avbridge 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +120 -0
- package/LICENSE +21 -0
- package/README.md +415 -0
- package/dist/avi-M5B4SHRM.cjs +164 -0
- package/dist/avi-M5B4SHRM.cjs.map +1 -0
- package/dist/avi-POCGZ4JX.js +162 -0
- package/dist/avi-POCGZ4JX.js.map +1 -0
- package/dist/chunk-5ISVAODK.js +80 -0
- package/dist/chunk-5ISVAODK.js.map +1 -0
- package/dist/chunk-F7YS2XOA.cjs +2966 -0
- package/dist/chunk-F7YS2XOA.cjs.map +1 -0
- package/dist/chunk-FKM7QBZU.js +2957 -0
- package/dist/chunk-FKM7QBZU.js.map +1 -0
- package/dist/chunk-J5MCMN3S.js +27 -0
- package/dist/chunk-J5MCMN3S.js.map +1 -0
- package/dist/chunk-L4NPOJ36.cjs +180 -0
- package/dist/chunk-L4NPOJ36.cjs.map +1 -0
- package/dist/chunk-NZU7W256.cjs +29 -0
- package/dist/chunk-NZU7W256.cjs.map +1 -0
- package/dist/chunk-PQTZS7OA.js +147 -0
- package/dist/chunk-PQTZS7OA.js.map +1 -0
- package/dist/chunk-WD2ZNQA7.js +177 -0
- package/dist/chunk-WD2ZNQA7.js.map +1 -0
- package/dist/chunk-Y5FYF5KG.cjs +153 -0
- package/dist/chunk-Y5FYF5KG.cjs.map +1 -0
- package/dist/chunk-Z2FJ5TJC.cjs +82 -0
- package/dist/chunk-Z2FJ5TJC.cjs.map +1 -0
- package/dist/element.cjs +433 -0
- package/dist/element.cjs.map +1 -0
- package/dist/element.d.cts +158 -0
- package/dist/element.d.ts +158 -0
- package/dist/element.js +431 -0
- package/dist/element.js.map +1 -0
- package/dist/index.cjs +576 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +80 -0
- package/dist/index.d.ts +80 -0
- package/dist/index.js +554 -0
- package/dist/index.js.map +1 -0
- package/dist/libav-http-reader-FPYDBMYK.cjs +16 -0
- package/dist/libav-http-reader-FPYDBMYK.cjs.map +1 -0
- package/dist/libav-http-reader-NQJVY273.js +3 -0
- package/dist/libav-http-reader-NQJVY273.js.map +1 -0
- package/dist/libav-import-2JURFHEW.js +8 -0
- package/dist/libav-import-2JURFHEW.js.map +1 -0
- package/dist/libav-import-GST2AMPL.cjs +30 -0
- package/dist/libav-import-GST2AMPL.cjs.map +1 -0
- package/dist/libav-loader-KA2MAWLM.js +3 -0
- package/dist/libav-loader-KA2MAWLM.js.map +1 -0
- package/dist/libav-loader-ZHOERPHW.cjs +12 -0
- package/dist/libav-loader-ZHOERPHW.cjs.map +1 -0
- package/dist/player-BBwbCkdL.d.cts +365 -0
- package/dist/player-BBwbCkdL.d.ts +365 -0
- package/dist/source-SC6ZEQYR.cjs +28 -0
- package/dist/source-SC6ZEQYR.cjs.map +1 -0
- package/dist/source-ZFS4H7J3.js +3 -0
- package/dist/source-ZFS4H7J3.js.map +1 -0
- package/dist/variant-routing-GOHB2RZN.cjs +12 -0
- package/dist/variant-routing-GOHB2RZN.cjs.map +1 -0
- package/dist/variant-routing-JOBWXYKD.js +3 -0
- package/dist/variant-routing-JOBWXYKD.js.map +1 -0
- package/package.json +95 -0
- package/src/classify/index.ts +1 -0
- package/src/classify/rules.ts +214 -0
- package/src/convert/index.ts +2 -0
- package/src/convert/remux.ts +522 -0
- package/src/convert/transcode.ts +329 -0
- package/src/diagnostics.ts +99 -0
- package/src/element/avbridge-player.ts +576 -0
- package/src/element.ts +19 -0
- package/src/events.ts +71 -0
- package/src/index.ts +42 -0
- package/src/libav-stubs.d.ts +24 -0
- package/src/player.ts +455 -0
- package/src/plugins/builtin.ts +37 -0
- package/src/plugins/registry.ts +32 -0
- package/src/probe/avi.ts +242 -0
- package/src/probe/index.ts +59 -0
- package/src/probe/mediabunny.ts +194 -0
- package/src/strategies/fallback/audio-output.ts +293 -0
- package/src/strategies/fallback/clock.ts +7 -0
- package/src/strategies/fallback/decoder.ts +660 -0
- package/src/strategies/fallback/index.ts +170 -0
- package/src/strategies/fallback/libav-import.ts +27 -0
- package/src/strategies/fallback/libav-loader.ts +190 -0
- package/src/strategies/fallback/variant-routing.ts +43 -0
- package/src/strategies/fallback/video-renderer.ts +216 -0
- package/src/strategies/hybrid/decoder.ts +641 -0
- package/src/strategies/hybrid/index.ts +139 -0
- package/src/strategies/native.ts +107 -0
- package/src/strategies/remux/annexb.ts +112 -0
- package/src/strategies/remux/index.ts +79 -0
- package/src/strategies/remux/mse.ts +234 -0
- package/src/strategies/remux/pipeline.ts +254 -0
- package/src/subtitles/index.ts +91 -0
- package/src/subtitles/render.ts +62 -0
- package/src/subtitles/srt.ts +62 -0
- package/src/subtitles/vtt.ts +5 -0
- package/src/types-shim.d.ts +3 -0
- package/src/types.ts +360 -0
- package/src/util/codec-strings.ts +86 -0
- package/src/util/libav-http-reader.ts +315 -0
- package/src/util/source.ts +274 -0
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
// src/strategies/fallback/variant-routing.ts
|
|
2
|
+
var LEGACY_CONTAINERS = /* @__PURE__ */ new Set(["avi", "asf", "flv"]);
|
|
3
|
+
var LEGACY_VIDEO_CODECS = /* @__PURE__ */ new Set([
|
|
4
|
+
"wmv3",
|
|
5
|
+
"vc1",
|
|
6
|
+
"mpeg4",
|
|
7
|
+
// MPEG-4 Part 2 / DivX / Xvid
|
|
8
|
+
"rv40",
|
|
9
|
+
"mpeg2",
|
|
10
|
+
"mpeg1",
|
|
11
|
+
"theora"
|
|
12
|
+
]);
|
|
13
|
+
var LEGACY_AUDIO_CODECS = /* @__PURE__ */ new Set(["wmav2", "wmapro", "ac3", "eac3"]);
|
|
14
|
+
function pickLibavVariant(ctx) {
|
|
15
|
+
if (LEGACY_CONTAINERS.has(ctx.container)) return "avbridge";
|
|
16
|
+
for (const v of ctx.videoTracks) {
|
|
17
|
+
if (LEGACY_VIDEO_CODECS.has(v.codec)) return "avbridge";
|
|
18
|
+
}
|
|
19
|
+
for (const a of ctx.audioTracks) {
|
|
20
|
+
if (LEGACY_AUDIO_CODECS.has(a.codec)) return "avbridge";
|
|
21
|
+
}
|
|
22
|
+
return "webcodecs";
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export { pickLibavVariant };
|
|
26
|
+
//# sourceMappingURL=chunk-J5MCMN3S.js.map
|
|
27
|
+
//# sourceMappingURL=chunk-J5MCMN3S.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/strategies/fallback/variant-routing.ts"],"names":[],"mappings":";AAmBA,IAAM,oCAAoB,IAAI,GAAA,CAAI,CAAC,KAAA,EAAO,KAAA,EAAO,KAAK,CAAC,CAAA;AAEvD,IAAM,mBAAA,uBAA0B,GAAA,CAAgB;AAAA,EAC9C,MAAA;AAAA,EACA,KAAA;AAAA,EACA,OAAA;AAAA;AAAA,EACA,MAAA;AAAA,EACA,OAAA;AAAA,EACA,OAAA;AAAA,EACA;AACF,CAAC,CAAA;AAED,IAAM,mBAAA,uBAA0B,GAAA,CAAgB,CAAC,SAAS,QAAA,EAAU,KAAA,EAAO,MAAM,CAAC,CAAA;AAE3E,SAAS,iBAAiB,GAAA,EAAiC;AAChE,EAAA,IAAI,iBAAA,CAAkB,GAAA,CAAI,GAAA,CAAI,SAAS,GAAG,OAAO,UAAA;AACjD,EAAA,KAAA,MAAW,CAAA,IAAK,IAAI,WAAA,EAAa;AAC/B,IAAA,IAAI,mBAAA,CAAoB,GAAA,CAAI,CAAA,CAAE,KAAK,GAAG,OAAO,UAAA;AAAA,EAC/C;AACA,EAAA,KAAA,MAAW,CAAA,IAAK,IAAI,WAAA,EAAa;AAC/B,IAAA,IAAI,mBAAA,CAAoB,GAAA,CAAI,CAAA,CAAE,KAAK,GAAG,OAAO,UAAA;AAAA,EAC/C;AACA,EAAA,OAAO,WAAA;AACT","file":"chunk-J5MCMN3S.js","sourcesContent":["import type { MediaContext, AudioCodec, VideoCodec } from \"../../types.js\";\nimport type { LibavVariant } from \"./libav-loader.js\";\n\n/**\n * Decide which libav.js variant to load for a given media context.\n *\n * - **webcodecs** (~5 MB, npm) — modern formats only, designed for the\n * WebCodecs bridge. Used when the codec is browser-supported and we just\n * need libav.js for demuxing or as a parser source.\n *\n * - **avbridge** (custom build, vendor/libav/) — has the AVI/ASF/FLV demuxers\n * and the legacy decoders (WMV3, MPEG-4 Part 2, VC-1, MS-MPEG4 v1/2/3,\n * AC-3, WMA*). Required for any of those formats; the npm variants ship\n * none of them.\n *\n * Rule: pick \"avbridge\" if either the container or any codec is one only the\n * custom build can handle. Otherwise pick \"webcodecs\".\n */\n\nconst LEGACY_CONTAINERS = new Set([\"avi\", \"asf\", \"flv\"]);\n\nconst LEGACY_VIDEO_CODECS = new Set<VideoCodec>([\n \"wmv3\",\n \"vc1\",\n \"mpeg4\", // MPEG-4 Part 2 / DivX / Xvid\n \"rv40\",\n \"mpeg2\",\n \"mpeg1\",\n \"theora\",\n]);\n\nconst LEGACY_AUDIO_CODECS = new Set<AudioCodec>([\"wmav2\", \"wmapro\", \"ac3\", \"eac3\"]);\n\nexport function pickLibavVariant(ctx: MediaContext): LibavVariant {\n if (LEGACY_CONTAINERS.has(ctx.container)) return \"avbridge\";\n for (const v of ctx.videoTracks) {\n if (LEGACY_VIDEO_CODECS.has(v.codec)) return \"avbridge\";\n }\n for (const a of ctx.audioTracks) {\n if (LEGACY_AUDIO_CODECS.has(a.codec)) return \"avbridge\";\n }\n return \"webcodecs\";\n}\n"]}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
// src/util/libav-http-reader.ts
|
|
4
|
+
var MIN_READ = 256 * 1024;
|
|
5
|
+
var MAX_READ = 1 * 1024 * 1024;
|
|
6
|
+
async function prepareLibavInput(libav, filename, source) {
|
|
7
|
+
if (source.kind === "url") {
|
|
8
|
+
const handle = await attachLibavHttpReader(libav, filename, source.url);
|
|
9
|
+
return {
|
|
10
|
+
filename,
|
|
11
|
+
transport: "http-range",
|
|
12
|
+
size: handle.size,
|
|
13
|
+
detach: () => handle.detach()
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
await libav.mkreadaheadfile(filename, source.blob);
|
|
17
|
+
return {
|
|
18
|
+
filename,
|
|
19
|
+
transport: "blob",
|
|
20
|
+
size: source.byteLength,
|
|
21
|
+
detach: async () => {
|
|
22
|
+
try {
|
|
23
|
+
await libav.unlinkreadaheadfile(filename);
|
|
24
|
+
} catch {
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
async function attachLibavHttpReader(libav, filename, url, options = {}) {
|
|
30
|
+
const fetchFn = options.fetchFn ?? fetch;
|
|
31
|
+
let probeRes;
|
|
32
|
+
try {
|
|
33
|
+
probeRes = await fetchFn(url, {
|
|
34
|
+
...options.requestInit,
|
|
35
|
+
headers: {
|
|
36
|
+
...options.requestInit?.headers ?? {},
|
|
37
|
+
Range: "bytes=0-0"
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
} catch (err) {
|
|
41
|
+
throw new Error(
|
|
42
|
+
`libav HTTP reader: failed to reach ${url}: ${err.message}`
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
if (probeRes.status !== 206) {
|
|
46
|
+
throw new Error(
|
|
47
|
+
`libav HTTP reader: ${url} does not support HTTP Range requests (server returned ${probeRes.status} for a Range probe; need 206 Partial Content). Remote AVI/ASF/FLV playback requires a server that honors byte-range requests.`
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
const contentRange = probeRes.headers.get("content-range") ?? "";
|
|
51
|
+
const sizeMatch = contentRange.match(/\/(\d+)$/);
|
|
52
|
+
if (!sizeMatch) {
|
|
53
|
+
throw new Error(
|
|
54
|
+
`libav HTTP reader: ${url} returned 206 but no parseable Content-Range header (got: "${contentRange}")`
|
|
55
|
+
);
|
|
56
|
+
}
|
|
57
|
+
const size = parseInt(sizeMatch[1], 10);
|
|
58
|
+
if (!Number.isFinite(size) || size <= 0) {
|
|
59
|
+
throw new Error(
|
|
60
|
+
`libav HTTP reader: ${url} reported invalid file size ${size}`
|
|
61
|
+
);
|
|
62
|
+
}
|
|
63
|
+
try {
|
|
64
|
+
await probeRes.arrayBuffer();
|
|
65
|
+
} catch {
|
|
66
|
+
}
|
|
67
|
+
await libav.mkblockreaderdev(filename, size);
|
|
68
|
+
let detached = false;
|
|
69
|
+
let cached = null;
|
|
70
|
+
let inflight = null;
|
|
71
|
+
function clampReadLength(requested) {
|
|
72
|
+
const doubled = requested * 2;
|
|
73
|
+
if (doubled < MIN_READ) return MIN_READ;
|
|
74
|
+
if (doubled > MAX_READ) return MAX_READ;
|
|
75
|
+
return doubled;
|
|
76
|
+
}
|
|
77
|
+
function cacheCovers(pos, length) {
|
|
78
|
+
if (!cached) return false;
|
|
79
|
+
return pos >= cached.pos && pos + length <= cached.pos + cached.bytes.byteLength;
|
|
80
|
+
}
|
|
81
|
+
function sliceFromCache(pos, length) {
|
|
82
|
+
if (!cached) throw new Error("sliceFromCache called with no cache");
|
|
83
|
+
const offset = pos - cached.pos;
|
|
84
|
+
return cached.bytes.subarray(offset, offset + length);
|
|
85
|
+
}
|
|
86
|
+
async function fetchRange(pos, length) {
|
|
87
|
+
const end = Math.min(pos + length - 1, size - 1);
|
|
88
|
+
const res = await fetchFn(url, {
|
|
89
|
+
...options.requestInit,
|
|
90
|
+
headers: {
|
|
91
|
+
...options.requestInit?.headers ?? {},
|
|
92
|
+
Range: `bytes=${pos}-${end}`
|
|
93
|
+
}
|
|
94
|
+
});
|
|
95
|
+
if (res.status !== 206 && res.status !== 200) {
|
|
96
|
+
throw new Error(
|
|
97
|
+
`libav HTTP reader: Range request bytes=${pos}-${end} returned ${res.status}`
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
const buf = new Uint8Array(await res.arrayBuffer());
|
|
101
|
+
cached = { pos, bytes: buf };
|
|
102
|
+
return buf;
|
|
103
|
+
}
|
|
104
|
+
async function handleRead(name, pos, length) {
|
|
105
|
+
if (inflight) {
|
|
106
|
+
try {
|
|
107
|
+
await inflight;
|
|
108
|
+
} catch {
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
if (detached) return;
|
|
112
|
+
if (cacheCovers(pos, length)) {
|
|
113
|
+
const data = sliceFromCache(pos, length);
|
|
114
|
+
try {
|
|
115
|
+
await libav.ff_block_reader_dev_send(name, pos, data);
|
|
116
|
+
} catch {
|
|
117
|
+
}
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
const fetchLen = clampReadLength(length);
|
|
121
|
+
const fetched = (async () => {
|
|
122
|
+
try {
|
|
123
|
+
const buf = await fetchRange(pos, fetchLen);
|
|
124
|
+
if (detached) return;
|
|
125
|
+
const reply = buf.subarray(0, Math.min(length, buf.byteLength));
|
|
126
|
+
try {
|
|
127
|
+
await libav.ff_block_reader_dev_send(name, pos, reply);
|
|
128
|
+
} catch {
|
|
129
|
+
}
|
|
130
|
+
} catch (err) {
|
|
131
|
+
if (detached) return;
|
|
132
|
+
try {
|
|
133
|
+
await libav.ff_block_reader_dev_send(name, pos, null, {
|
|
134
|
+
error: err
|
|
135
|
+
});
|
|
136
|
+
} catch {
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
})();
|
|
140
|
+
inflight = fetched;
|
|
141
|
+
try {
|
|
142
|
+
await fetched;
|
|
143
|
+
} finally {
|
|
144
|
+
if (inflight === fetched) inflight = null;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
const previousCallback = libav.onblockread;
|
|
148
|
+
libav.onblockread = (name, pos, length) => {
|
|
149
|
+
if (detached || name !== filename) {
|
|
150
|
+
previousCallback?.(name, pos, length);
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
void handleRead(name, pos, length);
|
|
154
|
+
};
|
|
155
|
+
return {
|
|
156
|
+
size,
|
|
157
|
+
transport: "http-range",
|
|
158
|
+
async detach() {
|
|
159
|
+
if (detached) return;
|
|
160
|
+
detached = true;
|
|
161
|
+
libav.onblockread = previousCallback;
|
|
162
|
+
if (inflight) {
|
|
163
|
+
try {
|
|
164
|
+
await inflight;
|
|
165
|
+
} catch {
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
cached = null;
|
|
169
|
+
try {
|
|
170
|
+
await libav.unlinkreadaheadfile(filename);
|
|
171
|
+
} catch {
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
exports.attachLibavHttpReader = attachLibavHttpReader;
|
|
178
|
+
exports.prepareLibavInput = prepareLibavInput;
|
|
179
|
+
//# sourceMappingURL=chunk-L4NPOJ36.cjs.map
|
|
180
|
+
//# sourceMappingURL=chunk-L4NPOJ36.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/util/libav-http-reader.ts"],"names":[],"mappings":";;;AA0CA,IAAM,WAAW,GAAA,GAAM,IAAA;AACvB,IAAM,QAAA,GAAW,IAAI,IAAA,GAAO,IAAA;AAyD5B,eAAsB,iBAAA,CACpB,KAAA,EACA,QAAA,EACA,MAAA,EAC2B;AAC3B,EAAA,IAAI,MAAA,CAAO,SAAS,KAAA,EAAO;AACzB,IAAA,MAAM,SAAS,MAAM,qBAAA,CAAsB,KAAA,EAAO,QAAA,EAAU,OAAO,GAAG,CAAA;AACtE,IAAA,OAAO;AAAA,MACL,QAAA;AAAA,MACA,SAAA,EAAW,YAAA;AAAA,MACX,MAAM,MAAA,CAAO,IAAA;AAAA,MACb,MAAA,EAAQ,MAAM,MAAA,CAAO,MAAA;AAAO,KAC9B;AAAA,EACF;AACA,EAAA,MAAM,KAAA,CAAM,eAAA,CAAgB,QAAA,EAAU,MAAA,CAAO,IAAI,CAAA;AACjD,EAAA,OAAO;AAAA,IACL,QAAA;AAAA,IACA,SAAA,EAAW,MAAA;AAAA,IACX,MAAM,MAAA,CAAO,UAAA;AAAA,IACb,QAAQ,YAAY;AAClB,MAAA,IAAI;AAAE,QAAA,MAAM,KAAA,CAAM,oBAAoB,QAAQ,CAAA;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAAe;AAAA,IAC1E;AAAA,GACF;AACF;AAUA,eAAsB,sBACpB,KAAA,EACA,QAAA,EACA,GAAA,EACA,OAAA,GAAwC,EAAC,EACT;AAChC,EAAA,MAAM,OAAA,GAAU,QAAQ,OAAA,IAAW,KAAA;AAGnC,EAAA,IAAI,QAAA;AACJ,EAAA,IAAI;AACF,IAAA,QAAA,GAAW,MAAM,QAAQ,GAAA,EAAK;AAAA,MAC5B,GAAG,OAAA,CAAQ,WAAA;AAAA,MACX,OAAA,EAAS;AAAA,QACP,GAAI,OAAA,CAAQ,WAAA,EAAa,OAAA,IAAW,EAAC;AAAA,QACrC,KAAA,EAAO;AAAA;AACT,KACD,CAAA;AAAA,EACH,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,mCAAA,EAAsC,GAAG,CAAA,EAAA,EAAM,GAAA,CAAc,OAAO,CAAA;AAAA,KACtE;AAAA,EACF;AACA,EAAA,IAAI,QAAA,CAAS,WAAW,GAAA,EAAK;AAG3B,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,mBAAA,EAAsB,GAAG,CAAA,uDAAA,EACL,QAAA,CAAS,MAAM,CAAA,6HAAA;AAAA,KAErC;AAAA,EACF;AAGA,EAAA,MAAM,YAAA,GAAe,QAAA,CAAS,OAAA,CAAQ,GAAA,CAAI,eAAe,CAAA,IAAK,EAAA;AAC9D,EAAA,MAAM,SAAA,GAAY,YAAA,CAAa,KAAA,CAAM,UAAU,CAAA;AAC/C,EAAA,IAAI,CAAC,SAAA,EAAW;AACd,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,mBAAA,EAAsB,GAAG,CAAA,2DAAA,EAA8D,YAAY,CAAA,EAAA;AAAA,KACrG;AAAA,EACF;AACA,EAAA,MAAM,IAAA,GAAO,QAAA,CAAS,SAAA,CAAU,CAAC,GAAG,EAAE,CAAA;AACtC,EAAA,IAAI,CAAC,MAAA,CAAO,QAAA,CAAS,IAAI,CAAA,IAAK,QAAQ,CAAA,EAAG;AACvC,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA,mBAAA,EAAsB,GAAG,CAAA,4BAAA,EAA+B,IAAI,CAAA;AAAA,KAC9D;AAAA,EACF;AAGA,EAAA,IAAI;AAAE,IAAA,MAAM,SAAS,WAAA,EAAY;AAAA,EAAG,CAAA,CAAA,MAAQ;AAAA,EAAe;AAG3D,EAAA,MAAM,KAAA,CAAM,gBAAA,CAAiB,QAAA,EAAU,IAAI,CAAA;AAI3C,EAAA,IAAI,QAAA,GAAW,KAAA;AAGf,EAAA,IAAI,MAAA,GAAoD,IAAA;AAGxD,EAAA,IAAI,QAAA,GAAiC,IAAA;AAErC,EAAA,SAAS,gBAAgB,SAAA,EAA2B;AAClD,IAAA,MAAM,UAAU,SAAA,GAAY,CAAA;AAC5B,IAAA,IAAI,OAAA,GAAU,UAAU,OAAO,QAAA;AAC/B,IAAA,IAAI,OAAA,GAAU,UAAU,OAAO,QAAA;AAC/B,IAAA,OAAO,OAAA;AAAA,EACT;AAGA,EAAA,SAAS,WAAA,CAAY,KAAa,MAAA,EAAyB;AACzD,IAAA,IAAI,CAAC,QAAQ,OAAO,KAAA;AACpB,IAAA,OAAO,GAAA,IAAO,OAAO,GAAA,IAAO,GAAA,GAAM,UAAU,MAAA,CAAO,GAAA,GAAM,OAAO,KAAA,CAAM,UAAA;AAAA,EACxE;AAGA,EAAA,SAAS,cAAA,CAAe,KAAa,MAAA,EAA4B;AAC/D,IAAA,IAAI,CAAC,MAAA,EAAQ,MAAM,IAAI,MAAM,qCAAqC,CAAA;AAClE,IAAA,MAAM,MAAA,GAAS,MAAM,MAAA,CAAO,GAAA;AAC5B,IAAA,OAAO,MAAA,CAAO,KAAA,CAAM,QAAA,CAAS,MAAA,EAAQ,SAAS,MAAM,CAAA;AAAA,EACtD;AAGA,EAAA,eAAe,UAAA,CAAW,KAAa,MAAA,EAAqC;AAC1E,IAAA,MAAM,MAAM,IAAA,CAAK,GAAA,CAAI,MAAM,MAAA,GAAS,CAAA,EAAG,OAAO,CAAC,CAAA;AAC/C,IAAA,MAAM,GAAA,GAAM,MAAM,OAAA,CAAQ,GAAA,EAAK;AAAA,MAC7B,GAAG,OAAA,CAAQ,WAAA;AAAA,MACX,OAAA,EAAS;AAAA,QACP,GAAI,OAAA,CAAQ,WAAA,EAAa,OAAA,IAAW,EAAC;AAAA,QACrC,KAAA,EAAO,CAAA,MAAA,EAAS,GAAG,CAAA,CAAA,EAAI,GAAG,CAAA;AAAA;AAC5B,KACD,CAAA;AACD,IAAA,IAAI,GAAA,CAAI,MAAA,KAAW,GAAA,IAAO,GAAA,CAAI,WAAW,GAAA,EAAK;AAC5C,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,0CAA0C,GAAG,CAAA,CAAA,EAAI,GAAG,CAAA,UAAA,EAAa,IAAI,MAAM,CAAA;AAAA,OAC7E;AAAA,IACF;AACA,IAAA,MAAM,MAAM,IAAI,UAAA,CAAW,MAAM,GAAA,CAAI,aAAa,CAAA;AAClD,IAAA,MAAA,GAAS,EAAE,GAAA,EAAK,KAAA,EAAO,GAAA,EAAI;AAC3B,IAAA,OAAO,GAAA;AAAA,EACT;AAOA,EAAA,eAAe,UAAA,CAAW,IAAA,EAAc,GAAA,EAAa,MAAA,EAA+B;AAElF,IAAA,IAAI,QAAA,EAAU;AACZ,MAAA,IAAI;AAAE,QAAA,MAAM,QAAA;AAAA,MAAU,CAAA,CAAA,MAAQ;AAAA,MAAmD;AAAA,IACnF;AACA,IAAA,IAAI,QAAA,EAAU;AAGd,IAAA,IAAI,WAAA,CAAY,GAAA,EAAK,MAAM,CAAA,EAAG;AAC5B,MAAA,MAAM,IAAA,GAAO,cAAA,CAAe,GAAA,EAAK,MAAM,CAAA;AACvC,MAAA,IAAI;AAAE,QAAA,MAAM,KAAA,CAAM,wBAAA,CAAyB,IAAA,EAAM,GAAA,EAAK,IAAI,CAAA;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAA0C;AAC/G,MAAA;AAAA,IACF;AAGA,IAAA,MAAM,QAAA,GAAW,gBAAgB,MAAM,CAAA;AACvC,IAAA,MAAM,WAAW,YAAY;AAC3B,MAAA,IAAI;AACF,QAAA,MAAM,GAAA,GAAM,MAAM,UAAA,CAAW,GAAA,EAAK,QAAQ,CAAA;AAC1C,QAAA,IAAI,QAAA,EAAU;AAEd,QAAA,MAAM,KAAA,GAAQ,IAAI,QAAA,CAAS,CAAA,EAAG,KAAK,GAAA,CAAI,MAAA,EAAQ,GAAA,CAAI,UAAU,CAAC,CAAA;AAC9D,QAAA,IAAI;AAAE,UAAA,MAAM,KAAA,CAAM,wBAAA,CAAyB,IAAA,EAAM,GAAA,EAAK,KAAK,CAAA;AAAA,QAAG,CAAA,CAAA,MAAQ;AAAA,QAAe;AAAA,MACvF,SAAS,GAAA,EAAK;AACZ,QAAA,IAAI,QAAA,EAAU;AAEd,QAAA,IAAI;AACF,UAAA,MAAM,KAAA,CAAM,wBAAA,CAAyB,IAAA,EAAM,GAAA,EAAK,IAAA,EAAM;AAAA,YACpD,KAAA,EAAO;AAAA,WACR,CAAA;AAAA,QACH,CAAA,CAAA,MAAQ;AAAA,QAAe;AAAA,MACzB;AAAA,IACF,CAAA,GAAG;AACH,IAAA,QAAA,GAAW,OAAA;AACX,IAAA,IAAI;AAAE,MAAA,MAAM,OAAA;AAAA,IAAS,CAAA,SAAE;AAAU,MAAA,IAAI,QAAA,KAAa,SAAS,QAAA,GAAW,IAAA;AAAA,IAAM;AAAA,EAC9E;AAOA,EAAA,MAAM,mBAAmB,KAAA,CAAM,WAAA;AAC/B,EAAA,KAAA,CAAM,WAAA,GAAc,CAAC,IAAA,EAAc,GAAA,EAAa,MAAA,KAAmB;AACjE,IAAA,IAAI,QAAA,IAAY,SAAS,QAAA,EAAU;AAGjC,MAAA,gBAAA,GAAmB,IAAA,EAAM,KAAK,MAAM,CAAA;AACpC,MAAA;AAAA,IACF;AACA,IAAA,KAAK,UAAA,CAAW,IAAA,EAAM,GAAA,EAAK,MAAM,CAAA;AAAA,EACnC,CAAA;AAEA,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA,SAAA,EAAW,YAAA;AAAA,IACX,MAAM,MAAA,GAAS;AACb,MAAA,IAAI,QAAA,EAAU;AACd,MAAA,QAAA,GAAW,IAAA;AAGX,MAAA,KAAA,CAAM,WAAA,GAAc,gBAAA;AAGpB,MAAA,IAAI,QAAA,EAAU;AACZ,QAAA,IAAI;AAAE,UAAA,MAAM,QAAA;AAAA,QAAU,CAAA,CAAA,MAAQ;AAAA,QAAe;AAAA,MAC/C;AAEA,MAAA,MAAA,GAAS,IAAA;AACT,MAAA,IAAI;AAAE,QAAA,MAAM,KAAA,CAAM,oBAAoB,QAAQ,CAAA;AAAA,MAAG,CAAA,CAAA,MAAQ;AAAA,MAAe;AAAA,IAC1E;AAAA,GACF;AACF","file":"chunk-L4NPOJ36.cjs","sourcesContent":["/**\n * libav.js HTTP block reader.\n *\n * Wraps `libav.mkblockreaderdev` + `libav.onblockread` +\n * `libav.ff_block_reader_dev_send` so that libav can demux a remote file\n * via HTTP Range requests instead of needing the entire file in memory.\n *\n * Used by the AVI/ASF/FLV probe path and the libav-backed playback /\n * conversion strategies whenever the source is a URL.\n *\n * Design notes:\n *\n * - **Range support detection** is done by issuing a `Range: bytes=0-0`\n * probe request. We do NOT trust `Accept-Ranges` headers — some servers\n * support ranges but don't advertise them, others advertise but don't.\n * The probe request is the canonical signal: a `206 Partial Content`\n * response means we can stream; anything else fails fast with a clear\n * error. We never silently fall back to a full download.\n *\n * - **Sequential reads.** libav can issue overlapping `onblockread`\n * callbacks. The reader serializes them through a single async queue\n * so a) `ff_block_reader_dev_send` calls are well-ordered and b) we\n * never have two in-flight fetches for unrelated reads. Throughput\n * for v1 is \"good enough\"; correctness > parallelism.\n *\n * - **In-flight dedup.** If libav asks for `(pos=1000, len=4096)` twice\n * in a row before the first request resolves, the second call awaits\n * the first instead of issuing a duplicate fetch. This handles the\n * \"demuxer re-reads the same header\" pattern cheaply.\n *\n * - **Read-ahead clamp.** libav's requested length is doubled, then\n * clamped to `[256 KB, 1 MB]`. Small reads get amortized; pathological\n * large requests don't OOM us.\n *\n * - **Last-block cache.** Only the most-recent fetched block is kept.\n * Re-fetches via Range are cheap; an LRU cache is post-1.0.\n *\n * - **Safe detach.** `detach()` clears `libav.onblockread`, sets a\n * destroyed flag, and ignores any in-flight fetch resolutions so we\n * never write into a torn-down demuxer.\n */\n\nconst MIN_READ = 256 * 1024;\nconst MAX_READ = 1 * 1024 * 1024;\n\ninterface LibavLike {\n mkblockreaderdev(name: string, size: number): Promise<void>;\n unlinkreadaheadfile(name: string): Promise<void>;\n ff_block_reader_dev_send(\n name: string,\n pos: number,\n data: Uint8Array | null,\n opts?: { errorCode?: number; error?: unknown },\n ): Promise<void>;\n onblockread?: (filename: string, pos: number, length: number) => void;\n}\n\nexport interface LibavHttpReaderHandle {\n /** Total file size (bytes) reported by the server. */\n readonly size: number;\n /** Always `\"http-range\"` for now. Reserved for future transports. */\n readonly transport: \"http-range\";\n /** Stop serving reads, clear the libav callback, and ignore late fetches. */\n detach(): Promise<void>;\n}\n\nexport interface AttachLibavHttpReaderOptions {\n /** Optional `RequestInit` extras (mode, credentials, headers, etc.). */\n requestInit?: RequestInit;\n /** Override fetch (for testing). Defaults to globalThis.fetch. */\n fetchFn?: typeof fetch;\n}\n\n/**\n * Result of preparing a libav-readable file from a normalized source.\n * Either an in-memory Blob (created via `mkreadaheadfile`) or a streaming\n * HTTP reader (created via `attachLibavHttpReader`). Callers should\n * `await detach()` when done so resources are cleaned up symmetrically.\n */\nexport interface LibavInputHandle {\n /** The virtual filename libav sees — pass to `ff_init_demuxer_file`. */\n readonly filename: string;\n /** \"blob\" for in-memory, \"http-range\" for streaming URL. */\n readonly transport: \"blob\" | \"http-range\";\n /** Total file size in bytes if known, otherwise undefined. */\n readonly size: number | undefined;\n /** Tear down the virtual file (and any HTTP reader state). */\n detach(): Promise<void>;\n}\n\ninterface LibavLikeWithBlob extends LibavLike {\n mkreadaheadfile(name: string, blob: Blob): Promise<void>;\n}\n\n/**\n * Convenience for the libav-backed strategies. Given a normalized source,\n * either creates an in-memory readahead file (for Blob inputs) or attaches\n * the HTTP block reader (for URL inputs). Returns a handle the caller\n * should detach when done.\n */\nexport async function prepareLibavInput(\n libav: LibavLikeWithBlob,\n filename: string,\n source: import(\"./source.js\").NormalizedSource,\n): Promise<LibavInputHandle> {\n if (source.kind === \"url\") {\n const handle = await attachLibavHttpReader(libav, filename, source.url);\n return {\n filename,\n transport: \"http-range\",\n size: handle.size,\n detach: () => handle.detach(),\n };\n }\n await libav.mkreadaheadfile(filename, source.blob);\n return {\n filename,\n transport: \"blob\",\n size: source.byteLength,\n detach: async () => {\n try { await libav.unlinkreadaheadfile(filename); } catch { /* ignore */ }\n },\n };\n}\n\n/**\n * Attach an HTTP block reader to a libav.js instance. After this resolves,\n * libav can `ff_init_demuxer_file(filename)` and the demuxer will pull\n * bytes via Range requests instead of needing a Blob.\n *\n * Fails fast (before any libav setup) if the server doesn't support\n * Range requests.\n */\nexport async function attachLibavHttpReader(\n libav: LibavLike,\n filename: string,\n url: string,\n options: AttachLibavHttpReaderOptions = {},\n): Promise<LibavHttpReaderHandle> {\n const fetchFn = options.fetchFn ?? fetch;\n\n // 1. Probe the server with a single-byte Range request.\n let probeRes: Response;\n try {\n probeRes = await fetchFn(url, {\n ...options.requestInit,\n headers: {\n ...(options.requestInit?.headers ?? {}),\n Range: \"bytes=0-0\",\n },\n });\n } catch (err) {\n throw new Error(\n `libav HTTP reader: failed to reach ${url}: ${(err as Error).message}`,\n );\n }\n if (probeRes.status !== 206) {\n // 200 means the server ignored Range and would have sent the whole\n // file. We refuse to silently slurp gigabytes.\n throw new Error(\n `libav HTTP reader: ${url} does not support HTTP Range requests ` +\n `(server returned ${probeRes.status} for a Range probe; need 206 Partial Content). ` +\n `Remote AVI/ASF/FLV playback requires a server that honors byte-range requests.`,\n );\n }\n\n // 2. Parse total file size from Content-Range: \"bytes 0-0/12345678\"\n const contentRange = probeRes.headers.get(\"content-range\") ?? \"\";\n const sizeMatch = contentRange.match(/\\/(\\d+)$/);\n if (!sizeMatch) {\n throw new Error(\n `libav HTTP reader: ${url} returned 206 but no parseable Content-Range header (got: \"${contentRange}\")`,\n );\n }\n const size = parseInt(sizeMatch[1], 10);\n if (!Number.isFinite(size) || size <= 0) {\n throw new Error(\n `libav HTTP reader: ${url} reported invalid file size ${size}`,\n );\n }\n\n // Drain the probe body so the connection can be reused.\n try { await probeRes.arrayBuffer(); } catch { /* ignore */ }\n\n // 3. Create the virtual file libav will read from.\n await libav.mkblockreaderdev(filename, size);\n\n // ── State ───────────────────────────────────────────────────────────────\n\n let detached = false;\n // Most-recently fetched block. Cached so re-reads of the same region\n // (e.g. demuxer re-walks the header) don't issue another HTTP request.\n let cached: { pos: number; bytes: Uint8Array } | null = null;\n // The currently in-flight fetch, if any. Used both for serialization\n // (we await this before starting another) and for in-flight dedup.\n let inflight: Promise<void> | null = null;\n\n function clampReadLength(requested: number): number {\n const doubled = requested * 2;\n if (doubled < MIN_READ) return MIN_READ;\n if (doubled > MAX_READ) return MAX_READ;\n return doubled;\n }\n\n /** True if the cached block fully covers `[pos, pos+length)`. */\n function cacheCovers(pos: number, length: number): boolean {\n if (!cached) return false;\n return pos >= cached.pos && pos + length <= cached.pos + cached.bytes.byteLength;\n }\n\n /** Slice the requested window out of the cached block. */\n function sliceFromCache(pos: number, length: number): Uint8Array {\n if (!cached) throw new Error(\"sliceFromCache called with no cache\");\n const offset = pos - cached.pos;\n return cached.bytes.subarray(offset, offset + length);\n }\n\n /** Fetch one Range and update the cache. */\n async function fetchRange(pos: number, length: number): Promise<Uint8Array> {\n const end = Math.min(pos + length - 1, size - 1);\n const res = await fetchFn(url, {\n ...options.requestInit,\n headers: {\n ...(options.requestInit?.headers ?? {}),\n Range: `bytes=${pos}-${end}`,\n },\n });\n if (res.status !== 206 && res.status !== 200) {\n throw new Error(\n `libav HTTP reader: Range request bytes=${pos}-${end} returned ${res.status}`,\n );\n }\n const buf = new Uint8Array(await res.arrayBuffer());\n cached = { pos, bytes: buf };\n return buf;\n }\n\n /**\n * Handle a single libav read request. Serializes against any in-flight\n * read by chaining off `inflight`. Honors `detached` at every async\n * boundary so a torn-down reader never writes back into libav.\n */\n async function handleRead(name: string, pos: number, length: number): Promise<void> {\n // Wait for any preceding read to finish so we don't interleave.\n if (inflight) {\n try { await inflight; } catch { /* ignore — that read's own caller handled it */ }\n }\n if (detached) return;\n\n // Cache hit — reply directly without a network round-trip.\n if (cacheCovers(pos, length)) {\n const data = sliceFromCache(pos, length);\n try { await libav.ff_block_reader_dev_send(name, pos, data); } catch { /* ignore — libav may have torn down */ }\n return;\n }\n\n // Cache miss — fetch via Range. Read-ahead amortizes small reads.\n const fetchLen = clampReadLength(length);\n const fetched = (async () => {\n try {\n const buf = await fetchRange(pos, fetchLen);\n if (detached) return;\n // Slice exactly what libav asked for and send it back.\n const reply = buf.subarray(0, Math.min(length, buf.byteLength));\n try { await libav.ff_block_reader_dev_send(name, pos, reply); } catch { /* ignore */ }\n } catch (err) {\n if (detached) return;\n // Signal EOF + error code to libav so the demuxer surfaces it.\n try {\n await libav.ff_block_reader_dev_send(name, pos, null, {\n error: err,\n });\n } catch { /* ignore */ }\n }\n })();\n inflight = fetched;\n try { await fetched; } finally { if (inflight === fetched) inflight = null; }\n }\n\n // 4. Wire the callback. The signature accepts `(name, pos, length)` and\n // we hand it to handleRead which does all the work asynchronously.\n // Note: libav.js dispatches this synchronously from a worker message,\n // so we kick off handleRead but don't await — the queue inside handleRead\n // serializes things.\n const previousCallback = libav.onblockread;\n libav.onblockread = (name: string, pos: number, length: number) => {\n if (detached || name !== filename) {\n // Forward to any previous callback (e.g. another reader on the same\n // libav instance). This is rare in practice but cheap to support.\n previousCallback?.(name, pos, length);\n return;\n }\n void handleRead(name, pos, length);\n };\n\n return {\n size,\n transport: \"http-range\",\n async detach() {\n if (detached) return;\n detached = true;\n // Restore the previous callback (if any) so we don't break unrelated\n // readers on the same libav instance.\n libav.onblockread = previousCallback;\n // Wait for the last in-flight read to settle so we don't tear down\n // the virtual file while libav is still expecting a response.\n if (inflight) {\n try { await inflight; } catch { /* ignore */ }\n }\n // Drop the cache and unlink the virtual file.\n cached = null;\n try { await libav.unlinkreadaheadfile(filename); } catch { /* ignore */ }\n },\n };\n}\n"]}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
// src/strategies/fallback/variant-routing.ts
|
|
4
|
+
var LEGACY_CONTAINERS = /* @__PURE__ */ new Set(["avi", "asf", "flv"]);
|
|
5
|
+
var LEGACY_VIDEO_CODECS = /* @__PURE__ */ new Set([
|
|
6
|
+
"wmv3",
|
|
7
|
+
"vc1",
|
|
8
|
+
"mpeg4",
|
|
9
|
+
// MPEG-4 Part 2 / DivX / Xvid
|
|
10
|
+
"rv40",
|
|
11
|
+
"mpeg2",
|
|
12
|
+
"mpeg1",
|
|
13
|
+
"theora"
|
|
14
|
+
]);
|
|
15
|
+
var LEGACY_AUDIO_CODECS = /* @__PURE__ */ new Set(["wmav2", "wmapro", "ac3", "eac3"]);
|
|
16
|
+
function pickLibavVariant(ctx) {
|
|
17
|
+
if (LEGACY_CONTAINERS.has(ctx.container)) return "avbridge";
|
|
18
|
+
for (const v of ctx.videoTracks) {
|
|
19
|
+
if (LEGACY_VIDEO_CODECS.has(v.codec)) return "avbridge";
|
|
20
|
+
}
|
|
21
|
+
for (const a of ctx.audioTracks) {
|
|
22
|
+
if (LEGACY_AUDIO_CODECS.has(a.codec)) return "avbridge";
|
|
23
|
+
}
|
|
24
|
+
return "webcodecs";
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
exports.pickLibavVariant = pickLibavVariant;
|
|
28
|
+
//# sourceMappingURL=chunk-NZU7W256.cjs.map
|
|
29
|
+
//# sourceMappingURL=chunk-NZU7W256.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/strategies/fallback/variant-routing.ts"],"names":[],"mappings":";;;AAmBA,IAAM,oCAAoB,IAAI,GAAA,CAAI,CAAC,KAAA,EAAO,KAAA,EAAO,KAAK,CAAC,CAAA;AAEvD,IAAM,mBAAA,uBAA0B,GAAA,CAAgB;AAAA,EAC9C,MAAA;AAAA,EACA,KAAA;AAAA,EACA,OAAA;AAAA;AAAA,EACA,MAAA;AAAA,EACA,OAAA;AAAA,EACA,OAAA;AAAA,EACA;AACF,CAAC,CAAA;AAED,IAAM,mBAAA,uBAA0B,GAAA,CAAgB,CAAC,SAAS,QAAA,EAAU,KAAA,EAAO,MAAM,CAAC,CAAA;AAE3E,SAAS,iBAAiB,GAAA,EAAiC;AAChE,EAAA,IAAI,iBAAA,CAAkB,GAAA,CAAI,GAAA,CAAI,SAAS,GAAG,OAAO,UAAA;AACjD,EAAA,KAAA,MAAW,CAAA,IAAK,IAAI,WAAA,EAAa;AAC/B,IAAA,IAAI,mBAAA,CAAoB,GAAA,CAAI,CAAA,CAAE,KAAK,GAAG,OAAO,UAAA;AAAA,EAC/C;AACA,EAAA,KAAA,MAAW,CAAA,IAAK,IAAI,WAAA,EAAa;AAC/B,IAAA,IAAI,mBAAA,CAAoB,GAAA,CAAI,CAAA,CAAE,KAAK,GAAG,OAAO,UAAA;AAAA,EAC/C;AACA,EAAA,OAAO,WAAA;AACT","file":"chunk-NZU7W256.cjs","sourcesContent":["import type { MediaContext, AudioCodec, VideoCodec } from \"../../types.js\";\nimport type { LibavVariant } from \"./libav-loader.js\";\n\n/**\n * Decide which libav.js variant to load for a given media context.\n *\n * - **webcodecs** (~5 MB, npm) — modern formats only, designed for the\n * WebCodecs bridge. Used when the codec is browser-supported and we just\n * need libav.js for demuxing or as a parser source.\n *\n * - **avbridge** (custom build, vendor/libav/) — has the AVI/ASF/FLV demuxers\n * and the legacy decoders (WMV3, MPEG-4 Part 2, VC-1, MS-MPEG4 v1/2/3,\n * AC-3, WMA*). Required for any of those formats; the npm variants ship\n * none of them.\n *\n * Rule: pick \"avbridge\" if either the container or any codec is one only the\n * custom build can handle. Otherwise pick \"webcodecs\".\n */\n\nconst LEGACY_CONTAINERS = new Set([\"avi\", \"asf\", \"flv\"]);\n\nconst LEGACY_VIDEO_CODECS = new Set<VideoCodec>([\n \"wmv3\",\n \"vc1\",\n \"mpeg4\", // MPEG-4 Part 2 / DivX / Xvid\n \"rv40\",\n \"mpeg2\",\n \"mpeg1\",\n \"theora\",\n]);\n\nconst LEGACY_AUDIO_CODECS = new Set<AudioCodec>([\"wmav2\", \"wmapro\", \"ac3\", \"eac3\"]);\n\nexport function pickLibavVariant(ctx: MediaContext): LibavVariant {\n if (LEGACY_CONTAINERS.has(ctx.container)) return \"avbridge\";\n for (const v of ctx.videoTracks) {\n if (LEGACY_VIDEO_CODECS.has(v.codec)) return \"avbridge\";\n }\n for (const a of ctx.audioTracks) {\n if (LEGACY_AUDIO_CODECS.has(a.codec)) return \"avbridge\";\n }\n return \"webcodecs\";\n}\n"]}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
// src/util/source.ts
|
|
2
|
+
var SNIFF_BYTES_NEEDED = 380;
|
|
3
|
+
var URL_SNIFF_RANGE_BYTES = 32 * 1024;
|
|
4
|
+
function isInMemorySource(source) {
|
|
5
|
+
return source.kind === "blob";
|
|
6
|
+
}
|
|
7
|
+
async function normalizeSource(source) {
|
|
8
|
+
if (source instanceof File) {
|
|
9
|
+
return {
|
|
10
|
+
kind: "blob",
|
|
11
|
+
blob: source,
|
|
12
|
+
name: source.name,
|
|
13
|
+
byteLength: source.size,
|
|
14
|
+
original: source
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
if (source instanceof Blob) {
|
|
18
|
+
return { kind: "blob", blob: source, byteLength: source.size, original: source };
|
|
19
|
+
}
|
|
20
|
+
if (source instanceof ArrayBuffer) {
|
|
21
|
+
const blob = new Blob([source]);
|
|
22
|
+
return { kind: "blob", blob, byteLength: blob.size, original: source };
|
|
23
|
+
}
|
|
24
|
+
if (source instanceof Uint8Array) {
|
|
25
|
+
const blob = new Blob([source]);
|
|
26
|
+
return { kind: "blob", blob, byteLength: blob.size, original: source };
|
|
27
|
+
}
|
|
28
|
+
if (typeof source === "string" || source instanceof URL) {
|
|
29
|
+
const url = source instanceof URL ? source.toString() : source;
|
|
30
|
+
return await fetchUrlForSniff(url, source);
|
|
31
|
+
}
|
|
32
|
+
throw new TypeError("unsupported source type");
|
|
33
|
+
}
|
|
34
|
+
async function fetchUrlForSniff(url, originalSource) {
|
|
35
|
+
const name = url.split("/").pop()?.split("?")[0] ?? void 0;
|
|
36
|
+
let res;
|
|
37
|
+
try {
|
|
38
|
+
res = await fetch(url, {
|
|
39
|
+
headers: { Range: `bytes=0-${URL_SNIFF_RANGE_BYTES - 1}` }
|
|
40
|
+
});
|
|
41
|
+
} catch (err) {
|
|
42
|
+
throw new Error(`failed to fetch source ${url}: ${err.message}`);
|
|
43
|
+
}
|
|
44
|
+
if (!res.ok && res.status !== 206) {
|
|
45
|
+
throw new Error(`failed to fetch source ${url}: ${res.status} ${res.statusText}`);
|
|
46
|
+
}
|
|
47
|
+
let byteLength;
|
|
48
|
+
const contentRange = res.headers.get("content-range");
|
|
49
|
+
if (contentRange) {
|
|
50
|
+
const m = contentRange.match(/\/(\d+)$/);
|
|
51
|
+
if (m) byteLength = parseInt(m[1], 10);
|
|
52
|
+
}
|
|
53
|
+
if (byteLength === void 0) {
|
|
54
|
+
const cl = res.headers.get("content-length");
|
|
55
|
+
if (cl) {
|
|
56
|
+
const n = parseInt(cl, 10);
|
|
57
|
+
if (Number.isFinite(n)) {
|
|
58
|
+
if (res.status === 200) byteLength = n;
|
|
59
|
+
else if (res.status === 206 && !contentRange) byteLength = n;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
const reader = res.body?.getReader();
|
|
64
|
+
if (!reader) {
|
|
65
|
+
const buf = new Uint8Array(await res.arrayBuffer());
|
|
66
|
+
const sniffBytes2 = buf.slice(0, URL_SNIFF_RANGE_BYTES);
|
|
67
|
+
return { kind: "url", url, sniffBytes: sniffBytes2, name, byteLength, original: originalSource };
|
|
68
|
+
}
|
|
69
|
+
const chunks = [];
|
|
70
|
+
let collected = 0;
|
|
71
|
+
while (collected < URL_SNIFF_RANGE_BYTES) {
|
|
72
|
+
const { done, value } = await reader.read();
|
|
73
|
+
if (done) break;
|
|
74
|
+
chunks.push(value);
|
|
75
|
+
collected += value.byteLength;
|
|
76
|
+
}
|
|
77
|
+
await reader.cancel().catch(() => {
|
|
78
|
+
});
|
|
79
|
+
const total = Math.min(collected, URL_SNIFF_RANGE_BYTES);
|
|
80
|
+
const sniffBytes = new Uint8Array(total);
|
|
81
|
+
let offset = 0;
|
|
82
|
+
for (const chunk of chunks) {
|
|
83
|
+
if (offset >= total) break;
|
|
84
|
+
const room = total - offset;
|
|
85
|
+
sniffBytes.set(chunk.subarray(0, Math.min(chunk.byteLength, room)), offset);
|
|
86
|
+
offset += chunk.byteLength;
|
|
87
|
+
}
|
|
88
|
+
return { kind: "url", url, sniffBytes, name, byteLength, original: originalSource };
|
|
89
|
+
}
|
|
90
|
+
function sniffContainerFromBytes(head) {
|
|
91
|
+
if (head.length >= 376 && head[0] === 71 && head[188] === 71) {
|
|
92
|
+
return "mpegts";
|
|
93
|
+
}
|
|
94
|
+
if (head.length >= 380 && head[4] === 71 && head[192] === 71) {
|
|
95
|
+
return "mpegts";
|
|
96
|
+
}
|
|
97
|
+
if (head[0] === 82 && head[1] === 73 && head[2] === 70 && head[3] === 70 && head[8] === 65 && head[9] === 86 && head[10] === 73) return "avi";
|
|
98
|
+
if (head[0] === 82 && head[1] === 73 && head[2] === 70 && head[3] === 70 && head[8] === 87 && head[9] === 65 && head[10] === 86 && head[11] === 69) return "wav";
|
|
99
|
+
if (head[0] === 26 && head[1] === 69 && head[2] === 223 && head[3] === 163) {
|
|
100
|
+
return "mkv";
|
|
101
|
+
}
|
|
102
|
+
if (head[4] === 102 && head[5] === 116 && head[6] === 121 && head[7] === 112) {
|
|
103
|
+
const brand = String.fromCharCode(head[8], head[9], head[10], head[11]);
|
|
104
|
+
if (brand.startsWith("qt")) return "mov";
|
|
105
|
+
return "mp4";
|
|
106
|
+
}
|
|
107
|
+
if (head[0] === 48 && head[1] === 38 && head[2] === 178 && head[3] === 117 && head[4] === 142 && head[5] === 102 && head[6] === 207 && head[7] === 17) return "asf";
|
|
108
|
+
if (head[0] === 70 && head[1] === 76 && head[2] === 86) return "flv";
|
|
109
|
+
if (head[0] === 79 && head[1] === 103 && head[2] === 103 && head[3] === 83) return "ogg";
|
|
110
|
+
if (head[0] === 102 && head[1] === 76 && head[2] === 97 && head[3] === 67) return "flac";
|
|
111
|
+
if (head[0] === 73 && head[1] === 68 && head[2] === 51) return "mp3";
|
|
112
|
+
if (head[0] === 255 && (head[1] & 224) === 224) {
|
|
113
|
+
if ((head[1] & 246) === 240) return "adts";
|
|
114
|
+
return "mp3";
|
|
115
|
+
}
|
|
116
|
+
return "unknown";
|
|
117
|
+
}
|
|
118
|
+
async function sniffNormalizedSource(source) {
|
|
119
|
+
if (source.kind === "url") {
|
|
120
|
+
return sniffContainerFromBytes(source.sniffBytes);
|
|
121
|
+
}
|
|
122
|
+
const buf = await readBlobBytes(source.blob, SNIFF_BYTES_NEEDED);
|
|
123
|
+
return sniffContainerFromBytes(new Uint8Array(buf));
|
|
124
|
+
}
|
|
125
|
+
async function sniffContainer(blob) {
|
|
126
|
+
const buf = await readBlobBytes(blob, SNIFF_BYTES_NEEDED);
|
|
127
|
+
return sniffContainerFromBytes(new Uint8Array(buf));
|
|
128
|
+
}
|
|
129
|
+
async function readBlobBytes(blob, limit) {
|
|
130
|
+
const slice = blob.slice(0, limit);
|
|
131
|
+
if (typeof slice.arrayBuffer === "function") {
|
|
132
|
+
try {
|
|
133
|
+
return await slice.arrayBuffer();
|
|
134
|
+
} catch {
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
return new Promise((resolve, reject) => {
|
|
138
|
+
const reader = new FileReader();
|
|
139
|
+
reader.onload = () => resolve(reader.result);
|
|
140
|
+
reader.onerror = () => reject(reader.error ?? new Error("FileReader failed"));
|
|
141
|
+
reader.readAsArrayBuffer(slice);
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
export { isInMemorySource, normalizeSource, sniffContainer, sniffContainerFromBytes, sniffNormalizedSource };
|
|
146
|
+
//# sourceMappingURL=chunk-PQTZS7OA.js.map
|
|
147
|
+
//# sourceMappingURL=chunk-PQTZS7OA.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/util/source.ts"],"names":["sniffBytes"],"mappings":";AAOA,IAAM,kBAAA,GAAqB,GAAA;AAQ3B,IAAM,wBAAwB,EAAA,GAAK,IAAA;AAgC5B,SAAS,iBAAiB,MAAA,EAAiF;AAChH,EAAA,OAAO,OAAO,IAAA,KAAS,MAAA;AACzB;AAYA,eAAsB,gBAAgB,MAAA,EAA+C;AACnF,EAAA,IAAI,kBAAkB,IAAA,EAAM;AAC1B,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,MAAA;AAAA,MACN,IAAA,EAAM,MAAA;AAAA,MACN,MAAM,MAAA,CAAO,IAAA;AAAA,MACb,YAAY,MAAA,CAAO,IAAA;AAAA,MACnB,QAAA,EAAU;AAAA,KACZ;AAAA,EACF;AACA,EAAA,IAAI,kBAAkB,IAAA,EAAM;AAC1B,IAAA,OAAO,EAAE,MAAM,MAAA,EAAQ,IAAA,EAAM,QAAQ,UAAA,EAAY,MAAA,CAAO,IAAA,EAAM,QAAA,EAAU,MAAA,EAAO;AAAA,EACjF;AACA,EAAA,IAAI,kBAAkB,WAAA,EAAa;AACjC,IAAA,MAAM,IAAA,GAAO,IAAI,IAAA,CAAK,CAAC,MAAM,CAAC,CAAA;AAC9B,IAAA,OAAO,EAAE,MAAM,MAAA,EAAQ,IAAA,EAAM,YAAY,IAAA,CAAK,IAAA,EAAM,UAAU,MAAA,EAAO;AAAA,EACvE;AACA,EAAA,IAAI,kBAAkB,UAAA,EAAY;AAChC,IAAA,MAAM,IAAA,GAAO,IAAI,IAAA,CAAK,CAAC,MAAkB,CAAC,CAAA;AAC1C,IAAA,OAAO,EAAE,MAAM,MAAA,EAAQ,IAAA,EAAM,YAAY,IAAA,CAAK,IAAA,EAAM,UAAU,MAAA,EAAO;AAAA,EACvE;AACA,EAAA,IAAI,OAAO,MAAA,KAAW,QAAA,IAAY,MAAA,YAAkB,GAAA,EAAK;AACvD,IAAA,MAAM,GAAA,GAAM,MAAA,YAAkB,GAAA,GAAM,MAAA,CAAO,UAAS,GAAI,MAAA;AACxD,IAAA,OAAO,MAAM,gBAAA,CAAiB,GAAA,EAAK,MAAM,CAAA;AAAA,EAC3C;AACA,EAAA,MAAM,IAAI,UAAU,yBAAyB,CAAA;AAC/C;AAQA,eAAe,gBAAA,CAAiB,KAAa,cAAA,EAAuD;AAClG,EAAA,MAAM,IAAA,GAAO,GAAA,CAAI,KAAA,CAAM,GAAG,CAAA,CAAE,GAAA,EAAI,EAAG,KAAA,CAAM,GAAG,CAAA,CAAE,CAAC,CAAA,IAAK,MAAA;AAGpD,EAAA,IAAI,GAAA;AACJ,EAAA,IAAI;AACF,IAAA,GAAA,GAAM,MAAM,MAAM,GAAA,EAAK;AAAA,MACrB,SAAS,EAAE,KAAA,EAAO,CAAA,QAAA,EAAW,qBAAA,GAAwB,CAAC,CAAA,CAAA;AAAG,KAC1D,CAAA;AAAA,EACH,SAAS,GAAA,EAAK;AACZ,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,uBAAA,EAA0B,GAAG,CAAA,EAAA,EAAM,GAAA,CAAc,OAAO,CAAA,CAAE,CAAA;AAAA,EAC5E;AACA,EAAA,IAAI,CAAC,GAAA,CAAI,EAAA,IAAM,GAAA,CAAI,WAAW,GAAA,EAAK;AACjC,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,uBAAA,EAA0B,GAAG,CAAA,EAAA,EAAK,IAAI,MAAM,CAAA,CAAA,EAAI,GAAA,CAAI,UAAU,CAAA,CAAE,CAAA;AAAA,EAClF;AAGA,EAAA,IAAI,UAAA;AACJ,EAAA,MAAM,YAAA,GAAe,GAAA,CAAI,OAAA,CAAQ,GAAA,CAAI,eAAe,CAAA;AACpD,EAAA,IAAI,YAAA,EAAc;AAEhB,IAAA,MAAM,CAAA,GAAI,YAAA,CAAa,KAAA,CAAM,UAAU,CAAA;AACvC,IAAA,IAAI,GAAG,UAAA,GAAa,QAAA,CAAS,CAAA,CAAE,CAAC,GAAG,EAAE,CAAA;AAAA,EACvC;AACA,EAAA,IAAI,eAAe,MAAA,EAAW;AAC5B,IAAA,MAAM,EAAA,GAAK,GAAA,CAAI,OAAA,CAAQ,GAAA,CAAI,gBAAgB,CAAA;AAC3C,IAAA,IAAI,EAAA,EAAI;AACN,MAAA,MAAM,CAAA,GAAI,QAAA,CAAS,EAAA,EAAI,EAAE,CAAA;AACzB,MAAA,IAAI,MAAA,CAAO,QAAA,CAAS,CAAC,CAAA,EAAG;AAKtB,QAAA,IAAI,GAAA,CAAI,MAAA,KAAW,GAAA,EAAK,UAAA,GAAa,CAAA;AAAA,aAAA,IAC5B,GAAA,CAAI,MAAA,KAAW,GAAA,IAAO,CAAC,cAAc,UAAA,GAAa,CAAA;AAAA,MAC7D;AAAA,IACF;AAAA,EACF;AAKA,EAAA,MAAM,MAAA,GAAS,GAAA,CAAI,IAAA,EAAM,SAAA,EAAU;AACnC,EAAA,IAAI,CAAC,MAAA,EAAQ;AAIX,IAAA,MAAM,MAAM,IAAI,UAAA,CAAW,MAAM,GAAA,CAAI,aAAa,CAAA;AAClD,IAAA,MAAMA,WAAAA,GAAa,GAAA,CAAI,KAAA,CAAM,CAAA,EAAG,qBAAqB,CAAA;AACrD,IAAA,OAAO,EAAE,MAAM,KAAA,EAAO,GAAA,EAAK,YAAAA,WAAAA,EAAY,IAAA,EAAM,UAAA,EAAY,QAAA,EAAU,cAAA,EAAe;AAAA,EACpF;AAEA,EAAA,MAAM,SAAuB,EAAC;AAC9B,EAAA,IAAI,SAAA,GAAY,CAAA;AAChB,EAAA,OAAO,YAAY,qBAAA,EAAuB;AACxC,IAAA,MAAM,EAAE,IAAA,EAAM,KAAA,EAAM,GAAI,MAAM,OAAO,IAAA,EAAK;AAC1C,IAAA,IAAI,IAAA,EAAM;AACV,IAAA,MAAA,CAAO,KAAK,KAAK,CAAA;AACjB,IAAA,SAAA,IAAa,KAAA,CAAM,UAAA;AAAA,EACrB;AAEA,EAAA,MAAM,MAAA,CAAO,MAAA,EAAO,CAAE,KAAA,CAAM,MAAM;AAAA,EAAe,CAAC,CAAA;AAGlD,EAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,GAAA,CAAI,SAAA,EAAW,qBAAqB,CAAA;AACvD,EAAA,MAAM,UAAA,GAAa,IAAI,UAAA,CAAW,KAAK,CAAA;AACvC,EAAA,IAAI,MAAA,GAAS,CAAA;AACb,EAAA,KAAA,MAAW,SAAS,MAAA,EAAQ;AAC1B,IAAA,IAAI,UAAU,KAAA,EAAO;AACrB,IAAA,MAAM,OAAO,KAAA,GAAQ,MAAA;AACrB,IAAA,UAAA,CAAW,GAAA,CAAI,KAAA,CAAM,QAAA,CAAS,CAAA,EAAG,IAAA,CAAK,GAAA,CAAI,KAAA,CAAM,UAAA,EAAY,IAAI,CAAC,CAAA,EAAG,MAAM,CAAA;AAC1E,IAAA,MAAA,IAAU,KAAA,CAAM,UAAA;AAAA,EAClB;AAEA,EAAA,OAAO,EAAE,MAAM,KAAA,EAAO,GAAA,EAAK,YAAY,IAAA,EAAM,UAAA,EAAY,UAAU,cAAA,EAAe;AACpF;AASO,SAAS,wBAAwB,IAAA,EAAiC;AAIvE,EAAA,IAAI,IAAA,CAAK,MAAA,IAAU,GAAA,IAAO,IAAA,CAAK,CAAC,MAAM,EAAA,IAAQ,IAAA,CAAK,GAAG,CAAA,KAAM,EAAA,EAAM;AAChE,IAAA,OAAO,QAAA;AAAA,EACT;AACA,EAAA,IAAI,IAAA,CAAK,MAAA,IAAU,GAAA,IAAO,IAAA,CAAK,CAAC,MAAM,EAAA,IAAQ,IAAA,CAAK,GAAG,CAAA,KAAM,EAAA,EAAM;AAChE,IAAA,OAAO,QAAA;AAAA,EACT;AAEA,EAAA,IACE,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IACxE,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,EAAE,CAAA,KAAM,EAAA,EACrD,OAAO,KAAA;AAET,EAAA,IACE,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,KAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IACxE,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,MAAM,EAAA,IAAQ,IAAA,CAAK,EAAE,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,EAAE,CAAA,KAAM,IAC1E,OAAO,KAAA;AAET,EAAA,IAAI,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,KAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,MAAM,GAAA,EAAM;AAChF,IAAA,OAAO,KAAA;AAAA,EACT;AAEA,EAAA,IAAI,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,KAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,MAAM,GAAA,EAAM;AAEhF,IAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,YAAA,CAAa,IAAA,CAAK,CAAC,CAAA,EAAG,IAAA,CAAK,CAAC,CAAA,EAAG,IAAA,CAAK,EAAE,CAAA,EAAG,IAAA,CAAK,EAAE,CAAC,CAAA;AACtE,IAAA,IAAI,KAAA,CAAM,UAAA,CAAW,IAAI,CAAA,EAAG,OAAO,KAAA;AACnC,IAAA,OAAO,KAAA;AAAA,EACT;AAEA,EAAA,IACE,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,KAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IACxE,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,MAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,IACxE,OAAO,KAAA;AAET,EAAA,IAAI,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,EAAM,OAAO,KAAA;AAErE,EAAA,IAAI,KAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,MAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,IAAM,OAAO,KAAA;AAEzF,EAAA,IAAI,KAAK,CAAC,CAAA,KAAM,GAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,MAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,IAAM,OAAO,MAAA;AAEzF,EAAA,IAAI,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,IAAQ,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,EAAM,OAAO,KAAA;AAErE,EAAA,IAAI,IAAA,CAAK,CAAC,CAAA,KAAM,GAAA,IAAA,CAAS,KAAK,CAAC,CAAA,GAAI,SAAU,GAAA,EAAM;AAEjD,IAAA,IAAA,CAAK,IAAA,CAAK,CAAC,CAAA,GAAI,GAAA,MAAU,KAAM,OAAO,MAAA;AACtC,IAAA,OAAO,KAAA;AAAA,EACT;AACA,EAAA,OAAO,SAAA;AACT;AAOA,eAAsB,sBAAsB,MAAA,EAAkD;AAC5F,EAAA,IAAI,MAAA,CAAO,SAAS,KAAA,EAAO;AACzB,IAAA,OAAO,uBAAA,CAAwB,OAAO,UAAU,CAAA;AAAA,EAClD;AACA,EAAA,MAAM,GAAA,GAAM,MAAM,aAAA,CAAc,MAAA,CAAO,MAAM,kBAAkB,CAAA;AAC/D,EAAA,OAAO,uBAAA,CAAwB,IAAI,UAAA,CAAW,GAAG,CAAC,CAAA;AACpD;AAMA,eAAsB,eAAe,IAAA,EAAoC;AACvE,EAAA,MAAM,GAAA,GAAM,MAAM,aAAA,CAAc,IAAA,EAAM,kBAAkB,CAAA;AACxD,EAAA,OAAO,uBAAA,CAAwB,IAAI,UAAA,CAAW,GAAG,CAAC,CAAA;AACpD;AAMA,eAAe,aAAA,CAAc,MAAY,KAAA,EAAqC;AAC5E,EAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,CAAA,EAAG,KAAK,CAAA;AACjC,EAAA,IAAI,OAAQ,KAAA,CAA8D,WAAA,KAAgB,UAAA,EAAY;AACpG,IAAA,IAAI;AACF,MAAA,OAAO,MAAO,MAA6D,WAAA,EAAY;AAAA,IACzF,CAAA,CAAA,MAAQ;AAAA,IAER;AAAA,EACF;AACA,EAAA,OAAO,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,KAAW;AACtC,IAAA,MAAM,MAAA,GAAS,IAAI,UAAA,EAAW;AAC9B,IAAA,MAAA,CAAO,MAAA,GAAS,MAAM,OAAA,CAAQ,MAAA,CAAO,MAAqB,CAAA;AAC1D,IAAA,MAAA,CAAO,OAAA,GAAU,MAAM,MAAA,CAAO,MAAA,CAAO,SAAS,IAAI,KAAA,CAAM,mBAAmB,CAAC,CAAA;AAC5E,IAAA,MAAA,CAAO,kBAAkB,KAAK,CAAA;AAAA,EAChC,CAAC,CAAA;AACH","file":"chunk-PQTZS7OA.js","sourcesContent":["import type { ContainerKind, MediaInput } from \"../types.js\";\n\n/**\n * Bytes needed by the sniffer to identify every container we recognize.\n * MPEG-TS needs the most: a sync byte at offset 0 *and* offset 188 (one TS\n * packet apart). Allow a little extra for the M2TS variant (offset 4/192).\n */\nconst SNIFF_BYTES_NEEDED = 380;\n\n/**\n * Bytes to fetch from a URL during the initial sniff. We grab a slightly\n * larger range than `SNIFF_BYTES_NEEDED` so the cache has some headroom for\n * the demuxer's first read after sniffing, in case it wants to look at\n * a few extra bytes (e.g. mp4 ftyp + first moov box).\n */\nconst URL_SNIFF_RANGE_BYTES = 32 * 1024;\n\n/**\n * `NormalizedSource` is a discriminated union: every consumer (probe,\n * strategies) decides what to do based on `kind`. URL sources are NOT\n * fetched eagerly; we only do a Range request for the first ~32 KB so the\n * sniffer has bytes to look at. The strategies are then handed the URL\n * directly so they can stream the rest via Range requests.\n *\n * For File / Blob / ArrayBuffer / Uint8Array sources, the bytes are\n * already in memory, so we wrap them as a `blob` variant.\n */\nexport type NormalizedSource =\n | {\n kind: \"blob\";\n blob: Blob;\n name?: string;\n byteLength: number;\n original: MediaInput;\n }\n | {\n kind: \"url\";\n url: string;\n /** Bytes pulled via Range request for the sniffer. NOT the full file. */\n sniffBytes: Uint8Array;\n name?: string;\n /** Total file size from Content-Length / Content-Range. May be undefined. */\n byteLength: number | undefined;\n original: MediaInput;\n };\n\n/** True if this source carries the entire file's bytes (vs. streaming). */\nexport function isInMemorySource(source: NormalizedSource): source is Extract<NormalizedSource, { kind: \"blob\" }> {\n return source.kind === \"blob\";\n}\n\n\n/**\n * Normalize a `MediaInput` for the probe + strategy layers. **Does not**\n * download URL sources in full — only fetches the first ~32 KB via a\n * Range request, which is enough for the sniffer to identify the\n * container. The strategies are then expected to stream the rest via\n * mediabunny's `UrlSource` (Range requests, prefetch, parallelism, cache).\n *\n * For non-URL inputs, the bytes are already in memory and we just wrap them.\n */\nexport async function normalizeSource(source: MediaInput): Promise<NormalizedSource> {\n if (source instanceof File) {\n return {\n kind: \"blob\",\n blob: source,\n name: source.name,\n byteLength: source.size,\n original: source,\n };\n }\n if (source instanceof Blob) {\n return { kind: \"blob\", blob: source, byteLength: source.size, original: source };\n }\n if (source instanceof ArrayBuffer) {\n const blob = new Blob([source]);\n return { kind: \"blob\", blob, byteLength: blob.size, original: source };\n }\n if (source instanceof Uint8Array) {\n const blob = new Blob([source as BlobPart]);\n return { kind: \"blob\", blob, byteLength: blob.size, original: source };\n }\n if (typeof source === \"string\" || source instanceof URL) {\n const url = source instanceof URL ? source.toString() : source;\n return await fetchUrlForSniff(url, source);\n }\n throw new TypeError(\"unsupported source type\");\n}\n\n/**\n * Fetch the first ~32 KB of a URL via a Range request. Falls back to a\n * full GET if the server doesn't support range requests, but in that case\n * we only read the first 32 KB and abort the rest of the response so we\n * don't accidentally buffer a large file.\n */\nasync function fetchUrlForSniff(url: string, originalSource: MediaInput): Promise<NormalizedSource> {\n const name = url.split(\"/\").pop()?.split(\"?\")[0] ?? undefined;\n\n // First attempt: Range request for the sniff window.\n let res: Response;\n try {\n res = await fetch(url, {\n headers: { Range: `bytes=0-${URL_SNIFF_RANGE_BYTES - 1}` },\n });\n } catch (err) {\n throw new Error(`failed to fetch source ${url}: ${(err as Error).message}`);\n }\n if (!res.ok && res.status !== 206) {\n throw new Error(`failed to fetch source ${url}: ${res.status} ${res.statusText}`);\n }\n\n // Determine the total file size from Content-Range (preferred) or Content-Length.\n let byteLength: number | undefined;\n const contentRange = res.headers.get(\"content-range\");\n if (contentRange) {\n // \"bytes 0-32767/12345678\" — parse the part after the slash\n const m = contentRange.match(/\\/(\\d+)$/);\n if (m) byteLength = parseInt(m[1], 10);\n }\n if (byteLength === undefined) {\n const cl = res.headers.get(\"content-length\");\n if (cl) {\n const n = parseInt(cl, 10);\n if (Number.isFinite(n)) {\n // If the server returned 200 (full body), Content-Length is the\n // FILE size. If 206 (partial), it's the chunk size — only use it\n // as a total if no Content-Range was present (server doesn't do\n // ranges) AND the full response is smaller than our sniff window.\n if (res.status === 200) byteLength = n;\n else if (res.status === 206 && !contentRange) byteLength = n;\n }\n }\n }\n\n // Read the sniff bytes. If the server ignored the Range header and is\n // streaming the full file, only read the first window and let the rest\n // be GC'd. We use a reader so we can stop early.\n const reader = res.body?.getReader();\n if (!reader) {\n // No streamed body (some test environments). Fall back to .arrayBuffer()\n // and slice — this might pull more than we wanted, but only for the\n // initial sniff, not the full file.\n const buf = new Uint8Array(await res.arrayBuffer());\n const sniffBytes = buf.slice(0, URL_SNIFF_RANGE_BYTES);\n return { kind: \"url\", url, sniffBytes, name, byteLength, original: originalSource };\n }\n\n const chunks: Uint8Array[] = [];\n let collected = 0;\n while (collected < URL_SNIFF_RANGE_BYTES) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n collected += value.byteLength;\n }\n // Cancel the response so we don't keep downloading.\n await reader.cancel().catch(() => { /* ignore */ });\n\n // Concatenate up to URL_SNIFF_RANGE_BYTES.\n const total = Math.min(collected, URL_SNIFF_RANGE_BYTES);\n const sniffBytes = new Uint8Array(total);\n let offset = 0;\n for (const chunk of chunks) {\n if (offset >= total) break;\n const room = total - offset;\n sniffBytes.set(chunk.subarray(0, Math.min(chunk.byteLength, room)), offset);\n offset += chunk.byteLength;\n }\n\n return { kind: \"url\", url, sniffBytes, name, byteLength, original: originalSource };\n}\n\n/**\n * Identify the container family from a small byte buffer. Used by the\n * probe layer for both file (Blob → first 380 bytes) and URL (Range\n * request → first 32 KB) inputs.\n *\n * Sniffing intentionally does not trust file extensions.\n */\nexport function sniffContainerFromBytes(head: Uint8Array): ContainerKind {\n // MPEG-TS: sync byte 0x47 every 188 bytes. Verify at least two sync\n // bytes in the right places to avoid false positives. Some captures\n // start with a few junk bytes — also try offsets 4 and 192 (M2TS).\n if (head.length >= 376 && head[0] === 0x47 && head[188] === 0x47) {\n return \"mpegts\";\n }\n if (head.length >= 380 && head[4] === 0x47 && head[192] === 0x47) {\n return \"mpegts\"; // M2TS — 4-byte timestamp prefix per packet\n }\n // RIFF....AVI → AVI\n if (\n head[0] === 0x52 && head[1] === 0x49 && head[2] === 0x46 && head[3] === 0x46 &&\n head[8] === 0x41 && head[9] === 0x56 && head[10] === 0x49\n ) return \"avi\";\n // RIFF....WAVE → WAV\n if (\n head[0] === 0x52 && head[1] === 0x49 && head[2] === 0x46 && head[3] === 0x46 &&\n head[8] === 0x57 && head[9] === 0x41 && head[10] === 0x56 && head[11] === 0x45\n ) return \"wav\";\n // EBML start: 1A 45 DF A3 → MKV/WebM. Distinguish later via DocType.\n if (head[0] === 0x1a && head[1] === 0x45 && head[2] === 0xdf && head[3] === 0xa3) {\n return \"mkv\";\n }\n // ftyp at offset 4 → MP4 family\n if (head[4] === 0x66 && head[5] === 0x74 && head[6] === 0x79 && head[7] === 0x70) {\n // brand at bytes 8..11\n const brand = String.fromCharCode(head[8], head[9], head[10], head[11]);\n if (brand.startsWith(\"qt\")) return \"mov\";\n return \"mp4\";\n }\n // ASF / WMV: 30 26 B2 75 8E 66 CF 11\n if (\n head[0] === 0x30 && head[1] === 0x26 && head[2] === 0xb2 && head[3] === 0x75 &&\n head[4] === 0x8e && head[5] === 0x66 && head[6] === 0xcf && head[7] === 0x11\n ) return \"asf\";\n // FLV: 46 4C 56\n if (head[0] === 0x46 && head[1] === 0x4c && head[2] === 0x56) return \"flv\";\n // OggS: 4F 67 67 53\n if (head[0] === 0x4f && head[1] === 0x67 && head[2] === 0x67 && head[3] === 0x53) return \"ogg\";\n // FLAC: 66 4C 61 43\n if (head[0] === 0x66 && head[1] === 0x4c && head[2] === 0x61 && head[3] === 0x43) return \"flac\";\n // ID3v2: 49 44 33 → MP3 (with id3)\n if (head[0] === 0x49 && head[1] === 0x44 && head[2] === 0x33) return \"mp3\";\n // MPEG audio frame sync: FF Fx\n if (head[0] === 0xff && (head[1] & 0xe0) === 0xe0) {\n // ADTS: FF F1 / FF F9\n if ((head[1] & 0xf6) === 0xf0) return \"adts\";\n return \"mp3\";\n }\n return \"unknown\";\n}\n\n/**\n * Convenience: sniff a `NormalizedSource` regardless of kind. For URL\n * sources, uses the pre-fetched `sniffBytes`. For blob sources, reads the\n * first 380 bytes.\n */\nexport async function sniffNormalizedSource(source: NormalizedSource): Promise<ContainerKind> {\n if (source.kind === \"url\") {\n return sniffContainerFromBytes(source.sniffBytes);\n }\n const buf = await readBlobBytes(source.blob, SNIFF_BYTES_NEEDED);\n return sniffContainerFromBytes(new Uint8Array(buf));\n}\n\n/**\n * Backwards-compatible wrapper for code that still passes a Blob directly.\n * Prefer `sniffNormalizedSource` going forward.\n */\nexport async function sniffContainer(blob: Blob): Promise<ContainerKind> {\n const buf = await readBlobBytes(blob, SNIFF_BYTES_NEEDED);\n return sniffContainerFromBytes(new Uint8Array(buf));\n}\n\n/**\n * Read up to `limit` bytes from a Blob. Tries `Blob.arrayBuffer()` first\n * (modern browsers), then falls back to `FileReader` (works under jsdom).\n */\nasync function readBlobBytes(blob: Blob, limit: number): Promise<ArrayBuffer> {\n const slice = blob.slice(0, limit);\n if (typeof (slice as Blob & { arrayBuffer?: () => Promise<ArrayBuffer> }).arrayBuffer === \"function\") {\n try {\n return await (slice as Blob & { arrayBuffer: () => Promise<ArrayBuffer> }).arrayBuffer();\n } catch {\n /* fall through to FileReader */\n }\n }\n return new Promise((resolve, reject) => {\n const reader = new FileReader();\n reader.onload = () => resolve(reader.result as ArrayBuffer);\n reader.onerror = () => reject(reader.error ?? new Error(\"FileReader failed\"));\n reader.readAsArrayBuffer(slice);\n });\n}\n"]}
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
// src/util/libav-http-reader.ts
|
|
2
|
+
var MIN_READ = 256 * 1024;
|
|
3
|
+
var MAX_READ = 1 * 1024 * 1024;
|
|
4
|
+
async function prepareLibavInput(libav, filename, source) {
|
|
5
|
+
if (source.kind === "url") {
|
|
6
|
+
const handle = await attachLibavHttpReader(libav, filename, source.url);
|
|
7
|
+
return {
|
|
8
|
+
filename,
|
|
9
|
+
transport: "http-range",
|
|
10
|
+
size: handle.size,
|
|
11
|
+
detach: () => handle.detach()
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
await libav.mkreadaheadfile(filename, source.blob);
|
|
15
|
+
return {
|
|
16
|
+
filename,
|
|
17
|
+
transport: "blob",
|
|
18
|
+
size: source.byteLength,
|
|
19
|
+
detach: async () => {
|
|
20
|
+
try {
|
|
21
|
+
await libav.unlinkreadaheadfile(filename);
|
|
22
|
+
} catch {
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
async function attachLibavHttpReader(libav, filename, url, options = {}) {
|
|
28
|
+
const fetchFn = options.fetchFn ?? fetch;
|
|
29
|
+
let probeRes;
|
|
30
|
+
try {
|
|
31
|
+
probeRes = await fetchFn(url, {
|
|
32
|
+
...options.requestInit,
|
|
33
|
+
headers: {
|
|
34
|
+
...options.requestInit?.headers ?? {},
|
|
35
|
+
Range: "bytes=0-0"
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
} catch (err) {
|
|
39
|
+
throw new Error(
|
|
40
|
+
`libav HTTP reader: failed to reach ${url}: ${err.message}`
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
if (probeRes.status !== 206) {
|
|
44
|
+
throw new Error(
|
|
45
|
+
`libav HTTP reader: ${url} does not support HTTP Range requests (server returned ${probeRes.status} for a Range probe; need 206 Partial Content). Remote AVI/ASF/FLV playback requires a server that honors byte-range requests.`
|
|
46
|
+
);
|
|
47
|
+
}
|
|
48
|
+
const contentRange = probeRes.headers.get("content-range") ?? "";
|
|
49
|
+
const sizeMatch = contentRange.match(/\/(\d+)$/);
|
|
50
|
+
if (!sizeMatch) {
|
|
51
|
+
throw new Error(
|
|
52
|
+
`libav HTTP reader: ${url} returned 206 but no parseable Content-Range header (got: "${contentRange}")`
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
const size = parseInt(sizeMatch[1], 10);
|
|
56
|
+
if (!Number.isFinite(size) || size <= 0) {
|
|
57
|
+
throw new Error(
|
|
58
|
+
`libav HTTP reader: ${url} reported invalid file size ${size}`
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
try {
|
|
62
|
+
await probeRes.arrayBuffer();
|
|
63
|
+
} catch {
|
|
64
|
+
}
|
|
65
|
+
await libav.mkblockreaderdev(filename, size);
|
|
66
|
+
let detached = false;
|
|
67
|
+
let cached = null;
|
|
68
|
+
let inflight = null;
|
|
69
|
+
function clampReadLength(requested) {
|
|
70
|
+
const doubled = requested * 2;
|
|
71
|
+
if (doubled < MIN_READ) return MIN_READ;
|
|
72
|
+
if (doubled > MAX_READ) return MAX_READ;
|
|
73
|
+
return doubled;
|
|
74
|
+
}
|
|
75
|
+
function cacheCovers(pos, length) {
|
|
76
|
+
if (!cached) return false;
|
|
77
|
+
return pos >= cached.pos && pos + length <= cached.pos + cached.bytes.byteLength;
|
|
78
|
+
}
|
|
79
|
+
function sliceFromCache(pos, length) {
|
|
80
|
+
if (!cached) throw new Error("sliceFromCache called with no cache");
|
|
81
|
+
const offset = pos - cached.pos;
|
|
82
|
+
return cached.bytes.subarray(offset, offset + length);
|
|
83
|
+
}
|
|
84
|
+
async function fetchRange(pos, length) {
|
|
85
|
+
const end = Math.min(pos + length - 1, size - 1);
|
|
86
|
+
const res = await fetchFn(url, {
|
|
87
|
+
...options.requestInit,
|
|
88
|
+
headers: {
|
|
89
|
+
...options.requestInit?.headers ?? {},
|
|
90
|
+
Range: `bytes=${pos}-${end}`
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
if (res.status !== 206 && res.status !== 200) {
|
|
94
|
+
throw new Error(
|
|
95
|
+
`libav HTTP reader: Range request bytes=${pos}-${end} returned ${res.status}`
|
|
96
|
+
);
|
|
97
|
+
}
|
|
98
|
+
const buf = new Uint8Array(await res.arrayBuffer());
|
|
99
|
+
cached = { pos, bytes: buf };
|
|
100
|
+
return buf;
|
|
101
|
+
}
|
|
102
|
+
async function handleRead(name, pos, length) {
|
|
103
|
+
if (inflight) {
|
|
104
|
+
try {
|
|
105
|
+
await inflight;
|
|
106
|
+
} catch {
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
if (detached) return;
|
|
110
|
+
if (cacheCovers(pos, length)) {
|
|
111
|
+
const data = sliceFromCache(pos, length);
|
|
112
|
+
try {
|
|
113
|
+
await libav.ff_block_reader_dev_send(name, pos, data);
|
|
114
|
+
} catch {
|
|
115
|
+
}
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
const fetchLen = clampReadLength(length);
|
|
119
|
+
const fetched = (async () => {
|
|
120
|
+
try {
|
|
121
|
+
const buf = await fetchRange(pos, fetchLen);
|
|
122
|
+
if (detached) return;
|
|
123
|
+
const reply = buf.subarray(0, Math.min(length, buf.byteLength));
|
|
124
|
+
try {
|
|
125
|
+
await libav.ff_block_reader_dev_send(name, pos, reply);
|
|
126
|
+
} catch {
|
|
127
|
+
}
|
|
128
|
+
} catch (err) {
|
|
129
|
+
if (detached) return;
|
|
130
|
+
try {
|
|
131
|
+
await libav.ff_block_reader_dev_send(name, pos, null, {
|
|
132
|
+
error: err
|
|
133
|
+
});
|
|
134
|
+
} catch {
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
})();
|
|
138
|
+
inflight = fetched;
|
|
139
|
+
try {
|
|
140
|
+
await fetched;
|
|
141
|
+
} finally {
|
|
142
|
+
if (inflight === fetched) inflight = null;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
const previousCallback = libav.onblockread;
|
|
146
|
+
libav.onblockread = (name, pos, length) => {
|
|
147
|
+
if (detached || name !== filename) {
|
|
148
|
+
previousCallback?.(name, pos, length);
|
|
149
|
+
return;
|
|
150
|
+
}
|
|
151
|
+
void handleRead(name, pos, length);
|
|
152
|
+
};
|
|
153
|
+
return {
|
|
154
|
+
size,
|
|
155
|
+
transport: "http-range",
|
|
156
|
+
async detach() {
|
|
157
|
+
if (detached) return;
|
|
158
|
+
detached = true;
|
|
159
|
+
libav.onblockread = previousCallback;
|
|
160
|
+
if (inflight) {
|
|
161
|
+
try {
|
|
162
|
+
await inflight;
|
|
163
|
+
} catch {
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
cached = null;
|
|
167
|
+
try {
|
|
168
|
+
await libav.unlinkreadaheadfile(filename);
|
|
169
|
+
} catch {
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
export { attachLibavHttpReader, prepareLibavInput };
|
|
176
|
+
//# sourceMappingURL=chunk-WD2ZNQA7.js.map
|
|
177
|
+
//# sourceMappingURL=chunk-WD2ZNQA7.js.map
|