avbridge 2.2.1 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +153 -1
- package/NOTICE.md +2 -2
- package/README.md +2 -3
- package/THIRD_PARTY_LICENSES.md +2 -2
- package/dist/avi-2JPBSHGA.js +183 -0
- package/dist/avi-2JPBSHGA.js.map +1 -0
- package/dist/avi-F6WZJK5T.cjs +185 -0
- package/dist/avi-F6WZJK5T.cjs.map +1 -0
- package/dist/{avi-GCGM7OJI.js → avi-NJXAXUXK.js} +9 -3
- package/dist/avi-NJXAXUXK.js.map +1 -0
- package/dist/{avi-6SJLWIWW.cjs → avi-W6L3BTWU.cjs} +10 -4
- package/dist/avi-W6L3BTWU.cjs.map +1 -0
- package/dist/chunk-2IJ66NTD.cjs +212 -0
- package/dist/chunk-2IJ66NTD.cjs.map +1 -0
- package/dist/{chunk-ILKDNBSE.js → chunk-2XW2O3YI.cjs} +55 -10
- package/dist/chunk-2XW2O3YI.cjs.map +1 -0
- package/dist/chunk-5KVLE6YI.js +167 -0
- package/dist/chunk-5KVLE6YI.js.map +1 -0
- package/dist/chunk-5YAWWKA3.js +18 -0
- package/dist/chunk-5YAWWKA3.js.map +1 -0
- package/dist/chunk-CPJLFFCC.js +189 -0
- package/dist/chunk-CPJLFFCC.js.map +1 -0
- package/dist/chunk-CPZ7PXAM.cjs +240 -0
- package/dist/chunk-CPZ7PXAM.cjs.map +1 -0
- package/dist/{chunk-WD2ZNQA7.js → chunk-DCSOQH2N.js} +7 -4
- package/dist/chunk-DCSOQH2N.js.map +1 -0
- package/dist/{chunk-HZLQNKFN.cjs → chunk-E76AMWI4.js} +40 -15
- package/dist/chunk-E76AMWI4.js.map +1 -0
- package/dist/chunk-F3LQJKXK.cjs +20 -0
- package/dist/chunk-F3LQJKXK.cjs.map +1 -0
- package/dist/chunk-IAYKFGFG.js +200 -0
- package/dist/chunk-IAYKFGFG.js.map +1 -0
- package/dist/{chunk-DMWARSEF.js → chunk-KY2GPCT7.js} +788 -697
- package/dist/chunk-KY2GPCT7.js.map +1 -0
- package/dist/chunk-LUFA47FP.js +19 -0
- package/dist/chunk-LUFA47FP.js.map +1 -0
- package/dist/chunk-NNVOHKXJ.cjs +204 -0
- package/dist/chunk-NNVOHKXJ.cjs.map +1 -0
- package/dist/chunk-Q2VUO52Z.cjs +374 -0
- package/dist/chunk-Q2VUO52Z.cjs.map +1 -0
- package/dist/chunk-QDJLQR53.cjs +22 -0
- package/dist/chunk-QDJLQR53.cjs.map +1 -0
- package/dist/chunk-S4WAZC2T.cjs +173 -0
- package/dist/chunk-S4WAZC2T.cjs.map +1 -0
- package/dist/chunk-SMH6IOP2.js +368 -0
- package/dist/chunk-SMH6IOP2.js.map +1 -0
- package/dist/chunk-SR3MPV4D.js +237 -0
- package/dist/chunk-SR3MPV4D.js.map +1 -0
- package/dist/{chunk-UF2N5L63.cjs → chunk-TBW26OPP.cjs} +800 -710
- package/dist/chunk-TBW26OPP.cjs.map +1 -0
- package/dist/chunk-X2K3GIWE.js +235 -0
- package/dist/chunk-X2K3GIWE.js.map +1 -0
- package/dist/{chunk-L4NPOJ36.cjs → chunk-Z33SBWL5.cjs} +7 -4
- package/dist/chunk-Z33SBWL5.cjs.map +1 -0
- package/dist/chunk-ZCUXHW55.cjs +242 -0
- package/dist/chunk-ZCUXHW55.cjs.map +1 -0
- package/dist/element-browser.js +1282 -503
- package/dist/element-browser.js.map +1 -1
- package/dist/element.cjs +59 -5
- package/dist/element.cjs.map +1 -1
- package/dist/element.d.cts +39 -1
- package/dist/element.d.ts +39 -1
- package/dist/element.js +58 -4
- package/dist/element.js.map +1 -1
- package/dist/index.cjs +605 -327
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +48 -4
- package/dist/index.d.ts +48 -4
- package/dist/index.js +528 -319
- package/dist/index.js.map +1 -1
- package/dist/libav-demux-H2GS46GH.cjs +27 -0
- package/dist/{libav-http-reader-NQJVY273.js.map → libav-demux-H2GS46GH.cjs.map} +1 -1
- package/dist/libav-demux-OWZ4T2YW.js +6 -0
- package/dist/{libav-http-reader-FPYDBMYK.cjs.map → libav-demux-OWZ4T2YW.js.map} +1 -1
- package/dist/libav-http-reader-AZLE7YFS.cjs +16 -0
- package/dist/libav-http-reader-AZLE7YFS.cjs.map +1 -0
- package/dist/libav-http-reader-WXG3Z7AI.js +3 -0
- package/dist/libav-http-reader-WXG3Z7AI.js.map +1 -0
- package/dist/{libav-import-GST2AMPL.cjs → libav-import-2ZVKV2E7.cjs} +2 -2
- package/dist/{libav-import-GST2AMPL.cjs.map → libav-import-2ZVKV2E7.cjs.map} +1 -1
- package/dist/{libav-import-2JURFHEW.js → libav-import-6MGLCXVQ.js} +2 -2
- package/dist/{libav-import-2JURFHEW.js.map → libav-import-6MGLCXVQ.js.map} +1 -1
- package/dist/{player-U2NPmFvA.d.cts → player-B6WB74RD.d.cts} +62 -3
- package/dist/{player-U2NPmFvA.d.ts → player-B6WB74RD.d.ts} +62 -3
- package/dist/player.cjs +5631 -0
- package/dist/player.cjs.map +1 -0
- package/dist/player.d.cts +699 -0
- package/dist/player.d.ts +699 -0
- package/dist/player.js +5629 -0
- package/dist/player.js.map +1 -0
- package/dist/remux-OBSMIENG.cjs +35 -0
- package/dist/remux-OBSMIENG.cjs.map +1 -0
- package/dist/remux-WBYIZBBX.js +10 -0
- package/dist/remux-WBYIZBBX.js.map +1 -0
- package/dist/source-4TZ6KMNV.js +4 -0
- package/dist/{source-FFZ7TW2B.js.map → source-4TZ6KMNV.js.map} +1 -1
- package/dist/source-7YLO6E7X.cjs +29 -0
- package/dist/{source-CN43EI7Z.cjs.map → source-7YLO6E7X.cjs.map} +1 -1
- package/dist/source-MTX5ELUZ.js +4 -0
- package/dist/source-MTX5ELUZ.js.map +1 -0
- package/dist/source-VFLXLOCN.cjs +29 -0
- package/dist/source-VFLXLOCN.cjs.map +1 -0
- package/dist/subtitles-4T74JRGT.js +4 -0
- package/dist/subtitles-4T74JRGT.js.map +1 -0
- package/dist/subtitles-QUH4LPI4.cjs +29 -0
- package/dist/subtitles-QUH4LPI4.cjs.map +1 -0
- package/dist/variant-routing-434STYAB.js +3 -0
- package/dist/{variant-routing-JOBWXYKD.js.map → variant-routing-434STYAB.js.map} +1 -1
- package/dist/variant-routing-HONNAA6R.cjs +12 -0
- package/dist/{variant-routing-GOHB2RZN.cjs.map → variant-routing-HONNAA6R.cjs.map} +1 -1
- package/package.json +9 -1
- package/src/classify/rules.ts +27 -5
- package/src/convert/remux.ts +9 -35
- package/src/convert/transcode-libav.ts +691 -0
- package/src/convert/transcode.ts +53 -12
- package/src/element/avbridge-player.ts +861 -0
- package/src/element/avbridge-video.ts +54 -0
- package/src/element/player-icons.ts +25 -0
- package/src/element/player-styles.ts +472 -0
- package/src/errors.ts +53 -0
- package/src/index.ts +23 -0
- package/src/player-element.ts +18 -0
- package/src/player.ts +118 -27
- package/src/plugins/builtin.ts +2 -2
- package/src/probe/avi.ts +4 -0
- package/src/probe/index.ts +40 -10
- package/src/strategies/fallback/audio-output.ts +31 -0
- package/src/strategies/fallback/decoder.ts +179 -175
- package/src/strategies/fallback/index.ts +48 -6
- package/src/strategies/fallback/libav-import.ts +9 -1
- package/src/strategies/fallback/variant-routing.ts +7 -13
- package/src/strategies/fallback/video-renderer.ts +231 -32
- package/src/strategies/hybrid/decoder.ts +219 -200
- package/src/strategies/hybrid/index.ts +48 -7
- package/src/strategies/native.ts +6 -3
- package/src/strategies/remux/index.ts +14 -2
- package/src/strategies/remux/mse.ts +12 -2
- package/src/strategies/remux/pipeline.ts +72 -12
- package/src/subtitles/index.ts +7 -3
- package/src/subtitles/render.ts +8 -0
- package/src/types.ts +53 -1
- package/src/util/libav-demux.ts +405 -0
- package/src/util/libav-http-reader.ts +5 -1
- package/src/util/source.ts +28 -8
- package/src/util/transport.ts +26 -0
- package/vendor/libav/avbridge/libav-6.8.8.0-avbridge.wasm.mjs +1 -1
- package/vendor/libav/avbridge/libav-6.8.8.0-avbridge.wasm.wasm +0 -0
- package/dist/avi-6SJLWIWW.cjs.map +0 -1
- package/dist/avi-GCGM7OJI.js.map +0 -1
- package/dist/chunk-DMWARSEF.js.map +0 -1
- package/dist/chunk-HZLQNKFN.cjs.map +0 -1
- package/dist/chunk-ILKDNBSE.js.map +0 -1
- package/dist/chunk-J5MCMN3S.js +0 -27
- package/dist/chunk-J5MCMN3S.js.map +0 -1
- package/dist/chunk-L4NPOJ36.cjs.map +0 -1
- package/dist/chunk-NZU7W256.cjs +0 -29
- package/dist/chunk-NZU7W256.cjs.map +0 -1
- package/dist/chunk-UF2N5L63.cjs.map +0 -1
- package/dist/chunk-WD2ZNQA7.js.map +0 -1
- package/dist/libav-http-reader-FPYDBMYK.cjs +0 -16
- package/dist/libav-http-reader-NQJVY273.js +0 -3
- package/dist/source-CN43EI7Z.cjs +0 -28
- package/dist/source-FFZ7TW2B.js +0 -3
- package/dist/variant-routing-GOHB2RZN.cjs +0 -12
- package/dist/variant-routing-JOBWXYKD.js +0 -3
package/dist/element-browser.js
CHANGED
|
@@ -31,6 +31,51 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
31
31
|
mod
|
|
32
32
|
));
|
|
33
33
|
|
|
34
|
+
// src/util/transport.ts
|
|
35
|
+
function mergeFetchInit(base, extra) {
|
|
36
|
+
if (!base && !extra) return void 0;
|
|
37
|
+
return {
|
|
38
|
+
...base,
|
|
39
|
+
...extra,
|
|
40
|
+
headers: {
|
|
41
|
+
...base?.headers ?? {},
|
|
42
|
+
...extra?.headers ?? {}
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
function fetchWith(transport) {
|
|
47
|
+
return transport?.fetchFn ?? globalThis.fetch;
|
|
48
|
+
}
|
|
49
|
+
var init_transport = __esm({
|
|
50
|
+
"src/util/transport.ts"() {
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
// src/errors.ts
|
|
55
|
+
var AvbridgeError, ERR_PROBE_FAILED, ERR_PROBE_UNKNOWN_CONTAINER, ERR_PROBE_FETCH_FAILED, ERR_ALL_STRATEGIES_EXHAUSTED, ERR_PLAYER_NOT_READY, ERR_LIBAV_NOT_REACHABLE, ERR_MSE_NOT_SUPPORTED, ERR_MSE_CODEC_NOT_SUPPORTED;
|
|
56
|
+
var init_errors = __esm({
|
|
57
|
+
"src/errors.ts"() {
|
|
58
|
+
AvbridgeError = class extends Error {
|
|
59
|
+
constructor(code, message, recovery, options) {
|
|
60
|
+
super(message, options);
|
|
61
|
+
this.code = code;
|
|
62
|
+
this.recovery = recovery;
|
|
63
|
+
}
|
|
64
|
+
code;
|
|
65
|
+
recovery;
|
|
66
|
+
name = "AvbridgeError";
|
|
67
|
+
};
|
|
68
|
+
ERR_PROBE_FAILED = "ERR_AVBRIDGE_PROBE_FAILED";
|
|
69
|
+
ERR_PROBE_UNKNOWN_CONTAINER = "ERR_AVBRIDGE_PROBE_UNKNOWN_CONTAINER";
|
|
70
|
+
ERR_PROBE_FETCH_FAILED = "ERR_AVBRIDGE_PROBE_FETCH_FAILED";
|
|
71
|
+
ERR_ALL_STRATEGIES_EXHAUSTED = "ERR_AVBRIDGE_ALL_STRATEGIES_EXHAUSTED";
|
|
72
|
+
ERR_PLAYER_NOT_READY = "ERR_AVBRIDGE_PLAYER_NOT_READY";
|
|
73
|
+
ERR_LIBAV_NOT_REACHABLE = "ERR_AVBRIDGE_LIBAV_NOT_REACHABLE";
|
|
74
|
+
ERR_MSE_NOT_SUPPORTED = "ERR_AVBRIDGE_MSE_NOT_SUPPORTED";
|
|
75
|
+
ERR_MSE_CODEC_NOT_SUPPORTED = "ERR_AVBRIDGE_MSE_CODEC_NOT_SUPPORTED";
|
|
76
|
+
}
|
|
77
|
+
});
|
|
78
|
+
|
|
34
79
|
// src/util/source.ts
|
|
35
80
|
var source_exports = {};
|
|
36
81
|
__export(source_exports, {
|
|
@@ -43,7 +88,7 @@ __export(source_exports, {
|
|
|
43
88
|
function isInMemorySource(source) {
|
|
44
89
|
return source.kind === "blob";
|
|
45
90
|
}
|
|
46
|
-
async function normalizeSource(source) {
|
|
91
|
+
async function normalizeSource(source, transport) {
|
|
47
92
|
if (source instanceof File) {
|
|
48
93
|
return {
|
|
49
94
|
kind: "blob",
|
|
@@ -66,22 +111,31 @@ async function normalizeSource(source) {
|
|
|
66
111
|
}
|
|
67
112
|
if (typeof source === "string" || source instanceof URL) {
|
|
68
113
|
const url2 = source instanceof URL ? source.toString() : source;
|
|
69
|
-
return await fetchUrlForSniff(url2, source);
|
|
114
|
+
return await fetchUrlForSniff(url2, source, transport);
|
|
70
115
|
}
|
|
71
116
|
throw new TypeError("unsupported source type");
|
|
72
117
|
}
|
|
73
|
-
async function fetchUrlForSniff(url2, originalSource) {
|
|
118
|
+
async function fetchUrlForSniff(url2, originalSource, transport) {
|
|
74
119
|
const name = url2.split("/").pop()?.split("?")[0] ?? void 0;
|
|
120
|
+
const doFetch = fetchWith(transport);
|
|
75
121
|
let res;
|
|
76
122
|
try {
|
|
77
|
-
res = await
|
|
123
|
+
res = await doFetch(url2, mergeFetchInit(transport?.requestInit, {
|
|
78
124
|
headers: { Range: `bytes=0-${URL_SNIFF_RANGE_BYTES - 1}` }
|
|
79
|
-
});
|
|
125
|
+
}));
|
|
80
126
|
} catch (err) {
|
|
81
|
-
throw new
|
|
127
|
+
throw new AvbridgeError(
|
|
128
|
+
ERR_PROBE_FETCH_FAILED,
|
|
129
|
+
`Failed to fetch source ${url2}: ${err.message}`,
|
|
130
|
+
"Check that the URL is reachable and CORS is configured. If the source requires authentication, pass requestInit with credentials/headers."
|
|
131
|
+
);
|
|
82
132
|
}
|
|
83
133
|
if (!res.ok && res.status !== 206) {
|
|
84
|
-
throw new
|
|
134
|
+
throw new AvbridgeError(
|
|
135
|
+
ERR_PROBE_FETCH_FAILED,
|
|
136
|
+
`Failed to fetch source ${url2}: ${res.status} ${res.statusText}`,
|
|
137
|
+
res.status === 403 || res.status === 401 ? "The server rejected the request. Pass requestInit with the required Authorization header or credentials." : "Check that the URL is correct and the server is reachable."
|
|
138
|
+
);
|
|
85
139
|
}
|
|
86
140
|
let byteLength;
|
|
87
141
|
const contentRange = res.headers.get("content-range");
|
|
@@ -186,6 +240,8 @@ async function readBlobBytes(blob, limit) {
|
|
|
186
240
|
var SNIFF_BYTES_NEEDED, URL_SNIFF_RANGE_BYTES;
|
|
187
241
|
var init_source = __esm({
|
|
188
242
|
"src/util/source.ts"() {
|
|
243
|
+
init_transport();
|
|
244
|
+
init_errors();
|
|
189
245
|
SNIFF_BYTES_NEEDED = 380;
|
|
190
246
|
URL_SNIFF_RANGE_BYTES = 32 * 1024;
|
|
191
247
|
}
|
|
@@ -29453,9 +29509,12 @@ __export(libav_http_reader_exports, {
|
|
|
29453
29509
|
attachLibavHttpReader: () => attachLibavHttpReader,
|
|
29454
29510
|
prepareLibavInput: () => prepareLibavInput
|
|
29455
29511
|
});
|
|
29456
|
-
async function prepareLibavInput(libav, filename, source) {
|
|
29512
|
+
async function prepareLibavInput(libav, filename, source, transport) {
|
|
29457
29513
|
if (source.kind === "url") {
|
|
29458
|
-
const handle = await attachLibavHttpReader(libav, filename, source.url
|
|
29514
|
+
const handle = await attachLibavHttpReader(libav, filename, source.url, {
|
|
29515
|
+
requestInit: transport?.requestInit,
|
|
29516
|
+
fetchFn: transport?.fetchFn
|
|
29517
|
+
});
|
|
29459
29518
|
return {
|
|
29460
29519
|
filename,
|
|
29461
29520
|
transport: "http-range",
|
|
@@ -30009,6 +30068,12 @@ function ffmpegToAvbridgeAudio(name) {
|
|
|
30009
30068
|
return "sipr";
|
|
30010
30069
|
case "atrac3":
|
|
30011
30070
|
return "atrac3";
|
|
30071
|
+
case "dca":
|
|
30072
|
+
case "dts":
|
|
30073
|
+
return "dts";
|
|
30074
|
+
case "truehd":
|
|
30075
|
+
case "mlp":
|
|
30076
|
+
return "truehd";
|
|
30012
30077
|
default:
|
|
30013
30078
|
return name;
|
|
30014
30079
|
}
|
|
@@ -30020,6 +30085,60 @@ var init_avi = __esm({
|
|
|
30020
30085
|
}
|
|
30021
30086
|
});
|
|
30022
30087
|
|
|
30088
|
+
// src/subtitles/render.ts
|
|
30089
|
+
function parseVtt(text) {
|
|
30090
|
+
const cues = [];
|
|
30091
|
+
const blocks = text.replace(/\r\n/g, "\n").split(/\n{2,}/);
|
|
30092
|
+
for (const block of blocks) {
|
|
30093
|
+
const lines = block.split("\n").filter(Boolean);
|
|
30094
|
+
if (lines.length === 0 || lines[0] === "WEBVTT") continue;
|
|
30095
|
+
const timingIdx = lines.findIndex((l) => l.includes("-->"));
|
|
30096
|
+
if (timingIdx < 0) continue;
|
|
30097
|
+
const m = /(\d{2}):(\d{2}):(\d{2})\.(\d{3})\s*-->\s*(\d{2}):(\d{2}):(\d{2})\.(\d{3})/.exec(
|
|
30098
|
+
lines[timingIdx]
|
|
30099
|
+
);
|
|
30100
|
+
if (!m) continue;
|
|
30101
|
+
const t = (h, mm, s, ms) => Number(h) * 3600 + Number(mm) * 60 + Number(s) + Number(ms) / 1e3;
|
|
30102
|
+
cues.push({
|
|
30103
|
+
start: t(m[1], m[2], m[3], m[4]),
|
|
30104
|
+
end: t(m[5], m[6], m[7], m[8]),
|
|
30105
|
+
text: lines.slice(timingIdx + 1).join("\n")
|
|
30106
|
+
});
|
|
30107
|
+
}
|
|
30108
|
+
return cues;
|
|
30109
|
+
}
|
|
30110
|
+
var SubtitleOverlay;
|
|
30111
|
+
var init_render = __esm({
|
|
30112
|
+
"src/subtitles/render.ts"() {
|
|
30113
|
+
SubtitleOverlay = class {
|
|
30114
|
+
el;
|
|
30115
|
+
cues = [];
|
|
30116
|
+
constructor(parent) {
|
|
30117
|
+
this.el = document.createElement("div");
|
|
30118
|
+
this.el.style.cssText = "position:absolute;left:0;right:0;bottom:8%;text-align:center;color:white;text-shadow:0 0 4px black;font-family:sans-serif;font-size:1.4em;pointer-events:none;";
|
|
30119
|
+
parent.appendChild(this.el);
|
|
30120
|
+
}
|
|
30121
|
+
loadVtt(text) {
|
|
30122
|
+
this.cues = parseVtt(text);
|
|
30123
|
+
}
|
|
30124
|
+
update(currentTime) {
|
|
30125
|
+
const active = this.cues.find((c) => currentTime >= c.start && currentTime <= c.end);
|
|
30126
|
+
this.el.textContent = active?.text ?? "";
|
|
30127
|
+
}
|
|
30128
|
+
/** Set the currently-displayed text directly (bypasses loadVtt/update). */
|
|
30129
|
+
setText(text) {
|
|
30130
|
+
if (this.el.textContent !== text) {
|
|
30131
|
+
this.el.textContent = text;
|
|
30132
|
+
}
|
|
30133
|
+
}
|
|
30134
|
+
destroy() {
|
|
30135
|
+
this.el.remove();
|
|
30136
|
+
this.cues = [];
|
|
30137
|
+
}
|
|
30138
|
+
};
|
|
30139
|
+
}
|
|
30140
|
+
});
|
|
30141
|
+
|
|
30023
30142
|
// node_modules/libavjs-webcodecs-bridge/dist/libavjs-webcodecs-bridge.mjs
|
|
30024
30143
|
var libavjs_webcodecs_bridge_exports = {};
|
|
30025
30144
|
__export(libavjs_webcodecs_bridge_exports, {
|
|
@@ -30937,6 +31056,145 @@ var init_libav_import = __esm({
|
|
|
30937
31056
|
}
|
|
30938
31057
|
});
|
|
30939
31058
|
|
|
31059
|
+
// src/subtitles/srt.ts
|
|
31060
|
+
function srtToVtt(srt) {
|
|
31061
|
+
if (srt.charCodeAt(0) === 65279) srt = srt.slice(1);
|
|
31062
|
+
const normalized = srt.replace(/\r\n/g, "\n").replace(/\r/g, "\n").trim();
|
|
31063
|
+
const blocks = normalized.split(/\n{2,}/);
|
|
31064
|
+
const out = ["WEBVTT", ""];
|
|
31065
|
+
for (const block of blocks) {
|
|
31066
|
+
const lines = block.split("\n");
|
|
31067
|
+
if (lines.length > 0 && /^\d+$/.test(lines[0].trim())) {
|
|
31068
|
+
lines.shift();
|
|
31069
|
+
}
|
|
31070
|
+
if (lines.length === 0) continue;
|
|
31071
|
+
const timing = lines.shift();
|
|
31072
|
+
const vttTiming = convertTiming(timing);
|
|
31073
|
+
if (!vttTiming) continue;
|
|
31074
|
+
out.push(vttTiming);
|
|
31075
|
+
for (const l of lines) out.push(l);
|
|
31076
|
+
out.push("");
|
|
31077
|
+
}
|
|
31078
|
+
return out.join("\n");
|
|
31079
|
+
}
|
|
31080
|
+
function convertTiming(line) {
|
|
31081
|
+
const m = /^(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})(.*)$/.exec(
|
|
31082
|
+
line.trim()
|
|
31083
|
+
);
|
|
31084
|
+
if (!m) return null;
|
|
31085
|
+
const fmt2 = (h, mm, s, ms) => `${h.padStart(2, "0")}:${mm}:${s}.${ms.padEnd(3, "0").slice(0, 3)}`;
|
|
31086
|
+
return `${fmt2(m[1], m[2], m[3], m[4])} --> ${fmt2(m[5], m[6], m[7], m[8])}${m[9] ?? ""}`;
|
|
31087
|
+
}
|
|
31088
|
+
var init_srt = __esm({
|
|
31089
|
+
"src/subtitles/srt.ts"() {
|
|
31090
|
+
}
|
|
31091
|
+
});
|
|
31092
|
+
|
|
31093
|
+
// src/subtitles/vtt.ts
|
|
31094
|
+
function isVtt(text) {
|
|
31095
|
+
const trimmed = text.replace(/^\ufeff/, "").trimStart();
|
|
31096
|
+
return trimmed.startsWith("WEBVTT");
|
|
31097
|
+
}
|
|
31098
|
+
var init_vtt = __esm({
|
|
31099
|
+
"src/subtitles/vtt.ts"() {
|
|
31100
|
+
}
|
|
31101
|
+
});
|
|
31102
|
+
|
|
31103
|
+
// src/subtitles/index.ts
|
|
31104
|
+
var subtitles_exports = {};
|
|
31105
|
+
__export(subtitles_exports, {
|
|
31106
|
+
SubtitleOverlay: () => SubtitleOverlay,
|
|
31107
|
+
SubtitleResourceBag: () => SubtitleResourceBag,
|
|
31108
|
+
attachSubtitleTracks: () => attachSubtitleTracks,
|
|
31109
|
+
discoverSidecars: () => discoverSidecars,
|
|
31110
|
+
srtToVtt: () => srtToVtt
|
|
31111
|
+
});
|
|
31112
|
+
async function discoverSidecars(file, directory) {
|
|
31113
|
+
const baseName = file.name.replace(/\.[^.]+$/, "");
|
|
31114
|
+
const found = [];
|
|
31115
|
+
for await (const [name, handle] of directory) {
|
|
31116
|
+
if (handle.kind !== "file") continue;
|
|
31117
|
+
if (!name.startsWith(baseName)) continue;
|
|
31118
|
+
const lower = name.toLowerCase();
|
|
31119
|
+
let format = null;
|
|
31120
|
+
if (lower.endsWith(".srt")) format = "srt";
|
|
31121
|
+
else if (lower.endsWith(".vtt")) format = "vtt";
|
|
31122
|
+
if (!format) continue;
|
|
31123
|
+
const sidecarFile = await handle.getFile();
|
|
31124
|
+
const url2 = URL.createObjectURL(sidecarFile);
|
|
31125
|
+
const langMatch = name.slice(baseName.length).match(/[._-]([a-z]{2,3})(?:[._-]|\.)/i);
|
|
31126
|
+
found.push({
|
|
31127
|
+
url: url2,
|
|
31128
|
+
format,
|
|
31129
|
+
language: langMatch?.[1]
|
|
31130
|
+
});
|
|
31131
|
+
}
|
|
31132
|
+
return found;
|
|
31133
|
+
}
|
|
31134
|
+
async function attachSubtitleTracks(video, tracks, bag, onError, transport) {
|
|
31135
|
+
const doFetch = fetchWith(transport);
|
|
31136
|
+
for (const t of Array.from(video.querySelectorAll("track[data-avbridge]"))) {
|
|
31137
|
+
t.remove();
|
|
31138
|
+
}
|
|
31139
|
+
for (const t of tracks) {
|
|
31140
|
+
if (!t.sidecarUrl) continue;
|
|
31141
|
+
try {
|
|
31142
|
+
let url2 = t.sidecarUrl;
|
|
31143
|
+
if (t.format === "srt") {
|
|
31144
|
+
const res = await doFetch(t.sidecarUrl, transport?.requestInit);
|
|
31145
|
+
const text = await res.text();
|
|
31146
|
+
const vtt = srtToVtt(text);
|
|
31147
|
+
const blob = new Blob([vtt], { type: "text/vtt" });
|
|
31148
|
+
url2 = bag ? bag.createObjectURL(blob) : URL.createObjectURL(blob);
|
|
31149
|
+
} else if (t.format === "vtt") {
|
|
31150
|
+
const res = await doFetch(t.sidecarUrl, transport?.requestInit);
|
|
31151
|
+
const text = await res.text();
|
|
31152
|
+
if (!isVtt(text)) {
|
|
31153
|
+
console.warn("[avbridge] subtitle missing WEBVTT header:", t.sidecarUrl);
|
|
31154
|
+
}
|
|
31155
|
+
}
|
|
31156
|
+
const trackEl = document.createElement("track");
|
|
31157
|
+
trackEl.kind = "subtitles";
|
|
31158
|
+
trackEl.src = url2;
|
|
31159
|
+
trackEl.srclang = t.language ?? "und";
|
|
31160
|
+
trackEl.label = t.language ?? `Subtitle ${t.id}`;
|
|
31161
|
+
trackEl.dataset.avbridge = "true";
|
|
31162
|
+
video.appendChild(trackEl);
|
|
31163
|
+
} catch (err) {
|
|
31164
|
+
const e = err instanceof Error ? err : new Error(String(err));
|
|
31165
|
+
onError?.(e, t);
|
|
31166
|
+
}
|
|
31167
|
+
}
|
|
31168
|
+
}
|
|
31169
|
+
var SubtitleResourceBag;
|
|
31170
|
+
var init_subtitles2 = __esm({
|
|
31171
|
+
"src/subtitles/index.ts"() {
|
|
31172
|
+
init_transport();
|
|
31173
|
+
init_srt();
|
|
31174
|
+
init_vtt();
|
|
31175
|
+
init_srt();
|
|
31176
|
+
init_render();
|
|
31177
|
+
SubtitleResourceBag = class {
|
|
31178
|
+
urls = /* @__PURE__ */ new Set();
|
|
31179
|
+
/** Track an externally-created blob URL (e.g. from `discoverSidecars`). */
|
|
31180
|
+
track(url2) {
|
|
31181
|
+
this.urls.add(url2);
|
|
31182
|
+
}
|
|
31183
|
+
/** Convenience: create a blob URL and track it in one call. */
|
|
31184
|
+
createObjectURL(blob) {
|
|
31185
|
+
const url2 = URL.createObjectURL(blob);
|
|
31186
|
+
this.urls.add(url2);
|
|
31187
|
+
return url2;
|
|
31188
|
+
}
|
|
31189
|
+
/** Revoke every tracked URL. Idempotent — safe to call multiple times. */
|
|
31190
|
+
revokeAll() {
|
|
31191
|
+
for (const u of this.urls) URL.revokeObjectURL(u);
|
|
31192
|
+
this.urls.clear();
|
|
31193
|
+
}
|
|
31194
|
+
};
|
|
31195
|
+
}
|
|
31196
|
+
});
|
|
31197
|
+
|
|
30940
31198
|
// src/events.ts
|
|
30941
31199
|
var TypedEmitter = class {
|
|
30942
31200
|
listeners = {};
|
|
@@ -31152,6 +31410,7 @@ async function safe(fn) {
|
|
|
31152
31410
|
}
|
|
31153
31411
|
|
|
31154
31412
|
// src/probe/index.ts
|
|
31413
|
+
init_errors();
|
|
31155
31414
|
var MEDIABUNNY_CONTAINERS = /* @__PURE__ */ new Set([
|
|
31156
31415
|
"mp4",
|
|
31157
31416
|
"mov",
|
|
@@ -31164,12 +31423,22 @@ var MEDIABUNNY_CONTAINERS = /* @__PURE__ */ new Set([
|
|
|
31164
31423
|
"adts",
|
|
31165
31424
|
"mpegts"
|
|
31166
31425
|
]);
|
|
31167
|
-
async function probe(source) {
|
|
31168
|
-
const normalized = await normalizeSource(source);
|
|
31426
|
+
async function probe(source, transport) {
|
|
31427
|
+
const normalized = await normalizeSource(source, transport);
|
|
31169
31428
|
const sniffed = await sniffNormalizedSource(normalized);
|
|
31170
31429
|
if (MEDIABUNNY_CONTAINERS.has(sniffed)) {
|
|
31171
31430
|
try {
|
|
31172
|
-
|
|
31431
|
+
const result = await probeWithMediabunny(normalized, sniffed);
|
|
31432
|
+
const hasUnknownCodec = result.videoTracks.some((t) => t.codec === "unknown") || result.audioTracks.some((t) => t.codec === "unknown");
|
|
31433
|
+
if (hasUnknownCodec) {
|
|
31434
|
+
try {
|
|
31435
|
+
const { probeWithLibav: probeWithLibav2 } = await Promise.resolve().then(() => (init_avi(), avi_exports));
|
|
31436
|
+
return await probeWithLibav2(normalized, sniffed);
|
|
31437
|
+
} catch {
|
|
31438
|
+
return result;
|
|
31439
|
+
}
|
|
31440
|
+
}
|
|
31441
|
+
return result;
|
|
31173
31442
|
} catch (mediabunnyErr) {
|
|
31174
31443
|
console.warn(
|
|
31175
31444
|
`[avbridge] mediabunny rejected ${sniffed} file, falling back to libav:`,
|
|
@@ -31181,8 +31450,10 @@ async function probe(source) {
|
|
|
31181
31450
|
} catch (libavErr) {
|
|
31182
31451
|
const mbMsg = mediabunnyErr.message || String(mediabunnyErr);
|
|
31183
31452
|
const lvMsg = libavErr instanceof Error ? libavErr.message : String(libavErr);
|
|
31184
|
-
throw new
|
|
31185
|
-
|
|
31453
|
+
throw new AvbridgeError(
|
|
31454
|
+
ERR_PROBE_FAILED,
|
|
31455
|
+
`Failed to probe ${sniffed.toUpperCase()} file. mediabunny: ${mbMsg}. libav: ${lvMsg}.`,
|
|
31456
|
+
"The file may be corrupt, truncated, or in an unsupported format. Enable AVBRIDGE_DEBUG for detailed logs."
|
|
31186
31457
|
);
|
|
31187
31458
|
}
|
|
31188
31459
|
}
|
|
@@ -31193,8 +31464,17 @@ async function probe(source) {
|
|
|
31193
31464
|
} catch (err) {
|
|
31194
31465
|
const inner = err instanceof Error ? err.message : String(err);
|
|
31195
31466
|
console.error("[avbridge] libav probe failed for", sniffed, "file:", err);
|
|
31196
|
-
|
|
31197
|
-
|
|
31467
|
+
if (sniffed === "unknown") {
|
|
31468
|
+
throw new AvbridgeError(
|
|
31469
|
+
ERR_PROBE_UNKNOWN_CONTAINER,
|
|
31470
|
+
`Unable to probe source: container format could not be identified. libav fallback: ${inner || "(no details)"}`,
|
|
31471
|
+
"The file may be corrupt or in a format avbridge doesn't recognize. Check the file plays in VLC or ffprobe."
|
|
31472
|
+
);
|
|
31473
|
+
}
|
|
31474
|
+
throw new AvbridgeError(
|
|
31475
|
+
ERR_LIBAV_NOT_REACHABLE,
|
|
31476
|
+
`${sniffed.toUpperCase()} files require libav.js, which failed to load: ${inner || "(no details)"}`,
|
|
31477
|
+
"Install @libav.js/variant-webcodecs, or check that AVBRIDGE_LIBAV_BASE points to the correct path."
|
|
31198
31478
|
);
|
|
31199
31479
|
}
|
|
31200
31480
|
}
|
|
@@ -31295,7 +31575,9 @@ var FALLBACK_AUDIO_CODECS = /* @__PURE__ */ new Set([
|
|
|
31295
31575
|
"ra_144",
|
|
31296
31576
|
"ra_288",
|
|
31297
31577
|
"sipr",
|
|
31298
|
-
"atrac3"
|
|
31578
|
+
"atrac3",
|
|
31579
|
+
"dts",
|
|
31580
|
+
"truehd"
|
|
31299
31581
|
]);
|
|
31300
31582
|
var NATIVE_CONTAINERS = /* @__PURE__ */ new Set([
|
|
31301
31583
|
"mp4",
|
|
@@ -31357,7 +31639,16 @@ function classifyContext(ctx) {
|
|
|
31357
31639
|
reason: `video codec "${video.codec}" has no browser decoder; WASM fallback required`
|
|
31358
31640
|
};
|
|
31359
31641
|
}
|
|
31360
|
-
|
|
31642
|
+
const audioNeedsFallback = audio && (FALLBACK_AUDIO_CODECS.has(audio.codec) || !NATIVE_AUDIO_CODECS.has(audio.codec));
|
|
31643
|
+
if (audioNeedsFallback) {
|
|
31644
|
+
if (NATIVE_VIDEO_CODECS.has(video.codec) && webCodecsAvailable()) {
|
|
31645
|
+
return {
|
|
31646
|
+
class: "HYBRID_CANDIDATE",
|
|
31647
|
+
strategy: "hybrid",
|
|
31648
|
+
reason: `video "${video.codec}" is hardware-decodable via WebCodecs; audio "${audio.codec}" decoded in software by libav`,
|
|
31649
|
+
fallbackChain: ["fallback"]
|
|
31650
|
+
};
|
|
31651
|
+
}
|
|
31361
31652
|
return {
|
|
31362
31653
|
class: "FALLBACK_REQUIRED",
|
|
31363
31654
|
strategy: "fallback",
|
|
@@ -31594,7 +31885,7 @@ async function createNativeSession(context, video) {
|
|
|
31594
31885
|
},
|
|
31595
31886
|
async setAudioTrack(id) {
|
|
31596
31887
|
const tracks = video.audioTracks;
|
|
31597
|
-
if (!tracks) return;
|
|
31888
|
+
if (!tracks || tracks.length === 0) return;
|
|
31598
31889
|
for (let i = 0; i < tracks.length; i++) {
|
|
31599
31890
|
tracks[i].enabled = tracks[i].id === String(id) || i === id;
|
|
31600
31891
|
}
|
|
@@ -31642,14 +31933,23 @@ function sourceToVideoUrl(source) {
|
|
|
31642
31933
|
}
|
|
31643
31934
|
|
|
31644
31935
|
// src/strategies/remux/mse.ts
|
|
31936
|
+
init_errors();
|
|
31645
31937
|
var MseSink = class {
|
|
31646
31938
|
constructor(options) {
|
|
31647
31939
|
this.options = options;
|
|
31648
31940
|
if (typeof MediaSource === "undefined") {
|
|
31649
|
-
throw new
|
|
31941
|
+
throw new AvbridgeError(
|
|
31942
|
+
ERR_MSE_NOT_SUPPORTED,
|
|
31943
|
+
"MediaSource Extensions (MSE) are not supported in this environment.",
|
|
31944
|
+
"MSE is required for the remux strategy. Use a browser that supports MSE, or try the fallback strategy."
|
|
31945
|
+
);
|
|
31650
31946
|
}
|
|
31651
31947
|
if (!MediaSource.isTypeSupported(options.mime)) {
|
|
31652
|
-
throw new
|
|
31948
|
+
throw new AvbridgeError(
|
|
31949
|
+
ERR_MSE_CODEC_NOT_SUPPORTED,
|
|
31950
|
+
`This browser's MSE does not support "${options.mime}".`,
|
|
31951
|
+
"The codec combination can't be played via remux in this browser. The player will try the next strategy automatically."
|
|
31952
|
+
);
|
|
31653
31953
|
}
|
|
31654
31954
|
this.mediaSource = new MediaSource();
|
|
31655
31955
|
this.objectUrl = URL.createObjectURL(this.mediaSource);
|
|
@@ -31834,30 +32134,49 @@ var MseSink = class {
|
|
|
31834
32134
|
async function createRemuxPipeline(ctx, video) {
|
|
31835
32135
|
const mb = await Promise.resolve().then(() => (init_src(), src_exports));
|
|
31836
32136
|
const videoTrackInfo = ctx.videoTracks[0];
|
|
31837
|
-
const audioTrackInfo = ctx.audioTracks[0];
|
|
31838
32137
|
if (!videoTrackInfo) throw new Error("remux: source has no video track");
|
|
31839
32138
|
const mbVideoCodec = avbridgeVideoToMediabunny(videoTrackInfo.codec);
|
|
31840
32139
|
if (!mbVideoCodec) {
|
|
31841
32140
|
throw new Error(`remux: video codec "${videoTrackInfo.codec}" is not supported by mediabunny output`);
|
|
31842
32141
|
}
|
|
31843
|
-
const mbAudioCodec = audioTrackInfo ? avbridgeAudioToMediabunny(audioTrackInfo.codec) : null;
|
|
31844
32142
|
const input = new mb.Input({
|
|
31845
32143
|
source: await buildMediabunnySourceFromInput(mb, ctx.source),
|
|
31846
32144
|
formats: mb.ALL_FORMATS
|
|
31847
32145
|
});
|
|
31848
32146
|
const allTracks = await input.getTracks();
|
|
31849
32147
|
const inputVideo = allTracks.find((t) => t.id === videoTrackInfo.id && t.isVideoTrack());
|
|
31850
|
-
const inputAudio = audioTrackInfo ? allTracks.find((t) => t.id === audioTrackInfo.id && t.isAudioTrack()) : null;
|
|
31851
32148
|
if (!inputVideo || !inputVideo.isVideoTrack()) {
|
|
31852
32149
|
throw new Error("remux: video track not found in input");
|
|
31853
32150
|
}
|
|
31854
|
-
if (audioTrackInfo && (!inputAudio || !inputAudio.isAudioTrack())) {
|
|
31855
|
-
throw new Error("remux: audio track not found in input");
|
|
31856
|
-
}
|
|
31857
32151
|
const videoConfig = await inputVideo.getDecoderConfig();
|
|
31858
|
-
const audioConfig = inputAudio && inputAudio.isAudioTrack() ? await inputAudio.getDecoderConfig() : null;
|
|
31859
32152
|
const videoSink = new mb.EncodedPacketSink(inputVideo);
|
|
31860
|
-
|
|
32153
|
+
let selectedAudioTrackId = ctx.audioTracks[0]?.id ?? null;
|
|
32154
|
+
let inputAudio = null;
|
|
32155
|
+
let mbAudioCodec = null;
|
|
32156
|
+
let audioSink = null;
|
|
32157
|
+
let audioConfig = null;
|
|
32158
|
+
async function rebuildAudio() {
|
|
32159
|
+
if (selectedAudioTrackId == null) {
|
|
32160
|
+
inputAudio = null;
|
|
32161
|
+
mbAudioCodec = null;
|
|
32162
|
+
audioSink = null;
|
|
32163
|
+
audioConfig = null;
|
|
32164
|
+
return;
|
|
32165
|
+
}
|
|
32166
|
+
const trackInfo = ctx.audioTracks.find((t) => t.id === selectedAudioTrackId);
|
|
32167
|
+
if (!trackInfo) {
|
|
32168
|
+
throw new Error(`remux: no audio track with id ${selectedAudioTrackId}`);
|
|
32169
|
+
}
|
|
32170
|
+
const newInput = allTracks.find((t) => t.id === trackInfo.id && t.isAudioTrack());
|
|
32171
|
+
if (!newInput || !newInput.isAudioTrack()) {
|
|
32172
|
+
throw new Error("remux: audio track not found in input");
|
|
32173
|
+
}
|
|
32174
|
+
inputAudio = newInput;
|
|
32175
|
+
mbAudioCodec = avbridgeAudioToMediabunny(trackInfo.codec);
|
|
32176
|
+
audioSink = new mb.EncodedPacketSink(newInput);
|
|
32177
|
+
audioConfig = await newInput.getDecoderConfig();
|
|
32178
|
+
}
|
|
32179
|
+
await rebuildAudio();
|
|
31861
32180
|
let sink = null;
|
|
31862
32181
|
const stats = { videoPackets: 0, audioPackets: 0, bytesWritten: 0, fragments: 0 };
|
|
31863
32182
|
let destroyed = false;
|
|
@@ -31982,6 +32301,30 @@ async function createRemuxPipeline(ctx, video) {
|
|
|
31982
32301
|
pendingAutoPlay = autoPlay;
|
|
31983
32302
|
if (sink) sink.setPlayOnSeek(autoPlay);
|
|
31984
32303
|
},
|
|
32304
|
+
async setAudioTrack(trackId, time, autoPlay) {
|
|
32305
|
+
if (selectedAudioTrackId === trackId) return;
|
|
32306
|
+
if (!ctx.audioTracks.some((t) => t.id === trackId)) {
|
|
32307
|
+
console.warn("[avbridge] remux: setAudioTrack \u2014 unknown track id", trackId);
|
|
32308
|
+
return;
|
|
32309
|
+
}
|
|
32310
|
+
pumpToken++;
|
|
32311
|
+
selectedAudioTrackId = trackId;
|
|
32312
|
+
await rebuildAudio().catch((err) => {
|
|
32313
|
+
console.warn("[avbridge] remux: rebuildAudio failed:", err.message);
|
|
32314
|
+
});
|
|
32315
|
+
if (sink) {
|
|
32316
|
+
try {
|
|
32317
|
+
sink.destroy();
|
|
32318
|
+
} catch {
|
|
32319
|
+
}
|
|
32320
|
+
sink = null;
|
|
32321
|
+
}
|
|
32322
|
+
pendingAutoPlay = autoPlay;
|
|
32323
|
+
pendingStartTime = time;
|
|
32324
|
+
pumpLoop(++pumpToken, time).catch((err) => {
|
|
32325
|
+
console.error("[avbridge] remux pipeline setAudioTrack pump failed:", err);
|
|
32326
|
+
});
|
|
32327
|
+
},
|
|
31985
32328
|
async destroy() {
|
|
31986
32329
|
destroyed = true;
|
|
31987
32330
|
pumpToken++;
|
|
@@ -32041,7 +32384,19 @@ async function createRemuxSession(context, video) {
|
|
|
32041
32384
|
const wasPlaying = !video.paused;
|
|
32042
32385
|
await pipeline.seek(time, wasPlaying || wantPlay);
|
|
32043
32386
|
},
|
|
32044
|
-
async setAudioTrack(
|
|
32387
|
+
async setAudioTrack(id) {
|
|
32388
|
+
if (!context.audioTracks.some((t) => t.id === id)) {
|
|
32389
|
+
console.warn("[avbridge] remux: setAudioTrack \u2014 unknown track id", id);
|
|
32390
|
+
return;
|
|
32391
|
+
}
|
|
32392
|
+
const wasPlaying = !video.paused;
|
|
32393
|
+
const time = video.currentTime || 0;
|
|
32394
|
+
if (!started) {
|
|
32395
|
+
started = true;
|
|
32396
|
+
await pipeline.setAudioTrack(id, time, wantPlay || wasPlaying);
|
|
32397
|
+
return;
|
|
32398
|
+
}
|
|
32399
|
+
await pipeline.setAudioTrack(id, time, wasPlaying || wantPlay);
|
|
32045
32400
|
},
|
|
32046
32401
|
async setSubtitleTrack(id) {
|
|
32047
32402
|
const tracks = video.textTracks;
|
|
@@ -32065,6 +32420,11 @@ async function createRemuxSession(context, video) {
|
|
|
32065
32420
|
}
|
|
32066
32421
|
|
|
32067
32422
|
// src/strategies/fallback/video-renderer.ts
|
|
32423
|
+
init_render();
|
|
32424
|
+
function isDebug() {
|
|
32425
|
+
return typeof globalThis !== "undefined" && !!globalThis.AVBRIDGE_DEBUG;
|
|
32426
|
+
}
|
|
32427
|
+
var lastDebugLog = 0;
|
|
32068
32428
|
var VideoRenderer = class {
|
|
32069
32429
|
constructor(target, clock, fps = 30) {
|
|
32070
32430
|
this.target = target;
|
|
@@ -32090,6 +32450,9 @@ var VideoRenderer = class {
|
|
|
32090
32450
|
document.body.appendChild(this.canvas);
|
|
32091
32451
|
}
|
|
32092
32452
|
target.style.visibility = "hidden";
|
|
32453
|
+
const overlayParent = parent instanceof HTMLElement ? parent : document.body;
|
|
32454
|
+
this.subtitleOverlay = new SubtitleOverlay(overlayParent);
|
|
32455
|
+
this.watchTextTracks(target);
|
|
32093
32456
|
const ctx = this.canvas.getContext("2d");
|
|
32094
32457
|
if (!ctx) throw new Error("video renderer: failed to acquire 2D context");
|
|
32095
32458
|
this.ctx = ctx;
|
|
@@ -32111,6 +32474,29 @@ var VideoRenderer = class {
|
|
|
32111
32474
|
lastPaintWall = 0;
|
|
32112
32475
|
/** Minimum ms between paints — paces video at roughly source fps. */
|
|
32113
32476
|
paintIntervalMs;
|
|
32477
|
+
/** Cumulative count of frames skipped because all PTS are in the future. */
|
|
32478
|
+
ticksWaiting = 0;
|
|
32479
|
+
/** Cumulative count of ticks where PTS mode painted a frame. */
|
|
32480
|
+
ticksPainted = 0;
|
|
32481
|
+
/**
|
|
32482
|
+
* Subtitle overlay div attached to the stage wrapper alongside the
|
|
32483
|
+
* canvas. Created lazily when subtitle tracks are attached via the
|
|
32484
|
+
* target's `<track>` children. Canvas strategies (hybrid, fallback)
|
|
32485
|
+
* hide the <video>, so we can't rely on the browser's native cue
|
|
32486
|
+
* rendering; we read TextTrack.cues and render into this overlay.
|
|
32487
|
+
*/
|
|
32488
|
+
subtitleOverlay = null;
|
|
32489
|
+
subtitleTrack = null;
|
|
32490
|
+
/**
|
|
32491
|
+
* Calibration offset (microseconds) between video PTS and audio clock.
|
|
32492
|
+
* Video PTS and AudioContext.currentTime can drift ~0.1% relative to
|
|
32493
|
+
* each other (different clock domains). Over 45 minutes that's 2.6s.
|
|
32494
|
+
* We measure the offset on the first painted frame and update it
|
|
32495
|
+
* periodically so the PTS comparison stays calibrated.
|
|
32496
|
+
*/
|
|
32497
|
+
ptsCalibrationUs = 0;
|
|
32498
|
+
ptsCalibrated = false;
|
|
32499
|
+
lastCalibrationWall = 0;
|
|
32114
32500
|
/** Resolves once the first decoded frame has been enqueued. */
|
|
32115
32501
|
firstFrameReady;
|
|
32116
32502
|
resolveFirstFrame;
|
|
@@ -32144,9 +32530,80 @@ var VideoRenderer = class {
|
|
|
32144
32530
|
this.framesDroppedOverflow++;
|
|
32145
32531
|
}
|
|
32146
32532
|
}
|
|
32533
|
+
/**
|
|
32534
|
+
* Watch the target <video>'s textTracks list. When a track is added,
|
|
32535
|
+
* grab it and start polling cues on each render tick. Existing tracks
|
|
32536
|
+
* (if any) are picked up immediately.
|
|
32537
|
+
*/
|
|
32538
|
+
watchTextTracks(target) {
|
|
32539
|
+
const pick = () => {
|
|
32540
|
+
if (this.subtitleTrack) return;
|
|
32541
|
+
const tracks = target.textTracks;
|
|
32542
|
+
if (isDebug()) {
|
|
32543
|
+
console.log(`[avbridge:subs] watchTextTracks pick() \u2014 ${tracks.length} tracks`);
|
|
32544
|
+
}
|
|
32545
|
+
for (let i = 0; i < tracks.length; i++) {
|
|
32546
|
+
const t = tracks[i];
|
|
32547
|
+
if (isDebug()) {
|
|
32548
|
+
console.log(`[avbridge:subs] track ${i}: kind=${t.kind} mode=${t.mode} cues=${t.cues?.length ?? 0}`);
|
|
32549
|
+
}
|
|
32550
|
+
if (t.kind === "subtitles" || t.kind === "captions") {
|
|
32551
|
+
this.subtitleTrack = t;
|
|
32552
|
+
t.mode = "hidden";
|
|
32553
|
+
if (isDebug()) {
|
|
32554
|
+
console.log(`[avbridge:subs] picked track, mode=hidden`);
|
|
32555
|
+
}
|
|
32556
|
+
const trackEl = target.querySelector(`track[srclang="${t.language}"]`);
|
|
32557
|
+
if (trackEl) {
|
|
32558
|
+
trackEl.addEventListener("load", () => {
|
|
32559
|
+
if (isDebug()) {
|
|
32560
|
+
console.log(`[avbridge:subs] track element loaded, cues=${t.cues?.length ?? 0}`);
|
|
32561
|
+
}
|
|
32562
|
+
});
|
|
32563
|
+
trackEl.addEventListener("error", (ev) => {
|
|
32564
|
+
console.warn(`[avbridge:subs] track element error:`, ev);
|
|
32565
|
+
});
|
|
32566
|
+
}
|
|
32567
|
+
break;
|
|
32568
|
+
}
|
|
32569
|
+
}
|
|
32570
|
+
};
|
|
32571
|
+
pick();
|
|
32572
|
+
if (typeof target.textTracks.addEventListener === "function") {
|
|
32573
|
+
target.textTracks.addEventListener("addtrack", (e) => {
|
|
32574
|
+
if (isDebug()) {
|
|
32575
|
+
console.log("[avbridge:subs] addtrack event fired");
|
|
32576
|
+
}
|
|
32577
|
+
pick();
|
|
32578
|
+
});
|
|
32579
|
+
}
|
|
32580
|
+
}
|
|
32581
|
+
_loggedCues = false;
|
|
32582
|
+
/** Find the active cue (if any) for the given media time. */
|
|
32583
|
+
updateSubtitles() {
|
|
32584
|
+
if (!this.subtitleOverlay || !this.subtitleTrack) return;
|
|
32585
|
+
const cues = this.subtitleTrack.cues;
|
|
32586
|
+
if (!cues || cues.length === 0) return;
|
|
32587
|
+
if (isDebug() && !this._loggedCues) {
|
|
32588
|
+
this._loggedCues = true;
|
|
32589
|
+
console.log(`[avbridge:subs] cues available: ${cues.length}, first start=${cues[0].startTime}, last end=${cues[cues.length - 1].endTime}`);
|
|
32590
|
+
}
|
|
32591
|
+
const t = this.clock.now();
|
|
32592
|
+
let activeText = "";
|
|
32593
|
+
for (let i = 0; i < cues.length; i++) {
|
|
32594
|
+
const c = cues[i];
|
|
32595
|
+
if (t >= c.startTime && t <= c.endTime) {
|
|
32596
|
+
const vttCue = c;
|
|
32597
|
+
activeText = vttCue.text ?? "";
|
|
32598
|
+
break;
|
|
32599
|
+
}
|
|
32600
|
+
}
|
|
32601
|
+
this.subtitleOverlay.setText(activeText.replace(/<[^>]+>/g, ""));
|
|
32602
|
+
}
|
|
32147
32603
|
tick() {
|
|
32148
32604
|
if (this.destroyed) return;
|
|
32149
32605
|
this.rafHandle = requestAnimationFrame(this.tick);
|
|
32606
|
+
this.updateSubtitles();
|
|
32150
32607
|
if (this.queue.length === 0) return;
|
|
32151
32608
|
const playing = this.clock.isPlaying();
|
|
32152
32609
|
if (!playing) {
|
|
@@ -32159,21 +32616,81 @@ var VideoRenderer = class {
|
|
|
32159
32616
|
}
|
|
32160
32617
|
return;
|
|
32161
32618
|
}
|
|
32162
|
-
const
|
|
32163
|
-
|
|
32164
|
-
|
|
32165
|
-
if (
|
|
32166
|
-
const
|
|
32167
|
-
|
|
32168
|
-
|
|
32169
|
-
|
|
32170
|
-
this.
|
|
32171
|
-
|
|
32172
|
-
|
|
32173
|
-
|
|
32619
|
+
const rawAudioNowUs = this.clock.now() * 1e6;
|
|
32620
|
+
const headTs = this.queue[0].timestamp ?? 0;
|
|
32621
|
+
const hasPts = headTs > 0 || this.queue.length > 1;
|
|
32622
|
+
if (hasPts) {
|
|
32623
|
+
const wallNow2 = performance.now();
|
|
32624
|
+
if (!this.ptsCalibrated || wallNow2 - this.lastCalibrationWall > 1e4) {
|
|
32625
|
+
this.ptsCalibrationUs = headTs - rawAudioNowUs;
|
|
32626
|
+
this.ptsCalibrated = true;
|
|
32627
|
+
this.lastCalibrationWall = wallNow2;
|
|
32628
|
+
}
|
|
32629
|
+
const audioNowUs = rawAudioNowUs + this.ptsCalibrationUs;
|
|
32630
|
+
const frameDurationUs = this.paintIntervalMs * 1e3;
|
|
32631
|
+
const deadlineUs = audioNowUs + frameDurationUs;
|
|
32632
|
+
let bestIdx = -1;
|
|
32633
|
+
for (let i = 0; i < this.queue.length; i++) {
|
|
32634
|
+
const ts = this.queue[i].timestamp ?? 0;
|
|
32635
|
+
if (ts <= deadlineUs) {
|
|
32636
|
+
bestIdx = i;
|
|
32637
|
+
} else {
|
|
32638
|
+
break;
|
|
32639
|
+
}
|
|
32640
|
+
}
|
|
32641
|
+
if (bestIdx < 0) {
|
|
32642
|
+
this.ticksWaiting++;
|
|
32643
|
+
if (isDebug()) {
|
|
32644
|
+
const now = performance.now();
|
|
32645
|
+
if (now - lastDebugLog > 1e3) {
|
|
32646
|
+
const headPtsMs = (headTs / 1e3).toFixed(1);
|
|
32647
|
+
const audioMs = (audioNowUs / 1e3).toFixed(1);
|
|
32648
|
+
const rawDriftMs = ((headTs - rawAudioNowUs) / 1e3).toFixed(1);
|
|
32649
|
+
const calibMs = (this.ptsCalibrationUs / 1e3).toFixed(1);
|
|
32650
|
+
console.log(
|
|
32651
|
+
`[avbridge:renderer] WAIT q=${this.queue.length} headPTS=${headPtsMs}ms calibAudio=${audioMs}ms rawDrift=${rawDriftMs}ms calib=${calibMs}ms painted=${this.framesPainted} dropped=${this.framesDroppedLate}`
|
|
32652
|
+
);
|
|
32653
|
+
lastDebugLog = now;
|
|
32654
|
+
}
|
|
32655
|
+
}
|
|
32174
32656
|
return;
|
|
32175
32657
|
}
|
|
32658
|
+
const dropThresholdUs = audioNowUs - frameDurationUs * 2;
|
|
32659
|
+
let dropped = 0;
|
|
32660
|
+
while (bestIdx > 0) {
|
|
32661
|
+
const ts = this.queue[0].timestamp ?? 0;
|
|
32662
|
+
if (ts < dropThresholdUs) {
|
|
32663
|
+
this.queue.shift()?.close();
|
|
32664
|
+
this.framesDroppedLate++;
|
|
32665
|
+
bestIdx--;
|
|
32666
|
+
dropped++;
|
|
32667
|
+
} else {
|
|
32668
|
+
break;
|
|
32669
|
+
}
|
|
32670
|
+
}
|
|
32671
|
+
this.ticksPainted++;
|
|
32672
|
+
if (isDebug()) {
|
|
32673
|
+
const now = performance.now();
|
|
32674
|
+
if (now - lastDebugLog > 1e3) {
|
|
32675
|
+
const paintedTs = this.queue[0]?.timestamp ?? 0;
|
|
32676
|
+
const audioMs = (audioNowUs / 1e3).toFixed(1);
|
|
32677
|
+
const ptsMs = (paintedTs / 1e3).toFixed(1);
|
|
32678
|
+
const rawDriftMs = ((paintedTs - rawAudioNowUs) / 1e3).toFixed(1);
|
|
32679
|
+
const calibMs = (this.ptsCalibrationUs / 1e3).toFixed(1);
|
|
32680
|
+
console.log(
|
|
32681
|
+
`[avbridge:renderer] PAINT q=${this.queue.length} calibAudio=${audioMs}ms nextPTS=${ptsMs}ms rawDrift=${rawDriftMs}ms calib=${calibMs}ms dropped=${dropped} total_drops=${this.framesDroppedLate} painted=${this.framesPainted}`
|
|
32682
|
+
);
|
|
32683
|
+
lastDebugLog = now;
|
|
32684
|
+
}
|
|
32685
|
+
}
|
|
32686
|
+
const frame2 = this.queue.shift();
|
|
32687
|
+
this.paint(frame2);
|
|
32688
|
+
frame2.close();
|
|
32689
|
+
this.lastPaintWall = performance.now();
|
|
32690
|
+
return;
|
|
32176
32691
|
}
|
|
32692
|
+
const wallNow = performance.now();
|
|
32693
|
+
if (wallNow - this.lastPaintWall < this.paintIntervalMs - 2) return;
|
|
32177
32694
|
const frame = this.queue.shift();
|
|
32178
32695
|
this.paint(frame);
|
|
32179
32696
|
frame.close();
|
|
@@ -32195,8 +32712,13 @@ var VideoRenderer = class {
|
|
|
32195
32712
|
}
|
|
32196
32713
|
/** Discard all queued frames. Used by seek to drop stale buffers. */
|
|
32197
32714
|
flush() {
|
|
32715
|
+
const count = this.queue.length;
|
|
32198
32716
|
while (this.queue.length > 0) this.queue.shift()?.close();
|
|
32199
32717
|
this.prerolled = false;
|
|
32718
|
+
this.ptsCalibrated = false;
|
|
32719
|
+
if (isDebug() && count > 0) {
|
|
32720
|
+
console.log(`[avbridge:renderer] FLUSH discarded=${count} painted=${this.framesPainted} drops=${this.framesDroppedLate}`);
|
|
32721
|
+
}
|
|
32200
32722
|
}
|
|
32201
32723
|
stats() {
|
|
32202
32724
|
return {
|
|
@@ -32210,6 +32732,11 @@ var VideoRenderer = class {
|
|
|
32210
32732
|
this.destroyed = true;
|
|
32211
32733
|
if (this.rafHandle != null) cancelAnimationFrame(this.rafHandle);
|
|
32212
32734
|
this.flush();
|
|
32735
|
+
if (this.subtitleOverlay) {
|
|
32736
|
+
this.subtitleOverlay.destroy();
|
|
32737
|
+
this.subtitleOverlay = null;
|
|
32738
|
+
}
|
|
32739
|
+
this.subtitleTrack = null;
|
|
32213
32740
|
this.canvas.remove();
|
|
32214
32741
|
this.target.style.visibility = "";
|
|
32215
32742
|
}
|
|
@@ -32241,11 +32768,38 @@ var AudioOutput = class {
|
|
|
32241
32768
|
pendingQueue = [];
|
|
32242
32769
|
framesScheduled = 0;
|
|
32243
32770
|
destroyed = false;
|
|
32771
|
+
/** User-set volume (0..1). Applied to the gain node. */
|
|
32772
|
+
_volume = 1;
|
|
32773
|
+
/** User-set muted flag. When true, gain is forced to 0. */
|
|
32774
|
+
_muted = false;
|
|
32244
32775
|
constructor() {
|
|
32245
32776
|
this.ctx = new AudioContext();
|
|
32246
32777
|
this.gain = this.ctx.createGain();
|
|
32247
32778
|
this.gain.connect(this.ctx.destination);
|
|
32248
32779
|
}
|
|
32780
|
+
/** Set volume (0..1). Applied immediately to the gain node. */
|
|
32781
|
+
setVolume(v) {
|
|
32782
|
+
this._volume = Math.max(0, Math.min(1, v));
|
|
32783
|
+
this.applyGain();
|
|
32784
|
+
}
|
|
32785
|
+
getVolume() {
|
|
32786
|
+
return this._volume;
|
|
32787
|
+
}
|
|
32788
|
+
/** Set muted. When true, output is silenced regardless of volume. */
|
|
32789
|
+
setMuted(m) {
|
|
32790
|
+
this._muted = m;
|
|
32791
|
+
this.applyGain();
|
|
32792
|
+
}
|
|
32793
|
+
getMuted() {
|
|
32794
|
+
return this._muted;
|
|
32795
|
+
}
|
|
32796
|
+
applyGain() {
|
|
32797
|
+
const target = this._muted ? 0 : this._volume;
|
|
32798
|
+
try {
|
|
32799
|
+
this.gain.gain.value = target;
|
|
32800
|
+
} catch {
|
|
32801
|
+
}
|
|
32802
|
+
}
|
|
32249
32803
|
/**
|
|
32250
32804
|
* Switch into wall-clock fallback mode. Called by the decoder when no
|
|
32251
32805
|
* audio decoder could be initialized for the source. Once set, this
|
|
@@ -32395,6 +32949,7 @@ var AudioOutput = class {
|
|
|
32395
32949
|
}
|
|
32396
32950
|
this.gain = this.ctx.createGain();
|
|
32397
32951
|
this.gain.connect(this.ctx.destination);
|
|
32952
|
+
this.applyGain();
|
|
32398
32953
|
this.pendingQueue = [];
|
|
32399
32954
|
this.mediaTimeOfAnchor = newMediaTime;
|
|
32400
32955
|
this.mediaTimeOfNext = newMediaTime;
|
|
@@ -32423,42 +32978,189 @@ var AudioOutput = class {
|
|
|
32423
32978
|
|
|
32424
32979
|
// src/strategies/hybrid/decoder.ts
|
|
32425
32980
|
init_libav_loader();
|
|
32981
|
+
init_debug();
|
|
32426
32982
|
|
|
32427
32983
|
// src/strategies/fallback/variant-routing.ts
|
|
32428
32984
|
var LEGACY_CONTAINERS = /* @__PURE__ */ new Set(["avi", "asf", "flv"]);
|
|
32429
|
-
var
|
|
32430
|
-
|
|
32431
|
-
"vc1",
|
|
32432
|
-
"mpeg4",
|
|
32433
|
-
// MPEG-4 Part 2 / DivX / Xvid
|
|
32434
|
-
"rv40",
|
|
32435
|
-
"mpeg2",
|
|
32436
|
-
"mpeg1",
|
|
32437
|
-
"theora"
|
|
32438
|
-
]);
|
|
32439
|
-
var LEGACY_AUDIO_CODECS = /* @__PURE__ */ new Set(["wmav2", "wmapro", "ac3", "eac3"]);
|
|
32985
|
+
var WEBCODECS_AUDIO = /* @__PURE__ */ new Set(["aac", "mp3", "opus", "vorbis", "flac"]);
|
|
32986
|
+
var WEBCODECS_VIDEO = /* @__PURE__ */ new Set(["h264", "h265", "vp8", "vp9", "av1"]);
|
|
32440
32987
|
function pickLibavVariant(ctx) {
|
|
32441
32988
|
if (LEGACY_CONTAINERS.has(ctx.container)) return "avbridge";
|
|
32442
32989
|
for (const v of ctx.videoTracks) {
|
|
32443
|
-
if (
|
|
32990
|
+
if (!WEBCODECS_VIDEO.has(v.codec)) return "avbridge";
|
|
32444
32991
|
}
|
|
32445
32992
|
for (const a of ctx.audioTracks) {
|
|
32446
|
-
if (
|
|
32993
|
+
if (!WEBCODECS_AUDIO.has(a.codec)) return "avbridge";
|
|
32447
32994
|
}
|
|
32448
32995
|
return "webcodecs";
|
|
32449
32996
|
}
|
|
32450
32997
|
|
|
32998
|
+
// src/util/libav-demux.ts
|
|
32999
|
+
function sanitizePacketTimestamp(pkt, nextUs, fallbackTimeBase) {
|
|
33000
|
+
const lo = pkt.pts ?? 0;
|
|
33001
|
+
const hi = pkt.ptshi ?? 0;
|
|
33002
|
+
const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
|
|
33003
|
+
if (isInvalid) {
|
|
33004
|
+
const us2 = nextUs();
|
|
33005
|
+
pkt.pts = us2;
|
|
33006
|
+
pkt.ptshi = 0;
|
|
33007
|
+
pkt.time_base_num = 1;
|
|
33008
|
+
pkt.time_base_den = 1e6;
|
|
33009
|
+
return;
|
|
33010
|
+
}
|
|
33011
|
+
const tb = fallbackTimeBase ?? [1, 1e6];
|
|
33012
|
+
const pts64 = hi * 4294967296 + lo;
|
|
33013
|
+
const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
|
|
33014
|
+
if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
|
|
33015
|
+
pkt.pts = us;
|
|
33016
|
+
pkt.ptshi = us < 0 ? -1 : 0;
|
|
33017
|
+
pkt.time_base_num = 1;
|
|
33018
|
+
pkt.time_base_den = 1e6;
|
|
33019
|
+
return;
|
|
33020
|
+
}
|
|
33021
|
+
const fallback = nextUs();
|
|
33022
|
+
pkt.pts = fallback;
|
|
33023
|
+
pkt.ptshi = 0;
|
|
33024
|
+
pkt.time_base_num = 1;
|
|
33025
|
+
pkt.time_base_den = 1e6;
|
|
33026
|
+
}
|
|
33027
|
+
var AV_SAMPLE_FMT_U8 = 0;
|
|
33028
|
+
var AV_SAMPLE_FMT_S16 = 1;
|
|
33029
|
+
var AV_SAMPLE_FMT_S32 = 2;
|
|
33030
|
+
var AV_SAMPLE_FMT_FLT = 3;
|
|
33031
|
+
var AV_SAMPLE_FMT_U8P = 5;
|
|
33032
|
+
var AV_SAMPLE_FMT_S16P = 6;
|
|
33033
|
+
var AV_SAMPLE_FMT_S32P = 7;
|
|
33034
|
+
var AV_SAMPLE_FMT_FLTP = 8;
|
|
33035
|
+
function libavFrameToInterleavedFloat32(frame) {
|
|
33036
|
+
const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
|
|
33037
|
+
const sampleRate = frame.sample_rate ?? 44100;
|
|
33038
|
+
const nbSamples = frame.nb_samples ?? 0;
|
|
33039
|
+
if (nbSamples === 0) return null;
|
|
33040
|
+
const out = new Float32Array(nbSamples * channels);
|
|
33041
|
+
switch (frame.format) {
|
|
33042
|
+
case AV_SAMPLE_FMT_FLTP: {
|
|
33043
|
+
const planes = ensurePlanes(frame.data, channels);
|
|
33044
|
+
for (let ch = 0; ch < channels; ch++) {
|
|
33045
|
+
const plane = asFloat32(planes[ch]);
|
|
33046
|
+
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
|
|
33047
|
+
}
|
|
33048
|
+
return { data: out, channels, sampleRate };
|
|
33049
|
+
}
|
|
33050
|
+
case AV_SAMPLE_FMT_FLT: {
|
|
33051
|
+
const flat = asFloat32(frame.data);
|
|
33052
|
+
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
|
|
33053
|
+
return { data: out, channels, sampleRate };
|
|
33054
|
+
}
|
|
33055
|
+
case AV_SAMPLE_FMT_S16P: {
|
|
33056
|
+
const planes = ensurePlanes(frame.data, channels);
|
|
33057
|
+
for (let ch = 0; ch < channels; ch++) {
|
|
33058
|
+
const plane = asInt16(planes[ch]);
|
|
33059
|
+
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
|
|
33060
|
+
}
|
|
33061
|
+
return { data: out, channels, sampleRate };
|
|
33062
|
+
}
|
|
33063
|
+
case AV_SAMPLE_FMT_S16: {
|
|
33064
|
+
const flat = asInt16(frame.data);
|
|
33065
|
+
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
|
|
33066
|
+
return { data: out, channels, sampleRate };
|
|
33067
|
+
}
|
|
33068
|
+
case AV_SAMPLE_FMT_S32P: {
|
|
33069
|
+
const planes = ensurePlanes(frame.data, channels);
|
|
33070
|
+
for (let ch = 0; ch < channels; ch++) {
|
|
33071
|
+
const plane = asInt32(planes[ch]);
|
|
33072
|
+
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
|
|
33073
|
+
}
|
|
33074
|
+
return { data: out, channels, sampleRate };
|
|
33075
|
+
}
|
|
33076
|
+
case AV_SAMPLE_FMT_S32: {
|
|
33077
|
+
const flat = asInt32(frame.data);
|
|
33078
|
+
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
|
|
33079
|
+
return { data: out, channels, sampleRate };
|
|
33080
|
+
}
|
|
33081
|
+
case AV_SAMPLE_FMT_U8P: {
|
|
33082
|
+
const planes = ensurePlanes(frame.data, channels);
|
|
33083
|
+
for (let ch = 0; ch < channels; ch++) {
|
|
33084
|
+
const plane = asUint8(planes[ch]);
|
|
33085
|
+
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
|
|
33086
|
+
}
|
|
33087
|
+
return { data: out, channels, sampleRate };
|
|
33088
|
+
}
|
|
33089
|
+
case AV_SAMPLE_FMT_U8: {
|
|
33090
|
+
const flat = asUint8(frame.data);
|
|
33091
|
+
for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
|
|
33092
|
+
return { data: out, channels, sampleRate };
|
|
33093
|
+
}
|
|
33094
|
+
default:
|
|
33095
|
+
return null;
|
|
33096
|
+
}
|
|
33097
|
+
}
|
|
33098
|
+
function ensurePlanes(data, channels) {
|
|
33099
|
+
if (Array.isArray(data)) return data;
|
|
33100
|
+
const arr = data;
|
|
33101
|
+
const len = arr.length;
|
|
33102
|
+
const perChannel = Math.floor(len / channels);
|
|
33103
|
+
const planes = [];
|
|
33104
|
+
for (let ch = 0; ch < channels; ch++) {
|
|
33105
|
+
planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
|
|
33106
|
+
}
|
|
33107
|
+
return planes;
|
|
33108
|
+
}
|
|
33109
|
+
function asFloat32(x) {
|
|
33110
|
+
if (x instanceof Float32Array) return x;
|
|
33111
|
+
const ta = x;
|
|
33112
|
+
return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
|
|
33113
|
+
}
|
|
33114
|
+
function asInt16(x) {
|
|
33115
|
+
if (x instanceof Int16Array) return x;
|
|
33116
|
+
const ta = x;
|
|
33117
|
+
return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
|
|
33118
|
+
}
|
|
33119
|
+
function asInt32(x) {
|
|
33120
|
+
if (x instanceof Int32Array) return x;
|
|
33121
|
+
const ta = x;
|
|
33122
|
+
return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
|
|
33123
|
+
}
|
|
33124
|
+
function asUint8(x) {
|
|
33125
|
+
if (x instanceof Uint8Array) return x;
|
|
33126
|
+
const ta = x;
|
|
33127
|
+
return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
|
|
33128
|
+
}
|
|
33129
|
+
function sanitizeFrameTimestamp(frame, nextUs, fallbackTimeBase) {
|
|
33130
|
+
const lo = frame.pts ?? 0;
|
|
33131
|
+
const hi = frame.ptshi ?? 0;
|
|
33132
|
+
const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
|
|
33133
|
+
if (isInvalid) {
|
|
33134
|
+
const us2 = nextUs();
|
|
33135
|
+
frame.pts = us2;
|
|
33136
|
+
frame.ptshi = 0;
|
|
33137
|
+
return;
|
|
33138
|
+
}
|
|
33139
|
+
const tb = fallbackTimeBase ?? [1, 1e6];
|
|
33140
|
+
const pts64 = hi * 4294967296 + lo;
|
|
33141
|
+
const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
|
|
33142
|
+
if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
|
|
33143
|
+
frame.pts = us;
|
|
33144
|
+
frame.ptshi = us < 0 ? -1 : 0;
|
|
33145
|
+
return;
|
|
33146
|
+
}
|
|
33147
|
+
const fallback = nextUs();
|
|
33148
|
+
frame.pts = fallback;
|
|
33149
|
+
frame.ptshi = 0;
|
|
33150
|
+
}
|
|
33151
|
+
|
|
32451
33152
|
// src/strategies/hybrid/decoder.ts
|
|
32452
33153
|
async function startHybridDecoder(opts) {
|
|
32453
33154
|
const variant = pickLibavVariant(opts.context);
|
|
32454
33155
|
const libav = await loadLibav(variant);
|
|
32455
33156
|
const bridge = await loadBridge();
|
|
32456
33157
|
const { prepareLibavInput: prepareLibavInput2 } = await Promise.resolve().then(() => (init_libav_http_reader(), libav_http_reader_exports));
|
|
32457
|
-
const inputHandle = await prepareLibavInput2(libav, opts.filename, opts.source);
|
|
33158
|
+
const inputHandle = await prepareLibavInput2(libav, opts.filename, opts.source, opts.transport);
|
|
32458
33159
|
const readPkt = await libav.av_packet_alloc();
|
|
32459
33160
|
const [fmt_ctx, streams] = await libav.ff_init_demuxer_file(opts.filename);
|
|
32460
33161
|
const videoStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_VIDEO) ?? null;
|
|
32461
|
-
const
|
|
33162
|
+
const firstAudioTrackId = opts.context.audioTracks[0]?.id;
|
|
33163
|
+
let audioStream = (firstAudioTrackId != null ? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === firstAudioTrackId) : void 0) ?? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
|
|
32462
33164
|
if (!videoStream && !audioStream) {
|
|
32463
33165
|
throw new Error("hybrid decoder: file has no decodable streams");
|
|
32464
33166
|
}
|
|
@@ -32525,6 +33227,56 @@ async function startHybridDecoder(opts) {
|
|
|
32525
33227
|
});
|
|
32526
33228
|
throw new Error("hybrid decoder: could not initialize any decoders");
|
|
32527
33229
|
}
|
|
33230
|
+
let bsfCtx = null;
|
|
33231
|
+
let bsfPkt = null;
|
|
33232
|
+
if (videoStream && opts.context.videoTracks[0]?.codec === "mpeg4") {
|
|
33233
|
+
try {
|
|
33234
|
+
bsfCtx = await libav.av_bsf_list_parse_str_js("mpeg4_unpack_bframes");
|
|
33235
|
+
if (bsfCtx != null && bsfCtx >= 0) {
|
|
33236
|
+
const parIn = await libav.AVBSFContext_par_in(bsfCtx);
|
|
33237
|
+
await libav.avcodec_parameters_copy(parIn, videoStream.codecpar);
|
|
33238
|
+
await libav.av_bsf_init(bsfCtx);
|
|
33239
|
+
bsfPkt = await libav.av_packet_alloc();
|
|
33240
|
+
dbg.info("bsf", "mpeg4_unpack_bframes BSF active (hybrid)");
|
|
33241
|
+
} else {
|
|
33242
|
+
console.warn("[avbridge] mpeg4_unpack_bframes BSF not available in hybrid decoder");
|
|
33243
|
+
bsfCtx = null;
|
|
33244
|
+
}
|
|
33245
|
+
} catch (err) {
|
|
33246
|
+
console.warn("[avbridge] hybrid: failed to init BSF:", err.message);
|
|
33247
|
+
bsfCtx = null;
|
|
33248
|
+
bsfPkt = null;
|
|
33249
|
+
}
|
|
33250
|
+
}
|
|
33251
|
+
async function applyBSF(packets) {
|
|
33252
|
+
if (!bsfCtx || !bsfPkt) return packets;
|
|
33253
|
+
const out = [];
|
|
33254
|
+
for (const pkt of packets) {
|
|
33255
|
+
await libav.ff_copyin_packet(bsfPkt, pkt);
|
|
33256
|
+
const sendErr = await libav.av_bsf_send_packet(bsfCtx, bsfPkt);
|
|
33257
|
+
if (sendErr < 0) {
|
|
33258
|
+
out.push(pkt);
|
|
33259
|
+
continue;
|
|
33260
|
+
}
|
|
33261
|
+
while (true) {
|
|
33262
|
+
const recvErr = await libav.av_bsf_receive_packet(bsfCtx, bsfPkt);
|
|
33263
|
+
if (recvErr < 0) break;
|
|
33264
|
+
out.push(await libav.ff_copyout_packet(bsfPkt));
|
|
33265
|
+
}
|
|
33266
|
+
}
|
|
33267
|
+
return out;
|
|
33268
|
+
}
|
|
33269
|
+
async function flushBSF() {
|
|
33270
|
+
if (!bsfCtx || !bsfPkt) return;
|
|
33271
|
+
try {
|
|
33272
|
+
await libav.av_bsf_send_packet(bsfCtx, 0);
|
|
33273
|
+
while (true) {
|
|
33274
|
+
const err = await libav.av_bsf_receive_packet(bsfCtx, bsfPkt);
|
|
33275
|
+
if (err < 0) break;
|
|
33276
|
+
}
|
|
33277
|
+
} catch {
|
|
33278
|
+
}
|
|
33279
|
+
}
|
|
32528
33280
|
let destroyed = false;
|
|
32529
33281
|
let pumpToken = 0;
|
|
32530
33282
|
let pumpRunning = null;
|
|
@@ -32552,8 +33304,15 @@ async function startHybridDecoder(opts) {
|
|
|
32552
33304
|
if (myToken !== pumpToken || destroyed) return;
|
|
32553
33305
|
const videoPackets = videoStream ? packets[videoStream.index] : void 0;
|
|
32554
33306
|
const audioPackets = audioStream ? packets[audioStream.index] : void 0;
|
|
33307
|
+
if (audioDec && audioPackets && audioPackets.length > 0) {
|
|
33308
|
+
await decodeAudioBatch(audioPackets, myToken);
|
|
33309
|
+
}
|
|
33310
|
+
if (myToken !== pumpToken || destroyed) return;
|
|
33311
|
+
await new Promise((r) => setTimeout(r, 0));
|
|
33312
|
+
if (myToken !== pumpToken || destroyed) return;
|
|
32555
33313
|
if (videoDecoder && videoPackets && videoPackets.length > 0) {
|
|
32556
|
-
|
|
33314
|
+
const processed = await applyBSF(videoPackets);
|
|
33315
|
+
for (const pkt of processed) {
|
|
32557
33316
|
if (myToken !== pumpToken || destroyed) return;
|
|
32558
33317
|
sanitizePacketTimestamp(pkt, () => {
|
|
32559
33318
|
const ts = syntheticVideoUs;
|
|
@@ -32573,9 +33332,6 @@ async function startHybridDecoder(opts) {
|
|
|
32573
33332
|
}
|
|
32574
33333
|
}
|
|
32575
33334
|
}
|
|
32576
|
-
if (audioDec && audioPackets && audioPackets.length > 0) {
|
|
32577
|
-
await decodeAudioBatch(audioPackets, myToken);
|
|
32578
|
-
}
|
|
32579
33335
|
packetsRead += (videoPackets?.length ?? 0) + (audioPackets?.length ?? 0);
|
|
32580
33336
|
while (!destroyed && myToken === pumpToken && (videoDecoder && videoDecoder.decodeQueueSize > 10 || opts.audio.bufferAhead() > 2 || opts.renderer.queueDepth() >= opts.renderer.queueHighWater)) {
|
|
32581
33337
|
await new Promise((r) => setTimeout(r, 50));
|
|
@@ -32598,20 +33354,43 @@ async function startHybridDecoder(opts) {
|
|
|
32598
33354
|
}
|
|
32599
33355
|
async function decodeAudioBatch(pkts, myToken, flush = false) {
|
|
32600
33356
|
if (!audioDec || destroyed || myToken !== pumpToken) return;
|
|
32601
|
-
|
|
32602
|
-
|
|
32603
|
-
|
|
32604
|
-
|
|
32605
|
-
|
|
32606
|
-
|
|
32607
|
-
|
|
32608
|
-
|
|
32609
|
-
|
|
32610
|
-
|
|
32611
|
-
|
|
32612
|
-
|
|
33357
|
+
const AUDIO_SUB_BATCH = 4;
|
|
33358
|
+
let allFrames = [];
|
|
33359
|
+
for (let i = 0; i < pkts.length; i += AUDIO_SUB_BATCH) {
|
|
33360
|
+
if (myToken !== pumpToken || destroyed) return;
|
|
33361
|
+
const slice = pkts.slice(i, i + AUDIO_SUB_BATCH);
|
|
33362
|
+
const isLast = i + AUDIO_SUB_BATCH >= pkts.length;
|
|
33363
|
+
try {
|
|
33364
|
+
const frames2 = await libav.ff_decode_multi(
|
|
33365
|
+
audioDec.c,
|
|
33366
|
+
audioDec.pkt,
|
|
33367
|
+
audioDec.frame,
|
|
33368
|
+
slice,
|
|
33369
|
+
isLast && flush ? { fin: true, ignoreErrors: true } : { ignoreErrors: true }
|
|
33370
|
+
);
|
|
33371
|
+
allFrames = allFrames.concat(frames2);
|
|
33372
|
+
} catch (err) {
|
|
33373
|
+
console.error("[avbridge] hybrid audio decode failed:", err);
|
|
33374
|
+
return;
|
|
33375
|
+
}
|
|
33376
|
+
if (!isLast) await new Promise((r) => setTimeout(r, 0));
|
|
33377
|
+
}
|
|
33378
|
+
if (pkts.length === 0 && flush) {
|
|
33379
|
+
try {
|
|
33380
|
+
allFrames = await libav.ff_decode_multi(
|
|
33381
|
+
audioDec.c,
|
|
33382
|
+
audioDec.pkt,
|
|
33383
|
+
audioDec.frame,
|
|
33384
|
+
[],
|
|
33385
|
+
{ fin: true, ignoreErrors: true }
|
|
33386
|
+
);
|
|
33387
|
+
} catch (err) {
|
|
33388
|
+
console.error("[avbridge] hybrid audio flush failed:", err);
|
|
33389
|
+
return;
|
|
33390
|
+
}
|
|
32613
33391
|
}
|
|
32614
33392
|
if (myToken !== pumpToken || destroyed) return;
|
|
33393
|
+
const frames = allFrames;
|
|
32615
33394
|
for (const f of frames) {
|
|
32616
33395
|
if (myToken !== pumpToken || destroyed) return;
|
|
32617
33396
|
sanitizeFrameTimestamp(
|
|
@@ -32648,6 +33427,14 @@ async function startHybridDecoder(opts) {
|
|
|
32648
33427
|
await pumpRunning;
|
|
32649
33428
|
} catch {
|
|
32650
33429
|
}
|
|
33430
|
+
try {
|
|
33431
|
+
if (bsfCtx) await libav.av_bsf_free(bsfCtx);
|
|
33432
|
+
} catch {
|
|
33433
|
+
}
|
|
33434
|
+
try {
|
|
33435
|
+
if (bsfPkt) await libav.av_packet_free?.(bsfPkt);
|
|
33436
|
+
} catch {
|
|
33437
|
+
}
|
|
32651
33438
|
try {
|
|
32652
33439
|
if (videoDecoder && videoDecoder.state !== "closed") videoDecoder.close();
|
|
32653
33440
|
} catch {
|
|
@@ -32669,6 +33456,71 @@ async function startHybridDecoder(opts) {
|
|
|
32669
33456
|
} catch {
|
|
32670
33457
|
}
|
|
32671
33458
|
},
|
|
33459
|
+
async setAudioTrack(trackId, timeSec) {
|
|
33460
|
+
if (audioStream && audioStream.index === trackId) return;
|
|
33461
|
+
const newStream = streams.find(
|
|
33462
|
+
(s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === trackId
|
|
33463
|
+
);
|
|
33464
|
+
if (!newStream) {
|
|
33465
|
+
console.warn("[avbridge] hybrid: setAudioTrack \u2014 no stream with id", trackId);
|
|
33466
|
+
return;
|
|
33467
|
+
}
|
|
33468
|
+
const newToken = ++pumpToken;
|
|
33469
|
+
if (pumpRunning) {
|
|
33470
|
+
try {
|
|
33471
|
+
await pumpRunning;
|
|
33472
|
+
} catch {
|
|
33473
|
+
}
|
|
33474
|
+
}
|
|
33475
|
+
if (destroyed) return;
|
|
33476
|
+
if (audioDec) {
|
|
33477
|
+
try {
|
|
33478
|
+
await libav.ff_free_decoder?.(audioDec.c, audioDec.pkt, audioDec.frame);
|
|
33479
|
+
} catch {
|
|
33480
|
+
}
|
|
33481
|
+
audioDec = null;
|
|
33482
|
+
}
|
|
33483
|
+
try {
|
|
33484
|
+
const [, c, pkt, frame] = await libav.ff_init_decoder(newStream.codec_id, {
|
|
33485
|
+
codecpar: newStream.codecpar
|
|
33486
|
+
});
|
|
33487
|
+
audioDec = { c, pkt, frame };
|
|
33488
|
+
audioTimeBase = newStream.time_base_num && newStream.time_base_den ? [newStream.time_base_num, newStream.time_base_den] : void 0;
|
|
33489
|
+
} catch (err) {
|
|
33490
|
+
console.warn(
|
|
33491
|
+
"[avbridge] hybrid: setAudioTrack init failed \u2014 switching to no-audio:",
|
|
33492
|
+
err.message
|
|
33493
|
+
);
|
|
33494
|
+
audioDec = null;
|
|
33495
|
+
opts.audio.setNoAudio();
|
|
33496
|
+
}
|
|
33497
|
+
audioStream = newStream;
|
|
33498
|
+
try {
|
|
33499
|
+
const tsUs = Math.floor(timeSec * 1e6);
|
|
33500
|
+
const [tsLo, tsHi] = libav.f64toi64 ? libav.f64toi64(tsUs) : [tsUs | 0, Math.floor(tsUs / 4294967296)];
|
|
33501
|
+
await libav.av_seek_frame(
|
|
33502
|
+
fmt_ctx,
|
|
33503
|
+
-1,
|
|
33504
|
+
tsLo,
|
|
33505
|
+
tsHi,
|
|
33506
|
+
libav.AVSEEK_FLAG_BACKWARD ?? 0
|
|
33507
|
+
);
|
|
33508
|
+
} catch (err) {
|
|
33509
|
+
console.warn("[avbridge] hybrid: setAudioTrack seek failed:", err);
|
|
33510
|
+
}
|
|
33511
|
+
try {
|
|
33512
|
+
if (videoDecoder && videoDecoder.state === "configured") {
|
|
33513
|
+
await videoDecoder.flush();
|
|
33514
|
+
}
|
|
33515
|
+
} catch {
|
|
33516
|
+
}
|
|
33517
|
+
await flushBSF();
|
|
33518
|
+
syntheticVideoUs = Math.round(timeSec * 1e6);
|
|
33519
|
+
syntheticAudioUs = Math.round(timeSec * 1e6);
|
|
33520
|
+
pumpRunning = pumpLoop(newToken).catch(
|
|
33521
|
+
(err) => console.error("[avbridge] hybrid pump failed (post-setAudioTrack):", err)
|
|
33522
|
+
);
|
|
33523
|
+
},
|
|
32672
33524
|
async seek(timeSec) {
|
|
32673
33525
|
const newToken = ++pumpToken;
|
|
32674
33526
|
if (pumpRunning) {
|
|
@@ -32701,6 +33553,7 @@ async function startHybridDecoder(opts) {
|
|
|
32701
33553
|
if (audioDec) await libav.avcodec_flush_buffers?.(audioDec.c);
|
|
32702
33554
|
} catch {
|
|
32703
33555
|
}
|
|
33556
|
+
await flushBSF();
|
|
32704
33557
|
syntheticVideoUs = Math.round(timeSec * 1e6);
|
|
32705
33558
|
syntheticAudioUs = Math.round(timeSec * 1e6);
|
|
32706
33559
|
pumpRunning = pumpLoop(newToken).catch(
|
|
@@ -32714,6 +33567,7 @@ async function startHybridDecoder(opts) {
|
|
|
32714
33567
|
videoFramesDecoded,
|
|
32715
33568
|
videoChunksFed,
|
|
32716
33569
|
audioFramesDecoded,
|
|
33570
|
+
bsfApplied: bsfCtx ? ["mpeg4_unpack_bframes"] : [],
|
|
32717
33571
|
videoDecodeQueueSize: videoDecoder?.decodeQueueSize ?? 0,
|
|
32718
33572
|
// Confirmed transport info — see fallback decoder for the pattern.
|
|
32719
33573
|
_transport: inputHandle.transport === "http-range" ? "http-range" : "memory",
|
|
@@ -32724,158 +33578,6 @@ async function startHybridDecoder(opts) {
|
|
|
32724
33578
|
}
|
|
32725
33579
|
};
|
|
32726
33580
|
}
|
|
32727
|
-
function sanitizePacketTimestamp(pkt, nextUs, fallbackTimeBase) {
|
|
32728
|
-
const lo = pkt.pts ?? 0;
|
|
32729
|
-
const hi = pkt.ptshi ?? 0;
|
|
32730
|
-
const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
|
|
32731
|
-
if (isInvalid) {
|
|
32732
|
-
const us2 = nextUs();
|
|
32733
|
-
pkt.pts = us2;
|
|
32734
|
-
pkt.ptshi = 0;
|
|
32735
|
-
pkt.time_base_num = 1;
|
|
32736
|
-
pkt.time_base_den = 1e6;
|
|
32737
|
-
return;
|
|
32738
|
-
}
|
|
32739
|
-
const tb = fallbackTimeBase ?? [1, 1e6];
|
|
32740
|
-
const pts64 = hi * 4294967296 + lo;
|
|
32741
|
-
const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
|
|
32742
|
-
if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
|
|
32743
|
-
pkt.pts = us;
|
|
32744
|
-
pkt.ptshi = us < 0 ? -1 : 0;
|
|
32745
|
-
pkt.time_base_num = 1;
|
|
32746
|
-
pkt.time_base_den = 1e6;
|
|
32747
|
-
return;
|
|
32748
|
-
}
|
|
32749
|
-
const fallback = nextUs();
|
|
32750
|
-
pkt.pts = fallback;
|
|
32751
|
-
pkt.ptshi = 0;
|
|
32752
|
-
pkt.time_base_num = 1;
|
|
32753
|
-
pkt.time_base_den = 1e6;
|
|
32754
|
-
}
|
|
32755
|
-
function sanitizeFrameTimestamp(frame, nextUs, fallbackTimeBase) {
|
|
32756
|
-
const lo = frame.pts ?? 0;
|
|
32757
|
-
const hi = frame.ptshi ?? 0;
|
|
32758
|
-
const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
|
|
32759
|
-
if (isInvalid) {
|
|
32760
|
-
const us2 = nextUs();
|
|
32761
|
-
frame.pts = us2;
|
|
32762
|
-
frame.ptshi = 0;
|
|
32763
|
-
return;
|
|
32764
|
-
}
|
|
32765
|
-
const tb = fallbackTimeBase ?? [1, 1e6];
|
|
32766
|
-
const pts64 = hi * 4294967296 + lo;
|
|
32767
|
-
const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
|
|
32768
|
-
if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
|
|
32769
|
-
frame.pts = us;
|
|
32770
|
-
frame.ptshi = us < 0 ? -1 : 0;
|
|
32771
|
-
return;
|
|
32772
|
-
}
|
|
32773
|
-
const fallback = nextUs();
|
|
32774
|
-
frame.pts = fallback;
|
|
32775
|
-
frame.ptshi = 0;
|
|
32776
|
-
}
|
|
32777
|
-
var AV_SAMPLE_FMT_U8 = 0;
|
|
32778
|
-
var AV_SAMPLE_FMT_S16 = 1;
|
|
32779
|
-
var AV_SAMPLE_FMT_S32 = 2;
|
|
32780
|
-
var AV_SAMPLE_FMT_FLT = 3;
|
|
32781
|
-
var AV_SAMPLE_FMT_U8P = 5;
|
|
32782
|
-
var AV_SAMPLE_FMT_S16P = 6;
|
|
32783
|
-
var AV_SAMPLE_FMT_S32P = 7;
|
|
32784
|
-
var AV_SAMPLE_FMT_FLTP = 8;
|
|
32785
|
-
function libavFrameToInterleavedFloat32(frame) {
|
|
32786
|
-
const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
|
|
32787
|
-
const sampleRate = frame.sample_rate ?? 44100;
|
|
32788
|
-
const nbSamples = frame.nb_samples ?? 0;
|
|
32789
|
-
if (nbSamples === 0) return null;
|
|
32790
|
-
const out = new Float32Array(nbSamples * channels);
|
|
32791
|
-
switch (frame.format) {
|
|
32792
|
-
case AV_SAMPLE_FMT_FLTP: {
|
|
32793
|
-
const planes = ensurePlanes(frame.data, channels);
|
|
32794
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
32795
|
-
const plane = asFloat32(planes[ch]);
|
|
32796
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
|
|
32797
|
-
}
|
|
32798
|
-
return { data: out, channels, sampleRate };
|
|
32799
|
-
}
|
|
32800
|
-
case AV_SAMPLE_FMT_FLT: {
|
|
32801
|
-
const flat = asFloat32(frame.data);
|
|
32802
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
|
|
32803
|
-
return { data: out, channels, sampleRate };
|
|
32804
|
-
}
|
|
32805
|
-
case AV_SAMPLE_FMT_S16P: {
|
|
32806
|
-
const planes = ensurePlanes(frame.data, channels);
|
|
32807
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
32808
|
-
const plane = asInt16(planes[ch]);
|
|
32809
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
|
|
32810
|
-
}
|
|
32811
|
-
return { data: out, channels, sampleRate };
|
|
32812
|
-
}
|
|
32813
|
-
case AV_SAMPLE_FMT_S16: {
|
|
32814
|
-
const flat = asInt16(frame.data);
|
|
32815
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
|
|
32816
|
-
return { data: out, channels, sampleRate };
|
|
32817
|
-
}
|
|
32818
|
-
case AV_SAMPLE_FMT_S32P: {
|
|
32819
|
-
const planes = ensurePlanes(frame.data, channels);
|
|
32820
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
32821
|
-
const plane = asInt32(planes[ch]);
|
|
32822
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
|
|
32823
|
-
}
|
|
32824
|
-
return { data: out, channels, sampleRate };
|
|
32825
|
-
}
|
|
32826
|
-
case AV_SAMPLE_FMT_S32: {
|
|
32827
|
-
const flat = asInt32(frame.data);
|
|
32828
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
|
|
32829
|
-
return { data: out, channels, sampleRate };
|
|
32830
|
-
}
|
|
32831
|
-
case AV_SAMPLE_FMT_U8P: {
|
|
32832
|
-
const planes = ensurePlanes(frame.data, channels);
|
|
32833
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
32834
|
-
const plane = asUint8(planes[ch]);
|
|
32835
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
|
|
32836
|
-
}
|
|
32837
|
-
return { data: out, channels, sampleRate };
|
|
32838
|
-
}
|
|
32839
|
-
case AV_SAMPLE_FMT_U8: {
|
|
32840
|
-
const flat = asUint8(frame.data);
|
|
32841
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
|
|
32842
|
-
return { data: out, channels, sampleRate };
|
|
32843
|
-
}
|
|
32844
|
-
default:
|
|
32845
|
-
return null;
|
|
32846
|
-
}
|
|
32847
|
-
}
|
|
32848
|
-
function ensurePlanes(data, channels) {
|
|
32849
|
-
if (Array.isArray(data)) return data;
|
|
32850
|
-
const arr = data;
|
|
32851
|
-
const len = arr.length;
|
|
32852
|
-
const perChannel = Math.floor(len / channels);
|
|
32853
|
-
const planes = [];
|
|
32854
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
32855
|
-
planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
|
|
32856
|
-
}
|
|
32857
|
-
return planes;
|
|
32858
|
-
}
|
|
32859
|
-
function asFloat32(x) {
|
|
32860
|
-
if (x instanceof Float32Array) return x;
|
|
32861
|
-
const ta = x;
|
|
32862
|
-
return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
|
|
32863
|
-
}
|
|
32864
|
-
function asInt16(x) {
|
|
32865
|
-
if (x instanceof Int16Array) return x;
|
|
32866
|
-
const ta = x;
|
|
32867
|
-
return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
|
|
32868
|
-
}
|
|
32869
|
-
function asInt32(x) {
|
|
32870
|
-
if (x instanceof Int32Array) return x;
|
|
32871
|
-
const ta = x;
|
|
32872
|
-
return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
|
|
32873
|
-
}
|
|
32874
|
-
function asUint8(x) {
|
|
32875
|
-
if (x instanceof Uint8Array) return x;
|
|
32876
|
-
const ta = x;
|
|
32877
|
-
return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
|
|
32878
|
-
}
|
|
32879
33581
|
async function loadBridge() {
|
|
32880
33582
|
try {
|
|
32881
33583
|
const wrapper = await Promise.resolve().then(() => (init_libav_import(), libav_import_exports));
|
|
@@ -32890,7 +33592,7 @@ async function loadBridge() {
|
|
|
32890
33592
|
// src/strategies/hybrid/index.ts
|
|
32891
33593
|
var READY_AUDIO_BUFFER_SECONDS = 0.3;
|
|
32892
33594
|
var READY_TIMEOUT_SECONDS = 10;
|
|
32893
|
-
async function createHybridSession(ctx, target) {
|
|
33595
|
+
async function createHybridSession(ctx, target, transport) {
|
|
32894
33596
|
const { normalizeSource: normalizeSource2 } = await Promise.resolve().then(() => (init_source(), source_exports));
|
|
32895
33597
|
const source = await normalizeSource2(ctx.source);
|
|
32896
33598
|
const fps = ctx.videoTracks[0]?.fps ?? 30;
|
|
@@ -32903,7 +33605,8 @@ async function createHybridSession(ctx, target) {
|
|
|
32903
33605
|
filename: ctx.name ?? "input.bin",
|
|
32904
33606
|
context: ctx,
|
|
32905
33607
|
renderer,
|
|
32906
|
-
audio
|
|
33608
|
+
audio,
|
|
33609
|
+
transport
|
|
32907
33610
|
});
|
|
32908
33611
|
} catch (err) {
|
|
32909
33612
|
audio.destroy();
|
|
@@ -32921,6 +33624,22 @@ async function createHybridSession(ctx, target) {
|
|
|
32921
33624
|
configurable: true,
|
|
32922
33625
|
get: () => !audio.isPlaying()
|
|
32923
33626
|
});
|
|
33627
|
+
Object.defineProperty(target, "volume", {
|
|
33628
|
+
configurable: true,
|
|
33629
|
+
get: () => audio.getVolume(),
|
|
33630
|
+
set: (v) => {
|
|
33631
|
+
audio.setVolume(v);
|
|
33632
|
+
target.dispatchEvent(new Event("volumechange"));
|
|
33633
|
+
}
|
|
33634
|
+
});
|
|
33635
|
+
Object.defineProperty(target, "muted", {
|
|
33636
|
+
configurable: true,
|
|
33637
|
+
get: () => audio.getMuted(),
|
|
33638
|
+
set: (m) => {
|
|
33639
|
+
audio.setMuted(m);
|
|
33640
|
+
target.dispatchEvent(new Event("volumechange"));
|
|
33641
|
+
}
|
|
33642
|
+
});
|
|
32924
33643
|
if (ctx.duration && Number.isFinite(ctx.duration)) {
|
|
32925
33644
|
Object.defineProperty(target, "duration", {
|
|
32926
33645
|
configurable: true,
|
|
@@ -32960,15 +33679,35 @@ async function createHybridSession(ctx, target) {
|
|
|
32960
33679
|
if (!audio.isPlaying()) {
|
|
32961
33680
|
await waitForBuffer();
|
|
32962
33681
|
await audio.start();
|
|
33682
|
+
target.dispatchEvent(new Event("play"));
|
|
33683
|
+
target.dispatchEvent(new Event("playing"));
|
|
32963
33684
|
}
|
|
32964
33685
|
},
|
|
32965
33686
|
pause() {
|
|
32966
33687
|
void audio.pause();
|
|
33688
|
+
target.dispatchEvent(new Event("pause"));
|
|
32967
33689
|
},
|
|
32968
33690
|
async seek(time) {
|
|
32969
33691
|
await doSeek(time);
|
|
32970
33692
|
},
|
|
32971
|
-
async setAudioTrack(
|
|
33693
|
+
async setAudioTrack(id) {
|
|
33694
|
+
if (!ctx.audioTracks.some((t) => t.id === id)) {
|
|
33695
|
+
console.warn("[avbridge] hybrid: setAudioTrack \u2014 unknown track id", id);
|
|
33696
|
+
return;
|
|
33697
|
+
}
|
|
33698
|
+
const wasPlaying = audio.isPlaying();
|
|
33699
|
+
const currentTime = audio.now();
|
|
33700
|
+
await audio.pause().catch(() => {
|
|
33701
|
+
});
|
|
33702
|
+
await handles.setAudioTrack(id, currentTime).catch(
|
|
33703
|
+
(err) => console.warn("[avbridge] hybrid: handles.setAudioTrack failed:", err)
|
|
33704
|
+
);
|
|
33705
|
+
await audio.reset(currentTime);
|
|
33706
|
+
renderer.flush();
|
|
33707
|
+
if (wasPlaying) {
|
|
33708
|
+
await waitForBuffer();
|
|
33709
|
+
await audio.start();
|
|
33710
|
+
}
|
|
32972
33711
|
},
|
|
32973
33712
|
async setSubtitleTrack(_id) {
|
|
32974
33713
|
},
|
|
@@ -32986,6 +33725,8 @@ async function createHybridSession(ctx, target) {
|
|
|
32986
33725
|
delete target.currentTime;
|
|
32987
33726
|
delete target.duration;
|
|
32988
33727
|
delete target.paused;
|
|
33728
|
+
delete target.volume;
|
|
33729
|
+
delete target.muted;
|
|
32989
33730
|
} catch {
|
|
32990
33731
|
}
|
|
32991
33732
|
},
|
|
@@ -32997,16 +33738,18 @@ async function createHybridSession(ctx, target) {
|
|
|
32997
33738
|
|
|
32998
33739
|
// src/strategies/fallback/decoder.ts
|
|
32999
33740
|
init_libav_loader();
|
|
33741
|
+
init_debug();
|
|
33000
33742
|
async function startDecoder(opts) {
|
|
33001
33743
|
const variant = pickLibavVariant(opts.context);
|
|
33002
33744
|
const libav = await loadLibav(variant);
|
|
33003
33745
|
const bridge = await loadBridge2();
|
|
33004
33746
|
const { prepareLibavInput: prepareLibavInput2 } = await Promise.resolve().then(() => (init_libav_http_reader(), libav_http_reader_exports));
|
|
33005
|
-
const inputHandle = await prepareLibavInput2(libav, opts.filename, opts.source);
|
|
33747
|
+
const inputHandle = await prepareLibavInput2(libav, opts.filename, opts.source, opts.transport);
|
|
33006
33748
|
const readPkt = await libav.av_packet_alloc();
|
|
33007
33749
|
const [fmt_ctx, streams] = await libav.ff_init_demuxer_file(opts.filename);
|
|
33008
33750
|
const videoStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_VIDEO) ?? null;
|
|
33009
|
-
const
|
|
33751
|
+
const firstAudioTrackId = opts.context.audioTracks[0]?.id;
|
|
33752
|
+
let audioStream = (firstAudioTrackId != null ? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === firstAudioTrackId) : void 0) ?? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
|
|
33010
33753
|
if (!videoStream && !audioStream) {
|
|
33011
33754
|
throw new Error("fallback decoder: file has no decodable streams");
|
|
33012
33755
|
}
|
|
@@ -33058,6 +33801,56 @@ async function startDecoder(opts) {
|
|
|
33058
33801
|
`fallback decoder: could not initialize any libav decoders (${codecs}).${hint}`
|
|
33059
33802
|
);
|
|
33060
33803
|
}
|
|
33804
|
+
let bsfCtx = null;
|
|
33805
|
+
let bsfPkt = null;
|
|
33806
|
+
if (videoStream && opts.context.videoTracks[0]?.codec === "mpeg4") {
|
|
33807
|
+
try {
|
|
33808
|
+
bsfCtx = await libav.av_bsf_list_parse_str_js("mpeg4_unpack_bframes");
|
|
33809
|
+
if (bsfCtx != null && bsfCtx >= 0) {
|
|
33810
|
+
const parIn = await libav.AVBSFContext_par_in(bsfCtx);
|
|
33811
|
+
await libav.avcodec_parameters_copy(parIn, videoStream.codecpar);
|
|
33812
|
+
await libav.av_bsf_init(bsfCtx);
|
|
33813
|
+
bsfPkt = await libav.av_packet_alloc();
|
|
33814
|
+
dbg.info("bsf", "mpeg4_unpack_bframes BSF active");
|
|
33815
|
+
} else {
|
|
33816
|
+
console.warn("[avbridge] mpeg4_unpack_bframes BSF not available \u2014 decoding without it");
|
|
33817
|
+
bsfCtx = null;
|
|
33818
|
+
}
|
|
33819
|
+
} catch (err) {
|
|
33820
|
+
console.warn("[avbridge] failed to init mpeg4_unpack_bframes BSF:", err.message);
|
|
33821
|
+
bsfCtx = null;
|
|
33822
|
+
bsfPkt = null;
|
|
33823
|
+
}
|
|
33824
|
+
}
|
|
33825
|
+
async function applyBSF(packets) {
|
|
33826
|
+
if (!bsfCtx || !bsfPkt) return packets;
|
|
33827
|
+
const out = [];
|
|
33828
|
+
for (const pkt of packets) {
|
|
33829
|
+
await libav.ff_copyin_packet(bsfPkt, pkt);
|
|
33830
|
+
const sendErr = await libav.av_bsf_send_packet(bsfCtx, bsfPkt);
|
|
33831
|
+
if (sendErr < 0) {
|
|
33832
|
+
out.push(pkt);
|
|
33833
|
+
continue;
|
|
33834
|
+
}
|
|
33835
|
+
while (true) {
|
|
33836
|
+
const recvErr = await libav.av_bsf_receive_packet(bsfCtx, bsfPkt);
|
|
33837
|
+
if (recvErr < 0) break;
|
|
33838
|
+
out.push(await libav.ff_copyout_packet(bsfPkt));
|
|
33839
|
+
}
|
|
33840
|
+
}
|
|
33841
|
+
return out;
|
|
33842
|
+
}
|
|
33843
|
+
async function flushBSF() {
|
|
33844
|
+
if (!bsfCtx || !bsfPkt) return;
|
|
33845
|
+
try {
|
|
33846
|
+
await libav.av_bsf_send_packet(bsfCtx, 0);
|
|
33847
|
+
while (true) {
|
|
33848
|
+
const err = await libav.av_bsf_receive_packet(bsfCtx, bsfPkt);
|
|
33849
|
+
if (err < 0) break;
|
|
33850
|
+
}
|
|
33851
|
+
} catch {
|
|
33852
|
+
}
|
|
33853
|
+
}
|
|
33061
33854
|
let destroyed = false;
|
|
33062
33855
|
let pumpToken = 0;
|
|
33063
33856
|
let pumpRunning = null;
|
|
@@ -33093,7 +33886,8 @@ async function startDecoder(opts) {
|
|
|
33093
33886
|
}
|
|
33094
33887
|
if (myToken !== pumpToken || destroyed) return;
|
|
33095
33888
|
if (videoDec && videoPackets && videoPackets.length > 0) {
|
|
33096
|
-
await
|
|
33889
|
+
const processed = await applyBSF(videoPackets);
|
|
33890
|
+
await decodeVideoBatch(processed, myToken);
|
|
33097
33891
|
}
|
|
33098
33892
|
packetsRead += (videoPackets?.length ?? 0) + (audioPackets?.length ?? 0);
|
|
33099
33893
|
if (videoFramesDecoded > 0) {
|
|
@@ -33171,7 +33965,7 @@ async function startDecoder(opts) {
|
|
|
33171
33965
|
if (myToken !== pumpToken || destroyed) return;
|
|
33172
33966
|
for (const f of frames) {
|
|
33173
33967
|
if (myToken !== pumpToken || destroyed) return;
|
|
33174
|
-
|
|
33968
|
+
sanitizeFrameTimestamp(
|
|
33175
33969
|
f,
|
|
33176
33970
|
() => {
|
|
33177
33971
|
const ts = syntheticVideoUs;
|
|
@@ -33181,7 +33975,7 @@ async function startDecoder(opts) {
|
|
|
33181
33975
|
videoTimeBase
|
|
33182
33976
|
);
|
|
33183
33977
|
try {
|
|
33184
|
-
const vf = bridge.laFrameToVideoFrame(f,
|
|
33978
|
+
const vf = bridge.laFrameToVideoFrame(f, { timeBase: [1, 1e6] });
|
|
33185
33979
|
opts.renderer.enqueue(vf);
|
|
33186
33980
|
videoFramesDecoded++;
|
|
33187
33981
|
} catch (err) {
|
|
@@ -33209,7 +34003,7 @@ async function startDecoder(opts) {
|
|
|
33209
34003
|
if (myToken !== pumpToken || destroyed) return;
|
|
33210
34004
|
for (const f of frames) {
|
|
33211
34005
|
if (myToken !== pumpToken || destroyed) return;
|
|
33212
|
-
|
|
34006
|
+
sanitizeFrameTimestamp(
|
|
33213
34007
|
f,
|
|
33214
34008
|
() => {
|
|
33215
34009
|
const ts = syntheticAudioUs;
|
|
@@ -33220,7 +34014,7 @@ async function startDecoder(opts) {
|
|
|
33220
34014
|
},
|
|
33221
34015
|
audioTimeBase
|
|
33222
34016
|
);
|
|
33223
|
-
const samples =
|
|
34017
|
+
const samples = libavFrameToInterleavedFloat32(f);
|
|
33224
34018
|
if (samples) {
|
|
33225
34019
|
opts.audio.schedule(samples.data, samples.channels, samples.sampleRate);
|
|
33226
34020
|
audioFramesDecoded++;
|
|
@@ -33239,6 +34033,14 @@ async function startDecoder(opts) {
|
|
|
33239
34033
|
await pumpRunning;
|
|
33240
34034
|
} catch {
|
|
33241
34035
|
}
|
|
34036
|
+
try {
|
|
34037
|
+
if (bsfCtx) await libav.av_bsf_free(bsfCtx);
|
|
34038
|
+
} catch {
|
|
34039
|
+
}
|
|
34040
|
+
try {
|
|
34041
|
+
if (bsfPkt) await libav.av_packet_free?.(bsfPkt);
|
|
34042
|
+
} catch {
|
|
34043
|
+
}
|
|
33242
34044
|
try {
|
|
33243
34045
|
if (videoDec) await libav.ff_free_decoder?.(videoDec.c, videoDec.pkt, videoDec.frame);
|
|
33244
34046
|
} catch {
|
|
@@ -33260,6 +34062,69 @@ async function startDecoder(opts) {
|
|
|
33260
34062
|
} catch {
|
|
33261
34063
|
}
|
|
33262
34064
|
},
|
|
34065
|
+
async setAudioTrack(trackId, timeSec) {
|
|
34066
|
+
if (audioStream && audioStream.index === trackId) return;
|
|
34067
|
+
const newStream = streams.find(
|
|
34068
|
+
(s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === trackId
|
|
34069
|
+
);
|
|
34070
|
+
if (!newStream) {
|
|
34071
|
+
console.warn("[avbridge] fallback: setAudioTrack \u2014 no stream with id", trackId);
|
|
34072
|
+
return;
|
|
34073
|
+
}
|
|
34074
|
+
const newToken = ++pumpToken;
|
|
34075
|
+
if (pumpRunning) {
|
|
34076
|
+
try {
|
|
34077
|
+
await pumpRunning;
|
|
34078
|
+
} catch {
|
|
34079
|
+
}
|
|
34080
|
+
}
|
|
34081
|
+
if (destroyed) return;
|
|
34082
|
+
if (audioDec) {
|
|
34083
|
+
try {
|
|
34084
|
+
await libav.ff_free_decoder?.(audioDec.c, audioDec.pkt, audioDec.frame);
|
|
34085
|
+
} catch {
|
|
34086
|
+
}
|
|
34087
|
+
audioDec = null;
|
|
34088
|
+
}
|
|
34089
|
+
try {
|
|
34090
|
+
const [, c, pkt, frame] = await libav.ff_init_decoder(newStream.codec_id, {
|
|
34091
|
+
codecpar: newStream.codecpar
|
|
34092
|
+
});
|
|
34093
|
+
audioDec = { c, pkt, frame };
|
|
34094
|
+
audioTimeBase = newStream.time_base_num && newStream.time_base_den ? [newStream.time_base_num, newStream.time_base_den] : void 0;
|
|
34095
|
+
} catch (err) {
|
|
34096
|
+
console.warn(
|
|
34097
|
+
"[avbridge] fallback: setAudioTrack init failed \u2014 falling back to no-audio mode:",
|
|
34098
|
+
err.message
|
|
34099
|
+
);
|
|
34100
|
+
audioDec = null;
|
|
34101
|
+
opts.audio.setNoAudio();
|
|
34102
|
+
}
|
|
34103
|
+
audioStream = newStream;
|
|
34104
|
+
try {
|
|
34105
|
+
const tsUs = Math.floor(timeSec * 1e6);
|
|
34106
|
+
const [tsLo, tsHi] = libav.f64toi64 ? libav.f64toi64(tsUs) : [tsUs | 0, Math.floor(tsUs / 4294967296)];
|
|
34107
|
+
await libav.av_seek_frame(
|
|
34108
|
+
fmt_ctx,
|
|
34109
|
+
-1,
|
|
34110
|
+
tsLo,
|
|
34111
|
+
tsHi,
|
|
34112
|
+
libav.AVSEEK_FLAG_BACKWARD ?? 0
|
|
34113
|
+
);
|
|
34114
|
+
} catch (err) {
|
|
34115
|
+
console.warn("[avbridge] fallback: setAudioTrack seek failed:", err);
|
|
34116
|
+
}
|
|
34117
|
+
try {
|
|
34118
|
+
if (videoDec) await libav.avcodec_flush_buffers?.(videoDec.c);
|
|
34119
|
+
} catch {
|
|
34120
|
+
}
|
|
34121
|
+
await flushBSF();
|
|
34122
|
+
syntheticVideoUs = Math.round(timeSec * 1e6);
|
|
34123
|
+
syntheticAudioUs = Math.round(timeSec * 1e6);
|
|
34124
|
+
pumpRunning = pumpLoop(newToken).catch(
|
|
34125
|
+
(err) => console.error("[avbridge] fallback pump failed (post-setAudioTrack):", err)
|
|
34126
|
+
);
|
|
34127
|
+
},
|
|
33263
34128
|
async seek(timeSec) {
|
|
33264
34129
|
const newToken = ++pumpToken;
|
|
33265
34130
|
if (pumpRunning) {
|
|
@@ -33290,6 +34155,7 @@ async function startDecoder(opts) {
|
|
|
33290
34155
|
if (audioDec) await libav.avcodec_flush_buffers?.(audioDec.c);
|
|
33291
34156
|
} catch {
|
|
33292
34157
|
}
|
|
34158
|
+
await flushBSF();
|
|
33293
34159
|
syntheticVideoUs = Math.round(timeSec * 1e6);
|
|
33294
34160
|
syntheticAudioUs = Math.round(timeSec * 1e6);
|
|
33295
34161
|
pumpRunning = pumpLoop(newToken).catch(
|
|
@@ -33302,6 +34168,7 @@ async function startDecoder(opts) {
|
|
|
33302
34168
|
packetsRead,
|
|
33303
34169
|
videoFramesDecoded,
|
|
33304
34170
|
audioFramesDecoded,
|
|
34171
|
+
bsfApplied: bsfCtx ? ["mpeg4_unpack_bframes"] : [],
|
|
33305
34172
|
// Confirmed transport info: once prepareLibavInput returns
|
|
33306
34173
|
// successfully, we *know* whether the source is http-range (probe
|
|
33307
34174
|
// succeeded and returned 206) or in-memory blob. Diagnostics hoists
|
|
@@ -33314,135 +34181,6 @@ async function startDecoder(opts) {
|
|
|
33314
34181
|
}
|
|
33315
34182
|
};
|
|
33316
34183
|
}
|
|
33317
|
-
function sanitizeFrameTimestamp2(frame, nextUs, fallbackTimeBase) {
|
|
33318
|
-
const lo = frame.pts ?? 0;
|
|
33319
|
-
const hi = frame.ptshi ?? 0;
|
|
33320
|
-
const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
|
|
33321
|
-
if (isInvalid) {
|
|
33322
|
-
const us2 = nextUs();
|
|
33323
|
-
frame.pts = us2;
|
|
33324
|
-
frame.ptshi = 0;
|
|
33325
|
-
return { timeBase: [1, 1e6] };
|
|
33326
|
-
}
|
|
33327
|
-
const tb = fallbackTimeBase ?? [1, 1e6];
|
|
33328
|
-
const pts64 = hi * 4294967296 + lo;
|
|
33329
|
-
const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
|
|
33330
|
-
if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
|
|
33331
|
-
frame.pts = us;
|
|
33332
|
-
frame.ptshi = us < 0 ? -1 : 0;
|
|
33333
|
-
return { timeBase: [1, 1e6] };
|
|
33334
|
-
}
|
|
33335
|
-
const fallback = nextUs();
|
|
33336
|
-
frame.pts = fallback;
|
|
33337
|
-
frame.ptshi = 0;
|
|
33338
|
-
return { timeBase: [1, 1e6] };
|
|
33339
|
-
}
|
|
33340
|
-
var AV_SAMPLE_FMT_U82 = 0;
|
|
33341
|
-
var AV_SAMPLE_FMT_S162 = 1;
|
|
33342
|
-
var AV_SAMPLE_FMT_S322 = 2;
|
|
33343
|
-
var AV_SAMPLE_FMT_FLT2 = 3;
|
|
33344
|
-
var AV_SAMPLE_FMT_U8P2 = 5;
|
|
33345
|
-
var AV_SAMPLE_FMT_S16P2 = 6;
|
|
33346
|
-
var AV_SAMPLE_FMT_S32P2 = 7;
|
|
33347
|
-
var AV_SAMPLE_FMT_FLTP2 = 8;
|
|
33348
|
-
function libavFrameToInterleavedFloat322(frame) {
|
|
33349
|
-
const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
|
|
33350
|
-
const sampleRate = frame.sample_rate ?? 44100;
|
|
33351
|
-
const nbSamples = frame.nb_samples ?? 0;
|
|
33352
|
-
if (nbSamples === 0) return null;
|
|
33353
|
-
const out = new Float32Array(nbSamples * channels);
|
|
33354
|
-
switch (frame.format) {
|
|
33355
|
-
case AV_SAMPLE_FMT_FLTP2: {
|
|
33356
|
-
const planes = ensurePlanes2(frame.data, channels);
|
|
33357
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
33358
|
-
const plane = asFloat322(planes[ch]);
|
|
33359
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
|
|
33360
|
-
}
|
|
33361
|
-
return { data: out, channels, sampleRate };
|
|
33362
|
-
}
|
|
33363
|
-
case AV_SAMPLE_FMT_FLT2: {
|
|
33364
|
-
const flat = asFloat322(frame.data);
|
|
33365
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
|
|
33366
|
-
return { data: out, channels, sampleRate };
|
|
33367
|
-
}
|
|
33368
|
-
case AV_SAMPLE_FMT_S16P2: {
|
|
33369
|
-
const planes = ensurePlanes2(frame.data, channels);
|
|
33370
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
33371
|
-
const plane = asInt162(planes[ch]);
|
|
33372
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
|
|
33373
|
-
}
|
|
33374
|
-
return { data: out, channels, sampleRate };
|
|
33375
|
-
}
|
|
33376
|
-
case AV_SAMPLE_FMT_S162: {
|
|
33377
|
-
const flat = asInt162(frame.data);
|
|
33378
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
|
|
33379
|
-
return { data: out, channels, sampleRate };
|
|
33380
|
-
}
|
|
33381
|
-
case AV_SAMPLE_FMT_S32P2: {
|
|
33382
|
-
const planes = ensurePlanes2(frame.data, channels);
|
|
33383
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
33384
|
-
const plane = asInt322(planes[ch]);
|
|
33385
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
|
|
33386
|
-
}
|
|
33387
|
-
return { data: out, channels, sampleRate };
|
|
33388
|
-
}
|
|
33389
|
-
case AV_SAMPLE_FMT_S322: {
|
|
33390
|
-
const flat = asInt322(frame.data);
|
|
33391
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
|
|
33392
|
-
return { data: out, channels, sampleRate };
|
|
33393
|
-
}
|
|
33394
|
-
case AV_SAMPLE_FMT_U8P2: {
|
|
33395
|
-
const planes = ensurePlanes2(frame.data, channels);
|
|
33396
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
33397
|
-
const plane = asUint82(planes[ch]);
|
|
33398
|
-
for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
|
|
33399
|
-
}
|
|
33400
|
-
return { data: out, channels, sampleRate };
|
|
33401
|
-
}
|
|
33402
|
-
case AV_SAMPLE_FMT_U82: {
|
|
33403
|
-
const flat = asUint82(frame.data);
|
|
33404
|
-
for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
|
|
33405
|
-
return { data: out, channels, sampleRate };
|
|
33406
|
-
}
|
|
33407
|
-
default:
|
|
33408
|
-
if (!globalThis.__avbridgeLoggedSampleFmt) {
|
|
33409
|
-
globalThis.__avbridgeLoggedSampleFmt = frame.format;
|
|
33410
|
-
console.warn(`[avbridge] unsupported audio sample format from libav: ${frame.format}`);
|
|
33411
|
-
}
|
|
33412
|
-
return null;
|
|
33413
|
-
}
|
|
33414
|
-
}
|
|
33415
|
-
function ensurePlanes2(data, channels) {
|
|
33416
|
-
if (Array.isArray(data)) return data;
|
|
33417
|
-
const arr = data;
|
|
33418
|
-
const len = arr.length;
|
|
33419
|
-
const perChannel = Math.floor(len / channels);
|
|
33420
|
-
const planes = [];
|
|
33421
|
-
for (let ch = 0; ch < channels; ch++) {
|
|
33422
|
-
planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
|
|
33423
|
-
}
|
|
33424
|
-
return planes;
|
|
33425
|
-
}
|
|
33426
|
-
function asFloat322(x) {
|
|
33427
|
-
if (x instanceof Float32Array) return x;
|
|
33428
|
-
const ta = x;
|
|
33429
|
-
return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
|
|
33430
|
-
}
|
|
33431
|
-
function asInt162(x) {
|
|
33432
|
-
if (x instanceof Int16Array) return x;
|
|
33433
|
-
const ta = x;
|
|
33434
|
-
return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
|
|
33435
|
-
}
|
|
33436
|
-
function asInt322(x) {
|
|
33437
|
-
if (x instanceof Int32Array) return x;
|
|
33438
|
-
const ta = x;
|
|
33439
|
-
return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
|
|
33440
|
-
}
|
|
33441
|
-
function asUint82(x) {
|
|
33442
|
-
if (x instanceof Uint8Array) return x;
|
|
33443
|
-
const ta = x;
|
|
33444
|
-
return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
|
|
33445
|
-
}
|
|
33446
34184
|
async function loadBridge2() {
|
|
33447
34185
|
try {
|
|
33448
34186
|
const wrapper = await Promise.resolve().then(() => (init_libav_import(), libav_import_exports));
|
|
@@ -33458,7 +34196,7 @@ async function loadBridge2() {
|
|
|
33458
34196
|
init_debug();
|
|
33459
34197
|
var READY_AUDIO_BUFFER_SECONDS2 = 0.04;
|
|
33460
34198
|
var READY_TIMEOUT_SECONDS2 = 3;
|
|
33461
|
-
async function createFallbackSession(ctx, target) {
|
|
34199
|
+
async function createFallbackSession(ctx, target, transport) {
|
|
33462
34200
|
const { normalizeSource: normalizeSource2 } = await Promise.resolve().then(() => (init_source(), source_exports));
|
|
33463
34201
|
const source = await normalizeSource2(ctx.source);
|
|
33464
34202
|
const fps = ctx.videoTracks[0]?.fps ?? 30;
|
|
@@ -33471,7 +34209,8 @@ async function createFallbackSession(ctx, target) {
|
|
|
33471
34209
|
filename: ctx.name ?? "input.bin",
|
|
33472
34210
|
context: ctx,
|
|
33473
34211
|
renderer,
|
|
33474
|
-
audio
|
|
34212
|
+
audio,
|
|
34213
|
+
transport
|
|
33475
34214
|
});
|
|
33476
34215
|
} catch (err) {
|
|
33477
34216
|
audio.destroy();
|
|
@@ -33489,6 +34228,22 @@ async function createFallbackSession(ctx, target) {
|
|
|
33489
34228
|
configurable: true,
|
|
33490
34229
|
get: () => !audio.isPlaying()
|
|
33491
34230
|
});
|
|
34231
|
+
Object.defineProperty(target, "volume", {
|
|
34232
|
+
configurable: true,
|
|
34233
|
+
get: () => audio.getVolume(),
|
|
34234
|
+
set: (v) => {
|
|
34235
|
+
audio.setVolume(v);
|
|
34236
|
+
target.dispatchEvent(new Event("volumechange"));
|
|
34237
|
+
}
|
|
34238
|
+
});
|
|
34239
|
+
Object.defineProperty(target, "muted", {
|
|
34240
|
+
configurable: true,
|
|
34241
|
+
get: () => audio.getMuted(),
|
|
34242
|
+
set: (m) => {
|
|
34243
|
+
audio.setMuted(m);
|
|
34244
|
+
target.dispatchEvent(new Event("volumechange"));
|
|
34245
|
+
}
|
|
34246
|
+
});
|
|
33492
34247
|
if (ctx.duration && Number.isFinite(ctx.duration)) {
|
|
33493
34248
|
Object.defineProperty(target, "duration", {
|
|
33494
34249
|
configurable: true,
|
|
@@ -33552,15 +34307,35 @@ async function createFallbackSession(ctx, target) {
|
|
|
33552
34307
|
if (!audio.isPlaying()) {
|
|
33553
34308
|
await waitForBuffer();
|
|
33554
34309
|
await audio.start();
|
|
34310
|
+
target.dispatchEvent(new Event("play"));
|
|
34311
|
+
target.dispatchEvent(new Event("playing"));
|
|
33555
34312
|
}
|
|
33556
34313
|
},
|
|
33557
34314
|
pause() {
|
|
33558
34315
|
void audio.pause();
|
|
34316
|
+
target.dispatchEvent(new Event("pause"));
|
|
33559
34317
|
},
|
|
33560
34318
|
async seek(time) {
|
|
33561
34319
|
await doSeek(time);
|
|
33562
34320
|
},
|
|
33563
|
-
async setAudioTrack(
|
|
34321
|
+
async setAudioTrack(id) {
|
|
34322
|
+
if (!ctx.audioTracks.some((t) => t.id === id)) {
|
|
34323
|
+
console.warn("[avbridge] fallback: setAudioTrack \u2014 unknown track id", id);
|
|
34324
|
+
return;
|
|
34325
|
+
}
|
|
34326
|
+
const wasPlaying = audio.isPlaying();
|
|
34327
|
+
const currentTime = audio.now();
|
|
34328
|
+
await audio.pause().catch(() => {
|
|
34329
|
+
});
|
|
34330
|
+
await handles.setAudioTrack(id, currentTime).catch(
|
|
34331
|
+
(err) => console.warn("[avbridge] fallback: handles.setAudioTrack failed:", err)
|
|
34332
|
+
);
|
|
34333
|
+
await audio.reset(currentTime);
|
|
34334
|
+
renderer.flush();
|
|
34335
|
+
if (wasPlaying) {
|
|
34336
|
+
await waitForBuffer();
|
|
34337
|
+
await audio.start();
|
|
34338
|
+
}
|
|
33564
34339
|
},
|
|
33565
34340
|
async setSubtitleTrack(_id) {
|
|
33566
34341
|
},
|
|
@@ -33575,6 +34350,8 @@ async function createFallbackSession(ctx, target) {
|
|
|
33575
34350
|
delete target.currentTime;
|
|
33576
34351
|
delete target.duration;
|
|
33577
34352
|
delete target.paused;
|
|
34353
|
+
delete target.volume;
|
|
34354
|
+
delete target.muted;
|
|
33578
34355
|
} catch {
|
|
33579
34356
|
}
|
|
33580
34357
|
},
|
|
@@ -33598,12 +34375,12 @@ var remuxPlugin = {
|
|
|
33598
34375
|
var hybridPlugin = {
|
|
33599
34376
|
name: "hybrid",
|
|
33600
34377
|
canHandle: () => typeof VideoDecoder !== "undefined",
|
|
33601
|
-
execute: (ctx, video) => createHybridSession(ctx, video)
|
|
34378
|
+
execute: (ctx, video, transport) => createHybridSession(ctx, video, transport)
|
|
33602
34379
|
};
|
|
33603
34380
|
var fallbackPlugin = {
|
|
33604
34381
|
name: "fallback",
|
|
33605
34382
|
canHandle: () => true,
|
|
33606
|
-
execute: (ctx, video) => createFallbackSession(ctx, video)
|
|
34383
|
+
execute: (ctx, video, transport) => createFallbackSession(ctx, video, transport)
|
|
33607
34384
|
};
|
|
33608
34385
|
function registerBuiltins(registry) {
|
|
33609
34386
|
registry.register(nativePlugin);
|
|
@@ -33612,120 +34389,10 @@ function registerBuiltins(registry) {
|
|
|
33612
34389
|
registry.register(fallbackPlugin);
|
|
33613
34390
|
}
|
|
33614
34391
|
|
|
33615
|
-
// src/subtitles/srt.ts
|
|
33616
|
-
function srtToVtt(srt) {
|
|
33617
|
-
if (srt.charCodeAt(0) === 65279) srt = srt.slice(1);
|
|
33618
|
-
const normalized = srt.replace(/\r\n/g, "\n").replace(/\r/g, "\n").trim();
|
|
33619
|
-
const blocks = normalized.split(/\n{2,}/);
|
|
33620
|
-
const out = ["WEBVTT", ""];
|
|
33621
|
-
for (const block of blocks) {
|
|
33622
|
-
const lines = block.split("\n");
|
|
33623
|
-
if (lines.length > 0 && /^\d+$/.test(lines[0].trim())) {
|
|
33624
|
-
lines.shift();
|
|
33625
|
-
}
|
|
33626
|
-
if (lines.length === 0) continue;
|
|
33627
|
-
const timing = lines.shift();
|
|
33628
|
-
const vttTiming = convertTiming(timing);
|
|
33629
|
-
if (!vttTiming) continue;
|
|
33630
|
-
out.push(vttTiming);
|
|
33631
|
-
for (const l of lines) out.push(l);
|
|
33632
|
-
out.push("");
|
|
33633
|
-
}
|
|
33634
|
-
return out.join("\n");
|
|
33635
|
-
}
|
|
33636
|
-
function convertTiming(line) {
|
|
33637
|
-
const m = /^(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})(.*)$/.exec(
|
|
33638
|
-
line.trim()
|
|
33639
|
-
);
|
|
33640
|
-
if (!m) return null;
|
|
33641
|
-
const fmt2 = (h, mm, s, ms) => `${h.padStart(2, "0")}:${mm}:${s}.${ms.padEnd(3, "0").slice(0, 3)}`;
|
|
33642
|
-
return `${fmt2(m[1], m[2], m[3], m[4])} --> ${fmt2(m[5], m[6], m[7], m[8])}${m[9] ?? ""}`;
|
|
33643
|
-
}
|
|
33644
|
-
|
|
33645
|
-
// src/subtitles/vtt.ts
|
|
33646
|
-
function isVtt(text) {
|
|
33647
|
-
const trimmed = text.replace(/^\ufeff/, "").trimStart();
|
|
33648
|
-
return trimmed.startsWith("WEBVTT");
|
|
33649
|
-
}
|
|
33650
|
-
|
|
33651
|
-
// src/subtitles/index.ts
|
|
33652
|
-
async function discoverSidecars(file, directory) {
|
|
33653
|
-
const baseName = file.name.replace(/\.[^.]+$/, "");
|
|
33654
|
-
const found = [];
|
|
33655
|
-
for await (const [name, handle] of directory) {
|
|
33656
|
-
if (handle.kind !== "file") continue;
|
|
33657
|
-
if (!name.startsWith(baseName)) continue;
|
|
33658
|
-
const lower = name.toLowerCase();
|
|
33659
|
-
let format = null;
|
|
33660
|
-
if (lower.endsWith(".srt")) format = "srt";
|
|
33661
|
-
else if (lower.endsWith(".vtt")) format = "vtt";
|
|
33662
|
-
if (!format) continue;
|
|
33663
|
-
const sidecarFile = await handle.getFile();
|
|
33664
|
-
const url2 = URL.createObjectURL(sidecarFile);
|
|
33665
|
-
const langMatch = name.slice(baseName.length).match(/[._-]([a-z]{2,3})(?:[._-]|\.)/i);
|
|
33666
|
-
found.push({
|
|
33667
|
-
url: url2,
|
|
33668
|
-
format,
|
|
33669
|
-
language: langMatch?.[1]
|
|
33670
|
-
});
|
|
33671
|
-
}
|
|
33672
|
-
return found;
|
|
33673
|
-
}
|
|
33674
|
-
var SubtitleResourceBag = class {
|
|
33675
|
-
urls = /* @__PURE__ */ new Set();
|
|
33676
|
-
/** Track an externally-created blob URL (e.g. from `discoverSidecars`). */
|
|
33677
|
-
track(url2) {
|
|
33678
|
-
this.urls.add(url2);
|
|
33679
|
-
}
|
|
33680
|
-
/** Convenience: create a blob URL and track it in one call. */
|
|
33681
|
-
createObjectURL(blob) {
|
|
33682
|
-
const url2 = URL.createObjectURL(blob);
|
|
33683
|
-
this.urls.add(url2);
|
|
33684
|
-
return url2;
|
|
33685
|
-
}
|
|
33686
|
-
/** Revoke every tracked URL. Idempotent — safe to call multiple times. */
|
|
33687
|
-
revokeAll() {
|
|
33688
|
-
for (const u of this.urls) URL.revokeObjectURL(u);
|
|
33689
|
-
this.urls.clear();
|
|
33690
|
-
}
|
|
33691
|
-
};
|
|
33692
|
-
async function attachSubtitleTracks(video, tracks, bag, onError) {
|
|
33693
|
-
for (const t of Array.from(video.querySelectorAll("track[data-avbridge]"))) {
|
|
33694
|
-
t.remove();
|
|
33695
|
-
}
|
|
33696
|
-
for (const t of tracks) {
|
|
33697
|
-
if (!t.sidecarUrl) continue;
|
|
33698
|
-
try {
|
|
33699
|
-
let url2 = t.sidecarUrl;
|
|
33700
|
-
if (t.format === "srt") {
|
|
33701
|
-
const res = await fetch(t.sidecarUrl);
|
|
33702
|
-
const text = await res.text();
|
|
33703
|
-
const vtt = srtToVtt(text);
|
|
33704
|
-
const blob = new Blob([vtt], { type: "text/vtt" });
|
|
33705
|
-
url2 = bag ? bag.createObjectURL(blob) : URL.createObjectURL(blob);
|
|
33706
|
-
} else if (t.format === "vtt") {
|
|
33707
|
-
const res = await fetch(t.sidecarUrl);
|
|
33708
|
-
const text = await res.text();
|
|
33709
|
-
if (!isVtt(text)) {
|
|
33710
|
-
console.warn("[avbridge] subtitle missing WEBVTT header:", t.sidecarUrl);
|
|
33711
|
-
}
|
|
33712
|
-
}
|
|
33713
|
-
const trackEl = document.createElement("track");
|
|
33714
|
-
trackEl.kind = "subtitles";
|
|
33715
|
-
trackEl.src = url2;
|
|
33716
|
-
trackEl.srclang = t.language ?? "und";
|
|
33717
|
-
trackEl.label = t.language ?? `Subtitle ${t.id}`;
|
|
33718
|
-
trackEl.dataset.avbridge = "true";
|
|
33719
|
-
video.appendChild(trackEl);
|
|
33720
|
-
} catch (err) {
|
|
33721
|
-
const e = err instanceof Error ? err : new Error(String(err));
|
|
33722
|
-
onError?.(e, t);
|
|
33723
|
-
}
|
|
33724
|
-
}
|
|
33725
|
-
}
|
|
33726
|
-
|
|
33727
34392
|
// src/player.ts
|
|
34393
|
+
init_subtitles2();
|
|
33728
34394
|
init_debug();
|
|
34395
|
+
init_errors();
|
|
33729
34396
|
var UnifiedPlayer = class _UnifiedPlayer {
|
|
33730
34397
|
/**
|
|
33731
34398
|
* @internal Use {@link createPlayer} or {@link UnifiedPlayer.create} instead.
|
|
@@ -33733,6 +34400,10 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33733
34400
|
constructor(options, registry) {
|
|
33734
34401
|
this.options = options;
|
|
33735
34402
|
this.registry = registry;
|
|
34403
|
+
const { requestInit, fetchFn } = options;
|
|
34404
|
+
if (requestInit || fetchFn) {
|
|
34405
|
+
this.transport = { requestInit, fetchFn };
|
|
34406
|
+
}
|
|
33736
34407
|
}
|
|
33737
34408
|
options;
|
|
33738
34409
|
registry;
|
|
@@ -33752,11 +34423,23 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33752
34423
|
// listener outlives the player and accumulates on elements that swap
|
|
33753
34424
|
// source (e.g. <avbridge-video>).
|
|
33754
34425
|
endedListener = null;
|
|
34426
|
+
// Background tab handling. userIntent is what the user last asked for
|
|
34427
|
+
// (play vs pause) — used to decide whether to auto-resume on visibility
|
|
34428
|
+
// return. autoPausedForVisibility tracks whether we paused because the
|
|
34429
|
+
// tab was hidden, so we don't resume playback the user deliberately
|
|
34430
|
+
// paused (e.g. via media keys while hidden).
|
|
34431
|
+
userIntent = "pause";
|
|
34432
|
+
autoPausedForVisibility = false;
|
|
34433
|
+
visibilityListener = null;
|
|
33755
34434
|
// Serializes escalation / setStrategy calls
|
|
33756
34435
|
switchingPromise = Promise.resolve();
|
|
33757
34436
|
// Owns blob URLs created during sidecar discovery + SRT->VTT conversion.
|
|
33758
34437
|
// Revoked at destroy() so repeated source swaps don't leak.
|
|
33759
34438
|
subtitleResources = new SubtitleResourceBag();
|
|
34439
|
+
// Transport config extracted from CreatePlayerOptions. Threaded to probe,
|
|
34440
|
+
// subtitle fetches, and strategy session creators. Not stored on MediaContext
|
|
34441
|
+
// because it's runtime config, not media analysis.
|
|
34442
|
+
transport;
|
|
33760
34443
|
static async create(options) {
|
|
33761
34444
|
const registry = new PluginRegistry();
|
|
33762
34445
|
registerBuiltins(registry);
|
|
@@ -33780,7 +34463,7 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33780
34463
|
const bootstrapStart = performance.now();
|
|
33781
34464
|
try {
|
|
33782
34465
|
dbg.info("bootstrap", "start");
|
|
33783
|
-
const ctx = await dbg.timed("probe", "probe", 3e3, () => probe(this.options.source));
|
|
34466
|
+
const ctx = await dbg.timed("probe", "probe", 3e3, () => probe(this.options.source, this.transport));
|
|
33784
34467
|
dbg.info(
|
|
33785
34468
|
"probe",
|
|
33786
34469
|
`container=${ctx.container} video=${ctx.videoTracks[0]?.codec ?? "-"} audio=${ctx.audioTracks[0]?.codec ?? "-"} probedBy=${ctx.probedBy}`
|
|
@@ -33821,16 +34504,15 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33821
34504
|
reason: decision.reason
|
|
33822
34505
|
});
|
|
33823
34506
|
await this.startSession(decision.strategy, decision.reason);
|
|
33824
|
-
|
|
33825
|
-
|
|
33826
|
-
|
|
33827
|
-
|
|
33828
|
-
|
|
33829
|
-
(
|
|
33830
|
-
|
|
33831
|
-
|
|
33832
|
-
|
|
33833
|
-
}
|
|
34507
|
+
await attachSubtitleTracks(
|
|
34508
|
+
this.options.target,
|
|
34509
|
+
ctx.subtitleTracks,
|
|
34510
|
+
this.subtitleResources,
|
|
34511
|
+
(err, track) => {
|
|
34512
|
+
console.warn(`[avbridge] subtitle ${track.id} failed: ${err.message}`);
|
|
34513
|
+
},
|
|
34514
|
+
this.transport
|
|
34515
|
+
);
|
|
33834
34516
|
this.emitter.emitSticky("tracks", {
|
|
33835
34517
|
video: ctx.videoTracks,
|
|
33836
34518
|
audio: ctx.audioTracks,
|
|
@@ -33839,6 +34521,10 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33839
34521
|
this.startTimeupdateLoop();
|
|
33840
34522
|
this.endedListener = () => this.emitter.emit("ended", void 0);
|
|
33841
34523
|
this.options.target.addEventListener("ended", this.endedListener);
|
|
34524
|
+
if (this.options.backgroundBehavior !== "continue" && typeof document !== "undefined") {
|
|
34525
|
+
this.visibilityListener = () => this.onVisibilityChange();
|
|
34526
|
+
document.addEventListener("visibilitychange", this.visibilityListener);
|
|
34527
|
+
}
|
|
33842
34528
|
this.emitter.emitSticky("ready", void 0);
|
|
33843
34529
|
const bootstrapElapsed = performance.now() - bootstrapStart;
|
|
33844
34530
|
dbg.info("bootstrap", `ready in ${bootstrapElapsed.toFixed(0)}ms`);
|
|
@@ -33865,7 +34551,7 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33865
34551
|
throw new Error(`no plugin available for strategy "${strategy}"`);
|
|
33866
34552
|
}
|
|
33867
34553
|
try {
|
|
33868
|
-
this.session = await plugin.execute(this.mediaContext, this.options.target);
|
|
34554
|
+
this.session = await plugin.execute(this.mediaContext, this.options.target, this.transport);
|
|
33869
34555
|
} catch (err) {
|
|
33870
34556
|
const chain2 = this.classification?.fallbackChain;
|
|
33871
34557
|
if (chain2 && chain2.length > 0) {
|
|
@@ -33938,7 +34624,7 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33938
34624
|
continue;
|
|
33939
34625
|
}
|
|
33940
34626
|
try {
|
|
33941
|
-
this.session = await plugin.execute(this.mediaContext, this.options.target);
|
|
34627
|
+
this.session = await plugin.execute(this.mediaContext, this.options.target, this.transport);
|
|
33942
34628
|
} catch (err) {
|
|
33943
34629
|
const msg = err instanceof Error ? err.message : String(err);
|
|
33944
34630
|
errors.push(`${nextStrategy}: ${msg}`);
|
|
@@ -33961,8 +34647,10 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
33961
34647
|
}
|
|
33962
34648
|
return;
|
|
33963
34649
|
}
|
|
33964
|
-
this.emitter.emit("error", new
|
|
33965
|
-
|
|
34650
|
+
this.emitter.emit("error", new AvbridgeError(
|
|
34651
|
+
ERR_ALL_STRATEGIES_EXHAUSTED,
|
|
34652
|
+
`All playback strategies failed: ${errors.join("; ")}`,
|
|
34653
|
+
"This file may require a codec or container that isn't available in this browser. Try the fallback strategy or check browser codec support."
|
|
33966
34654
|
));
|
|
33967
34655
|
}
|
|
33968
34656
|
// ── Stall supervision ─────────────────────────────────────────────────
|
|
@@ -34014,7 +34702,7 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
34014
34702
|
// ── Public: manual strategy switch ────────────────────────────────────
|
|
34015
34703
|
/** Manually switch to a different playback strategy. Preserves current position and play/pause state. Concurrent calls are serialized. */
|
|
34016
34704
|
async setStrategy(strategy, reason) {
|
|
34017
|
-
if (!this.mediaContext) throw new
|
|
34705
|
+
if (!this.mediaContext) throw new AvbridgeError(ERR_PLAYER_NOT_READY, "Player not ready \u2014 wait for the 'ready' event before calling playback methods.", "Await the 'ready' event or check player.readyState before calling play/pause/seek.");
|
|
34018
34706
|
if (this.session?.strategy === strategy) return;
|
|
34019
34707
|
this.switchingPromise = this.switchingPromise.then(
|
|
34020
34708
|
() => this.doSetStrategy(strategy, reason)
|
|
@@ -34043,7 +34731,7 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
34043
34731
|
}
|
|
34044
34732
|
const plugin = this.registry.findFor(this.mediaContext, strategy);
|
|
34045
34733
|
if (!plugin) throw new Error(`no plugin available for strategy "${strategy}"`);
|
|
34046
|
-
this.session = await plugin.execute(this.mediaContext, this.options.target);
|
|
34734
|
+
this.session = await plugin.execute(this.mediaContext, this.options.target, this.transport);
|
|
34047
34735
|
this.emitter.emitSticky("strategy", {
|
|
34048
34736
|
strategy,
|
|
34049
34737
|
reason: switchReason
|
|
@@ -34077,26 +34765,56 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
34077
34765
|
}
|
|
34078
34766
|
/** Begin or resume playback. Throws if the player is not ready. */
|
|
34079
34767
|
async play() {
|
|
34080
|
-
if (!this.session) throw new
|
|
34768
|
+
if (!this.session) throw new AvbridgeError(ERR_PLAYER_NOT_READY, "Player not ready \u2014 wait for the 'ready' event before calling playback methods.", "Await the 'ready' event or check player.readyState before calling play/pause/seek.");
|
|
34769
|
+
this.userIntent = "play";
|
|
34770
|
+
this.autoPausedForVisibility = false;
|
|
34081
34771
|
await this.session.play();
|
|
34082
34772
|
}
|
|
34083
34773
|
/** Pause playback. No-op if the player is not ready or already paused. */
|
|
34084
34774
|
pause() {
|
|
34775
|
+
this.userIntent = "pause";
|
|
34776
|
+
this.autoPausedForVisibility = false;
|
|
34085
34777
|
this.session?.pause();
|
|
34086
34778
|
}
|
|
34779
|
+
/**
|
|
34780
|
+
* Handle browser tab visibility changes. On hide: pause if the user
|
|
34781
|
+
* had been playing. On show: resume if we were the one who paused.
|
|
34782
|
+
* Skips when `backgroundBehavior: "continue"` is set (listener isn't
|
|
34783
|
+
* installed in that case).
|
|
34784
|
+
*/
|
|
34785
|
+
onVisibilityChange() {
|
|
34786
|
+
if (!this.session) return;
|
|
34787
|
+
const action = decideVisibilityAction({
|
|
34788
|
+
hidden: document.hidden,
|
|
34789
|
+
userIntent: this.userIntent,
|
|
34790
|
+
sessionIsPlaying: !this.options.target.paused,
|
|
34791
|
+
autoPausedForVisibility: this.autoPausedForVisibility
|
|
34792
|
+
});
|
|
34793
|
+
if (action === "pause") {
|
|
34794
|
+
this.autoPausedForVisibility = true;
|
|
34795
|
+
dbg.info("visibility", "tab hidden \u2014 auto-paused");
|
|
34796
|
+
this.session.pause();
|
|
34797
|
+
} else if (action === "resume") {
|
|
34798
|
+
this.autoPausedForVisibility = false;
|
|
34799
|
+
dbg.info("visibility", "tab visible \u2014 auto-resuming");
|
|
34800
|
+
void this.session.play().catch((err) => {
|
|
34801
|
+
console.warn("[avbridge] auto-resume after tab return failed:", err);
|
|
34802
|
+
});
|
|
34803
|
+
}
|
|
34804
|
+
}
|
|
34087
34805
|
/** Seek to the given time in seconds. Throws if the player is not ready. */
|
|
34088
34806
|
async seek(time) {
|
|
34089
|
-
if (!this.session) throw new
|
|
34807
|
+
if (!this.session) throw new AvbridgeError(ERR_PLAYER_NOT_READY, "Player not ready \u2014 wait for the 'ready' event before calling playback methods.", "Await the 'ready' event or check player.readyState before calling play/pause/seek.");
|
|
34090
34808
|
await this.session.seek(time);
|
|
34091
34809
|
}
|
|
34092
34810
|
/** Switch the active audio track by track ID. Throws if the player is not ready. */
|
|
34093
34811
|
async setAudioTrack(id) {
|
|
34094
|
-
if (!this.session) throw new
|
|
34812
|
+
if (!this.session) throw new AvbridgeError(ERR_PLAYER_NOT_READY, "Player not ready \u2014 wait for the 'ready' event before calling playback methods.", "Await the 'ready' event or check player.readyState before calling play/pause/seek.");
|
|
34095
34813
|
await this.session.setAudioTrack(id);
|
|
34096
34814
|
}
|
|
34097
34815
|
/** Switch the active subtitle track by track ID, or pass `null` to disable subtitles. */
|
|
34098
34816
|
async setSubtitleTrack(id) {
|
|
34099
|
-
if (!this.session) throw new
|
|
34817
|
+
if (!this.session) throw new AvbridgeError(ERR_PLAYER_NOT_READY, "Player not ready \u2014 wait for the 'ready' event before calling playback methods.", "Await the 'ready' event or check player.readyState before calling play/pause/seek.");
|
|
34100
34818
|
await this.session.setSubtitleTrack(id);
|
|
34101
34819
|
}
|
|
34102
34820
|
/** Return a snapshot of current diagnostics: container, codecs, strategy, runtime stats, and strategy history. */
|
|
@@ -34128,6 +34846,10 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
34128
34846
|
this.options.target.removeEventListener("ended", this.endedListener);
|
|
34129
34847
|
this.endedListener = null;
|
|
34130
34848
|
}
|
|
34849
|
+
if (this.visibilityListener) {
|
|
34850
|
+
document.removeEventListener("visibilitychange", this.visibilityListener);
|
|
34851
|
+
this.visibilityListener = null;
|
|
34852
|
+
}
|
|
34131
34853
|
if (this.session) {
|
|
34132
34854
|
await this.session.destroy();
|
|
34133
34855
|
this.session = null;
|
|
@@ -34139,6 +34861,14 @@ var UnifiedPlayer = class _UnifiedPlayer {
|
|
|
34139
34861
|
async function createPlayer(options) {
|
|
34140
34862
|
return UnifiedPlayer.create(options);
|
|
34141
34863
|
}
|
|
34864
|
+
function decideVisibilityAction(state) {
|
|
34865
|
+
if (state.hidden) {
|
|
34866
|
+
if (state.userIntent === "play" && state.sessionIsPlaying) return "pause";
|
|
34867
|
+
return "noop";
|
|
34868
|
+
}
|
|
34869
|
+
if (state.autoPausedForVisibility) return "resume";
|
|
34870
|
+
return "noop";
|
|
34871
|
+
}
|
|
34142
34872
|
function buildInitialDecision(initial, ctx) {
|
|
34143
34873
|
const natural = classifyContext(ctx);
|
|
34144
34874
|
const cls = strategyToClass(initial, natural);
|
|
@@ -34249,6 +34979,13 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
|
|
|
34249
34979
|
_strategyClass = null;
|
|
34250
34980
|
_audioTracks = [];
|
|
34251
34981
|
_subtitleTracks = [];
|
|
34982
|
+
/**
|
|
34983
|
+
* External subtitle list forwarded to `createPlayer()` on the next
|
|
34984
|
+
* bootstrap. Setting this after bootstrap queues it for the next
|
|
34985
|
+
* source change; consumers that need to swap subtitles mid-playback
|
|
34986
|
+
* should set `source` to reload.
|
|
34987
|
+
*/
|
|
34988
|
+
_subtitles = null;
|
|
34252
34989
|
/**
|
|
34253
34990
|
* Initial strategy preference. `"auto"` means "let the classifier decide";
|
|
34254
34991
|
* any other value is passed to `createPlayer({ initialStrategy })` and
|
|
@@ -34404,7 +35141,8 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
|
|
|
34404
35141
|
// Honor the consumer's preferred initial strategy. "auto" means
|
|
34405
35142
|
// "let the classifier decide" — the createPlayer call simply doesn't
|
|
34406
35143
|
// pass initialStrategy in that case.
|
|
34407
|
-
...this._preferredStrategy !== "auto" ? { initialStrategy: this._preferredStrategy } : {}
|
|
35144
|
+
...this._preferredStrategy !== "auto" ? { initialStrategy: this._preferredStrategy } : {},
|
|
35145
|
+
...this._subtitles ? { subtitles: this._subtitles } : {}
|
|
34408
35146
|
});
|
|
34409
35147
|
} catch (err) {
|
|
34410
35148
|
if (id !== this._bootstrapId || this._destroyed) return;
|
|
@@ -34695,6 +35433,47 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
|
|
|
34695
35433
|
get subtitleTracks() {
|
|
34696
35434
|
return this._subtitleTracks;
|
|
34697
35435
|
}
|
|
35436
|
+
/**
|
|
35437
|
+
* External subtitle files to attach when the source loads. Takes effect
|
|
35438
|
+
* on the next bootstrap — set before assigning `source`, or reload via
|
|
35439
|
+
* `load()` after changing. For dynamic post-bootstrap addition, use
|
|
35440
|
+
* `addSubtitle()` instead.
|
|
35441
|
+
*
|
|
35442
|
+
* @example
|
|
35443
|
+
* el.subtitles = [{ url: "/en.srt", format: "srt", language: "en" }];
|
|
35444
|
+
* el.src = "/movie.mp4";
|
|
35445
|
+
*/
|
|
35446
|
+
get subtitles() {
|
|
35447
|
+
return this._subtitles;
|
|
35448
|
+
}
|
|
35449
|
+
set subtitles(value) {
|
|
35450
|
+
this._subtitles = value;
|
|
35451
|
+
}
|
|
35452
|
+
/**
|
|
35453
|
+
* Attach a subtitle track to the current playback without rebuilding
|
|
35454
|
+
* the player. Works while the element is playing — converts SRT to
|
|
35455
|
+
* VTT if needed, adds a `<track>` to the inner `<video>`. Canvas
|
|
35456
|
+
* strategies pick up the new track via their textTracks watcher.
|
|
35457
|
+
*/
|
|
35458
|
+
async addSubtitle(subtitle) {
|
|
35459
|
+
const { attachSubtitleTracks: attachSubtitleTracks2 } = await Promise.resolve().then(() => (init_subtitles2(), subtitles_exports));
|
|
35460
|
+
const format = subtitle.format ?? (subtitle.url.endsWith(".srt") ? "srt" : "vtt");
|
|
35461
|
+
const track = {
|
|
35462
|
+
id: this._subtitleTracks.length,
|
|
35463
|
+
format,
|
|
35464
|
+
language: subtitle.language,
|
|
35465
|
+
sidecarUrl: subtitle.url
|
|
35466
|
+
};
|
|
35467
|
+
this._subtitleTracks.push(track);
|
|
35468
|
+
await attachSubtitleTracks2(
|
|
35469
|
+
this._videoEl,
|
|
35470
|
+
this._subtitleTracks,
|
|
35471
|
+
void 0,
|
|
35472
|
+
(err, t) => {
|
|
35473
|
+
console.warn(`[avbridge] subtitle ${t.id} failed: ${err.message}`);
|
|
35474
|
+
}
|
|
35475
|
+
);
|
|
35476
|
+
}
|
|
34698
35477
|
// ── Public methods ─────────────────────────────────────────────────────
|
|
34699
35478
|
/** Force a (re-)bootstrap if a source is currently set. */
|
|
34700
35479
|
async load() {
|