@editframe/assets 0.47.0 → 0.47.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE-FULL.md +148 -0
- package/LICENSE.md +58 -0
- package/dist/Probe.cjs +58 -1
- package/dist/Probe.cjs.map +1 -1
- package/dist/Probe.d.cts +49 -0
- package/dist/Probe.d.ts +49 -0
- package/dist/Probe.js +58 -1
- package/dist/Probe.js.map +1 -1
- package/dist/generateWebmSegmentIndex.cjs +323 -0
- package/dist/generateWebmSegmentIndex.cjs.map +1 -0
- package/dist/generateWebmSegmentIndex.js +319 -0
- package/dist/generateWebmSegmentIndex.js.map +1 -0
- package/dist/tasks/generateTrack.cjs +18 -2
- package/dist/tasks/generateTrack.cjs.map +1 -1
- package/dist/tasks/generateTrack.js +16 -2
- package/dist/tasks/generateTrack.js.map +1 -1
- package/dist/tasks/generateTrackFragmentIndex.cjs +20 -3
- package/dist/tasks/generateTrackFragmentIndex.cjs.map +1 -1
- package/dist/tasks/generateTrackFragmentIndex.js +20 -3
- package/dist/tasks/generateTrackFragmentIndex.js.map +1 -1
- package/package.json +1 -1
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
import { PacketProbe } from "./Probe.js";
|
|
2
|
+
import debug from "debug";
|
|
3
|
+
import { readFile } from "node:fs/promises";
|
|
4
|
+
|
|
5
|
+
//#region src/generateWebmSegmentIndex.ts
|
|
6
|
+
const log = debug("ef:generateWebmSegmentIndex");
|
|
7
|
+
const MIN_SEGMENT_DURATION_MS = 2e3;
|
|
8
|
+
const ID_EBML = 440786851;
|
|
9
|
+
const ID_SEGMENT = 408125543;
|
|
10
|
+
const ID_INFO = 357149030;
|
|
11
|
+
const ID_CLUSTER = 524531317;
|
|
12
|
+
const ID_TIMESTAMP = 231;
|
|
13
|
+
const ID_SIMPLE_BLOCK = 163;
|
|
14
|
+
const ID_BLOCK_GROUP = 160;
|
|
15
|
+
const ID_REFERENCE_BLOCK = 251;
|
|
16
|
+
const ID_TIMESTAMP_SCALE = 2807729;
|
|
17
|
+
/**
|
|
18
|
+
* Decode an EBML variable-length integer used for **element sizes**.
|
|
19
|
+
* The leading 1-bit encodes the byte-width; that bit is then stripped from
|
|
20
|
+
* the value. Returns `null` for the "unknown / streaming" sentinel value
|
|
21
|
+
* (all data-bits = 1).
|
|
22
|
+
*/
|
|
23
|
+
function readVintSize(buf, offset) {
|
|
24
|
+
const b = buf[offset];
|
|
25
|
+
let width = 1;
|
|
26
|
+
let mask = 128;
|
|
27
|
+
while (width <= 8 && !(b & mask)) {
|
|
28
|
+
mask >>= 1;
|
|
29
|
+
width++;
|
|
30
|
+
}
|
|
31
|
+
let value = b & mask - 1;
|
|
32
|
+
for (let i = 1; i < width; i++) value = value << 8 | (buf[offset + i] ?? 0);
|
|
33
|
+
const unknownSentinel = (1 << 7 * width) - 1;
|
|
34
|
+
return {
|
|
35
|
+
value: value === unknownSentinel ? null : value,
|
|
36
|
+
width
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Decode an EBML **element ID**. Unlike sizes, the leading marker bit is
|
|
41
|
+
* *kept* in the returned value (IDs are opaque bit patterns).
|
|
42
|
+
*/
|
|
43
|
+
function readElementId(buf, offset) {
|
|
44
|
+
const b = buf[offset];
|
|
45
|
+
let width = 1;
|
|
46
|
+
let mask = 128;
|
|
47
|
+
while (width <= 4 && !(b & mask)) {
|
|
48
|
+
mask >>= 1;
|
|
49
|
+
width++;
|
|
50
|
+
}
|
|
51
|
+
let id = b;
|
|
52
|
+
for (let i = 1; i < width; i++) id = id << 8 | (buf[offset + i] ?? 0);
|
|
53
|
+
return {
|
|
54
|
+
id,
|
|
55
|
+
width
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
/** Read a big-endian unsigned integer of `size` bytes. */
|
|
59
|
+
function readUintBE(buf, offset, size) {
|
|
60
|
+
let val = 0;
|
|
61
|
+
for (let i = 0; i < size; i++) val = val * 256 + (buf[offset + i] ?? 0) >>> 0;
|
|
62
|
+
return val;
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Parse the top-level EBML and Segment structure of a WebM file.
|
|
66
|
+
* Only reads as deep as needed: EBML header → Segment → Info/Tracks/Clusters.
|
|
67
|
+
*/
|
|
68
|
+
function parseWebmStructure(buf) {
|
|
69
|
+
let pos = 0;
|
|
70
|
+
const { id: ebmlId, width: ebmlIdWidth } = readElementId(buf, pos);
|
|
71
|
+
if (ebmlId !== ID_EBML) throw new Error(`Expected EBML element (0x1A45DFA3), got 0x${ebmlId.toString(16)}`);
|
|
72
|
+
const { value: ebmlSize, width: ebmlSizeWidth } = readVintSize(buf, pos + ebmlIdWidth);
|
|
73
|
+
if (ebmlSize === null) throw new Error("EBML element has unknown size");
|
|
74
|
+
pos += ebmlIdWidth + ebmlSizeWidth + ebmlSize;
|
|
75
|
+
const { id: segId, width: segIdWidth } = readElementId(buf, pos);
|
|
76
|
+
if (segId !== ID_SEGMENT) throw new Error(`Expected Segment element (0x18538067), got 0x${segId.toString(16)}`);
|
|
77
|
+
const segmentSizeVintOffset = pos + segIdWidth;
|
|
78
|
+
const { width: segSizeWidth } = readVintSize(buf, segmentSizeVintOffset);
|
|
79
|
+
let spos = segmentSizeVintOffset + segSizeWidth;
|
|
80
|
+
let initSize = 0;
|
|
81
|
+
let timestampScaleNs = 1e6;
|
|
82
|
+
const clusters = [];
|
|
83
|
+
while (spos < buf.length) {
|
|
84
|
+
if (buf.length - spos < 2) break;
|
|
85
|
+
const { id: childId, width: childIdWidth } = readElementId(buf, spos);
|
|
86
|
+
const { value: childSize, width: childSizeWidth } = readVintSize(buf, spos + childIdWidth);
|
|
87
|
+
const headerSize = childIdWidth + childSizeWidth;
|
|
88
|
+
if (childSize === null) {
|
|
89
|
+
log(`Unknown-size element 0x${childId.toString(16)} at ${spos}, stopping`);
|
|
90
|
+
break;
|
|
91
|
+
}
|
|
92
|
+
const elementEnd = spos + headerSize + childSize;
|
|
93
|
+
if (childId === ID_INFO) {
|
|
94
|
+
let ipos = spos + headerSize;
|
|
95
|
+
while (ipos < elementEnd && ipos < buf.length) {
|
|
96
|
+
const { id: infoChildId, width: infoIdW } = readElementId(buf, ipos);
|
|
97
|
+
const { value: infoChildSize, width: infoSizeW } = readVintSize(buf, ipos + infoIdW);
|
|
98
|
+
if (infoChildSize === null) break;
|
|
99
|
+
if (infoChildId === ID_TIMESTAMP_SCALE) timestampScaleNs = readUintBE(buf, ipos + infoIdW + infoSizeW, infoChildSize);
|
|
100
|
+
ipos += infoIdW + infoSizeW + infoChildSize;
|
|
101
|
+
}
|
|
102
|
+
} else if (childId === ID_CLUSTER) {
|
|
103
|
+
if (initSize === 0) initSize = spos;
|
|
104
|
+
let clusterTimestampUnits = 0;
|
|
105
|
+
let hasKeyframe = false;
|
|
106
|
+
let cpos = spos + headerSize;
|
|
107
|
+
while (cpos < elementEnd && cpos < buf.length) {
|
|
108
|
+
if (buf.length - cpos < 2) break;
|
|
109
|
+
const { id: cid, width: cidW } = readElementId(buf, cpos);
|
|
110
|
+
const { value: csize, width: csizeW } = readVintSize(buf, cpos + cidW);
|
|
111
|
+
if (csize === null) break;
|
|
112
|
+
const cHeaderSize = cidW + csizeW;
|
|
113
|
+
if (cid === ID_TIMESTAMP) clusterTimestampUnits = readUintBE(buf, cpos + cHeaderSize, csize);
|
|
114
|
+
else if (cid === ID_SIMPLE_BLOCK) {
|
|
115
|
+
const sbOffset = cpos + cHeaderSize;
|
|
116
|
+
const { width: trackNumWidth } = readVintSize(buf, sbOffset);
|
|
117
|
+
if ((buf[sbOffset + trackNumWidth + 2] ?? 0) & 128) hasKeyframe = true;
|
|
118
|
+
} else if (cid === ID_BLOCK_GROUP) {
|
|
119
|
+
let bgpos = cpos + cHeaderSize;
|
|
120
|
+
const bgEnd = cpos + cHeaderSize + csize;
|
|
121
|
+
let hasReferenceBlock = false;
|
|
122
|
+
while (bgpos < bgEnd && bgpos < buf.length) {
|
|
123
|
+
const { id: bgid, width: bgidW } = readElementId(buf, bgpos);
|
|
124
|
+
const { value: bgsize, width: bgsizeW } = readVintSize(buf, bgpos + bgidW);
|
|
125
|
+
if (bgsize === null) break;
|
|
126
|
+
if (bgid === ID_REFERENCE_BLOCK) {
|
|
127
|
+
hasReferenceBlock = true;
|
|
128
|
+
break;
|
|
129
|
+
}
|
|
130
|
+
bgpos += bgidW + bgsizeW + bgsize;
|
|
131
|
+
}
|
|
132
|
+
if (!hasReferenceBlock) hasKeyframe = true;
|
|
133
|
+
}
|
|
134
|
+
cpos += cHeaderSize + csize;
|
|
135
|
+
}
|
|
136
|
+
const timestampMs = Math.round(clusterTimestampUnits * timestampScaleNs / 1e6);
|
|
137
|
+
clusters.push({
|
|
138
|
+
offset: spos,
|
|
139
|
+
size: headerSize + childSize,
|
|
140
|
+
timestampMs,
|
|
141
|
+
hasKeyframe
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
spos = elementEnd;
|
|
145
|
+
}
|
|
146
|
+
if (initSize === 0) throw new Error("No Cluster elements found in WebM file");
|
|
147
|
+
return {
|
|
148
|
+
segmentSizeVintOffset,
|
|
149
|
+
segmentSizeVintWidth: segSizeWidth,
|
|
150
|
+
initSize,
|
|
151
|
+
clusters,
|
|
152
|
+
timestampScaleNs
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* Use ffprobe packet data to mark clusters that contain keyframe packets.
|
|
157
|
+
* More reliable than parsing SimpleBlock flags in-process for large files.
|
|
158
|
+
*/
|
|
159
|
+
async function annotateKeyframes(clusters, absolutePath) {
|
|
160
|
+
const keyframePositions = (await PacketProbe.probePath(absolutePath)).packets.filter((p) => p.flags?.includes("K") && p.pos !== void 0).map((p) => p.pos);
|
|
161
|
+
for (const cluster of clusters) {
|
|
162
|
+
const clusterEnd = cluster.offset + cluster.size;
|
|
163
|
+
cluster.hasKeyframe = keyframePositions.some((pos) => pos >= cluster.offset && pos < clusterEnd);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
function buildSegments(clusters, timescale, totalDurationMs) {
|
|
167
|
+
const segments = [];
|
|
168
|
+
let segmentClusters = [];
|
|
169
|
+
let segmentStartMs = 0;
|
|
170
|
+
const flushSegment = (nextStartMs) => {
|
|
171
|
+
if (segmentClusters.length === 0) return;
|
|
172
|
+
const first = segmentClusters[0];
|
|
173
|
+
const last = segmentClusters[segmentClusters.length - 1];
|
|
174
|
+
const offset = first.offset;
|
|
175
|
+
const size = last.offset + last.size - first.offset;
|
|
176
|
+
const cts = Math.round(segmentStartMs * timescale / 1e3);
|
|
177
|
+
const dts = cts;
|
|
178
|
+
const duration = Math.round((nextStartMs - segmentStartMs) * timescale / 1e3);
|
|
179
|
+
segments.push({
|
|
180
|
+
cts,
|
|
181
|
+
dts,
|
|
182
|
+
offset,
|
|
183
|
+
size,
|
|
184
|
+
duration
|
|
185
|
+
});
|
|
186
|
+
segmentClusters = [];
|
|
187
|
+
};
|
|
188
|
+
for (let i = 0; i < clusters.length; i++) {
|
|
189
|
+
const cluster = clusters[i];
|
|
190
|
+
const durationSoFarMs = segmentClusters.length > 0 ? cluster.timestampMs - segmentStartMs : 0;
|
|
191
|
+
if (segmentClusters.length > 0 && durationSoFarMs >= MIN_SEGMENT_DURATION_MS && cluster.hasKeyframe) {
|
|
192
|
+
flushSegment(cluster.timestampMs);
|
|
193
|
+
segmentStartMs = cluster.timestampMs;
|
|
194
|
+
}
|
|
195
|
+
if (segmentClusters.length === 0) {
|
|
196
|
+
if (!cluster.hasKeyframe) continue;
|
|
197
|
+
segmentStartMs = cluster.timestampMs;
|
|
198
|
+
}
|
|
199
|
+
segmentClusters.push(cluster);
|
|
200
|
+
}
|
|
201
|
+
flushSegment(totalDurationMs);
|
|
202
|
+
return segments;
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Build a TrackFragmentIndex for a VP9-alpha WebM source without any
|
|
206
|
+
* transcoding. The returned index contains byte offsets that point directly
|
|
207
|
+
* into the cached copy of the source file (which has its Segment-size VINT
|
|
208
|
+
* patched to "unknown" by generateTrack.ts).
|
|
209
|
+
*/
|
|
210
|
+
async function generateWebmSegmentIndex(absolutePath, startTimeOffsetMs) {
|
|
211
|
+
log(`Generating WebM segment index for ${absolutePath}`);
|
|
212
|
+
const { initSize, clusters, timestampScaleNs } = parseWebmStructure(await readFile(absolutePath));
|
|
213
|
+
const timescale = 1e3;
|
|
214
|
+
log(`Parsed ${clusters.length} clusters, init=${initSize} bytes, timestampScale=${timestampScaleNs}ns`);
|
|
215
|
+
await annotateKeyframes(clusters, absolutePath);
|
|
216
|
+
log(`Keyframe clusters: ${clusters.filter((c) => c.hasKeyframe).map((c) => `${c.timestampMs}ms@${c.offset}`).join(", ")}`);
|
|
217
|
+
const lastCluster = clusters[clusters.length - 1];
|
|
218
|
+
let finalDurationMs = lastCluster.timestampMs + lastCluster.size / 4096;
|
|
219
|
+
try {
|
|
220
|
+
const videoStream$1 = (await PacketProbe.probePath(absolutePath)).data?.streams?.find((s) => s.codec_type === "video");
|
|
221
|
+
if (videoStream$1?.duration) finalDurationMs = Math.round(parseFloat(videoStream$1.duration) * 1e3);
|
|
222
|
+
} catch {}
|
|
223
|
+
log(`Total duration: ${finalDurationMs}ms`);
|
|
224
|
+
const segments = buildSegments(clusters, timescale, finalDurationMs);
|
|
225
|
+
log(`Built ${segments.length} segments: ${segments.map((s) => `[${s.cts / timescale * 1e3 | 0}ms offset=${s.offset} size=${s.size}]`).join(", ")}`);
|
|
226
|
+
const probe2 = await PacketProbe.probePath(absolutePath);
|
|
227
|
+
const videoStream = probe2.data?.streams?.find((s) => s.codec_type === "video");
|
|
228
|
+
const width = videoStream?.width ?? 1920;
|
|
229
|
+
const height = videoStream?.height ?? 1080;
|
|
230
|
+
const codec = videoStream?.codec_tag_string ?? videoStream?.codec_name ?? "vp09";
|
|
231
|
+
const sampleCount = probe2.packets?.filter((p) => p.codec_type !== "audio").length ?? 0;
|
|
232
|
+
let trackStartTimeOffsetMs;
|
|
233
|
+
if (startTimeOffsetMs !== void 0) trackStartTimeOffsetMs = startTimeOffsetMs;
|
|
234
|
+
else if (clusters[0] && clusters[0].timestampMs > 1) trackStartTimeOffsetMs = clusters[0].timestampMs;
|
|
235
|
+
const trackIndex = {
|
|
236
|
+
track: 1,
|
|
237
|
+
type: "video",
|
|
238
|
+
width,
|
|
239
|
+
height,
|
|
240
|
+
timescale,
|
|
241
|
+
codec,
|
|
242
|
+
duration: Math.round(finalDurationMs * timescale / 1e3),
|
|
243
|
+
sample_count: sampleCount,
|
|
244
|
+
startTimeOffsetMs: trackStartTimeOffsetMs,
|
|
245
|
+
initSegment: {
|
|
246
|
+
offset: 0,
|
|
247
|
+
size: initSize
|
|
248
|
+
},
|
|
249
|
+
segments
|
|
250
|
+
};
|
|
251
|
+
return { 1: trackIndex };
|
|
252
|
+
}
|
|
253
|
+
const ID_SEEKHEAD = 290298740;
|
|
254
|
+
const EBML_VOID_ID = 236;
|
|
255
|
+
/**
|
|
256
|
+
* Patch a WebM buffer in-place to make it safe for partial (segment-sliced) serving:
|
|
257
|
+
*
|
|
258
|
+
* 1. Segment-element size VINT → EBML unknown-size (0x01FFFFFFFFFFFFFF).
|
|
259
|
+
* Without this, mediabunny expects the full 44 MB when receiving a 5 MB chunk.
|
|
260
|
+
*
|
|
261
|
+
* 2. SeekHead element → EBML Void of the same byte length.
|
|
262
|
+
* The SeekHead contains Cues/cluster offsets relative to the original file.
|
|
263
|
+
* When mediabunny uses those offsets on a sliced buffer it reads garbage or
|
|
264
|
+
* falls off the end of the buffer, causing seek errors and de-sync. Replacing
|
|
265
|
+
* it with a Void element forces linear parsing: EBML → Info → Tracks → Clusters.
|
|
266
|
+
*
|
|
267
|
+
* Both patches are applied to a Buffer copy; the source file on disk is not modified.
|
|
268
|
+
*/
|
|
269
|
+
function patchWebmForSegmentedServing(buf) {
|
|
270
|
+
try {
|
|
271
|
+
const { id: ebmlId, width: ebmlIdWidth } = readElementId(buf, 0);
|
|
272
|
+
if (ebmlId !== ID_EBML) return;
|
|
273
|
+
const { value: ebmlSize, width: ebmlSizeWidth } = readVintSize(buf, ebmlIdWidth);
|
|
274
|
+
if (ebmlSize === null) return;
|
|
275
|
+
const segPos = ebmlIdWidth + ebmlSizeWidth + ebmlSize;
|
|
276
|
+
const { id: segId, width: segIdWidth } = readElementId(buf, segPos);
|
|
277
|
+
if (segId !== ID_SEGMENT) return;
|
|
278
|
+
const segSizeOffset = segPos + segIdWidth;
|
|
279
|
+
const { width: segSizeWidth } = readVintSize(buf, segSizeOffset);
|
|
280
|
+
if (segSizeWidth === 8) Buffer.from([
|
|
281
|
+
1,
|
|
282
|
+
255,
|
|
283
|
+
255,
|
|
284
|
+
255,
|
|
285
|
+
255,
|
|
286
|
+
255,
|
|
287
|
+
255,
|
|
288
|
+
255
|
|
289
|
+
]).copy(buf, segSizeOffset);
|
|
290
|
+
let pos = segPos + segIdWidth + segSizeWidth;
|
|
291
|
+
const scanLimit = Math.min(pos + 4096, buf.length);
|
|
292
|
+
while (pos < scanLimit) {
|
|
293
|
+
const { id: childId, width: childIdWidth } = readElementId(buf, pos);
|
|
294
|
+
const { value: childSize, width: childSizeWidth } = readVintSize(buf, pos + childIdWidth);
|
|
295
|
+
if (childSize === null) break;
|
|
296
|
+
const totalElementSize = childIdWidth + childSizeWidth + childSize;
|
|
297
|
+
if (childId === ID_SEEKHEAD) {
|
|
298
|
+
const voidContentSize = totalElementSize - 1;
|
|
299
|
+
let vintBytes;
|
|
300
|
+
if (voidContentSize <= 126) vintBytes = [128 | voidContentSize];
|
|
301
|
+
else if (voidContentSize <= 16382) vintBytes = [64 | voidContentSize >> 8, voidContentSize & 255];
|
|
302
|
+
else vintBytes = [
|
|
303
|
+
32 | voidContentSize >> 16,
|
|
304
|
+
voidContentSize >> 8 & 255,
|
|
305
|
+
voidContentSize & 255
|
|
306
|
+
];
|
|
307
|
+
buf[pos] = EBML_VOID_ID;
|
|
308
|
+
for (let i = 0; i < vintBytes.length; i++) buf[pos + 1 + i] = vintBytes[i];
|
|
309
|
+
buf.fill(0, pos + 1 + vintBytes.length, pos + totalElementSize);
|
|
310
|
+
break;
|
|
311
|
+
}
|
|
312
|
+
pos += totalElementSize;
|
|
313
|
+
}
|
|
314
|
+
} catch {}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
//#endregion
|
|
318
|
+
export { generateWebmSegmentIndex, patchWebmForSegmentedServing };
|
|
319
|
+
//# sourceMappingURL=generateWebmSegmentIndex.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generateWebmSegmentIndex.js","names":["clusters: ClusterInfo[]","segments: TrackSegment[]","segmentClusters: ClusterInfo[]","videoStream","width: number","height: number","codec: string","sampleCount: number","trackStartTimeOffsetMs: number | undefined","trackIndex: TrackFragmentIndex","vintBytes: number[]"],"sources":["../src/generateWebmSegmentIndex.ts"],"sourcesContent":["/**\n * WebM / Matroska segment index generator.\n *\n * Instead of transcoding VP9-alpha WebM → fragmented MP4 (which strips the\n * alpha BlockAdditions), this module parses the source WebM's EBML structure\n * to produce byte-range segment maps directly into the original file.\n *\n * The pipeline:\n * 1. EBML parse: locate init region (EBML + Segment header + Info + Tracks)\n * and enumerate every Cluster with its timestamp.\n * 2. Keyframe detection: cross-reference cluster byte ranges with ffprobe\n * packet positions+flags to mark which clusters are keyframe-aligned.\n * 3. Segment grouping: accumulate clusters into ≥2-second segments that\n * start on keyframe clusters (mirrors generateFragmentIndex.ts logic).\n * 4. Output: same TrackFragmentIndex shape used by the MP4 path so the\n * rest of the pipeline (middleware, SegmentIndex.ts, EFVideo) is unchanged.\n *\n * Segment size patching\n * ---------------------\n * ffmpeg writes a known Segment-element size in the WebM header. When we\n * serve a subset of clusters, the size field must be replaced with the EBML\n * \"unknown\" value (0x01FFFFFFFFFFFFFF) so mediabunny does not expect more\n * bytes than we provide. This patch is applied once to the cached copy of\n * the source file (see generateTrack.ts).\n */\n\nimport { readFile } from \"node:fs/promises\";\nimport debug from \"debug\";\nimport type { TrackFragmentIndex, TrackSegment } from \"./Probe.js\";\nimport { PacketProbe } from \"./Probe.js\";\n\nconst log = debug(\"ef:generateWebmSegmentIndex\");\n\nconst MIN_SEGMENT_DURATION_MS = 2000;\n\n// ---------------------------------------------------------------------------\n// EBML element IDs (big-endian, variable width)\n// ---------------------------------------------------------------------------\nconst ID_EBML = 0x1a45dfa3;\nconst ID_SEGMENT = 0x18538067;\nconst ID_INFO = 0x1549a966;\nconst ID_CLUSTER = 0x1f43b675;\nconst ID_TIMESTAMP = 0xe7; // inside Cluster\nconst ID_SIMPLE_BLOCK = 0xa3; // inside Cluster\nconst ID_BLOCK_GROUP = 0xa0; // inside Cluster (contains Block + optional ReferenceBlock)\nconst ID_REFERENCE_BLOCK = 0xfb; // inside BlockGroup — presence means delta frame\nconst ID_TIMESTAMP_SCALE = 0x2ad7b1; // inside Info (nanoseconds per timestamp unit)\n\n// ---------------------------------------------------------------------------\n// EBML primitive decoders\n// ---------------------------------------------------------------------------\n\n/**\n * Decode an EBML variable-length integer used for **element sizes**.\n * The leading 1-bit encodes the byte-width; that bit is then stripped from\n * the value. Returns `null` for the \"unknown / streaming\" sentinel value\n * (all data-bits = 1).\n */\nfunction readVintSize(buf: Buffer, offset: number): { value: number | null; width: number } {\n const b = buf[offset]!;\n let width = 1;\n let mask = 0x80;\n while (width <= 8 && !(b & mask)) {\n mask >>= 1;\n width++;\n }\n let value = b & (mask - 1); // strip leading marker bit\n for (let i = 1; i < width; i++) {\n value = (value << 8) | (buf[offset + i] ?? 0);\n }\n // All data-bits = 1 means \"unknown size\" in EBML\n const unknownSentinel = (1 << (7 * width)) - 1;\n return { value: value === unknownSentinel ? null : value, width };\n}\n\n/**\n * Decode an EBML **element ID**. Unlike sizes, the leading marker bit is\n * *kept* in the returned value (IDs are opaque bit patterns).\n */\nfunction readElementId(buf: Buffer, offset: number): { id: number; width: number } {\n const b = buf[offset]!;\n let width = 1;\n let mask = 0x80;\n while (width <= 4 && !(b & mask)) {\n mask >>= 1;\n width++;\n }\n let id = b;\n for (let i = 1; i < width; i++) {\n id = (id << 8) | (buf[offset + i] ?? 0);\n }\n return { id, width };\n}\n\n/** Read a big-endian unsigned integer of `size` bytes. */\nfunction readUintBE(buf: Buffer, offset: number, size: number): number {\n let val = 0;\n for (let i = 0; i < size; i++) {\n val = (val * 256 + (buf[offset + i] ?? 0)) >>> 0;\n }\n return val;\n}\n\n// ---------------------------------------------------------------------------\n// WebM structure parser\n// ---------------------------------------------------------------------------\n\ninterface ClusterInfo {\n /** Byte offset of the Cluster element's ID field in the file. */\n offset: number;\n /** Total byte length of the Cluster element (header + content). */\n size: number;\n /** Cluster base timestamp converted to milliseconds. */\n timestampMs: number;\n /** True if any SimpleBlock within the cluster has the keyframe flag set. */\n hasKeyframe: boolean;\n}\n\ninterface WebmStructure {\n /**\n * Byte offset of the 8-byte Segment-size VINT that must be patched to the\n * EBML unknown-size sentinel when serving partial content.\n */\n segmentSizeVintOffset: number;\n /** Byte length of the Segment-size VINT (should be 8 for ffmpeg output). */\n segmentSizeVintWidth: number;\n /** Byte offset of the first Cluster element == end of the \"init\" region. */\n initSize: number;\n /** All Cluster elements found in the file, in order. */\n clusters: ClusterInfo[];\n /** TimestampScale in nanoseconds per unit (default 1 000 000 = 1 ms/unit). */\n timestampScaleNs: number;\n}\n\n/**\n * Parse the top-level EBML and Segment structure of a WebM file.\n * Only reads as deep as needed: EBML header → Segment → Info/Tracks/Clusters.\n */\nfunction parseWebmStructure(buf: Buffer): WebmStructure {\n let pos = 0;\n\n // ── EBML element ─────────────────────────────────────────────────────────\n const { id: ebmlId, width: ebmlIdWidth } = readElementId(buf, pos);\n if (ebmlId !== ID_EBML) {\n throw new Error(`Expected EBML element (0x1A45DFA3), got 0x${ebmlId.toString(16)}`);\n }\n const { value: ebmlSize, width: ebmlSizeWidth } = readVintSize(buf, pos + ebmlIdWidth);\n if (ebmlSize === null) throw new Error(\"EBML element has unknown size\");\n pos += ebmlIdWidth + ebmlSizeWidth + ebmlSize; // skip to Segment\n\n // ── Segment element ───────────────────────────────────────────────────────\n const { id: segId, width: segIdWidth } = readElementId(buf, pos);\n if (segId !== ID_SEGMENT) {\n throw new Error(`Expected Segment element (0x18538067), got 0x${segId.toString(16)}`);\n }\n const segmentSizeVintOffset = pos + segIdWidth;\n const { width: segSizeWidth } = readVintSize(buf, segmentSizeVintOffset);\n const segContentStart = segmentSizeVintOffset + segSizeWidth;\n\n // ── Segment children ──────────────────────────────────────────────────────\n let spos = segContentStart;\n let initSize = 0;\n let timestampScaleNs = 1_000_000; // default: 1 ms per unit\n const clusters: ClusterInfo[] = [];\n\n while (spos < buf.length) {\n if (buf.length - spos < 2) break; // not enough bytes for a header\n\n const { id: childId, width: childIdWidth } = readElementId(buf, spos);\n const { value: childSize, width: childSizeWidth } = readVintSize(buf, spos + childIdWidth);\n const headerSize = childIdWidth + childSizeWidth;\n\n if (childSize === null) {\n // Unknown-size element (rare in well-formed WebM) — stop scanning.\n log(`Unknown-size element 0x${childId.toString(16)} at ${spos}, stopping`);\n break;\n }\n\n const elementEnd = spos + headerSize + childSize;\n\n if (childId === ID_INFO) {\n // Extract TimestampScale from Info\n let ipos = spos + headerSize;\n while (ipos < elementEnd && ipos < buf.length) {\n const { id: infoChildId, width: infoIdW } = readElementId(buf, ipos);\n const { value: infoChildSize, width: infoSizeW } = readVintSize(buf, ipos + infoIdW);\n if (infoChildSize === null) break;\n if (infoChildId === ID_TIMESTAMP_SCALE) {\n timestampScaleNs = readUintBE(buf, ipos + infoIdW + infoSizeW, infoChildSize);\n }\n ipos += infoIdW + infoSizeW + infoChildSize;\n }\n } else if (childId === ID_CLUSTER) {\n if (initSize === 0) {\n initSize = spos; // first cluster offset = init region ends here\n }\n\n // Extract Cluster Timestamp and scan for keyframe SimpleBlocks\n let clusterTimestampUnits = 0;\n let hasKeyframe = false;\n let cpos = spos + headerSize;\n\n while (cpos < elementEnd && cpos < buf.length) {\n if (buf.length - cpos < 2) break;\n const { id: cid, width: cidW } = readElementId(buf, cpos);\n const { value: csize, width: csizeW } = readVintSize(buf, cpos + cidW);\n if (csize === null) break;\n const cHeaderSize = cidW + csizeW;\n\n if (cid === ID_TIMESTAMP) {\n clusterTimestampUnits = readUintBE(buf, cpos + cHeaderSize, csize);\n } else if (cid === ID_SIMPLE_BLOCK) {\n // SimpleBlock layout: [track VINT][relative timecode int16BE][flags byte][frame data]\n const sbOffset = cpos + cHeaderSize;\n const { width: trackNumWidth } = readVintSize(buf, sbOffset);\n const flagsByte = buf[sbOffset + trackNumWidth + 2] ?? 0;\n if (flagsByte & 0x80) {\n hasKeyframe = true;\n }\n } else if (cid === ID_BLOCK_GROUP) {\n // BlockGroup: check for absence of ReferenceBlock → keyframe\n let bgpos = cpos + cHeaderSize;\n const bgEnd = cpos + cHeaderSize + csize;\n let hasReferenceBlock = false;\n while (bgpos < bgEnd && bgpos < buf.length) {\n const { id: bgid, width: bgidW } = readElementId(buf, bgpos);\n const { value: bgsize, width: bgsizeW } = readVintSize(buf, bgpos + bgidW);\n if (bgsize === null) break;\n if (bgid === ID_REFERENCE_BLOCK) {\n hasReferenceBlock = true;\n break;\n }\n bgpos += bgidW + bgsizeW + bgsize;\n }\n if (!hasReferenceBlock) hasKeyframe = true;\n }\n\n cpos += cHeaderSize + csize;\n }\n\n const timestampMs = Math.round((clusterTimestampUnits * timestampScaleNs) / 1_000_000);\n clusters.push({ offset: spos, size: headerSize + childSize, timestampMs, hasKeyframe });\n }\n\n spos = elementEnd;\n }\n\n if (initSize === 0) {\n throw new Error(\"No Cluster elements found in WebM file\");\n }\n\n return {\n segmentSizeVintOffset,\n segmentSizeVintWidth: segSizeWidth,\n initSize,\n clusters,\n timestampScaleNs,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Keyframe detection via ffprobe packet positions\n// ---------------------------------------------------------------------------\n\n/**\n * Use ffprobe packet data to mark clusters that contain keyframe packets.\n * More reliable than parsing SimpleBlock flags in-process for large files.\n */\nasync function annotateKeyframes(clusters: ClusterInfo[], absolutePath: string): Promise<void> {\n const probe = await PacketProbe.probePath(absolutePath);\n const keyframePositions = (probe.packets as Array<{ pos?: number; flags?: string }>)\n .filter((p) => p.flags?.includes(\"K\") && p.pos !== undefined)\n .map((p) => p.pos as number);\n\n for (const cluster of clusters) {\n const clusterEnd = cluster.offset + cluster.size;\n cluster.hasKeyframe = keyframePositions.some(\n (pos) => pos >= cluster.offset && pos < clusterEnd,\n );\n }\n}\n\n// ---------------------------------------------------------------------------\n// Segment grouping (mirrors generateFragmentIndex.ts logic)\n// ---------------------------------------------------------------------------\n\nfunction buildSegments(\n clusters: ClusterInfo[],\n timescale: number,\n totalDurationMs: number,\n): TrackSegment[] {\n const segments: TrackSegment[] = [];\n\n let segmentClusters: ClusterInfo[] = [];\n let segmentStartMs = 0;\n\n const flushSegment = (nextStartMs: number) => {\n if (segmentClusters.length === 0) return;\n const first = segmentClusters[0]!;\n const last = segmentClusters[segmentClusters.length - 1]!;\n const offset = first.offset;\n const size = last.offset + last.size - first.offset;\n const cts = Math.round((segmentStartMs * timescale) / 1000);\n const dts = cts;\n const duration = Math.round(((nextStartMs - segmentStartMs) * timescale) / 1000);\n segments.push({ cts, dts, offset, size, duration });\n segmentClusters = [];\n };\n\n for (let i = 0; i < clusters.length; i++) {\n const cluster = clusters[i]!;\n const durationSoFarMs = segmentClusters.length > 0 ? cluster.timestampMs - segmentStartMs : 0;\n\n const shouldFinalize =\n segmentClusters.length > 0 &&\n durationSoFarMs >= MIN_SEGMENT_DURATION_MS &&\n cluster.hasKeyframe;\n\n if (shouldFinalize) {\n flushSegment(cluster.timestampMs);\n segmentStartMs = cluster.timestampMs;\n }\n\n if (segmentClusters.length === 0) {\n if (!cluster.hasKeyframe) continue; // wait for first keyframe\n segmentStartMs = cluster.timestampMs;\n }\n\n segmentClusters.push(cluster);\n }\n\n // Flush final segment\n flushSegment(totalDurationMs);\n\n return segments;\n}\n\n// ---------------------------------------------------------------------------\n// Public entry point\n// ---------------------------------------------------------------------------\n\n/**\n * Build a TrackFragmentIndex for a VP9-alpha WebM source without any\n * transcoding. The returned index contains byte offsets that point directly\n * into the cached copy of the source file (which has its Segment-size VINT\n * patched to \"unknown\" by generateTrack.ts).\n */\nexport async function generateWebmSegmentIndex(\n absolutePath: string,\n startTimeOffsetMs?: number,\n): Promise<Record<number, TrackFragmentIndex>> {\n log(`Generating WebM segment index for ${absolutePath}`);\n\n const buf = await readFile(absolutePath);\n const structure = parseWebmStructure(buf);\n\n const { initSize, clusters, timestampScaleNs } = structure;\n const timescale = 1000; // work in milliseconds\n\n log(\n `Parsed ${clusters.length} clusters, init=${initSize} bytes, ` +\n `timestampScale=${timestampScaleNs}ns`,\n );\n\n // Annotate keyframes via ffprobe for accuracy\n await annotateKeyframes(clusters, absolutePath);\n\n const keyframeClusters = clusters.filter((c) => c.hasKeyframe);\n log(\n `Keyframe clusters: ${keyframeClusters.map((c) => `${c.timestampMs}ms@${c.offset}`).join(\", \")}`,\n );\n\n // Total duration from last cluster\n const lastCluster = clusters[clusters.length - 1]!;\n const totalDurationMs = lastCluster.timestampMs + lastCluster.size / 4096; // rough estimate\n // Better: use ffprobe stream duration\n let finalDurationMs = totalDurationMs;\n try {\n const probe = await PacketProbe.probePath(absolutePath);\n const videoStream = (probe as any).data?.streams?.find((s: any) => s.codec_type === \"video\");\n if (videoStream?.duration) {\n finalDurationMs = Math.round(parseFloat(videoStream.duration) * 1000);\n }\n } catch {\n // fall back to rough estimate\n }\n\n log(`Total duration: ${finalDurationMs}ms`);\n\n const segments = buildSegments(clusters, timescale, finalDurationMs);\n\n log(\n `Built ${segments.length} segments: ${segments.map((s) => `[${((s.cts / timescale) * 1000) | 0}ms offset=${s.offset} size=${s.size}]`).join(\", \")}`,\n );\n\n // Track metadata from ffprobe\n const probe2 = await PacketProbe.probePath(absolutePath);\n const videoStream = (probe2 as any).data?.streams?.find((s: any) => s.codec_type === \"video\");\n const width: number = videoStream?.width ?? 1920;\n const height: number = videoStream?.height ?? 1080;\n const codec: string = videoStream?.codec_tag_string ?? videoStream?.codec_name ?? \"vp09\";\n const sampleCount: number =\n (probe2 as any).packets?.filter((p: any) => p.codec_type !== \"audio\").length ?? 0;\n\n let trackStartTimeOffsetMs: number | undefined;\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n } else if (clusters[0] && clusters[0].timestampMs > 1) {\n trackStartTimeOffsetMs = clusters[0].timestampMs;\n }\n\n const trackIndex: TrackFragmentIndex = {\n track: 1,\n type: \"video\",\n width,\n height,\n timescale,\n codec,\n duration: Math.round((finalDurationMs * timescale) / 1000),\n sample_count: sampleCount,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: { offset: 0, size: initSize },\n segments,\n };\n\n return { 1: trackIndex };\n}\n\n// ---------------------------------------------------------------------------\n// Helpers exported for use in generateTrack.ts\n// ---------------------------------------------------------------------------\n\nconst ID_SEEKHEAD = 0x114d9b74;\nconst EBML_VOID_ID = 0xec;\n\n/**\n * Patch a WebM buffer in-place to make it safe for partial (segment-sliced) serving:\n *\n * 1. Segment-element size VINT → EBML unknown-size (0x01FFFFFFFFFFFFFF).\n * Without this, mediabunny expects the full 44 MB when receiving a 5 MB chunk.\n *\n * 2. SeekHead element → EBML Void of the same byte length.\n * The SeekHead contains Cues/cluster offsets relative to the original file.\n * When mediabunny uses those offsets on a sliced buffer it reads garbage or\n * falls off the end of the buffer, causing seek errors and de-sync. Replacing\n * it with a Void element forces linear parsing: EBML → Info → Tracks → Clusters.\n *\n * Both patches are applied to a Buffer copy; the source file on disk is not modified.\n */\nexport function patchWebmForSegmentedServing(buf: Buffer): void {\n try {\n // ── Locate EBML and Segment headers ───────────────────────────────────\n const { id: ebmlId, width: ebmlIdWidth } = readElementId(buf, 0);\n if (ebmlId !== ID_EBML) return;\n const { value: ebmlSize, width: ebmlSizeWidth } = readVintSize(buf, ebmlIdWidth);\n if (ebmlSize === null) return;\n const segPos = ebmlIdWidth + ebmlSizeWidth + ebmlSize;\n const { id: segId, width: segIdWidth } = readElementId(buf, segPos);\n if (segId !== ID_SEGMENT) return;\n\n // ── Patch 1: Segment size → unknown ───────────────────────────────────\n const segSizeOffset = segPos + segIdWidth;\n const { width: segSizeWidth } = readVintSize(buf, segSizeOffset);\n if (segSizeWidth === 8) {\n const UNKNOWN_SIZE = Buffer.from([0x01, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]);\n UNKNOWN_SIZE.copy(buf, segSizeOffset);\n }\n\n // ── Patch 2: SeekHead → Void ──────────────────────────────────────────\n const segContentStart = segPos + segIdWidth + segSizeWidth;\n let pos = segContentStart;\n\n // Scan segment children until we find SeekHead (it is always the first child\n // in ffmpeg-generated WebM files, but scan defensively up to 4 KB).\n const scanLimit = Math.min(pos + 4096, buf.length);\n while (pos < scanLimit) {\n const { id: childId, width: childIdWidth } = readElementId(buf, pos);\n const { value: childSize, width: childSizeWidth } = readVintSize(buf, pos + childIdWidth);\n if (childSize === null) break;\n const totalElementSize = childIdWidth + childSizeWidth + childSize;\n\n if (childId === ID_SEEKHEAD) {\n // Overwrite the entire SeekHead element with a Void element of equal size.\n // Void layout: 1-byte ID (0xEC) + VINT size + zero content.\n // We need VINT(totalElementSize - 1) to account for the 1-byte Void ID.\n const voidContentSize = totalElementSize - 1; // after the 0xEC ID byte\n\n // Encode the size as a VINT. voidContentSize is small enough for 2-byte VINT\n // (max 16383) or even 1-byte (max 127) for typical SeekHead sizes.\n let vintBytes: number[];\n if (voidContentSize <= 126) {\n // 1-byte VINT: 0x80 | value\n vintBytes = [0x80 | voidContentSize];\n } else if (voidContentSize <= 16382) {\n // 2-byte VINT: 0x40xx\n vintBytes = [0x40 | (voidContentSize >> 8), voidContentSize & 0xff];\n } else {\n // 3-byte VINT (should never be needed for SeekHead)\n vintBytes = [\n 0x20 | (voidContentSize >> 16),\n (voidContentSize >> 8) & 0xff,\n voidContentSize & 0xff,\n ];\n }\n\n buf[pos] = EBML_VOID_ID;\n for (let i = 0; i < vintBytes.length; i++) {\n buf[pos + 1 + i] = vintBytes[i]!;\n }\n // Zero out the rest of the element\n buf.fill(0, pos + 1 + vintBytes.length, pos + totalElementSize);\n break; // only one SeekHead per file\n }\n\n pos += totalElementSize;\n }\n } catch {\n // Best-effort — if parsing fails the file is served unpatched\n }\n}\n"],"mappings":";;;;;AA+BA,MAAM,MAAM,MAAM,8BAA8B;AAEhD,MAAM,0BAA0B;AAKhC,MAAM,UAAU;AAChB,MAAM,aAAa;AACnB,MAAM,UAAU;AAChB,MAAM,aAAa;AACnB,MAAM,eAAe;AACrB,MAAM,kBAAkB;AACxB,MAAM,iBAAiB;AACvB,MAAM,qBAAqB;AAC3B,MAAM,qBAAqB;;;;;;;AAY3B,SAAS,aAAa,KAAa,QAAyD;CAC1F,MAAM,IAAI,IAAI;CACd,IAAI,QAAQ;CACZ,IAAI,OAAO;AACX,QAAO,SAAS,KAAK,EAAE,IAAI,OAAO;AAChC,WAAS;AACT;;CAEF,IAAI,QAAQ,IAAK,OAAO;AACxB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,IACzB,SAAS,SAAS,KAAM,IAAI,SAAS,MAAM;CAG7C,MAAM,mBAAmB,KAAM,IAAI,SAAU;AAC7C,QAAO;EAAE,OAAO,UAAU,kBAAkB,OAAO;EAAO;EAAO;;;;;;AAOnE,SAAS,cAAc,KAAa,QAA+C;CACjF,MAAM,IAAI,IAAI;CACd,IAAI,QAAQ;CACZ,IAAI,OAAO;AACX,QAAO,SAAS,KAAK,EAAE,IAAI,OAAO;AAChC,WAAS;AACT;;CAEF,IAAI,KAAK;AACT,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,IACzB,MAAM,MAAM,KAAM,IAAI,SAAS,MAAM;AAEvC,QAAO;EAAE;EAAI;EAAO;;;AAItB,SAAS,WAAW,KAAa,QAAgB,MAAsB;CACrE,IAAI,MAAM;AACV,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,IACxB,OAAO,MAAM,OAAO,IAAI,SAAS,MAAM,OAAQ;AAEjD,QAAO;;;;;;AAsCT,SAAS,mBAAmB,KAA4B;CACtD,IAAI,MAAM;CAGV,MAAM,EAAE,IAAI,QAAQ,OAAO,gBAAgB,cAAc,KAAK,IAAI;AAClE,KAAI,WAAW,QACb,OAAM,IAAI,MAAM,6CAA6C,OAAO,SAAS,GAAG,GAAG;CAErF,MAAM,EAAE,OAAO,UAAU,OAAO,kBAAkB,aAAa,KAAK,MAAM,YAAY;AACtF,KAAI,aAAa,KAAM,OAAM,IAAI,MAAM,gCAAgC;AACvE,QAAO,cAAc,gBAAgB;CAGrC,MAAM,EAAE,IAAI,OAAO,OAAO,eAAe,cAAc,KAAK,IAAI;AAChE,KAAI,UAAU,WACZ,OAAM,IAAI,MAAM,gDAAgD,MAAM,SAAS,GAAG,GAAG;CAEvF,MAAM,wBAAwB,MAAM;CACpC,MAAM,EAAE,OAAO,iBAAiB,aAAa,KAAK,sBAAsB;CAIxE,IAAI,OAHoB,wBAAwB;CAIhD,IAAI,WAAW;CACf,IAAI,mBAAmB;CACvB,MAAMA,WAA0B,EAAE;AAElC,QAAO,OAAO,IAAI,QAAQ;AACxB,MAAI,IAAI,SAAS,OAAO,EAAG;EAE3B,MAAM,EAAE,IAAI,SAAS,OAAO,iBAAiB,cAAc,KAAK,KAAK;EACrE,MAAM,EAAE,OAAO,WAAW,OAAO,mBAAmB,aAAa,KAAK,OAAO,aAAa;EAC1F,MAAM,aAAa,eAAe;AAElC,MAAI,cAAc,MAAM;AAEtB,OAAI,0BAA0B,QAAQ,SAAS,GAAG,CAAC,MAAM,KAAK,YAAY;AAC1E;;EAGF,MAAM,aAAa,OAAO,aAAa;AAEvC,MAAI,YAAY,SAAS;GAEvB,IAAI,OAAO,OAAO;AAClB,UAAO,OAAO,cAAc,OAAO,IAAI,QAAQ;IAC7C,MAAM,EAAE,IAAI,aAAa,OAAO,YAAY,cAAc,KAAK,KAAK;IACpE,MAAM,EAAE,OAAO,eAAe,OAAO,cAAc,aAAa,KAAK,OAAO,QAAQ;AACpF,QAAI,kBAAkB,KAAM;AAC5B,QAAI,gBAAgB,mBAClB,oBAAmB,WAAW,KAAK,OAAO,UAAU,WAAW,cAAc;AAE/E,YAAQ,UAAU,YAAY;;aAEvB,YAAY,YAAY;AACjC,OAAI,aAAa,EACf,YAAW;GAIb,IAAI,wBAAwB;GAC5B,IAAI,cAAc;GAClB,IAAI,OAAO,OAAO;AAElB,UAAO,OAAO,cAAc,OAAO,IAAI,QAAQ;AAC7C,QAAI,IAAI,SAAS,OAAO,EAAG;IAC3B,MAAM,EAAE,IAAI,KAAK,OAAO,SAAS,cAAc,KAAK,KAAK;IACzD,MAAM,EAAE,OAAO,OAAO,OAAO,WAAW,aAAa,KAAK,OAAO,KAAK;AACtE,QAAI,UAAU,KAAM;IACpB,MAAM,cAAc,OAAO;AAE3B,QAAI,QAAQ,aACV,yBAAwB,WAAW,KAAK,OAAO,aAAa,MAAM;aACzD,QAAQ,iBAAiB;KAElC,MAAM,WAAW,OAAO;KACxB,MAAM,EAAE,OAAO,kBAAkB,aAAa,KAAK,SAAS;AAE5D,UADkB,IAAI,WAAW,gBAAgB,MAAM,KACvC,IACd,eAAc;eAEP,QAAQ,gBAAgB;KAEjC,IAAI,QAAQ,OAAO;KACnB,MAAM,QAAQ,OAAO,cAAc;KACnC,IAAI,oBAAoB;AACxB,YAAO,QAAQ,SAAS,QAAQ,IAAI,QAAQ;MAC1C,MAAM,EAAE,IAAI,MAAM,OAAO,UAAU,cAAc,KAAK,MAAM;MAC5D,MAAM,EAAE,OAAO,QAAQ,OAAO,YAAY,aAAa,KAAK,QAAQ,MAAM;AAC1E,UAAI,WAAW,KAAM;AACrB,UAAI,SAAS,oBAAoB;AAC/B,2BAAoB;AACpB;;AAEF,eAAS,QAAQ,UAAU;;AAE7B,SAAI,CAAC,kBAAmB,eAAc;;AAGxC,YAAQ,cAAc;;GAGxB,MAAM,cAAc,KAAK,MAAO,wBAAwB,mBAAoB,IAAU;AACtF,YAAS,KAAK;IAAE,QAAQ;IAAM,MAAM,aAAa;IAAW;IAAa;IAAa,CAAC;;AAGzF,SAAO;;AAGT,KAAI,aAAa,EACf,OAAM,IAAI,MAAM,yCAAyC;AAG3D,QAAO;EACL;EACA,sBAAsB;EACtB;EACA;EACA;EACD;;;;;;AAWH,eAAe,kBAAkB,UAAyB,cAAqC;CAE7F,MAAM,qBADQ,MAAM,YAAY,UAAU,aAAa,EACtB,QAC9B,QAAQ,MAAM,EAAE,OAAO,SAAS,IAAI,IAAI,EAAE,QAAQ,OAAU,CAC5D,KAAK,MAAM,EAAE,IAAc;AAE9B,MAAK,MAAM,WAAW,UAAU;EAC9B,MAAM,aAAa,QAAQ,SAAS,QAAQ;AAC5C,UAAQ,cAAc,kBAAkB,MACrC,QAAQ,OAAO,QAAQ,UAAU,MAAM,WACzC;;;AAQL,SAAS,cACP,UACA,WACA,iBACgB;CAChB,MAAMC,WAA2B,EAAE;CAEnC,IAAIC,kBAAiC,EAAE;CACvC,IAAI,iBAAiB;CAErB,MAAM,gBAAgB,gBAAwB;AAC5C,MAAI,gBAAgB,WAAW,EAAG;EAClC,MAAM,QAAQ,gBAAgB;EAC9B,MAAM,OAAO,gBAAgB,gBAAgB,SAAS;EACtD,MAAM,SAAS,MAAM;EACrB,MAAM,OAAO,KAAK,SAAS,KAAK,OAAO,MAAM;EAC7C,MAAM,MAAM,KAAK,MAAO,iBAAiB,YAAa,IAAK;EAC3D,MAAM,MAAM;EACZ,MAAM,WAAW,KAAK,OAAQ,cAAc,kBAAkB,YAAa,IAAK;AAChF,WAAS,KAAK;GAAE;GAAK;GAAK;GAAQ;GAAM;GAAU,CAAC;AACnD,oBAAkB,EAAE;;AAGtB,MAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;EACxC,MAAM,UAAU,SAAS;EACzB,MAAM,kBAAkB,gBAAgB,SAAS,IAAI,QAAQ,cAAc,iBAAiB;AAO5F,MAJE,gBAAgB,SAAS,KACzB,mBAAmB,2BACnB,QAAQ,aAEU;AAClB,gBAAa,QAAQ,YAAY;AACjC,oBAAiB,QAAQ;;AAG3B,MAAI,gBAAgB,WAAW,GAAG;AAChC,OAAI,CAAC,QAAQ,YAAa;AAC1B,oBAAiB,QAAQ;;AAG3B,kBAAgB,KAAK,QAAQ;;AAI/B,cAAa,gBAAgB;AAE7B,QAAO;;;;;;;;AAaT,eAAsB,yBACpB,cACA,mBAC6C;AAC7C,KAAI,qCAAqC,eAAe;CAKxD,MAAM,EAAE,UAAU,UAAU,qBAFV,mBADN,MAAM,SAAS,aAAa,CACC;CAGzC,MAAM,YAAY;AAElB,KACE,UAAU,SAAS,OAAO,kBAAkB,SAAS,yBACjC,iBAAiB,IACtC;AAGD,OAAM,kBAAkB,UAAU,aAAa;AAG/C,KACE,sBAFuB,SAAS,QAAQ,MAAM,EAAE,YAAY,CAErB,KAAK,MAAM,GAAG,EAAE,YAAY,KAAK,EAAE,SAAS,CAAC,KAAK,KAAK,GAC/F;CAGD,MAAM,cAAc,SAAS,SAAS,SAAS;CAG/C,IAAI,kBAFoB,YAAY,cAAc,YAAY,OAAO;AAGrE,KAAI;EAEF,MAAMC,iBADQ,MAAM,YAAY,UAAU,aAAa,EACpB,MAAM,SAAS,MAAM,MAAW,EAAE,eAAe,QAAQ;AAC5F,MAAIA,eAAa,SACf,mBAAkB,KAAK,MAAM,WAAWA,cAAY,SAAS,GAAG,IAAK;SAEjE;AAIR,KAAI,mBAAmB,gBAAgB,IAAI;CAE3C,MAAM,WAAW,cAAc,UAAU,WAAW,gBAAgB;AAEpE,KACE,SAAS,SAAS,OAAO,aAAa,SAAS,KAAK,MAAM,IAAM,EAAE,MAAM,YAAa,MAAQ,EAAE,YAAY,EAAE,OAAO,QAAQ,EAAE,KAAK,GAAG,CAAC,KAAK,KAAK,GAClJ;CAGD,MAAM,SAAS,MAAM,YAAY,UAAU,aAAa;CACxD,MAAM,cAAe,OAAe,MAAM,SAAS,MAAM,MAAW,EAAE,eAAe,QAAQ;CAC7F,MAAMC,QAAgB,aAAa,SAAS;CAC5C,MAAMC,SAAiB,aAAa,UAAU;CAC9C,MAAMC,QAAgB,aAAa,oBAAoB,aAAa,cAAc;CAClF,MAAMC,cACH,OAAe,SAAS,QAAQ,MAAW,EAAE,eAAe,QAAQ,CAAC,UAAU;CAElF,IAAIC;AACJ,KAAI,sBAAsB,OACxB,0BAAyB;UAChB,SAAS,MAAM,SAAS,GAAG,cAAc,EAClD,0BAAyB,SAAS,GAAG;CAGvC,MAAMC,aAAiC;EACrC,OAAO;EACP,MAAM;EACN;EACA;EACA;EACA;EACA,UAAU,KAAK,MAAO,kBAAkB,YAAa,IAAK;EAC1D,cAAc;EACd,mBAAmB;EACnB,aAAa;GAAE,QAAQ;GAAG,MAAM;GAAU;EAC1C;EACD;AAED,QAAO,EAAE,GAAG,YAAY;;AAO1B,MAAM,cAAc;AACpB,MAAM,eAAe;;;;;;;;;;;;;;;AAgBrB,SAAgB,6BAA6B,KAAmB;AAC9D,KAAI;EAEF,MAAM,EAAE,IAAI,QAAQ,OAAO,gBAAgB,cAAc,KAAK,EAAE;AAChE,MAAI,WAAW,QAAS;EACxB,MAAM,EAAE,OAAO,UAAU,OAAO,kBAAkB,aAAa,KAAK,YAAY;AAChF,MAAI,aAAa,KAAM;EACvB,MAAM,SAAS,cAAc,gBAAgB;EAC7C,MAAM,EAAE,IAAI,OAAO,OAAO,eAAe,cAAc,KAAK,OAAO;AACnE,MAAI,UAAU,WAAY;EAG1B,MAAM,gBAAgB,SAAS;EAC/B,MAAM,EAAE,OAAO,iBAAiB,aAAa,KAAK,cAAc;AAChE,MAAI,iBAAiB,EAEnB,CADqB,OAAO,KAAK;GAAC;GAAM;GAAM;GAAM;GAAM;GAAM;GAAM;GAAM;GAAK,CAAC,CACrE,KAAK,KAAK,cAAc;EAKvC,IAAI,MADoB,SAAS,aAAa;EAK9C,MAAM,YAAY,KAAK,IAAI,MAAM,MAAM,IAAI,OAAO;AAClD,SAAO,MAAM,WAAW;GACtB,MAAM,EAAE,IAAI,SAAS,OAAO,iBAAiB,cAAc,KAAK,IAAI;GACpE,MAAM,EAAE,OAAO,WAAW,OAAO,mBAAmB,aAAa,KAAK,MAAM,aAAa;AACzF,OAAI,cAAc,KAAM;GACxB,MAAM,mBAAmB,eAAe,iBAAiB;AAEzD,OAAI,YAAY,aAAa;IAI3B,MAAM,kBAAkB,mBAAmB;IAI3C,IAAIC;AACJ,QAAI,mBAAmB,IAErB,aAAY,CAAC,MAAO,gBAAgB;aAC3B,mBAAmB,MAE5B,aAAY,CAAC,KAAQ,mBAAmB,GAAI,kBAAkB,IAAK;QAGnE,aAAY;KACV,KAAQ,mBAAmB;KAC1B,mBAAmB,IAAK;KACzB,kBAAkB;KACnB;AAGH,QAAI,OAAO;AACX,SAAK,IAAI,IAAI,GAAG,IAAI,UAAU,QAAQ,IACpC,KAAI,MAAM,IAAI,KAAK,UAAU;AAG/B,QAAI,KAAK,GAAG,MAAM,IAAI,UAAU,QAAQ,MAAM,iBAAiB;AAC/D;;AAGF,UAAO;;SAEH"}
|
|
@@ -1,19 +1,35 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
+
const require_Probe = require('../Probe.cjs');
|
|
2
3
|
const require_idempotentTask = require('../idempotentTask.cjs');
|
|
4
|
+
const require_generateWebmSegmentIndex = require('../generateWebmSegmentIndex.cjs');
|
|
3
5
|
const require_generateSingleTrack = require('../generateSingleTrack.cjs');
|
|
4
6
|
let debug = require("debug");
|
|
5
7
|
debug = require_rolldown_runtime.__toESM(debug);
|
|
8
|
+
let node_stream = require("node:stream");
|
|
9
|
+
node_stream = require_rolldown_runtime.__toESM(node_stream);
|
|
10
|
+
let node_fs_promises = require("node:fs/promises");
|
|
11
|
+
node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
|
|
6
12
|
let node_path = require("node:path");
|
|
7
13
|
node_path = require_rolldown_runtime.__toESM(node_path);
|
|
8
14
|
|
|
9
15
|
//#region src/tasks/generateTrack.ts
|
|
10
16
|
const generateTrackFromPath = async (absolutePath, trackId) => {
|
|
11
|
-
(0, debug.default)("ef:generateTrackFragment")
|
|
17
|
+
const log = (0, debug.default)("ef:generateTrackFragment");
|
|
18
|
+
log(`Generating track ${trackId} for ${absolutePath}`);
|
|
19
|
+
if ((await require_Probe.Probe.probePath(absolutePath)).hasAlphaVideo) {
|
|
20
|
+
log(`VP9 alpha WebM: copying source with Segment size + SeekHead patched`);
|
|
21
|
+
const buf = await (0, node_fs_promises.readFile)(absolutePath);
|
|
22
|
+
require_generateWebmSegmentIndex.patchWebmForSegmentedServing(buf);
|
|
23
|
+
return node_stream.Readable.from(buf);
|
|
24
|
+
}
|
|
12
25
|
return (await require_generateSingleTrack.generateSingleTrackFromPath(absolutePath, trackId)).stream;
|
|
13
26
|
};
|
|
14
27
|
const generateTrackTask = require_idempotentTask.idempotentTask({
|
|
15
28
|
label: "track",
|
|
16
|
-
filename: (absolutePath,
|
|
29
|
+
filename: (absolutePath, _trackId) => {
|
|
30
|
+
const ext = absolutePath.endsWith(".webm") ? "webm" : "mp4";
|
|
31
|
+
return `${(0, node_path.basename)(absolutePath)}.track-1.${ext}`;
|
|
32
|
+
},
|
|
17
33
|
runner: generateTrackFromPath
|
|
18
34
|
});
|
|
19
35
|
const generateTrack = async (cacheRoot, absolutePath, url) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"generateTrack.cjs","names":["generateSingleTrackFromPath","idempotentTask"],"sources":["../../src/tasks/generateTrack.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { generateSingleTrackFromPath } from \"../generateSingleTrack.js\";\n\nexport const generateTrackFromPath = async (absolutePath: string, trackId: number) => {\n const log = debug(\"ef:generateTrackFragment\");\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n //
|
|
1
|
+
{"version":3,"file":"generateTrack.cjs","names":["Probe","Readable","generateSingleTrackFromPath","idempotentTask"],"sources":["../../src/tasks/generateTrack.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { Readable } from \"node:stream\";\nimport { readFile } from \"node:fs/promises\";\nimport { Probe } from \"../Probe.js\";\nimport { generateSingleTrackFromPath } from \"../generateSingleTrack.js\";\nimport { patchWebmForSegmentedServing } from \"../generateWebmSegmentIndex.js\";\n\nexport const generateTrackFromPath = async (absolutePath: string, trackId: number) => {\n const log = debug(\"ef:generateTrackFragment\");\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // For VP9-alpha WebM sources: no transcoding needed.\n // Copy the source to cache with two in-place patches so mediabunny can\n // parse partial (segment-sliced) chunks without expecting the full file:\n // 1. Segment-element size → EBML unknown-size (0x01FFFFFFFFFFFFFF)\n // 2. SeekHead → Void element of equal size (prevents stale seeks to\n // positions that don't exist in the sliced chunk, which was causing\n // de-sync in Scene A).\n // Alpha BlockAdditions travel untouched in every cluster.\n if (probe.hasAlphaVideo) {\n log(`VP9 alpha WebM: copying source with Segment size + SeekHead patched`);\n const buf = await readFile(absolutePath);\n patchWebmForSegmentedServing(buf);\n return Readable.from(buf);\n }\n\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n return result.stream;\n};\n\nexport const generateTrackTask = idempotentTask({\n label: \"track\",\n filename: (absolutePath: string, _trackId: number) => {\n // VP9-alpha WebM sources are cached as .webm; everything else is .mp4.\n const ext = absolutePath.endsWith(\".webm\") ? \"webm\" : \"mp4\";\n return `${basename(absolutePath)}.track-1.${ext}`;\n },\n runner: generateTrackFromPath,\n});\n\nexport const generateTrack = async (cacheRoot: string, absolutePath: string, url: string) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\"trackId\");\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=1 (for video) or ?trackId=2 (for audio)\",\n );\n }\n return await generateTrackTask(cacheRoot, absolutePath, Number(trackId));\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;AASA,MAAa,wBAAwB,OAAO,cAAsB,YAAoB;CACpF,MAAM,yBAAY,2BAA2B;AAC7C,KAAI,oBAAoB,QAAQ,OAAO,eAAe;AAYtD,MAVc,MAAMA,oBAAM,UAAU,aAAa,EAUvC,eAAe;AACvB,MAAI,sEAAsE;EAC1E,MAAM,MAAM,qCAAe,aAAa;AACxC,gEAA6B,IAAI;AACjC,SAAOC,qBAAS,KAAK,IAAI;;AAI3B,SADe,MAAMC,wDAA4B,cAAc,QAAQ,EACzD;;AAGhB,MAAa,oBAAoBC,sCAAe;CAC9C,OAAO;CACP,WAAW,cAAsB,aAAqB;EAEpD,MAAM,MAAM,aAAa,SAAS,QAAQ,GAAG,SAAS;AACtD,SAAO,2BAAY,aAAa,CAAC,WAAW;;CAE9C,QAAQ;CACT,CAAC;AAEF,MAAa,gBAAgB,OAAO,WAAmB,cAAsB,QAAgB;AAC3F,KAAI;EACF,MAAM,UAAU,IAAI,IAAI,mBAAmB,MAAM,CAAC,aAAa,IAAI,UAAU;AAC7E,MAAI,YAAY,KACd,OAAM,IAAI,MACR,kHACD;AAEH,SAAO,MAAM,kBAAkB,WAAW,cAAc,OAAO,QAAQ,CAAC;UACjE,OAAO;AACd,UAAQ,MAAM,MAAM;AACpB,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,QAAM"}
|
|
@@ -1,16 +1,30 @@
|
|
|
1
|
+
import { Probe } from "../Probe.js";
|
|
1
2
|
import { idempotentTask } from "../idempotentTask.js";
|
|
3
|
+
import { patchWebmForSegmentedServing } from "../generateWebmSegmentIndex.js";
|
|
2
4
|
import { generateSingleTrackFromPath } from "../generateSingleTrack.js";
|
|
3
5
|
import debug from "debug";
|
|
6
|
+
import { Readable } from "node:stream";
|
|
7
|
+
import { readFile } from "node:fs/promises";
|
|
4
8
|
import { basename } from "node:path";
|
|
5
9
|
|
|
6
10
|
//#region src/tasks/generateTrack.ts
|
|
7
11
|
const generateTrackFromPath = async (absolutePath, trackId) => {
|
|
8
|
-
debug("ef:generateTrackFragment")
|
|
12
|
+
const log = debug("ef:generateTrackFragment");
|
|
13
|
+
log(`Generating track ${trackId} for ${absolutePath}`);
|
|
14
|
+
if ((await Probe.probePath(absolutePath)).hasAlphaVideo) {
|
|
15
|
+
log(`VP9 alpha WebM: copying source with Segment size + SeekHead patched`);
|
|
16
|
+
const buf = await readFile(absolutePath);
|
|
17
|
+
patchWebmForSegmentedServing(buf);
|
|
18
|
+
return Readable.from(buf);
|
|
19
|
+
}
|
|
9
20
|
return (await generateSingleTrackFromPath(absolutePath, trackId)).stream;
|
|
10
21
|
};
|
|
11
22
|
const generateTrackTask = idempotentTask({
|
|
12
23
|
label: "track",
|
|
13
|
-
filename: (absolutePath,
|
|
24
|
+
filename: (absolutePath, _trackId) => {
|
|
25
|
+
const ext = absolutePath.endsWith(".webm") ? "webm" : "mp4";
|
|
26
|
+
return `${basename(absolutePath)}.track-1.${ext}`;
|
|
27
|
+
},
|
|
14
28
|
runner: generateTrackFromPath
|
|
15
29
|
});
|
|
16
30
|
const generateTrack = async (cacheRoot, absolutePath, url) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"generateTrack.js","names":[],"sources":["../../src/tasks/generateTrack.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { generateSingleTrackFromPath } from \"../generateSingleTrack.js\";\n\nexport const generateTrackFromPath = async (absolutePath: string, trackId: number) => {\n const log = debug(\"ef:generateTrackFragment\");\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n //
|
|
1
|
+
{"version":3,"file":"generateTrack.js","names":[],"sources":["../../src/tasks/generateTrack.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { Readable } from \"node:stream\";\nimport { readFile } from \"node:fs/promises\";\nimport { Probe } from \"../Probe.js\";\nimport { generateSingleTrackFromPath } from \"../generateSingleTrack.js\";\nimport { patchWebmForSegmentedServing } from \"../generateWebmSegmentIndex.js\";\n\nexport const generateTrackFromPath = async (absolutePath: string, trackId: number) => {\n const log = debug(\"ef:generateTrackFragment\");\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // For VP9-alpha WebM sources: no transcoding needed.\n // Copy the source to cache with two in-place patches so mediabunny can\n // parse partial (segment-sliced) chunks without expecting the full file:\n // 1. Segment-element size → EBML unknown-size (0x01FFFFFFFFFFFFFF)\n // 2. SeekHead → Void element of equal size (prevents stale seeks to\n // positions that don't exist in the sliced chunk, which was causing\n // de-sync in Scene A).\n // Alpha BlockAdditions travel untouched in every cluster.\n if (probe.hasAlphaVideo) {\n log(`VP9 alpha WebM: copying source with Segment size + SeekHead patched`);\n const buf = await readFile(absolutePath);\n patchWebmForSegmentedServing(buf);\n return Readable.from(buf);\n }\n\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n return result.stream;\n};\n\nexport const generateTrackTask = idempotentTask({\n label: \"track\",\n filename: (absolutePath: string, _trackId: number) => {\n // VP9-alpha WebM sources are cached as .webm; everything else is .mp4.\n const ext = absolutePath.endsWith(\".webm\") ? \"webm\" : \"mp4\";\n return `${basename(absolutePath)}.track-1.${ext}`;\n },\n runner: generateTrackFromPath,\n});\n\nexport const generateTrack = async (cacheRoot: string, absolutePath: string, url: string) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\"trackId\");\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=1 (for video) or ?trackId=2 (for audio)\",\n );\n }\n return await generateTrackTask(cacheRoot, absolutePath, Number(trackId));\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;;;;AASA,MAAa,wBAAwB,OAAO,cAAsB,YAAoB;CACpF,MAAM,MAAM,MAAM,2BAA2B;AAC7C,KAAI,oBAAoB,QAAQ,OAAO,eAAe;AAYtD,MAVc,MAAM,MAAM,UAAU,aAAa,EAUvC,eAAe;AACvB,MAAI,sEAAsE;EAC1E,MAAM,MAAM,MAAM,SAAS,aAAa;AACxC,+BAA6B,IAAI;AACjC,SAAO,SAAS,KAAK,IAAI;;AAI3B,SADe,MAAM,4BAA4B,cAAc,QAAQ,EACzD;;AAGhB,MAAa,oBAAoB,eAAe;CAC9C,OAAO;CACP,WAAW,cAAsB,aAAqB;EAEpD,MAAM,MAAM,aAAa,SAAS,QAAQ,GAAG,SAAS;AACtD,SAAO,GAAG,SAAS,aAAa,CAAC,WAAW;;CAE9C,QAAQ;CACT,CAAC;AAEF,MAAa,gBAAgB,OAAO,WAAmB,cAAsB,QAAgB;AAC3F,KAAI;EACF,MAAM,UAAU,IAAI,IAAI,mBAAmB,MAAM,CAAC,aAAa,IAAI,UAAU;AAC7E,MAAI,YAAY,KACd,OAAM,IAAI,MACR,kHACD;AAEH,SAAO,MAAM,kBAAkB,WAAW,cAAc,OAAO,QAAQ,CAAC;UACjE,OAAO;AACd,UAAQ,MAAM,MAAM;AACpB,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,QAAM"}
|
|
@@ -2,6 +2,7 @@ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
|
2
2
|
const require_Probe = require('../Probe.cjs');
|
|
3
3
|
const require_generateFragmentIndex = require('../generateFragmentIndex.cjs');
|
|
4
4
|
const require_idempotentTask = require('../idempotentTask.cjs');
|
|
5
|
+
const require_generateWebmSegmentIndex = require('../generateWebmSegmentIndex.cjs');
|
|
5
6
|
let debug = require("debug");
|
|
6
7
|
debug = require_rolldown_runtime.__toESM(debug);
|
|
7
8
|
let node_path = require("node:path");
|
|
@@ -14,6 +15,24 @@ const generateTrackFragmentIndexFromPath = async (absolutePath) => {
|
|
|
14
15
|
const startTimeOffsetMs = probe.startTimeOffsetMs;
|
|
15
16
|
if (startTimeOffsetMs !== void 0) log(`Extracted start_time offset: ${startTimeOffsetMs}ms`);
|
|
16
17
|
else log("No format/stream timing offset found - will detect from composition time");
|
|
18
|
+
if (probe.hasAlphaVideo) {
|
|
19
|
+
log(`VP9 alpha WebM: using EBML cluster index (no transcoding)`);
|
|
20
|
+
const webmIndex = await require_generateWebmSegmentIndex.generateWebmSegmentIndex(absolutePath, startTimeOffsetMs);
|
|
21
|
+
const scrubResult$1 = await (async () => {
|
|
22
|
+
try {
|
|
23
|
+
log("Generating scrub track fragment index for alpha WebM");
|
|
24
|
+
const result = await require_generateFragmentIndex.generateFragmentIndex(probe.createScrubTrackReadstream(), startTimeOffsetMs, { 0: -1 });
|
|
25
|
+
log("Scrub track fragment index generated successfully");
|
|
26
|
+
return result;
|
|
27
|
+
} catch (error) {
|
|
28
|
+
log(`Failed to generate scrub track fragment index: ${error}`);
|
|
29
|
+
return null;
|
|
30
|
+
}
|
|
31
|
+
})();
|
|
32
|
+
const trackFragmentIndexes$1 = { ...webmIndex };
|
|
33
|
+
if (scrubResult$1) Object.assign(trackFragmentIndexes$1, scrubResult$1);
|
|
34
|
+
return trackFragmentIndexes$1;
|
|
35
|
+
}
|
|
17
36
|
log(`Generating track fragment index for ${absolutePath} using single-track approach`);
|
|
18
37
|
const trackTasks = probe.streams.map((stream, streamIndex) => {
|
|
19
38
|
if (stream.codec_type !== "audio" && stream.codec_type !== "video") return null;
|
|
@@ -24,9 +43,7 @@ const generateTrackFragmentIndexFromPath = async (absolutePath) => {
|
|
|
24
43
|
const scrubTask = probe.videoStreams.length > 0 ? (async () => {
|
|
25
44
|
try {
|
|
26
45
|
log("Generating scrub track fragment index");
|
|
27
|
-
const
|
|
28
|
-
const scrubTrackId = -1;
|
|
29
|
-
const result = await require_generateFragmentIndex.generateFragmentIndex(scrubStream, startTimeOffsetMs, { 0: scrubTrackId });
|
|
46
|
+
const result = await require_generateFragmentIndex.generateFragmentIndex(probe.createScrubTrackReadstream(), startTimeOffsetMs, { 0: -1 });
|
|
30
47
|
log("Scrub track fragment index generated successfully");
|
|
31
48
|
return result;
|
|
32
49
|
} catch (error) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"generateTrackFragmentIndex.cjs","names":["Probe","generateFragmentIndex","
|
|
1
|
+
{"version":3,"file":"generateTrackFragmentIndex.cjs","names":["Probe","generateWebmSegmentIndex","scrubResult","generateFragmentIndex","trackFragmentIndexes: Record<number, TrackFragmentIndex>","trackFragmentIndexes","scrubTask: Promise<Record<number, TrackFragmentIndex> | null>","idempotentTask"],"sources":["../../src/tasks/generateTrackFragmentIndex.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"../Probe.js\";\nimport { generateFragmentIndex } from \"../generateFragmentIndex.js\";\nimport { generateWebmSegmentIndex } from \"../generateWebmSegmentIndex.js\";\nimport type { TrackFragmentIndex } from \"../Probe.js\";\n\nexport const generateTrackFragmentIndexFromPath = async (absolutePath: string) => {\n const log = debug(\"ef:generateTrackFragment\");\n const probe = await Probe.probePath(absolutePath);\n\n const startTimeOffsetMs = probe.startTimeOffsetMs;\n if (startTimeOffsetMs !== undefined) {\n log(`Extracted start_time offset: ${startTimeOffsetMs}ms`);\n } else {\n log(\"No format/stream timing offset found - will detect from composition time\");\n }\n\n // ── VP9-alpha WebM fast path ─────────────────────────────────────────────\n // For VP9-alpha WebM sources we skip all transcoding and instead build a\n // byte-range index directly from the source file's EBML cluster structure.\n // Alpha BlockAdditions are preserved in every cluster — no H264 matte needed.\n if (probe.hasAlphaVideo) {\n log(`VP9 alpha WebM: using EBML cluster index (no transcoding)`);\n const webmIndex = await generateWebmSegmentIndex(absolutePath, startTimeOffsetMs);\n\n // Scrub track still needs transcoding (low-res H264 thumbnails)\n const scrubTask: Promise<Record<number, TrackFragmentIndex> | null> = (async () => {\n try {\n log(\"Generating scrub track fragment index for alpha WebM\");\n const scrubStream = probe.createScrubTrackReadstream();\n const result = await generateFragmentIndex(scrubStream, startTimeOffsetMs, { 0: -1 });\n log(\"Scrub track fragment index generated successfully\");\n return result;\n } catch (error) {\n log(`Failed to generate scrub track fragment index: ${error}`);\n return null;\n }\n })();\n\n const scrubResult = await scrubTask;\n const trackFragmentIndexes: Record<number, TrackFragmentIndex> = { ...webmIndex };\n if (scrubResult) Object.assign(trackFragmentIndexes, scrubResult);\n return trackFragmentIndexes;\n }\n\n // ── Standard MP4 path ────────────────────────────────────────────────────\n log(`Generating track fragment index for ${absolutePath} using single-track approach`);\n\n const trackTasks = probe.streams\n .map((stream, streamIndex) => {\n if (stream.codec_type !== \"audio\" && stream.codec_type !== \"video\") {\n return null;\n }\n const trackId = streamIndex + 1;\n log(`Processing track ${trackId} (${stream.codec_type})`);\n const trackStream = probe.createTrackReadstream(streamIndex);\n const trackIdMapping = { 0: trackId };\n return generateFragmentIndex(trackStream, startTimeOffsetMs, trackIdMapping);\n })\n .filter((task): task is Promise<Record<number, TrackFragmentIndex>> => task !== null);\n\n const scrubTask: Promise<Record<number, TrackFragmentIndex> | null> =\n probe.videoStreams.length > 0\n ? (async () => {\n try {\n log(\"Generating scrub track fragment index\");\n const scrubStream = probe.createScrubTrackReadstream();\n const result = await generateFragmentIndex(scrubStream, startTimeOffsetMs, { 0: -1 });\n log(\"Scrub track fragment index generated successfully\");\n return result;\n } catch (error) {\n log(`Failed to generate scrub track fragment index: ${error}`);\n return null;\n }\n })()\n : Promise.resolve(null);\n\n const [trackResults, scrubResult] = await Promise.all([Promise.all(trackTasks), scrubTask]);\n\n const trackFragmentIndexes: Record<number, TrackFragmentIndex> = {};\n for (const result of trackResults) {\n Object.assign(trackFragmentIndexes, result);\n }\n if (scrubResult) {\n Object.assign(trackFragmentIndexes, scrubResult);\n }\n\n return trackFragmentIndexes;\n};\n\nconst generateTrackFragmentIndexTask = idempotentTask({\n label: \"trackFragmentIndex\",\n filename: (absolutePath) => `${basename(absolutePath)}.tracks.json`,\n runner: async (absolutePath: string) => {\n const index = await generateTrackFragmentIndexFromPath(absolutePath);\n return JSON.stringify(index, null, 2);\n },\n});\n\nexport const generateTrackFragmentIndex = async (cacheRoot: string, absolutePath: string) => {\n try {\n return await generateTrackFragmentIndexTask(cacheRoot, absolutePath);\n } catch (error) {\n console.trace(\"Error generating track fragment index\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;;;;;AAQA,MAAa,qCAAqC,OAAO,iBAAyB;CAChF,MAAM,yBAAY,2BAA2B;CAC7C,MAAM,QAAQ,MAAMA,oBAAM,UAAU,aAAa;CAEjD,MAAM,oBAAoB,MAAM;AAChC,KAAI,sBAAsB,OACxB,KAAI,gCAAgC,kBAAkB,IAAI;KAE1D,KAAI,2EAA2E;AAOjF,KAAI,MAAM,eAAe;AACvB,MAAI,4DAA4D;EAChE,MAAM,YAAY,MAAMC,0DAAyB,cAAc,kBAAkB;EAgBjF,MAAMC,gBAAc,OAbmD,YAAY;AACjF,OAAI;AACF,QAAI,uDAAuD;IAE3D,MAAM,SAAS,MAAMC,oDADD,MAAM,4BAA4B,EACE,mBAAmB,EAAE,GAAG,IAAI,CAAC;AACrF,QAAI,oDAAoD;AACxD,WAAO;YACA,OAAO;AACd,QAAI,kDAAkD,QAAQ;AAC9D,WAAO;;MAEP;EAGJ,MAAMC,yBAA2D,EAAE,GAAG,WAAW;AACjF,MAAIF,cAAa,QAAO,OAAOG,wBAAsBH,cAAY;AACjE,SAAOG;;AAIT,KAAI,uCAAuC,aAAa,8BAA8B;CAEtF,MAAM,aAAa,MAAM,QACtB,KAAK,QAAQ,gBAAgB;AAC5B,MAAI,OAAO,eAAe,WAAW,OAAO,eAAe,QACzD,QAAO;EAET,MAAM,UAAU,cAAc;AAC9B,MAAI,oBAAoB,QAAQ,IAAI,OAAO,WAAW,GAAG;AAGzD,SAAOF,oDAFa,MAAM,sBAAsB,YAAY,EAElB,mBADnB,EAAE,GAAG,SAAS,CACuC;GAC5E,CACD,QAAQ,SAA8D,SAAS,KAAK;CAEvF,MAAMG,YACJ,MAAM,aAAa,SAAS,KACvB,YAAY;AACX,MAAI;AACF,OAAI,wCAAwC;GAE5C,MAAM,SAAS,MAAMH,oDADD,MAAM,4BAA4B,EACE,mBAAmB,EAAE,GAAG,IAAI,CAAC;AACrF,OAAI,oDAAoD;AACxD,UAAO;WACA,OAAO;AACd,OAAI,kDAAkD,QAAQ;AAC9D,UAAO;;KAEP,GACJ,QAAQ,QAAQ,KAAK;CAE3B,MAAM,CAAC,cAAc,eAAe,MAAM,QAAQ,IAAI,CAAC,QAAQ,IAAI,WAAW,EAAE,UAAU,CAAC;CAE3F,MAAMC,uBAA2D,EAAE;AACnE,MAAK,MAAM,UAAU,aACnB,QAAO,OAAO,sBAAsB,OAAO;AAE7C,KAAI,YACF,QAAO,OAAO,sBAAsB,YAAY;AAGlD,QAAO;;AAGT,MAAM,iCAAiCG,sCAAe;CACpD,OAAO;CACP,WAAW,iBAAiB,2BAAY,aAAa,CAAC;CACtD,QAAQ,OAAO,iBAAyB;EACtC,MAAM,QAAQ,MAAM,mCAAmC,aAAa;AACpE,SAAO,KAAK,UAAU,OAAO,MAAM,EAAE;;CAExC,CAAC;AAEF,MAAa,6BAA6B,OAAO,WAAmB,iBAAyB;AAC3F,KAAI;AACF,SAAO,MAAM,+BAA+B,WAAW,aAAa;UAC7D,OAAO;AACd,UAAQ,MAAM,yCAAyC,MAAM;AAC7D,QAAM"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { Probe } from "../Probe.js";
|
|
2
2
|
import { generateFragmentIndex } from "../generateFragmentIndex.js";
|
|
3
3
|
import { idempotentTask } from "../idempotentTask.js";
|
|
4
|
+
import { generateWebmSegmentIndex } from "../generateWebmSegmentIndex.js";
|
|
4
5
|
import debug from "debug";
|
|
5
6
|
import { basename } from "node:path";
|
|
6
7
|
|
|
@@ -11,6 +12,24 @@ const generateTrackFragmentIndexFromPath = async (absolutePath) => {
|
|
|
11
12
|
const startTimeOffsetMs = probe.startTimeOffsetMs;
|
|
12
13
|
if (startTimeOffsetMs !== void 0) log(`Extracted start_time offset: ${startTimeOffsetMs}ms`);
|
|
13
14
|
else log("No format/stream timing offset found - will detect from composition time");
|
|
15
|
+
if (probe.hasAlphaVideo) {
|
|
16
|
+
log(`VP9 alpha WebM: using EBML cluster index (no transcoding)`);
|
|
17
|
+
const webmIndex = await generateWebmSegmentIndex(absolutePath, startTimeOffsetMs);
|
|
18
|
+
const scrubResult$1 = await (async () => {
|
|
19
|
+
try {
|
|
20
|
+
log("Generating scrub track fragment index for alpha WebM");
|
|
21
|
+
const result = await generateFragmentIndex(probe.createScrubTrackReadstream(), startTimeOffsetMs, { 0: -1 });
|
|
22
|
+
log("Scrub track fragment index generated successfully");
|
|
23
|
+
return result;
|
|
24
|
+
} catch (error) {
|
|
25
|
+
log(`Failed to generate scrub track fragment index: ${error}`);
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
})();
|
|
29
|
+
const trackFragmentIndexes$1 = { ...webmIndex };
|
|
30
|
+
if (scrubResult$1) Object.assign(trackFragmentIndexes$1, scrubResult$1);
|
|
31
|
+
return trackFragmentIndexes$1;
|
|
32
|
+
}
|
|
14
33
|
log(`Generating track fragment index for ${absolutePath} using single-track approach`);
|
|
15
34
|
const trackTasks = probe.streams.map((stream, streamIndex) => {
|
|
16
35
|
if (stream.codec_type !== "audio" && stream.codec_type !== "video") return null;
|
|
@@ -21,9 +40,7 @@ const generateTrackFragmentIndexFromPath = async (absolutePath) => {
|
|
|
21
40
|
const scrubTask = probe.videoStreams.length > 0 ? (async () => {
|
|
22
41
|
try {
|
|
23
42
|
log("Generating scrub track fragment index");
|
|
24
|
-
const
|
|
25
|
-
const scrubTrackId = -1;
|
|
26
|
-
const result = await generateFragmentIndex(scrubStream, startTimeOffsetMs, { 0: scrubTrackId });
|
|
43
|
+
const result = await generateFragmentIndex(probe.createScrubTrackReadstream(), startTimeOffsetMs, { 0: -1 });
|
|
27
44
|
log("Scrub track fragment index generated successfully");
|
|
28
45
|
return result;
|
|
29
46
|
} catch (error) {
|