@editframe/assets 0.30.1-beta.0 → 0.31.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,12 +5,14 @@ import { pipeline } from "node:stream/promises";
5
5
 
6
6
  //#region src/generateFragmentIndex.ts
7
7
  const log = debug("ef:generateFragmentIndex");
8
+ const MIN_SEGMENT_DURATION_MS = 2e3;
9
+ const MS_PER_SECOND = 1e3;
8
10
  function constructH264CodecString(codecTagString, profile, level) {
9
11
  if (codecTagString !== "avc1" || !profile || level === void 0) return codecTagString;
10
12
  const profileIdc = {
11
- "Baseline": 66,
12
- "Main": 77,
13
- "High": 100,
13
+ Baseline: 66,
14
+ Main: 77,
15
+ High: 100,
14
16
  "High 10": 110,
15
17
  "High 422": 122,
16
18
  "High 444": 244
@@ -106,6 +108,108 @@ function createFragmentStream(fragmentData) {
106
108
  this.push(Buffer.from(chunk));
107
109
  } });
108
110
  }
111
+ function convertTimestamp(pts, timebase, timescale) {
112
+ return Math.round(pts * timescale / timebase.den);
113
+ }
114
+ function durationMsFromTimescale(durationTimescale, timescale) {
115
+ return durationTimescale / timescale * MS_PER_SECOND;
116
+ }
117
+ function calculateSegmentByteRange(accumulatedFragments) {
118
+ const firstFrag = accumulatedFragments[0];
119
+ const lastFrag = accumulatedFragments[accumulatedFragments.length - 1];
120
+ return {
121
+ offset: firstFrag.fragment.offset,
122
+ size: lastFrag.fragment.offset + lastFrag.fragment.size - firstFrag.fragment.offset
123
+ };
124
+ }
125
+ var SegmentAccumulator = class {
126
+ constructor(context, minDurationMs) {
127
+ this.state = { type: "idle" };
128
+ this.context = context;
129
+ this.minDurationMs = minDurationMs;
130
+ }
131
+ shouldFinalize(nextKeyframe) {
132
+ if (this.state.type !== "accumulating") return false;
133
+ const hasMinimumDuration = this.calculateAccumulatedDurationMs() >= this.minDurationMs;
134
+ if (this.context.streamType === "video") return hasMinimumDuration && nextKeyframe !== null;
135
+ else return hasMinimumDuration;
136
+ }
137
+ evaluateSegment(nextBoundary) {
138
+ if (this.state.type !== "accumulating") return null;
139
+ const segmentCts = convertTimestamp(this.state.startPts, this.context.timebase, this.context.timescale);
140
+ const segmentDts = convertTimestamp(this.state.startDts, this.context.timebase, this.context.timescale);
141
+ const segmentDuration = this.calculateSegmentDuration(segmentCts, nextBoundary);
142
+ const { offset, size } = calculateSegmentByteRange(this.state.fragments);
143
+ return {
144
+ cts: segmentCts,
145
+ dts: segmentDts,
146
+ duration: segmentDuration,
147
+ offset,
148
+ size
149
+ };
150
+ }
151
+ addFragment(fragment, fragmentData) {
152
+ if (this.state.type === "idle") this.state = {
153
+ type: "accumulating",
154
+ startPts: this.getStartPts(fragmentData),
155
+ startDts: this.getStartDts(fragmentData),
156
+ fragments: [{
157
+ fragment,
158
+ fragmentData
159
+ }]
160
+ };
161
+ else this.state.fragments.push({
162
+ fragment,
163
+ fragmentData
164
+ });
165
+ }
166
+ reset() {
167
+ this.state = { type: "idle" };
168
+ }
169
+ startNewSegment(keyframe) {
170
+ this.state = {
171
+ type: "accumulating",
172
+ startPts: keyframe.pts,
173
+ startDts: keyframe.dts,
174
+ fragments: []
175
+ };
176
+ }
177
+ getState() {
178
+ return this.state;
179
+ }
180
+ isAccumulating() {
181
+ return this.state.type === "accumulating";
182
+ }
183
+ calculateAccumulatedDurationMs() {
184
+ if (this.state.type !== "accumulating") return 0;
185
+ const lastFrag = this.state.fragments[this.state.fragments.length - 1];
186
+ const lastPacket = this.getLastPacket(lastFrag.fragmentData);
187
+ return durationMsFromTimescale(convertTimestamp(lastPacket.pts + (lastPacket.duration || 0), this.context.timebase, this.context.timescale) - convertTimestamp(this.state.startPts, this.context.timebase, this.context.timescale), this.context.timescale);
188
+ }
189
+ calculateSegmentDuration(segmentCts, nextBoundary) {
190
+ if (nextBoundary) return convertTimestamp(nextBoundary.pts, this.context.timebase, this.context.timescale) - segmentCts;
191
+ const sortedPackets = [...this.context.streamPackets].sort((a, b) => a.pts - b.pts);
192
+ const lastPacket = sortedPackets[sortedPackets.length - 1];
193
+ return convertTimestamp(lastPacket.pts + (lastPacket.duration || 0), this.context.timebase, this.context.timescale) - segmentCts;
194
+ }
195
+ getStartPts(fragmentData) {
196
+ if (this.context.streamType === "video") return fragmentData.videoPackets.find((p) => p.isKeyframe)?.pts ?? fragmentData.videoPackets[0]?.pts ?? 0;
197
+ else return fragmentData.audioPackets[0]?.pts ?? 0;
198
+ }
199
+ getStartDts(fragmentData) {
200
+ if (this.context.streamType === "video") return fragmentData.videoPackets.find((p) => p.isKeyframe)?.dts ?? fragmentData.videoPackets[0]?.dts ?? 0;
201
+ else return fragmentData.audioPackets[0]?.dts ?? 0;
202
+ }
203
+ getLastPacket(fragmentData) {
204
+ if (this.context.streamType === "video") {
205
+ const packets = fragmentData.videoPackets;
206
+ return packets[packets.length - 1];
207
+ } else {
208
+ const packets = fragmentData.audioPackets;
209
+ return packets[packets.length - 1];
210
+ }
211
+ }
212
+ };
109
213
  const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapping) => {
110
214
  const parser = new StreamingBoxParser();
111
215
  const chunks = [];
@@ -157,64 +261,98 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
157
261
  audioPackets
158
262
  });
159
263
  }
160
- for (const videoStream of videoStreams) {
264
+ const processTrack = (streamIndex, streamType, timebase, allPackets) => {
161
265
  const segments = [];
162
- const totalVideoPackets = probe.packets.filter((p) => p.stream_index === videoStream.index);
163
- const keyframePackets = totalVideoPackets.filter((p) => p.flags?.includes("K"));
164
- const totalSampleCount = keyframePackets.length;
165
- log(`Complete stream has ${totalVideoPackets.length} video packets, ${keyframePackets.length} keyframes for stream ${videoStream.index}`);
266
+ const accumulator = new SegmentAccumulator({
267
+ timebase,
268
+ timescale: Math.round(timebase.den / timebase.num),
269
+ fragmentTimingData,
270
+ mediaFragments,
271
+ streamPackets: allPackets.filter((p) => p.stream_index === streamIndex),
272
+ streamType,
273
+ streamIndex
274
+ }, MIN_SEGMENT_DURATION_MS);
275
+ for (let i = 0; i < fragmentTimingData.length; i++) {
276
+ const fragmentData = fragmentTimingData[i];
277
+ const fragment = mediaFragments[fragmentData.fragmentIndex];
278
+ const packets = streamType === "video" ? fragmentData.videoPackets : fragmentData.audioPackets;
279
+ log(`Fragment ${fragmentData.fragmentIndex}: ${packets.length} ${streamType} packets`);
280
+ if (packets.length === 0) {
281
+ log(`Skipping fragment ${fragmentData.fragmentIndex} - no ${streamType} packets`);
282
+ continue;
283
+ }
284
+ if (streamType === "video") {
285
+ const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);
286
+ const hasKeyframe = keyframe !== void 0;
287
+ if (!accumulator.isAccumulating() && hasKeyframe) {
288
+ accumulator.startNewSegment({
289
+ pts: keyframe.pts,
290
+ dts: keyframe.dts
291
+ });
292
+ accumulator.addFragment(fragment, fragmentData);
293
+ continue;
294
+ }
295
+ if (!accumulator.isAccumulating()) continue;
296
+ if (hasKeyframe) {
297
+ if (accumulator.shouldFinalize({
298
+ pts: keyframe.pts,
299
+ dts: keyframe.dts
300
+ })) {
301
+ const nextBoundary = { pts: keyframe.pts };
302
+ const evaluation = accumulator.evaluateSegment(nextBoundary);
303
+ if (evaluation) segments.push(evaluation);
304
+ accumulator.reset();
305
+ accumulator.startNewSegment({
306
+ pts: keyframe.pts,
307
+ dts: keyframe.dts
308
+ });
309
+ }
310
+ }
311
+ } else {
312
+ if (!accumulator.isAccumulating()) {
313
+ accumulator.addFragment(fragment, fragmentData);
314
+ continue;
315
+ }
316
+ if (accumulator.shouldFinalize(null)) {
317
+ const nextBoundary = { pts: fragmentData.audioPackets[0].pts };
318
+ const evaluation = accumulator.evaluateSegment(nextBoundary);
319
+ if (evaluation) segments.push(evaluation);
320
+ accumulator.reset();
321
+ }
322
+ }
323
+ accumulator.addFragment(fragment, fragmentData);
324
+ }
325
+ if (accumulator.isAccumulating()) {
326
+ const evaluation = accumulator.evaluateSegment(null);
327
+ if (evaluation) segments.push(evaluation);
328
+ }
329
+ return segments;
330
+ };
331
+ for (const videoStream of videoStreams) {
166
332
  const timebase = probe.videoTimebase;
167
333
  if (!timebase) {
168
334
  console.warn("No timebase found for video stream");
169
335
  continue;
170
336
  }
171
337
  const timescale = Math.round(timebase.den / timebase.num);
338
+ const streamPackets = probe.packets.filter((p) => p.stream_index === videoStream.index);
339
+ const keyframePackets = streamPackets.filter((p) => p.flags?.includes("K"));
340
+ const totalSampleCount = keyframePackets.length;
341
+ log(`Complete stream has ${streamPackets.length} video packets, ${keyframePackets.length} keyframes for stream ${videoStream.index}`);
172
342
  let trackStartTimeOffsetMs;
173
- const allVideoPackets = probe.packets.filter((p) => p.stream_index === videoStream.index);
174
- if (allVideoPackets.length > 0) {
175
- const firstPacketTime = allVideoPackets[0].dts_time;
176
- log(`First video packet dts_time: ${firstPacketTime}, pts_time: ${allVideoPackets[0].pts_time}`);
177
- const presentationTime = allVideoPackets[0].pts_time;
178
- if (Math.abs(presentationTime) > .01) trackStartTimeOffsetMs = presentationTime * 1e3;
343
+ if (streamPackets.length > 0) {
344
+ log(`First video packet dts_time: ${streamPackets[0].dts_time}, pts_time: ${streamPackets[0].pts_time}`);
345
+ const presentationTime = streamPackets[0].pts_time;
346
+ if (Math.abs(presentationTime) > .01) trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;
179
347
  }
180
348
  if (startTimeOffsetMs !== void 0) trackStartTimeOffsetMs = startTimeOffsetMs;
181
- log(`Processing ${fragmentTimingData.length} fragments for video stream ${videoStream.index}`);
182
- for (const fragmentData of fragmentTimingData) {
183
- const fragment = mediaFragments[fragmentData.fragmentIndex];
184
- const videoPackets = fragmentData.videoPackets;
185
- log(`Fragment ${fragmentData.fragmentIndex}: ${videoPackets.length} video packets`);
186
- if (videoPackets.length === 0) {
187
- log(`Skipping fragment ${fragmentData.fragmentIndex} - no video packets`);
188
- continue;
189
- }
190
- const firstPacket = videoPackets[0];
191
- const keyframe = videoPackets.find((p) => p.isKeyframe) || firstPacket;
192
- const segmentCts = Math.round(keyframe.pts * timescale / timebase.den);
193
- const segmentDts = Math.round(keyframe.dts * timescale / timebase.den);
194
- const nextKeyframe = fragmentTimingData[fragmentData.fragmentIndex + 1]?.videoPackets.find((p) => p.isKeyframe);
195
- let segmentDuration;
196
- if (nextKeyframe) segmentDuration = Math.round(nextKeyframe.pts * timescale / timebase.den) - segmentCts;
197
- else {
198
- const allVideoPackets$1 = probe.packets.filter((p) => {
199
- return videoStreams.find((s) => s.index === p.stream_index)?.codec_type === "video";
200
- }).sort((a, b) => a.pts - b.pts);
201
- const lastPacket = allVideoPackets$1[allVideoPackets$1.length - 1];
202
- segmentDuration = Math.round((lastPacket.pts + (lastPacket.duration || 0)) * timescale / timebase.den) - segmentCts;
203
- }
204
- segments.push({
205
- cts: segmentCts,
206
- dts: segmentDts,
207
- duration: segmentDuration,
208
- offset: fragment.offset,
209
- size: fragment.size
210
- });
211
- }
349
+ const segments = processTrack(videoStream.index, "video", timebase, probe.packets);
212
350
  let totalDuration = 0;
213
- if (totalVideoPackets.length > 0) {
214
- const firstPacket = totalVideoPackets[0];
215
- const lastPacket = totalVideoPackets[totalVideoPackets.length - 1];
216
- const firstPts = Math.round(firstPacket.pts * timescale / timebase.den);
217
- totalDuration = Math.round(lastPacket.pts * timescale / timebase.den) - firstPts;
351
+ if (streamPackets.length > 0) {
352
+ const firstPacket = streamPackets[0];
353
+ const lastPacket = streamPackets[streamPackets.length - 1];
354
+ const firstPts = convertTimestamp(firstPacket.pts, timebase, timescale);
355
+ totalDuration = convertTimestamp(lastPacket.pts, timebase, timescale) - firstPts;
218
356
  }
219
357
  const finalTrackId = trackIdMapping?.[videoStream.index] ?? videoStream.index + 1;
220
358
  trackIndexes[finalTrackId] = {
@@ -235,51 +373,21 @@ const generateFragmentIndex = async (inputStream, startTimeOffsetMs, trackIdMapp
235
373
  };
236
374
  }
237
375
  for (const audioStream of audioStreams) {
238
- const segments = [];
239
- const totalSampleCount = probe.packets.filter((p) => p.stream_index === audioStream.index).length;
240
376
  const timebase = probe.audioTimebase;
241
377
  if (!timebase) {
242
378
  console.warn("No timebase found for audio stream");
243
379
  continue;
244
380
  }
245
381
  const timescale = Math.round(timebase.den / timebase.num);
382
+ const streamPackets = probe.packets.filter((p) => p.stream_index === audioStream.index);
383
+ const totalSampleCount = streamPackets.length;
246
384
  let trackStartTimeOffsetMs;
247
- const allAudioPackets = probe.packets.filter((p) => p.stream_index === audioStream.index);
248
- if (allAudioPackets.length > 0) {
249
- const presentationTime = allAudioPackets[0].pts_time;
250
- if (Math.abs(presentationTime) > .01) trackStartTimeOffsetMs = presentationTime * 1e3;
385
+ if (streamPackets.length > 0) {
386
+ const presentationTime = streamPackets[0].pts_time;
387
+ if (Math.abs(presentationTime) > .01) trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;
251
388
  }
252
389
  if (startTimeOffsetMs !== void 0) trackStartTimeOffsetMs = startTimeOffsetMs;
253
- log(`Processing ${fragmentTimingData.length} fragments for audio stream ${audioStream.index}`);
254
- for (const fragmentData of fragmentTimingData) {
255
- const fragment = mediaFragments[fragmentData.fragmentIndex];
256
- const audioPackets = fragmentData.audioPackets;
257
- log(`Fragment ${fragmentData.fragmentIndex}: ${audioPackets.length} audio packets`);
258
- if (audioPackets.length === 0) {
259
- log(`Skipping fragment ${fragmentData.fragmentIndex} - no audio packets`);
260
- continue;
261
- }
262
- const firstPacket = audioPackets[0];
263
- const segmentCts = Math.round(firstPacket.pts * timescale / timebase.den);
264
- const segmentDts = Math.round(firstPacket.dts * timescale / timebase.den);
265
- const nextFirstPacket = fragmentTimingData[fragmentData.fragmentIndex + 1]?.audioPackets[0];
266
- let segmentDuration;
267
- if (nextFirstPacket) segmentDuration = Math.round(nextFirstPacket.pts * timescale / timebase.den) - segmentCts;
268
- else {
269
- const allAudioPackets$1 = probe.packets.filter((p) => {
270
- return audioStreams.find((s) => s.index === p.stream_index)?.codec_type === "audio";
271
- }).sort((a, b) => a.pts - b.pts);
272
- const lastPacket = allAudioPackets$1[allAudioPackets$1.length - 1];
273
- segmentDuration = Math.round((lastPacket.pts + (lastPacket.duration || 0)) * timescale / timebase.den) - segmentCts;
274
- }
275
- segments.push({
276
- cts: segmentCts,
277
- dts: segmentDts,
278
- duration: segmentDuration,
279
- offset: fragment.offset,
280
- size: fragment.size
281
- });
282
- }
390
+ const segments = processTrack(audioStream.index, "audio", timebase, probe.packets);
283
391
  const totalDuration = segments.reduce((sum, seg) => sum + seg.duration, 0);
284
392
  const finalTrackId = trackIdMapping?.[audioStream.index] ?? audioStream.index + 1;
285
393
  trackIndexes[finalTrackId] = {
@@ -1 +1 @@
1
- {"version":3,"file":"generateFragmentIndex.js","names":["box: MP4BoxHeader","chunks: Buffer[]","probe: PacketProbe","trackIndexes: Record<number, TrackFragmentIndex>","fragmentTimingData: Array<{\n fragmentIndex: number;\n videoPackets: Array<{ pts: number; dts: number; isKeyframe: boolean; duration?: number }>;\n audioPackets: Array<{ pts: number; dts: number; duration?: number }>;\n }>","segments: TrackSegment[]","trackStartTimeOffsetMs: number | undefined","segmentDuration: number","allVideoPackets","allAudioPackets"],"sources":["../src/generateFragmentIndex.ts"],"sourcesContent":["import { Readable, Transform, Writable } from \"node:stream\";\nimport { pipeline } from \"node:stream/promises\";\nimport debug from \"debug\";\nimport type { TrackFragmentIndex, TrackSegment } from \"./Probe.js\";\nimport { PacketProbe } from \"./Probe.js\";\n\nconst log = debug(\"ef:generateFragmentIndex\");\n\n// Helper function to construct H.264 codec string from profile and level\nfunction constructH264CodecString(codecTagString: string, profile?: string, level?: number): string {\n if (codecTagString !== 'avc1' || !profile || level === undefined) {\n return codecTagString;\n }\n\n // Map H.264 profile names to profile_idc values\n const profileMap: Record<string, number> = {\n 'Baseline': 0x42,\n 'Main': 0x4d,\n 'High': 0x64,\n 'High 10': 0x6e,\n 'High 422': 0x7a,\n 'High 444': 0xf4,\n };\n\n const profileIdc = profileMap[profile];\n if (!profileIdc) {\n return codecTagString;\n }\n\n // Format: avc1.PPCCLL where PP=profile_idc, CC=constraint_flags, LL=level_idc\n const profileHex = profileIdc.toString(16).padStart(2, '0');\n const constraintFlags = '00'; // Most common case\n const levelHex = level.toString(16).padStart(2, '0');\n\n return `${codecTagString}.${profileHex}${constraintFlags}${levelHex}`;\n}\n\ninterface MP4BoxHeader {\n type: string;\n offset: number;\n size: number;\n headerSize: number;\n}\n\ninterface Fragment {\n type: 'init' | 'media';\n offset: number;\n size: number;\n moofOffset?: number;\n mdatOffset?: number;\n}\n\n/**\n * Streaming MP4 box parser that detects box boundaries without loading entire file into memory\n */\nclass StreamingBoxParser extends Transform {\n private buffer = Buffer.alloc(0);\n private globalOffset = 0;\n private fragments: Fragment[] = [];\n private currentMoof: MP4BoxHeader | null = null;\n private initSegmentEnd = 0;\n private foundBoxes: MP4BoxHeader[] = [];\n\n constructor() {\n super({ objectMode: false });\n }\n\n _transform(chunk: any, _encoding: BufferEncoding, callback: () => void) {\n // Append new data to our sliding buffer\n this.buffer = Buffer.concat([this.buffer, chunk]);\n\n // Parse all complete boxes in the current buffer\n this.parseBoxes();\n\n // Pass through the original chunk unchanged\n this.push(chunk);\n callback();\n }\n\n private parseBoxes() {\n let bufferOffset = 0;\n\n while (this.buffer.length - bufferOffset >= 8) {\n const size = this.buffer.readUInt32BE(bufferOffset);\n const type = this.buffer.subarray(bufferOffset + 4, bufferOffset + 8).toString('ascii');\n\n // Invalid or incomplete box\n if (size === 0 || size < 8 || this.buffer.length < bufferOffset + size) {\n break;\n }\n\n const box: MP4BoxHeader = {\n type,\n offset: this.globalOffset + bufferOffset,\n size,\n headerSize: 8\n };\n\n log(`Found box: ${box.type} at offset ${box.offset}, size ${box.size}`);\n this.foundBoxes.push(box);\n this.handleBox(box);\n\n bufferOffset += size;\n }\n\n // Update global offset and trim processed data from buffer\n this.globalOffset += bufferOffset;\n this.buffer = this.buffer.subarray(bufferOffset);\n }\n\n private handleBox(box: MP4BoxHeader) {\n switch (box.type) {\n case 'ftyp':\n case 'moov':\n // Part of init segment\n this.initSegmentEnd = Math.max(this.initSegmentEnd, box.offset + box.size);\n break;\n\n case 'moof':\n this.currentMoof = box;\n break;\n\n case 'mdat':\n if (this.currentMoof) {\n // Found a complete fragment (moof + mdat pair) - fragmented MP4\n this.fragments.push({\n type: 'media',\n offset: this.currentMoof.offset,\n size: (box.offset + box.size) - this.currentMoof.offset,\n moofOffset: this.currentMoof.offset,\n mdatOffset: box.offset,\n });\n this.currentMoof = null;\n } else {\n // mdat without moof - this is non-fragmented content, not a fragment\n // Common in mixed MP4 files where initial content is non-fragmented\n // followed by fragmented content. Ignore for fragment indexing.\n log(`Found non-fragmented mdat at offset ${box.offset}, skipping for fragment index`);\n }\n break;\n }\n }\n\n _flush(callback: () => void) {\n this.parseBoxes(); // Process any remaining buffered data\n\n // Probe always outputs fragmented MP4\n // Init segment is ftyp + moov boxes before the first moof\n if (this.initSegmentEnd > 0) {\n this.fragments.unshift({\n type: 'init',\n offset: 0,\n size: this.initSegmentEnd,\n });\n }\n\n callback();\n }\n\n getFragments(): Fragment[] {\n return this.fragments;\n }\n}\n\n// Helper function to create a readable stream from fragment data\nfunction createFragmentStream(fragmentData: Uint8Array): Readable {\n let offset = 0;\n return new Readable({\n read() {\n if (offset >= fragmentData.length) {\n this.push(null);\n return;\n }\n\n const chunkSize = Math.min(64 * 1024, fragmentData.length - offset); // 64KB chunks\n const chunk = fragmentData.slice(offset, offset + chunkSize);\n offset += chunkSize;\n this.push(Buffer.from(chunk));\n }\n });\n}\n\n// Helper function to extract fragment data (init + media fragment)\n\nexport const generateFragmentIndex = async (\n inputStream: Readable,\n startTimeOffsetMs?: number,\n trackIdMapping?: Record<number, number> // Map from source track ID to desired track ID\n): Promise<Record<number, TrackFragmentIndex>> => {\n // Step 1: Create a streaming parser that detects fragment boundaries\n const parser = new StreamingBoxParser();\n\n // Step 2: Create a passthrough stream that doesn't buffer everything\n const chunks: Buffer[] = [];\n let totalSize = 0;\n\n const dest = new Writable({\n write(chunk, _encoding, callback) {\n chunks.push(chunk);\n totalSize += chunk.length;\n callback();\n }\n });\n\n // Process the stream through both parser and collection\n await pipeline(inputStream, parser, dest);\n const fragments = parser.getFragments();\n\n // If no data was collected, return empty result\n if (totalSize === 0) {\n return {};\n }\n\n // Step 3: Use ffprobe to analyze the complete stream for track metadata\n const completeData = Buffer.concat(chunks as readonly Uint8Array[]);\n const completeStream = createFragmentStream(new Uint8Array(completeData.buffer, completeData.byteOffset, completeData.byteLength));\n\n let probe: PacketProbe;\n try {\n probe = await PacketProbe.probeStream(completeStream);\n } catch (error) {\n console.warn(\"Failed to probe stream with ffprobe:\", error);\n return {};\n }\n\n const videoStreams = probe.videoStreams;\n const audioStreams = probe.audioStreams;\n\n const trackIndexes: Record<number, TrackFragmentIndex> = {};\n const initFragment = fragments.find(f => f.type === 'init');\n const mediaFragments = fragments.filter(f => f.type === 'media');\n\n // Map packets to fragments using byte position for moof+mdat boundaries\n // But create contiguous segments based on keyframes\n const fragmentTimingData: Array<{\n fragmentIndex: number;\n videoPackets: Array<{ pts: number; dts: number; isKeyframe: boolean; duration?: number }>;\n audioPackets: Array<{ pts: number; dts: number; duration?: number }>;\n }> = [];\n\n for (let fragmentIndex = 0; fragmentIndex < mediaFragments.length; fragmentIndex++) {\n const fragment = mediaFragments[fragmentIndex]!;\n\n // Find packets that belong to this fragment based on byte position (moof+mdat boundaries)\n const fragmentStart = fragment.offset;\n const fragmentEnd = fragment.offset + fragment.size;\n\n const videoPackets = probe.packets\n .filter(packet => {\n const stream = videoStreams.find(s => s.index === packet.stream_index);\n return stream?.codec_type === 'video' &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd;\n })\n .map(packet => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n isKeyframe: packet.flags?.includes('K') ?? false,\n }));\n\n const audioPackets = probe.packets\n .filter(packet => {\n const stream = audioStreams.find(s => s.index === packet.stream_index);\n return stream?.codec_type === 'audio' &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd;\n })\n .map(packet => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n }));\n\n fragmentTimingData.push({\n fragmentIndex,\n videoPackets,\n audioPackets,\n });\n }\n\n // Step 4: Process video tracks using ffprobe data\n for (const videoStream of videoStreams) {\n const segments: TrackSegment[] = [];\n\n // Count total samples from complete stream - try counting keyframes for \"improved efficiency\"\n const totalVideoPackets = probe.packets.filter(p => p.stream_index === videoStream.index);\n const keyframePackets = totalVideoPackets.filter(p => p.flags?.includes('K'));\n\n // The test comment mentions \"improved efficiency\" suggesting we should count keyframes\n const totalSampleCount = keyframePackets.length;\n\n log(`Complete stream has ${totalVideoPackets.length} video packets, ${keyframePackets.length} keyframes for stream ${videoStream.index}`);\n\n // Get timebase for this stream to convert timestamps\n const timebase = probe.videoTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for video stream\");\n continue;\n }\n\n // Calculate timescale as the inverse of timebase\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n const allVideoPackets = probe.packets.filter(p => p.stream_index === videoStream.index);\n if (allVideoPackets.length > 0) {\n const firstPacketTime = allVideoPackets[0]!.dts_time;\n log(`First video packet dts_time: ${firstPacketTime}, pts_time: ${allVideoPackets[0]!.pts_time}`);\n\n // Use PTS time instead of DTS time for offset calculation \n // since PTS represents the presentation timeline\n const presentationTime = allVideoPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * 1000;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process each fragment to create segments\n log(`Processing ${fragmentTimingData.length} fragments for video stream ${videoStream.index}`);\n for (const fragmentData of fragmentTimingData) {\n const fragment = mediaFragments[fragmentData.fragmentIndex]!;\n const videoPackets = fragmentData.videoPackets;\n\n log(`Fragment ${fragmentData.fragmentIndex}: ${videoPackets.length} video packets`);\n if (videoPackets.length === 0) {\n log(`Skipping fragment ${fragmentData.fragmentIndex} - no video packets`);\n continue;\n }\n\n // Note: Only some fragments start with keyframes in typical fragmented MP4\n const firstPacket = videoPackets[0]!;\n\n // Use keyframe as segment start (essential for video streaming)\n const keyframe = videoPackets.find(p => p.isKeyframe) || firstPacket;\n\n // Convert timestamps from ffprobe timebase to track timescale\n const segmentCts = Math.round(keyframe.pts * timescale / timebase.den);\n const segmentDts = Math.round(keyframe.dts * timescale / timebase.den);\n\n // Calculate duration to ensure perfect continuity\n // Find the next segment's keyframe\n const nextFragmentData = fragmentTimingData[fragmentData.fragmentIndex + 1];\n const nextKeyframe = nextFragmentData?.videoPackets.find(p => p.isKeyframe);\n\n let segmentDuration: number;\n if (nextKeyframe) {\n // Duration to next keyframe (perfectly contiguous)\n const nextSegmentCts = Math.round(nextKeyframe.pts * timescale / timebase.den);\n segmentDuration = nextSegmentCts - segmentCts;\n } else {\n // Last segment: duration to end of all video packets\n const allVideoPackets = probe.packets\n .filter(p => {\n const stream = videoStreams.find(s => s.index === p.stream_index);\n return stream?.codec_type === 'video';\n })\n .sort((a, b) => a.pts - b.pts);\n const lastPacket = allVideoPackets[allVideoPackets.length - 1]!;\n const streamEnd = Math.round((lastPacket.pts + (lastPacket.duration || 0)) * timescale / timebase.den);\n segmentDuration = streamEnd - segmentCts;\n }\n\n\n\n segments.push({\n cts: segmentCts,\n dts: segmentDts,\n duration: segmentDuration,\n offset: fragment.offset,\n size: fragment.size,\n });\n }\n\n // Calculate total duration from complete stream packets, not just segments\n // This accounts for standalone mdat fragments that don't create segments\n let totalDuration = 0;\n if (totalVideoPackets.length > 0) {\n const firstPacket = totalVideoPackets[0]!;\n const lastPacket = totalVideoPackets[totalVideoPackets.length - 1]!;\n\n const firstPts = Math.round(firstPacket.pts * timescale / timebase.den);\n const lastPts = Math.round(lastPacket.pts * timescale / timebase.den);\n\n // Calculate duration as the span from first to last packet\n totalDuration = lastPts - firstPts;\n }\n\n const finalTrackId = trackIdMapping?.[videoStream.index] ?? (videoStream.index + 1);\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"video\",\n width: videoStream.coded_width || videoStream.width,\n height: videoStream.coded_height || videoStream.height,\n timescale: timescale,\n sample_count: totalSampleCount,\n codec: constructH264CodecString(videoStream.codec_tag_string, videoStream.profile, videoStream.level),\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n // Step 5: Process audio tracks using ffprobe data \n for (const audioStream of audioStreams) {\n const segments: TrackSegment[] = [];\n\n // Count total samples from complete stream, not individual fragments\n const totalAudioPackets = probe.packets.filter(p => p.stream_index === audioStream.index);\n const totalSampleCount = totalAudioPackets.length;\n\n // Get timebase for this stream to convert timestamps\n const timebase = probe.audioTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for audio stream\");\n continue;\n }\n\n // Calculate timescale as the inverse of timebase\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n const allAudioPackets = probe.packets.filter(p => p.stream_index === audioStream.index);\n if (allAudioPackets.length > 0) {\n // Use PTS time for offset calculation since it represents presentation timeline\n const presentationTime = allAudioPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * 1000;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process each fragment to create segments\n log(`Processing ${fragmentTimingData.length} fragments for audio stream ${audioStream.index}`);\n for (const fragmentData of fragmentTimingData) {\n const fragment = mediaFragments[fragmentData.fragmentIndex]!;\n const audioPackets = fragmentData.audioPackets;\n\n log(`Fragment ${fragmentData.fragmentIndex}: ${audioPackets.length} audio packets`);\n if (audioPackets.length === 0) {\n log(`Skipping fragment ${fragmentData.fragmentIndex} - no audio packets`);\n continue;\n }\n\n // Calculate fragment duration from actual packet data\n const firstPacket = audioPackets[0]!;\n\n // Convert timestamps from ffprobe timebase to track timescale\n // For audio, CTS always equals PTS (no reordering)\n const segmentCts = Math.round(firstPacket.pts * timescale / timebase.den);\n const segmentDts = Math.round(firstPacket.dts * timescale / timebase.den);\n\n // Calculate duration to ensure perfect continuity with next segment\n const nextFragmentData = fragmentTimingData[fragmentData.fragmentIndex + 1];\n const nextFirstPacket = nextFragmentData?.audioPackets[0];\n\n let segmentDuration: number;\n if (nextFirstPacket) {\n // Duration to next segment start (perfectly contiguous)\n const nextSegmentCts = Math.round(nextFirstPacket.pts * timescale / timebase.den);\n segmentDuration = nextSegmentCts - segmentCts;\n } else {\n // Last segment: duration to end of all audio packets\n const allAudioPackets = probe.packets\n .filter(p => {\n const stream = audioStreams.find(s => s.index === p.stream_index);\n return stream?.codec_type === 'audio';\n })\n .sort((a, b) => a.pts - b.pts);\n const lastPacket = allAudioPackets[allAudioPackets.length - 1]!;\n const streamEnd = Math.round((lastPacket.pts + (lastPacket.duration || 0)) * timescale / timebase.den);\n segmentDuration = streamEnd - segmentCts;\n }\n\n\n\n segments.push({\n cts: segmentCts,\n dts: segmentDts,\n duration: segmentDuration,\n offset: fragment.offset,\n size: fragment.size,\n });\n }\n\n // Calculate total duration\n const totalDuration = segments.reduce((sum, seg) => sum + seg.duration, 0);\n\n const finalTrackId = trackIdMapping?.[audioStream.index] ?? (audioStream.index + 1);\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"audio\",\n channel_count: audioStream.channels,\n sample_rate: Number(audioStream.sample_rate),\n sample_size: audioStream.bits_per_sample,\n sample_count: totalSampleCount,\n timescale: timescale,\n codec: audioStream.codec_tag_string || audioStream.codec_name || '',\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n return trackIndexes;\n};\n"],"mappings":";;;;;;AAMA,MAAM,MAAM,MAAM,2BAA2B;AAG7C,SAAS,yBAAyB,gBAAwB,SAAkB,OAAwB;AAClG,KAAI,mBAAmB,UAAU,CAAC,WAAW,UAAU,OACrD,QAAO;CAaT,MAAM,aATqC;EACzC,YAAY;EACZ,QAAQ;EACR,QAAQ;EACR,WAAW;EACX,YAAY;EACZ,YAAY;EACb,CAE6B;AAC9B,KAAI,CAAC,WACH,QAAO;AAQT,QAAO,GAAG,eAAe,GAJN,WAAW,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI,KAE1C,MAAM,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;;;;AAuBtD,IAAM,qBAAN,cAAiC,UAAU;CAQzC,cAAc;AACZ,QAAM,EAAE,YAAY,OAAO,CAAC;gBARb,OAAO,MAAM,EAAE;sBACT;mBACS,EAAE;qBACS;wBAClB;oBACY,EAAE;;CAMvC,WAAW,OAAY,WAA2B,UAAsB;AAEtE,OAAK,SAAS,OAAO,OAAO,CAAC,KAAK,QAAQ,MAAM,CAAC;AAGjD,OAAK,YAAY;AAGjB,OAAK,KAAK,MAAM;AAChB,YAAU;;CAGZ,AAAQ,aAAa;EACnB,IAAI,eAAe;AAEnB,SAAO,KAAK,OAAO,SAAS,gBAAgB,GAAG;GAC7C,MAAM,OAAO,KAAK,OAAO,aAAa,aAAa;GACnD,MAAM,OAAO,KAAK,OAAO,SAAS,eAAe,GAAG,eAAe,EAAE,CAAC,SAAS,QAAQ;AAGvF,OAAI,SAAS,KAAK,OAAO,KAAK,KAAK,OAAO,SAAS,eAAe,KAChE;GAGF,MAAMA,MAAoB;IACxB;IACA,QAAQ,KAAK,eAAe;IAC5B;IACA,YAAY;IACb;AAED,OAAI,cAAc,IAAI,KAAK,aAAa,IAAI,OAAO,SAAS,IAAI,OAAO;AACvE,QAAK,WAAW,KAAK,IAAI;AACzB,QAAK,UAAU,IAAI;AAEnB,mBAAgB;;AAIlB,OAAK,gBAAgB;AACrB,OAAK,SAAS,KAAK,OAAO,SAAS,aAAa;;CAGlD,AAAQ,UAAU,KAAmB;AACnC,UAAQ,IAAI,MAAZ;GACE,KAAK;GACL,KAAK;AAEH,SAAK,iBAAiB,KAAK,IAAI,KAAK,gBAAgB,IAAI,SAAS,IAAI,KAAK;AAC1E;GAEF,KAAK;AACH,SAAK,cAAc;AACnB;GAEF,KAAK;AACH,QAAI,KAAK,aAAa;AAEpB,UAAK,UAAU,KAAK;MAClB,MAAM;MACN,QAAQ,KAAK,YAAY;MACzB,MAAO,IAAI,SAAS,IAAI,OAAQ,KAAK,YAAY;MACjD,YAAY,KAAK,YAAY;MAC7B,YAAY,IAAI;MACjB,CAAC;AACF,UAAK,cAAc;UAKnB,KAAI,uCAAuC,IAAI,OAAO,+BAA+B;AAEvF;;;CAIN,OAAO,UAAsB;AAC3B,OAAK,YAAY;AAIjB,MAAI,KAAK,iBAAiB,EACxB,MAAK,UAAU,QAAQ;GACrB,MAAM;GACN,QAAQ;GACR,MAAM,KAAK;GACZ,CAAC;AAGJ,YAAU;;CAGZ,eAA2B;AACzB,SAAO,KAAK;;;AAKhB,SAAS,qBAAqB,cAAoC;CAChE,IAAI,SAAS;AACb,QAAO,IAAI,SAAS,EAClB,OAAO;AACL,MAAI,UAAU,aAAa,QAAQ;AACjC,QAAK,KAAK,KAAK;AACf;;EAGF,MAAM,YAAY,KAAK,IAAI,KAAK,MAAM,aAAa,SAAS,OAAO;EACnE,MAAM,QAAQ,aAAa,MAAM,QAAQ,SAAS,UAAU;AAC5D,YAAU;AACV,OAAK,KAAK,OAAO,KAAK,MAAM,CAAC;IAEhC,CAAC;;AAKJ,MAAa,wBAAwB,OACnC,aACA,mBACA,mBACgD;CAEhD,MAAM,SAAS,IAAI,oBAAoB;CAGvC,MAAMC,SAAmB,EAAE;CAC3B,IAAI,YAAY;AAWhB,OAAM,SAAS,aAAa,QATf,IAAI,SAAS,EACxB,MAAM,OAAO,WAAW,UAAU;AAChC,SAAO,KAAK,MAAM;AAClB,eAAa,MAAM;AACnB,YAAU;IAEb,CAAC,CAGuC;CACzC,MAAM,YAAY,OAAO,cAAc;AAGvC,KAAI,cAAc,EAChB,QAAO,EAAE;CAIX,MAAM,eAAe,OAAO,OAAO,OAAgC;CACnE,MAAM,iBAAiB,qBAAqB,IAAI,WAAW,aAAa,QAAQ,aAAa,YAAY,aAAa,WAAW,CAAC;CAElI,IAAIC;AACJ,KAAI;AACF,UAAQ,MAAM,YAAY,YAAY,eAAe;UAC9C,OAAO;AACd,UAAQ,KAAK,wCAAwC,MAAM;AAC3D,SAAO,EAAE;;CAGX,MAAM,eAAe,MAAM;CAC3B,MAAM,eAAe,MAAM;CAE3B,MAAMC,eAAmD,EAAE;CAC3D,MAAM,eAAe,UAAU,MAAK,MAAK,EAAE,SAAS,OAAO;CAC3D,MAAM,iBAAiB,UAAU,QAAO,MAAK,EAAE,SAAS,QAAQ;CAIhE,MAAMC,qBAID,EAAE;AAEP,MAAK,IAAI,gBAAgB,GAAG,gBAAgB,eAAe,QAAQ,iBAAiB;EAClF,MAAM,WAAW,eAAe;EAGhC,MAAM,gBAAgB,SAAS;EAC/B,MAAM,cAAc,SAAS,SAAS,SAAS;EAE/C,MAAM,eAAe,MAAM,QACxB,QAAO,WAAU;AAEhB,UADe,aAAa,MAAK,MAAK,EAAE,UAAU,OAAO,aAAa,EACvD,eAAe,WAC5B,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IACf,CACD,KAAI,YAAW;GACd,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GACjB,YAAY,OAAO,OAAO,SAAS,IAAI,IAAI;GAC5C,EAAE;EAEL,MAAM,eAAe,MAAM,QACxB,QAAO,WAAU;AAEhB,UADe,aAAa,MAAK,MAAK,EAAE,UAAU,OAAO,aAAa,EACvD,eAAe,WAC5B,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IACf,CACD,KAAI,YAAW;GACd,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GAClB,EAAE;AAEL,qBAAmB,KAAK;GACtB;GACA;GACA;GACD,CAAC;;AAIJ,MAAK,MAAM,eAAe,cAAc;EACtC,MAAMC,WAA2B,EAAE;EAGnC,MAAM,oBAAoB,MAAM,QAAQ,QAAO,MAAK,EAAE,iBAAiB,YAAY,MAAM;EACzF,MAAM,kBAAkB,kBAAkB,QAAO,MAAK,EAAE,OAAO,SAAS,IAAI,CAAC;EAG7E,MAAM,mBAAmB,gBAAgB;AAEzC,MAAI,uBAAuB,kBAAkB,OAAO,kBAAkB,gBAAgB,OAAO,wBAAwB,YAAY,QAAQ;EAGzI,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAIF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,IAAIC;EACJ,MAAM,kBAAkB,MAAM,QAAQ,QAAO,MAAK,EAAE,iBAAiB,YAAY,MAAM;AACvF,MAAI,gBAAgB,SAAS,GAAG;GAC9B,MAAM,kBAAkB,gBAAgB,GAAI;AAC5C,OAAI,gCAAgC,gBAAgB,cAAc,gBAAgB,GAAI,WAAW;GAIjG,MAAM,mBAAmB,gBAAgB,GAAI;AAC7C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;AAI3B,MAAI,cAAc,mBAAmB,OAAO,8BAA8B,YAAY,QAAQ;AAC9F,OAAK,MAAM,gBAAgB,oBAAoB;GAC7C,MAAM,WAAW,eAAe,aAAa;GAC7C,MAAM,eAAe,aAAa;AAElC,OAAI,YAAY,aAAa,cAAc,IAAI,aAAa,OAAO,gBAAgB;AACnF,OAAI,aAAa,WAAW,GAAG;AAC7B,QAAI,qBAAqB,aAAa,cAAc,qBAAqB;AACzE;;GAIF,MAAM,cAAc,aAAa;GAGjC,MAAM,WAAW,aAAa,MAAK,MAAK,EAAE,WAAW,IAAI;GAGzD,MAAM,aAAa,KAAK,MAAM,SAAS,MAAM,YAAY,SAAS,IAAI;GACtE,MAAM,aAAa,KAAK,MAAM,SAAS,MAAM,YAAY,SAAS,IAAI;GAKtE,MAAM,eADmB,mBAAmB,aAAa,gBAAgB,IAClC,aAAa,MAAK,MAAK,EAAE,WAAW;GAE3E,IAAIC;AACJ,OAAI,aAGF,mBADuB,KAAK,MAAM,aAAa,MAAM,YAAY,SAAS,IAAI,GAC3C;QAC9B;IAEL,MAAMC,oBAAkB,MAAM,QAC3B,QAAO,MAAK;AAEX,YADe,aAAa,MAAK,MAAK,EAAE,UAAU,EAAE,aAAa,EAClD,eAAe;MAC9B,CACD,MAAM,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI;IAChC,MAAM,aAAaA,kBAAgBA,kBAAgB,SAAS;AAE5D,sBADkB,KAAK,OAAO,WAAW,OAAO,WAAW,YAAY,MAAM,YAAY,SAAS,IAAI,GACxE;;AAKhC,YAAS,KAAK;IACZ,KAAK;IACL,KAAK;IACL,UAAU;IACV,QAAQ,SAAS;IACjB,MAAM,SAAS;IAChB,CAAC;;EAKJ,IAAI,gBAAgB;AACpB,MAAI,kBAAkB,SAAS,GAAG;GAChC,MAAM,cAAc,kBAAkB;GACtC,MAAM,aAAa,kBAAkB,kBAAkB,SAAS;GAEhE,MAAM,WAAW,KAAK,MAAM,YAAY,MAAM,YAAY,SAAS,IAAI;AAIvE,mBAHgB,KAAK,MAAM,WAAW,MAAM,YAAY,SAAS,IAAI,GAG3C;;EAG5B,MAAM,eAAe,iBAAiB,YAAY,UAAW,YAAY,QAAQ;AACjF,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,OAAO,YAAY,eAAe,YAAY;GAC9C,QAAQ,YAAY,gBAAgB,YAAY;GACrC;GACX,cAAc;GACd,OAAO,yBAAyB,YAAY,kBAAkB,YAAY,SAAS,YAAY,MAAM;GACrG,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAIH,MAAK,MAAM,eAAe,cAAc;EACtC,MAAMH,WAA2B,EAAE;EAInC,MAAM,mBADoB,MAAM,QAAQ,QAAO,MAAK,EAAE,iBAAiB,YAAY,MAAM,CAC9C;EAG3C,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAIF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,IAAIC;EACJ,MAAM,kBAAkB,MAAM,QAAQ,QAAO,MAAK,EAAE,iBAAiB,YAAY,MAAM;AACvF,MAAI,gBAAgB,SAAS,GAAG;GAE9B,MAAM,mBAAmB,gBAAgB,GAAI;AAC7C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;AAI3B,MAAI,cAAc,mBAAmB,OAAO,8BAA8B,YAAY,QAAQ;AAC9F,OAAK,MAAM,gBAAgB,oBAAoB;GAC7C,MAAM,WAAW,eAAe,aAAa;GAC7C,MAAM,eAAe,aAAa;AAElC,OAAI,YAAY,aAAa,cAAc,IAAI,aAAa,OAAO,gBAAgB;AACnF,OAAI,aAAa,WAAW,GAAG;AAC7B,QAAI,qBAAqB,aAAa,cAAc,qBAAqB;AACzE;;GAIF,MAAM,cAAc,aAAa;GAIjC,MAAM,aAAa,KAAK,MAAM,YAAY,MAAM,YAAY,SAAS,IAAI;GACzE,MAAM,aAAa,KAAK,MAAM,YAAY,MAAM,YAAY,SAAS,IAAI;GAIzE,MAAM,kBADmB,mBAAmB,aAAa,gBAAgB,IAC/B,aAAa;GAEvD,IAAIC;AACJ,OAAI,gBAGF,mBADuB,KAAK,MAAM,gBAAgB,MAAM,YAAY,SAAS,IAAI,GAC9C;QAC9B;IAEL,MAAME,oBAAkB,MAAM,QAC3B,QAAO,MAAK;AAEX,YADe,aAAa,MAAK,MAAK,EAAE,UAAU,EAAE,aAAa,EAClD,eAAe;MAC9B,CACD,MAAM,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI;IAChC,MAAM,aAAaA,kBAAgBA,kBAAgB,SAAS;AAE5D,sBADkB,KAAK,OAAO,WAAW,OAAO,WAAW,YAAY,MAAM,YAAY,SAAS,IAAI,GACxE;;AAKhC,YAAS,KAAK;IACZ,KAAK;IACL,KAAK;IACL,UAAU;IACV,QAAQ,SAAS;IACjB,MAAM,SAAS;IAChB,CAAC;;EAIJ,MAAM,gBAAgB,SAAS,QAAQ,KAAK,QAAQ,MAAM,IAAI,UAAU,EAAE;EAE1E,MAAM,eAAe,iBAAiB,YAAY,UAAW,YAAY,QAAQ;AACjF,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,eAAe,YAAY;GAC3B,aAAa,OAAO,YAAY,YAAY;GAC5C,aAAa,YAAY;GACzB,cAAc;GACH;GACX,OAAO,YAAY,oBAAoB,YAAY,cAAc;GACjE,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAGH,QAAO"}
1
+ {"version":3,"file":"generateFragmentIndex.js","names":["box: MP4BoxHeader","chunks: Buffer[]","probe: PacketProbe","trackIndexes: Record<number, TrackFragmentIndex>","fragmentTimingData: FragmentTimingData[]","segments: TrackSegment[]","trackStartTimeOffsetMs: number | undefined"],"sources":["../src/generateFragmentIndex.ts"],"sourcesContent":["import { Readable, Transform, Writable } from \"node:stream\";\nimport { pipeline } from \"node:stream/promises\";\nimport debug from \"debug\";\nimport type { TrackFragmentIndex, TrackSegment } from \"./Probe.js\";\nimport { PacketProbe } from \"./Probe.js\";\n\nconst log = debug(\"ef:generateFragmentIndex\");\n\n// Minimum segment duration in milliseconds\nconst MIN_SEGMENT_DURATION_MS = 2000; // 2 seconds\nconst MS_PER_SECOND = 1000;\n\n// ============================================================================\n// Core Domain Types (Type Safety as Invariant Enforcement)\n// ============================================================================\n\n/** Raw packet from ffprobe - the fundamental unit of media data */\ninterface ProbePacket {\n stream_index: number;\n pts: number;\n dts: number;\n pts_time: number;\n dts_time: number;\n duration?: number;\n pos?: number;\n flags?: string;\n}\n\n/** Video packet with keyframe status - invariant: isKeyframe is always defined */\ninterface VideoPacket {\n pts: number;\n dts: number;\n duration?: number;\n isKeyframe: boolean;\n}\n\n/** Audio packet - simpler than video, no keyframe concept */\ninterface AudioPacket {\n pts: number;\n dts: number;\n duration?: number;\n}\n\n/** Fragment timing data - packets organized by fragment */\ninterface FragmentTimingData {\n fragmentIndex: number;\n videoPackets: VideoPacket[];\n audioPackets: AudioPacket[];\n}\n\n/** Timebase for timestamp conversion */\ninterface Timebase {\n num: number;\n den: number;\n}\n\n// Helper function to construct H.264 codec string from profile and level\nfunction constructH264CodecString(\n codecTagString: string,\n profile?: string,\n level?: number,\n): string {\n if (codecTagString !== \"avc1\" || !profile || level === undefined) {\n return codecTagString;\n }\n\n // Map H.264 profile names to profile_idc values\n const profileMap: Record<string, number> = {\n Baseline: 0x42,\n Main: 0x4d,\n High: 0x64,\n \"High 10\": 0x6e,\n \"High 422\": 0x7a,\n \"High 444\": 0xf4,\n };\n\n const profileIdc = profileMap[profile];\n if (!profileIdc) {\n return codecTagString;\n }\n\n // Format: avc1.PPCCLL where PP=profile_idc, CC=constraint_flags, LL=level_idc\n const profileHex = profileIdc.toString(16).padStart(2, \"0\");\n const constraintFlags = \"00\"; // Most common case\n const levelHex = level.toString(16).padStart(2, \"0\");\n\n return `${codecTagString}.${profileHex}${constraintFlags}${levelHex}`;\n}\n\ninterface MP4BoxHeader {\n type: string;\n offset: number;\n size: number;\n headerSize: number;\n}\n\ninterface Fragment {\n type: \"init\" | \"media\";\n offset: number;\n size: number;\n moofOffset?: number;\n mdatOffset?: number;\n}\n\n/**\n * Streaming MP4 box parser that detects box boundaries without loading entire file into memory\n */\nclass StreamingBoxParser extends Transform {\n private buffer = Buffer.alloc(0);\n private globalOffset = 0;\n private fragments: Fragment[] = [];\n private currentMoof: MP4BoxHeader | null = null;\n private initSegmentEnd = 0;\n private foundBoxes: MP4BoxHeader[] = [];\n\n constructor() {\n super({ objectMode: false });\n }\n\n _transform(chunk: Buffer, _encoding: BufferEncoding, callback: () => void) {\n // Append new data to our sliding buffer\n this.buffer = Buffer.concat([this.buffer, chunk]);\n\n // Parse all complete boxes in the current buffer\n this.parseBoxes();\n\n // Pass through the original chunk unchanged\n this.push(chunk);\n callback();\n }\n\n private parseBoxes() {\n let bufferOffset = 0;\n\n while (this.buffer.length - bufferOffset >= 8) {\n const size = this.buffer.readUInt32BE(bufferOffset);\n const type = this.buffer\n .subarray(bufferOffset + 4, bufferOffset + 8)\n .toString(\"ascii\");\n\n // Invalid or incomplete box\n if (size === 0 || size < 8 || this.buffer.length < bufferOffset + size) {\n break;\n }\n\n const box: MP4BoxHeader = {\n type,\n offset: this.globalOffset + bufferOffset,\n size,\n headerSize: 8,\n };\n\n log(`Found box: ${box.type} at offset ${box.offset}, size ${box.size}`);\n this.foundBoxes.push(box);\n this.handleBox(box);\n\n bufferOffset += size;\n }\n\n // Update global offset and trim processed data from buffer\n this.globalOffset += bufferOffset;\n this.buffer = this.buffer.subarray(bufferOffset);\n }\n\n private handleBox(box: MP4BoxHeader) {\n switch (box.type) {\n case \"ftyp\":\n case \"moov\":\n // Part of init segment\n this.initSegmentEnd = Math.max(\n this.initSegmentEnd,\n box.offset + box.size,\n );\n break;\n\n case \"moof\":\n this.currentMoof = box;\n break;\n\n case \"mdat\":\n if (this.currentMoof) {\n // Found a complete fragment (moof + mdat pair) - fragmented MP4\n this.fragments.push({\n type: \"media\",\n offset: this.currentMoof.offset,\n size: box.offset + box.size - this.currentMoof.offset,\n moofOffset: this.currentMoof.offset,\n mdatOffset: box.offset,\n });\n this.currentMoof = null;\n } else {\n // mdat without moof - this is non-fragmented content, not a fragment\n // Common in mixed MP4 files where initial content is non-fragmented\n // followed by fragmented content. Ignore for fragment indexing.\n log(\n `Found non-fragmented mdat at offset ${box.offset}, skipping for fragment index`,\n );\n }\n break;\n }\n }\n\n _flush(callback: () => void) {\n this.parseBoxes(); // Process any remaining buffered data\n\n // Probe always outputs fragmented MP4\n // Init segment is ftyp + moov boxes before the first moof\n if (this.initSegmentEnd > 0) {\n this.fragments.unshift({\n type: \"init\",\n offset: 0,\n size: this.initSegmentEnd,\n });\n }\n\n callback();\n }\n\n getFragments(): Fragment[] {\n return this.fragments;\n }\n}\n\n// Helper function to create a readable stream from fragment data\nfunction createFragmentStream(fragmentData: Uint8Array): Readable {\n let offset = 0;\n return new Readable({\n read() {\n if (offset >= fragmentData.length) {\n this.push(null);\n return;\n }\n\n const chunkSize = Math.min(64 * 1024, fragmentData.length - offset); // 64KB chunks\n const chunk = fragmentData.slice(offset, offset + chunkSize);\n offset += chunkSize;\n this.push(Buffer.from(chunk));\n },\n });\n}\n\n// Helper to convert timestamp from ffprobe timebase to track timescale\nfunction convertTimestamp(\n pts: number,\n timebase: Timebase,\n timescale: number,\n): number {\n return Math.round((pts * timescale) / timebase.den);\n}\n\n// Helper to calculate duration in milliseconds from timescale units\nfunction durationMsFromTimescale(\n durationTimescale: number,\n timescale: number,\n): number {\n return (durationTimescale / timescale) * MS_PER_SECOND;\n}\n\n// Helper to calculate segment byte range from accumulated fragments\nfunction calculateSegmentByteRange(\n accumulatedFragments: Array<{ fragment: Fragment }>,\n): { offset: number; size: number } {\n const firstFrag = accumulatedFragments[0]!;\n const lastFrag = accumulatedFragments[accumulatedFragments.length - 1]!;\n return {\n offset: firstFrag.fragment.offset,\n size: lastFrag.fragment.offset + lastFrag.fragment.size - firstFrag.fragment.offset,\n };\n}\n\n// Explicit enumeration of segment accumulation state (Enumerate the Core Concept)\ntype SegmentAccumulationState =\n | { type: \"idle\" }\n | {\n type: \"accumulating\";\n startPts: number;\n startDts: number;\n fragments: Array<{ fragment: Fragment; fragmentData: FragmentTimingData }>;\n };\n\n// Invariant: Segment must start on keyframe (for video) and have minimum duration\ninterface SegmentEvaluation {\n cts: number;\n dts: number;\n duration: number;\n offset: number;\n size: number;\n}\n\n// Track processing context - single source of truth for track processing\ninterface TrackProcessingContext {\n timebase: Timebase;\n timescale: number;\n fragmentTimingData: FragmentTimingData[];\n mediaFragments: Fragment[];\n // Cached filtered packets for this stream (Performance Through Caching)\n streamPackets: ProbePacket[];\n streamType: \"video\" | \"audio\";\n streamIndex: number;\n}\n\n// Segment accumulator that encapsulates accumulation logic\nclass SegmentAccumulator {\n private state: SegmentAccumulationState = { type: \"idle\" };\n private readonly context: TrackProcessingContext;\n private readonly minDurationMs: number;\n\n constructor(context: TrackProcessingContext, minDurationMs: number) {\n this.context = context;\n this.minDurationMs = minDurationMs;\n }\n\n // Evaluation: Determine if we should finalize (semantics)\n shouldFinalize(nextKeyframe: { pts: number; dts: number } | null): boolean {\n if (this.state.type !== \"accumulating\") {\n return false;\n }\n\n const durationMs = this.calculateAccumulatedDurationMs();\n const hasMinimumDuration = durationMs >= this.minDurationMs;\n\n // For video: finalize on keyframe + minimum duration\n // For audio: finalize on minimum duration (no keyframe requirement)\n if (this.context.streamType === \"video\") {\n return hasMinimumDuration && nextKeyframe !== null;\n } else {\n return hasMinimumDuration;\n }\n }\n\n // Evaluation: Calculate what the segment would be (semantics)\n evaluateSegment(\n nextBoundary: { pts: number } | null,\n ): SegmentEvaluation | null {\n if (this.state.type !== \"accumulating\") {\n return null;\n }\n\n const segmentCts = convertTimestamp(\n this.state.startPts,\n this.context.timebase,\n this.context.timescale,\n );\n const segmentDts = convertTimestamp(\n this.state.startDts,\n this.context.timebase,\n this.context.timescale,\n );\n const segmentDuration = this.calculateSegmentDuration(\n segmentCts,\n nextBoundary,\n );\n const { offset, size } = calculateSegmentByteRange(this.state.fragments);\n\n return {\n cts: segmentCts,\n dts: segmentDts,\n duration: segmentDuration,\n offset,\n size,\n };\n }\n\n // Application: Add fragment to accumulation (mechanism)\n addFragment(fragment: Fragment, fragmentData: FragmentTimingData): void {\n if (this.state.type === \"idle\") {\n // Start accumulation - invariant: video segments must start on keyframe\n const startPts = this.getStartPts(fragmentData);\n const startDts = this.getStartDts(fragmentData);\n this.state = {\n type: \"accumulating\",\n startPts,\n startDts,\n fragments: [{ fragment, fragmentData }],\n };\n } else {\n // Continue accumulation\n this.state.fragments.push({ fragment, fragmentData });\n }\n }\n\n // Application: Reset accumulation (mechanism)\n reset(): void {\n this.state = { type: \"idle\" };\n }\n\n // Application: Start new segment with keyframe (mechanism)\n startNewSegment(keyframe: { pts: number; dts: number }): void {\n this.state = {\n type: \"accumulating\",\n startPts: keyframe.pts,\n startDts: keyframe.dts,\n fragments: [],\n };\n }\n\n // Query: Get current state\n getState(): SegmentAccumulationState {\n return this.state;\n }\n\n // Query: Check if accumulating\n isAccumulating(): boolean {\n return this.state.type === \"accumulating\";\n }\n\n // Private helpers\n private calculateAccumulatedDurationMs(): number {\n if (this.state.type !== \"accumulating\") {\n return 0;\n }\n\n const lastFrag = this.state.fragments[this.state.fragments.length - 1]!;\n const lastPacket = this.getLastPacket(lastFrag.fragmentData);\n const endCts = convertTimestamp(\n lastPacket.pts + (lastPacket.duration || 0),\n this.context.timebase,\n this.context.timescale,\n );\n const startCts = convertTimestamp(\n this.state.startPts,\n this.context.timebase,\n this.context.timescale,\n );\n return durationMsFromTimescale(endCts - startCts, this.context.timescale);\n }\n\n private calculateSegmentDuration(\n segmentCts: number,\n nextBoundary: { pts: number } | null,\n ): number {\n if (nextBoundary) {\n const nextSegmentCts = convertTimestamp(\n nextBoundary.pts,\n this.context.timebase,\n this.context.timescale,\n );\n return nextSegmentCts - segmentCts;\n }\n\n // Last segment: duration to end of all packets\n // Use pre-cached streamPackets (Performance Through Caching)\n const sortedPackets = [...this.context.streamPackets].sort(\n (a, b) => a.pts - b.pts,\n );\n const lastPacket = sortedPackets[sortedPackets.length - 1]!;\n const streamEnd = convertTimestamp(\n lastPacket.pts + (lastPacket.duration || 0),\n this.context.timebase,\n this.context.timescale,\n );\n return streamEnd - segmentCts;\n }\n\n private getStartPts(fragmentData: FragmentTimingData): number {\n if (this.context.streamType === \"video\") {\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n return keyframe?.pts ?? fragmentData.videoPackets[0]?.pts ?? 0;\n } else {\n return fragmentData.audioPackets[0]?.pts ?? 0;\n }\n }\n\n private getStartDts(fragmentData: FragmentTimingData): number {\n if (this.context.streamType === \"video\") {\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n return keyframe?.dts ?? fragmentData.videoPackets[0]?.dts ?? 0;\n } else {\n return fragmentData.audioPackets[0]?.dts ?? 0;\n }\n }\n\n private getLastPacket(\n fragmentData: FragmentTimingData,\n ): { pts: number; duration?: number } {\n if (this.context.streamType === \"video\") {\n const packets = fragmentData.videoPackets;\n return packets[packets.length - 1]!;\n } else {\n const packets = fragmentData.audioPackets;\n return packets[packets.length - 1]!;\n }\n }\n}\n\n// Helper function to extract fragment data (init + media fragment)\n\nexport const generateFragmentIndex = async (\n inputStream: Readable,\n startTimeOffsetMs?: number,\n trackIdMapping?: Record<number, number>, // Map from source track ID to desired track ID\n): Promise<Record<number, TrackFragmentIndex>> => {\n // Step 1: Create a streaming parser that detects fragment boundaries\n const parser = new StreamingBoxParser();\n\n // Step 2: Create a passthrough stream that doesn't buffer everything\n const chunks: Buffer[] = [];\n let totalSize = 0;\n\n const dest = new Writable({\n write(chunk, _encoding, callback) {\n chunks.push(chunk);\n totalSize += chunk.length;\n callback();\n },\n });\n\n // Process the stream through both parser and collection\n await pipeline(inputStream, parser, dest);\n const fragments = parser.getFragments();\n\n // If no data was collected, return empty result\n if (totalSize === 0) {\n return {};\n }\n\n // Step 3: Use ffprobe to analyze the complete stream for track metadata\n const completeData = Buffer.concat(chunks as readonly Uint8Array[]);\n const completeStream = createFragmentStream(\n new Uint8Array(\n completeData.buffer,\n completeData.byteOffset,\n completeData.byteLength,\n ),\n );\n\n let probe: PacketProbe;\n try {\n probe = await PacketProbe.probeStream(completeStream);\n } catch (error) {\n console.warn(\"Failed to probe stream with ffprobe:\", error);\n return {};\n }\n\n const videoStreams = probe.videoStreams;\n const audioStreams = probe.audioStreams;\n\n const trackIndexes: Record<number, TrackFragmentIndex> = {};\n const initFragment = fragments.find((f) => f.type === \"init\");\n const mediaFragments = fragments.filter((f) => f.type === \"media\");\n\n // Map packets to fragments using byte position for moof+mdat boundaries\n // But create contiguous segments based on keyframes\n const fragmentTimingData: FragmentTimingData[] = [];\n\n for (\n let fragmentIndex = 0;\n fragmentIndex < mediaFragments.length;\n fragmentIndex++\n ) {\n const fragment = mediaFragments[fragmentIndex]!;\n\n // Find packets that belong to this fragment based on byte position (moof+mdat boundaries)\n const fragmentStart = fragment.offset;\n const fragmentEnd = fragment.offset + fragment.size;\n\n const videoPackets = probe.packets\n .filter((packet) => {\n const stream = videoStreams.find(\n (s) => s.index === packet.stream_index,\n );\n return (\n stream?.codec_type === \"video\" &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd\n );\n })\n .map((packet) => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n isKeyframe: packet.flags?.includes(\"K\") ?? false,\n }));\n\n const audioPackets = probe.packets\n .filter((packet) => {\n const stream = audioStreams.find(\n (s) => s.index === packet.stream_index,\n );\n return (\n stream?.codec_type === \"audio\" &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd\n );\n })\n .map((packet) => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n }));\n\n fragmentTimingData.push({\n fragmentIndex,\n videoPackets,\n audioPackets,\n });\n }\n\n // Unified track processing function (One Direction of Truth)\n const processTrack = (\n streamIndex: number,\n streamType: \"video\" | \"audio\",\n timebase: Timebase,\n allPackets: ProbePacket[],\n ): TrackSegment[] => {\n const segments: TrackSegment[] = [];\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = allPackets.filter(\n (p) => p.stream_index === streamIndex,\n );\n\n const context: TrackProcessingContext = {\n timebase,\n timescale,\n fragmentTimingData,\n mediaFragments,\n streamPackets,\n streamType,\n streamIndex,\n };\n\n const accumulator = new SegmentAccumulator(context, MIN_SEGMENT_DURATION_MS);\n\n for (let i = 0; i < fragmentTimingData.length; i++) {\n const fragmentData = fragmentTimingData[i]!;\n const fragment = mediaFragments[fragmentData.fragmentIndex]!;\n const packets =\n streamType === \"video\"\n ? fragmentData.videoPackets\n : fragmentData.audioPackets;\n\n log(\n `Fragment ${fragmentData.fragmentIndex}: ${packets.length} ${streamType} packets`,\n );\n\n if (packets.length === 0) {\n log(\n `Skipping fragment ${fragmentData.fragmentIndex} - no ${streamType} packets`,\n );\n continue;\n }\n\n if (streamType === \"video\") {\n // Video: segments must start on keyframes\n const keyframe = fragmentData.videoPackets.find(\n (p) => p.isKeyframe,\n );\n const hasKeyframe = keyframe !== undefined;\n\n // Start new segment on keyframe if none exists\n if (!accumulator.isAccumulating() && hasKeyframe) {\n accumulator.startNewSegment({\n pts: keyframe.pts,\n dts: keyframe.dts,\n });\n accumulator.addFragment(fragment, fragmentData);\n continue;\n }\n\n // Skip fragments without keyframes if no segment started\n if (!accumulator.isAccumulating()) {\n continue;\n }\n\n // Check if we should finalize when encountering a new keyframe\n if (hasKeyframe) {\n if (accumulator.shouldFinalize({ pts: keyframe.pts, dts: keyframe.dts })) {\n // Duration should be to the start of this keyframe (start of next segment)\n const nextBoundary = { pts: keyframe.pts };\n const evaluation = accumulator.evaluateSegment(nextBoundary);\n if (evaluation) {\n segments.push(evaluation);\n }\n accumulator.reset();\n accumulator.startNewSegment({\n pts: keyframe.pts,\n dts: keyframe.dts,\n });\n }\n }\n } else {\n // Audio: no keyframe requirement, just duration-based\n if (!accumulator.isAccumulating()) {\n accumulator.addFragment(fragment, fragmentData);\n continue;\n }\n\n // Check if we should finalize based on accumulated duration\n if (accumulator.shouldFinalize(null)) {\n // Duration should be to the start of this fragment (start of next segment)\n const nextBoundary = { pts: fragmentData.audioPackets[0]!.pts };\n const evaluation = accumulator.evaluateSegment(nextBoundary);\n if (evaluation) {\n segments.push(evaluation);\n }\n accumulator.reset();\n }\n }\n\n // Add fragment to current segment\n accumulator.addFragment(fragment, fragmentData);\n }\n\n // Finalize any remaining accumulated fragments\n if (accumulator.isAccumulating()) {\n const evaluation = accumulator.evaluateSegment(null);\n if (evaluation) {\n segments.push(evaluation);\n }\n }\n\n return segments;\n };\n\n // Step 4: Process video tracks using ffprobe data\n for (const videoStream of videoStreams) {\n // Get timebase for this stream to convert timestamps\n const timebase = probe.videoTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for video stream\");\n continue;\n }\n\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = (probe.packets as ProbePacket[]).filter(\n (p) => p.stream_index === videoStream.index,\n );\n const keyframePackets = streamPackets.filter((p) =>\n p.flags?.includes(\"K\"),\n );\n const totalSampleCount = keyframePackets.length;\n\n log(\n `Complete stream has ${streamPackets.length} video packets, ${keyframePackets.length} keyframes for stream ${videoStream.index}`,\n );\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n if (streamPackets.length > 0) {\n log(\n `First video packet dts_time: ${streamPackets[0]!.dts_time}, pts_time: ${streamPackets[0]!.pts_time}`,\n );\n const presentationTime = streamPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process fragments to create segments with minimum duration\n const segments = processTrack(\n videoStream.index,\n \"video\",\n timebase,\n probe.packets as ProbePacket[],\n );\n\n // Calculate total duration from cached stream packets\n let totalDuration = 0;\n if (streamPackets.length > 0) {\n const firstPacket = streamPackets[0]!;\n const lastPacket = streamPackets[streamPackets.length - 1]!;\n const firstPts = convertTimestamp(firstPacket.pts, timebase, timescale);\n const lastPts = convertTimestamp(lastPacket.pts, timebase, timescale);\n totalDuration = lastPts - firstPts;\n }\n\n const finalTrackId =\n trackIdMapping?.[videoStream.index] ?? videoStream.index + 1;\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"video\",\n width: videoStream.coded_width || videoStream.width,\n height: videoStream.coded_height || videoStream.height,\n timescale: timescale,\n sample_count: totalSampleCount,\n codec: constructH264CodecString(\n videoStream.codec_tag_string,\n videoStream.profile,\n videoStream.level,\n ),\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n // Step 5: Process audio tracks using ffprobe data\n for (const audioStream of audioStreams) {\n // Get timebase for this stream to convert timestamps\n const timebase = probe.audioTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for audio stream\");\n continue;\n }\n\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = (probe.packets as ProbePacket[]).filter(\n (p) => p.stream_index === audioStream.index,\n );\n const totalSampleCount = streamPackets.length;\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n if (streamPackets.length > 0) {\n const presentationTime = streamPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process fragments to create segments with minimum duration\n const segments = processTrack(\n audioStream.index,\n \"audio\",\n timebase,\n probe.packets as ProbePacket[],\n );\n\n // Calculate total duration\n const totalDuration = segments.reduce((sum, seg) => sum + seg.duration, 0);\n\n const finalTrackId =\n trackIdMapping?.[audioStream.index] ?? audioStream.index + 1;\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"audio\",\n channel_count: audioStream.channels,\n sample_rate: Number(audioStream.sample_rate),\n sample_size: audioStream.bits_per_sample,\n sample_count: totalSampleCount,\n timescale: timescale,\n codec: audioStream.codec_tag_string || audioStream.codec_name || \"\",\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n return trackIndexes;\n};\n"],"mappings":";;;;;;AAMA,MAAM,MAAM,MAAM,2BAA2B;AAG7C,MAAM,0BAA0B;AAChC,MAAM,gBAAgB;AA+CtB,SAAS,yBACP,gBACA,SACA,OACQ;AACR,KAAI,mBAAmB,UAAU,CAAC,WAAW,UAAU,OACrD,QAAO;CAaT,MAAM,aATqC;EACzC,UAAU;EACV,MAAM;EACN,MAAM;EACN,WAAW;EACX,YAAY;EACZ,YAAY;EACb,CAE6B;AAC9B,KAAI,CAAC,WACH,QAAO;AAQT,QAAO,GAAG,eAAe,GAJN,WAAW,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI,KAE1C,MAAM,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;;;;AAuBtD,IAAM,qBAAN,cAAiC,UAAU;CAQzC,cAAc;AACZ,QAAM,EAAE,YAAY,OAAO,CAAC;gBARb,OAAO,MAAM,EAAE;sBACT;mBACS,EAAE;qBACS;wBAClB;oBACY,EAAE;;CAMvC,WAAW,OAAe,WAA2B,UAAsB;AAEzE,OAAK,SAAS,OAAO,OAAO,CAAC,KAAK,QAAQ,MAAM,CAAC;AAGjD,OAAK,YAAY;AAGjB,OAAK,KAAK,MAAM;AAChB,YAAU;;CAGZ,AAAQ,aAAa;EACnB,IAAI,eAAe;AAEnB,SAAO,KAAK,OAAO,SAAS,gBAAgB,GAAG;GAC7C,MAAM,OAAO,KAAK,OAAO,aAAa,aAAa;GACnD,MAAM,OAAO,KAAK,OACf,SAAS,eAAe,GAAG,eAAe,EAAE,CAC5C,SAAS,QAAQ;AAGpB,OAAI,SAAS,KAAK,OAAO,KAAK,KAAK,OAAO,SAAS,eAAe,KAChE;GAGF,MAAMA,MAAoB;IACxB;IACA,QAAQ,KAAK,eAAe;IAC5B;IACA,YAAY;IACb;AAED,OAAI,cAAc,IAAI,KAAK,aAAa,IAAI,OAAO,SAAS,IAAI,OAAO;AACvE,QAAK,WAAW,KAAK,IAAI;AACzB,QAAK,UAAU,IAAI;AAEnB,mBAAgB;;AAIlB,OAAK,gBAAgB;AACrB,OAAK,SAAS,KAAK,OAAO,SAAS,aAAa;;CAGlD,AAAQ,UAAU,KAAmB;AACnC,UAAQ,IAAI,MAAZ;GACE,KAAK;GACL,KAAK;AAEH,SAAK,iBAAiB,KAAK,IACzB,KAAK,gBACL,IAAI,SAAS,IAAI,KAClB;AACD;GAEF,KAAK;AACH,SAAK,cAAc;AACnB;GAEF,KAAK;AACH,QAAI,KAAK,aAAa;AAEpB,UAAK,UAAU,KAAK;MAClB,MAAM;MACN,QAAQ,KAAK,YAAY;MACzB,MAAM,IAAI,SAAS,IAAI,OAAO,KAAK,YAAY;MAC/C,YAAY,KAAK,YAAY;MAC7B,YAAY,IAAI;MACjB,CAAC;AACF,UAAK,cAAc;UAKnB,KACE,uCAAuC,IAAI,OAAO,+BACnD;AAEH;;;CAIN,OAAO,UAAsB;AAC3B,OAAK,YAAY;AAIjB,MAAI,KAAK,iBAAiB,EACxB,MAAK,UAAU,QAAQ;GACrB,MAAM;GACN,QAAQ;GACR,MAAM,KAAK;GACZ,CAAC;AAGJ,YAAU;;CAGZ,eAA2B;AACzB,SAAO,KAAK;;;AAKhB,SAAS,qBAAqB,cAAoC;CAChE,IAAI,SAAS;AACb,QAAO,IAAI,SAAS,EAClB,OAAO;AACL,MAAI,UAAU,aAAa,QAAQ;AACjC,QAAK,KAAK,KAAK;AACf;;EAGF,MAAM,YAAY,KAAK,IAAI,KAAK,MAAM,aAAa,SAAS,OAAO;EACnE,MAAM,QAAQ,aAAa,MAAM,QAAQ,SAAS,UAAU;AAC5D,YAAU;AACV,OAAK,KAAK,OAAO,KAAK,MAAM,CAAC;IAEhC,CAAC;;AAIJ,SAAS,iBACP,KACA,UACA,WACQ;AACR,QAAO,KAAK,MAAO,MAAM,YAAa,SAAS,IAAI;;AAIrD,SAAS,wBACP,mBACA,WACQ;AACR,QAAQ,oBAAoB,YAAa;;AAI3C,SAAS,0BACP,sBACkC;CAClC,MAAM,YAAY,qBAAqB;CACvC,MAAM,WAAW,qBAAqB,qBAAqB,SAAS;AACpE,QAAO;EACL,QAAQ,UAAU,SAAS;EAC3B,MAAM,SAAS,SAAS,SAAS,SAAS,SAAS,OAAO,UAAU,SAAS;EAC9E;;AAmCH,IAAM,qBAAN,MAAyB;CAKvB,YAAY,SAAiC,eAAuB;eAJ1B,EAAE,MAAM,QAAQ;AAKxD,OAAK,UAAU;AACf,OAAK,gBAAgB;;CAIvB,eAAe,cAA4D;AACzE,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAIT,MAAM,qBADa,KAAK,gCAAgC,IACf,KAAK;AAI9C,MAAI,KAAK,QAAQ,eAAe,QAC9B,QAAO,sBAAsB,iBAAiB;MAE9C,QAAO;;CAKX,gBACE,cAC0B;AAC1B,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAGT,MAAM,aAAa,iBACjB,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd;EACD,MAAM,aAAa,iBACjB,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd;EACD,MAAM,kBAAkB,KAAK,yBAC3B,YACA,aACD;EACD,MAAM,EAAE,QAAQ,SAAS,0BAA0B,KAAK,MAAM,UAAU;AAExE,SAAO;GACL,KAAK;GACL,KAAK;GACL,UAAU;GACV;GACA;GACD;;CAIH,YAAY,UAAoB,cAAwC;AACtE,MAAI,KAAK,MAAM,SAAS,OAItB,MAAK,QAAQ;GACX,MAAM;GACN,UAJe,KAAK,YAAY,aAAa;GAK7C,UAJe,KAAK,YAAY,aAAa;GAK7C,WAAW,CAAC;IAAE;IAAU;IAAc,CAAC;GACxC;MAGD,MAAK,MAAM,UAAU,KAAK;GAAE;GAAU;GAAc,CAAC;;CAKzD,QAAc;AACZ,OAAK,QAAQ,EAAE,MAAM,QAAQ;;CAI/B,gBAAgB,UAA8C;AAC5D,OAAK,QAAQ;GACX,MAAM;GACN,UAAU,SAAS;GACnB,UAAU,SAAS;GACnB,WAAW,EAAE;GACd;;CAIH,WAAqC;AACnC,SAAO,KAAK;;CAId,iBAA0B;AACxB,SAAO,KAAK,MAAM,SAAS;;CAI7B,AAAQ,iCAAyC;AAC/C,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAGT,MAAM,WAAW,KAAK,MAAM,UAAU,KAAK,MAAM,UAAU,SAAS;EACpE,MAAM,aAAa,KAAK,cAAc,SAAS,aAAa;AAW5D,SAAO,wBAVQ,iBACb,WAAW,OAAO,WAAW,YAAY,IACzC,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACgB,iBACf,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,EACiD,KAAK,QAAQ,UAAU;;CAG3E,AAAQ,yBACN,YACA,cACQ;AACR,MAAI,aAMF,QALuB,iBACrB,aAAa,KACb,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACuB;EAK1B,MAAM,gBAAgB,CAAC,GAAG,KAAK,QAAQ,cAAc,CAAC,MACnD,GAAG,MAAM,EAAE,MAAM,EAAE,IACrB;EACD,MAAM,aAAa,cAAc,cAAc,SAAS;AAMxD,SALkB,iBAChB,WAAW,OAAO,WAAW,YAAY,IACzC,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACkB;;CAGrB,AAAQ,YAAY,cAA0C;AAC5D,MAAI,KAAK,QAAQ,eAAe,QAE9B,QADiB,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW,EACnD,OAAO,aAAa,aAAa,IAAI,OAAO;MAE7D,QAAO,aAAa,aAAa,IAAI,OAAO;;CAIhD,AAAQ,YAAY,cAA0C;AAC5D,MAAI,KAAK,QAAQ,eAAe,QAE9B,QADiB,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW,EACnD,OAAO,aAAa,aAAa,IAAI,OAAO;MAE7D,QAAO,aAAa,aAAa,IAAI,OAAO;;CAIhD,AAAQ,cACN,cACoC;AACpC,MAAI,KAAK,QAAQ,eAAe,SAAS;GACvC,MAAM,UAAU,aAAa;AAC7B,UAAO,QAAQ,QAAQ,SAAS;SAC3B;GACL,MAAM,UAAU,aAAa;AAC7B,UAAO,QAAQ,QAAQ,SAAS;;;;AAOtC,MAAa,wBAAwB,OACnC,aACA,mBACA,mBACgD;CAEhD,MAAM,SAAS,IAAI,oBAAoB;CAGvC,MAAMC,SAAmB,EAAE;CAC3B,IAAI,YAAY;AAWhB,OAAM,SAAS,aAAa,QATf,IAAI,SAAS,EACxB,MAAM,OAAO,WAAW,UAAU;AAChC,SAAO,KAAK,MAAM;AAClB,eAAa,MAAM;AACnB,YAAU;IAEb,CAAC,CAGuC;CACzC,MAAM,YAAY,OAAO,cAAc;AAGvC,KAAI,cAAc,EAChB,QAAO,EAAE;CAIX,MAAM,eAAe,OAAO,OAAO,OAAgC;CACnE,MAAM,iBAAiB,qBACrB,IAAI,WACF,aAAa,QACb,aAAa,YACb,aAAa,WACd,CACF;CAED,IAAIC;AACJ,KAAI;AACF,UAAQ,MAAM,YAAY,YAAY,eAAe;UAC9C,OAAO;AACd,UAAQ,KAAK,wCAAwC,MAAM;AAC3D,SAAO,EAAE;;CAGX,MAAM,eAAe,MAAM;CAC3B,MAAM,eAAe,MAAM;CAE3B,MAAMC,eAAmD,EAAE;CAC3D,MAAM,eAAe,UAAU,MAAM,MAAM,EAAE,SAAS,OAAO;CAC7D,MAAM,iBAAiB,UAAU,QAAQ,MAAM,EAAE,SAAS,QAAQ;CAIlE,MAAMC,qBAA2C,EAAE;AAEnD,MACE,IAAI,gBAAgB,GACpB,gBAAgB,eAAe,QAC/B,iBACA;EACA,MAAM,WAAW,eAAe;EAGhC,MAAM,gBAAgB,SAAS;EAC/B,MAAM,cAAc,SAAS,SAAS,SAAS;EAE/C,MAAM,eAAe,MAAM,QACxB,QAAQ,WAAW;AAIlB,UAHe,aAAa,MACzB,MAAM,EAAE,UAAU,OAAO,aAC3B,EAES,eAAe,WACvB,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IAEf,CACD,KAAK,YAAY;GAChB,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GACjB,YAAY,OAAO,OAAO,SAAS,IAAI,IAAI;GAC5C,EAAE;EAEL,MAAM,eAAe,MAAM,QACxB,QAAQ,WAAW;AAIlB,UAHe,aAAa,MACzB,MAAM,EAAE,UAAU,OAAO,aAC3B,EAES,eAAe,WACvB,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IAEf,CACD,KAAK,YAAY;GAChB,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GAClB,EAAE;AAEL,qBAAmB,KAAK;GACtB;GACA;GACA;GACD,CAAC;;CAIJ,MAAM,gBACJ,aACA,YACA,UACA,eACmB;EACnB,MAAMC,WAA2B,EAAE;EAkBnC,MAAM,cAAc,IAAI,mBAVgB;GACtC;GACA,WATgB,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;GAUvD;GACA;GACA,eAToB,WAAW,QAC9B,MAAM,EAAE,iBAAiB,YAC3B;GAQC;GACA;GACD,EAEmD,wBAAwB;AAE5E,OAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,QAAQ,KAAK;GAClD,MAAM,eAAe,mBAAmB;GACxC,MAAM,WAAW,eAAe,aAAa;GAC7C,MAAM,UACJ,eAAe,UACX,aAAa,eACb,aAAa;AAEnB,OACE,YAAY,aAAa,cAAc,IAAI,QAAQ,OAAO,GAAG,WAAW,UACzE;AAED,OAAI,QAAQ,WAAW,GAAG;AACxB,QACE,qBAAqB,aAAa,cAAc,QAAQ,WAAW,UACpE;AACD;;AAGF,OAAI,eAAe,SAAS;IAE1B,MAAM,WAAW,aAAa,aAAa,MACxC,MAAM,EAAE,WACV;IACD,MAAM,cAAc,aAAa;AAGjC,QAAI,CAAC,YAAY,gBAAgB,IAAI,aAAa;AAChD,iBAAY,gBAAgB;MAC1B,KAAK,SAAS;MACd,KAAK,SAAS;MACf,CAAC;AACF,iBAAY,YAAY,UAAU,aAAa;AAC/C;;AAIF,QAAI,CAAC,YAAY,gBAAgB,CAC/B;AAIF,QAAI,aACF;SAAI,YAAY,eAAe;MAAE,KAAK,SAAS;MAAK,KAAK,SAAS;MAAK,CAAC,EAAE;MAExE,MAAM,eAAe,EAAE,KAAK,SAAS,KAAK;MAC1C,MAAM,aAAa,YAAY,gBAAgB,aAAa;AAC5D,UAAI,WACF,UAAS,KAAK,WAAW;AAE3B,kBAAY,OAAO;AACnB,kBAAY,gBAAgB;OAC1B,KAAK,SAAS;OACd,KAAK,SAAS;OACf,CAAC;;;UAGD;AAEL,QAAI,CAAC,YAAY,gBAAgB,EAAE;AACjC,iBAAY,YAAY,UAAU,aAAa;AAC/C;;AAIF,QAAI,YAAY,eAAe,KAAK,EAAE;KAEpC,MAAM,eAAe,EAAE,KAAK,aAAa,aAAa,GAAI,KAAK;KAC/D,MAAM,aAAa,YAAY,gBAAgB,aAAa;AAC5D,SAAI,WACF,UAAS,KAAK,WAAW;AAE3B,iBAAY,OAAO;;;AAKvB,eAAY,YAAY,UAAU,aAAa;;AAIjD,MAAI,YAAY,gBAAgB,EAAE;GAChC,MAAM,aAAa,YAAY,gBAAgB,KAAK;AACpD,OAAI,WACF,UAAS,KAAK,WAAW;;AAI7B,SAAO;;AAIT,MAAK,MAAM,eAAe,cAAc;EAEtC,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAGF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,MAAM,gBAAiB,MAAM,QAA0B,QACpD,MAAM,EAAE,iBAAiB,YAAY,MACvC;EACD,MAAM,kBAAkB,cAAc,QAAQ,MAC5C,EAAE,OAAO,SAAS,IAAI,CACvB;EACD,MAAM,mBAAmB,gBAAgB;AAEzC,MACE,uBAAuB,cAAc,OAAO,kBAAkB,gBAAgB,OAAO,wBAAwB,YAAY,QAC1H;EAGD,IAAIC;AACJ,MAAI,cAAc,SAAS,GAAG;AAC5B,OACE,gCAAgC,cAAc,GAAI,SAAS,cAAc,cAAc,GAAI,WAC5F;GACD,MAAM,mBAAmB,cAAc,GAAI;AAC3C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;EAI3B,MAAM,WAAW,aACf,YAAY,OACZ,SACA,UACA,MAAM,QACP;EAGD,IAAI,gBAAgB;AACpB,MAAI,cAAc,SAAS,GAAG;GAC5B,MAAM,cAAc,cAAc;GAClC,MAAM,aAAa,cAAc,cAAc,SAAS;GACxD,MAAM,WAAW,iBAAiB,YAAY,KAAK,UAAU,UAAU;AAEvE,mBADgB,iBAAiB,WAAW,KAAK,UAAU,UAAU,GAC3C;;EAG5B,MAAM,eACJ,iBAAiB,YAAY,UAAU,YAAY,QAAQ;AAC7D,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,OAAO,YAAY,eAAe,YAAY;GAC9C,QAAQ,YAAY,gBAAgB,YAAY;GACrC;GACX,cAAc;GACd,OAAO,yBACL,YAAY,kBACZ,YAAY,SACZ,YAAY,MACb;GACD,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAIH,MAAK,MAAM,eAAe,cAAc;EAEtC,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAGF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,MAAM,gBAAiB,MAAM,QAA0B,QACpD,MAAM,EAAE,iBAAiB,YAAY,MACvC;EACD,MAAM,mBAAmB,cAAc;EAGvC,IAAIA;AACJ,MAAI,cAAc,SAAS,GAAG;GAC5B,MAAM,mBAAmB,cAAc,GAAI;AAC3C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;EAI3B,MAAM,WAAW,aACf,YAAY,OACZ,SACA,UACA,MAAM,QACP;EAGD,MAAM,gBAAgB,SAAS,QAAQ,KAAK,QAAQ,MAAM,IAAI,UAAU,EAAE;EAE1E,MAAM,eACJ,iBAAiB,YAAY,UAAU,YAAY,QAAQ;AAC7D,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,eAAe,YAAY;GAC3B,aAAa,OAAO,YAAY,YAAY;GAC5C,aAAa,YAAY;GACzB,cAAc;GACH;GACX,OAAO,YAAY,oBAAoB,YAAY,cAAc;GACjE,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAGH,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"generateSingleTrack.js","names":["progressTimeout: NodeJS.Timeout | null"],"sources":["../src/generateSingleTrack.ts"],"sourcesContent":["import { idempotentTask } from \"./idempotentTask.js\";\nimport debug from \"debug\";\nimport { PassThrough } from \"node:stream\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"./Probe.js\";\nimport { generateFragmentIndex } from \"./generateFragmentIndex.js\";\n\nconst log = debug(\"ef:generateSingleTrack\");\n\nexport const generateSingleTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) - tracks use 1-based IDs, streams use 0-based indices\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(`Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`);\n }\n\n // Get the track stream from FFmpeg (single track, fragmented MP4)\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Create a PassThrough to tee the stream\n const outputStream = new PassThrough();\n const indexStream = new PassThrough();\n\n // Pipe data but DON'T end outputStream automatically - we'll control this\n trackStream.pipe(outputStream, { end: false });\n trackStream.pipe(indexStream);\n\n // Track when the source stream ends (but don't end output yet)\n let sourceStreamEnded = false;\n trackStream.on('end', () => {\n sourceStreamEnded = true;\n });\n\n trackStream.on('error', (error) => {\n outputStream.destroy(error);\n indexStream.destroy(error);\n });\n\n // Generate fragment index from the single-track stream\n // This will be a single-track index since we're processing isolated track\n // Map the single-track file's track ID 1 to the original multi-track ID\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndexPromise = generateFragmentIndex(indexStream, undefined, trackIdMapping);\n\n // End outputStream only after BOTH source ends AND fragment index completes\n fragmentIndexPromise.then(() => {\n if (sourceStreamEnded) {\n outputStream.end();\n } else {\n // If fragment index completes first, wait for stream to end\n trackStream.once('end', () => {\n outputStream.end();\n });\n }\n }).catch((error) => {\n outputStream.destroy(error);\n });\n\n // Return both the stream and the index\n return {\n stream: outputStream,\n fragmentIndex: fragmentIndexPromise\n };\n};\n\nexport const generateSingleTrackTask = idempotentTask({\n label: \"track-single\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: async (absolutePath: string, trackId: number) => {\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Create a PassThrough stream that processes fragment index in parallel\n const finalStream = new PassThrough();\n\n // Start fragment index processing immediately (don't wait for stream to end)\n const fragmentIndexPromise = result.fragmentIndex.catch((error) => {\n console.warn(`Fragment index generation failed for track ${trackId}:`, error);\n // Don't fail the stream if fragment index fails\n });\n\n // Monitor progress and extend timeout based on actual work\n let progressTimeout: NodeJS.Timeout | null = null;\n\n const resetProgressTimeout = () => {\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n progressTimeout = setTimeout(() => {\n if (!finalStream.destroyed) {\n console.warn(`Progress timeout triggered for track ${trackId} - no activity for 10 seconds`);\n finalStream.end();\n }\n }, 10000); // 10 second sliding timeout\n };\n\n // Start the initial timeout\n resetProgressTimeout();\n\n // Monitor data flow to detect active work\n result.stream.on('data', () => {\n resetProgressTimeout(); // Reset timeout when we see data\n });\n\n result.stream.on('end', () => {\n resetProgressTimeout(); // Reset timeout when stream ends\n });\n\n // Pipe data through but don't end until fragment index is ready\n result.stream.pipe(finalStream, { end: false });\n\n // Wait for fragment index to complete, then end the stream\n await fragmentIndexPromise;\n finalStream.end();\n\n // Clean up timeout\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n return finalStream;\n },\n});\n\nexport const generateSingleTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=0\",\n );\n }\n return await generateSingleTrackTask(cacheRoot, absolutePath, Number(trackId));\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n\n// Helper function to get both stream and fragment index\nexport const generateSingleTrackWithIndex = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} with index for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) for compatibility\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(`Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`);\n }\n\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Collect all data for fragment index generation\n const chunks: Buffer[] = [];\n const outputStream = new PassThrough();\n\n // Tee the stream: collect for index AND pass through for output\n trackStream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n outputStream.write(chunk);\n });\n\n trackStream.on('end', () => {\n // Don't end the output stream immediately - wait for async processing\n (async () => {\n try {\n // Create a readable from collected chunks for fragment index\n const { Readable } = await import(\"node:stream\");\n const indexInputStream = Readable.from(Buffer.concat(chunks as any));\n\n // Generate fragment index with track ID mapping\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndex = await generateFragmentIndex(indexInputStream, undefined, trackIdMapping);\n\n // Emit the fragment index as metadata\n outputStream.emit('fragmentIndex', fragmentIndex);\n\n // Now it's safe to end the stream\n outputStream.end();\n } catch (error) {\n outputStream.destroy(error as Error);\n }\n })();\n });\n\n trackStream.on('error', (error) => {\n outputStream.destroy(error);\n });\n\n return outputStream;\n};\n"],"mappings":";;;;;;;;AAOA,MAAM,MAAM,MAAM,yBAAyB;AAE3C,MAAa,8BAA8B,OACzC,cACA,YACG;AACH,KAAI,oBAAoB,QAAQ,OAAO,eAAe;CAEtD,MAAM,QAAQ,MAAM,MAAM,UAAU,aAAa;CAGjD,MAAM,cAAc,UAAU;AAE9B,KAAI,cAAc,KAAK,eAAe,MAAM,QAAQ,OAClD,OAAM,IAAI,MAAM,SAAS,QAAQ,8BAA8B,MAAM,QAAQ,OAAO,GAAG;CAIzF,MAAM,cAAc,MAAM,sBAAsB,YAAY;CAG5D,MAAM,eAAe,IAAI,aAAa;CACtC,MAAM,cAAc,IAAI,aAAa;AAGrC,aAAY,KAAK,cAAc,EAAE,KAAK,OAAO,CAAC;AAC9C,aAAY,KAAK,YAAY;CAG7B,IAAI,oBAAoB;AACxB,aAAY,GAAG,aAAa;AAC1B,sBAAoB;GACpB;AAEF,aAAY,GAAG,UAAU,UAAU;AACjC,eAAa,QAAQ,MAAM;AAC3B,cAAY,QAAQ,MAAM;GAC1B;CAMF,MAAM,uBAAuB,sBAAsB,aAAa,QADzC,EAAE,GAAG,SAAS,CACqD;AAG1F,sBAAqB,WAAW;AAC9B,MAAI,kBACF,cAAa,KAAK;MAGlB,aAAY,KAAK,aAAa;AAC5B,gBAAa,KAAK;IAClB;GAEJ,CAAC,OAAO,UAAU;AAClB,eAAa,QAAQ,MAAM;GAC3B;AAGF,QAAO;EACL,QAAQ;EACR,eAAe;EAChB;;AAGH,MAAa,0BAA0B,eAAe;CACpD,OAAO;CACP,WAAW,cAAsB,YAC/B,GAAG,SAAS,aAAa,CAAC,SAAS,QAAQ;CAC7C,QAAQ,OAAO,cAAsB,YAAoB;EACvD,MAAM,SAAS,MAAM,4BAA4B,cAAc,QAAQ;EAGvE,MAAM,cAAc,IAAI,aAAa;EAGrC,MAAM,uBAAuB,OAAO,cAAc,OAAO,UAAU;AACjE,WAAQ,KAAK,8CAA8C,QAAQ,IAAI,MAAM;IAE7E;EAGF,IAAIA,kBAAyC;EAE7C,MAAM,6BAA6B;AACjC,OAAI,gBACF,cAAa,gBAAgB;AAG/B,qBAAkB,iBAAiB;AACjC,QAAI,CAAC,YAAY,WAAW;AAC1B,aAAQ,KAAK,wCAAwC,QAAQ,+BAA+B;AAC5F,iBAAY,KAAK;;MAElB,IAAM;;AAIX,wBAAsB;AAGtB,SAAO,OAAO,GAAG,cAAc;AAC7B,yBAAsB;IACtB;AAEF,SAAO,OAAO,GAAG,aAAa;AAC5B,yBAAsB;IACtB;AAGF,SAAO,OAAO,KAAK,aAAa,EAAE,KAAK,OAAO,CAAC;AAG/C,QAAM;AACN,cAAY,KAAK;AAGjB,MAAI,gBACF,cAAa,gBAAgB;AAG/B,SAAO;;CAEV,CAAC"}
1
+ {"version":3,"file":"generateSingleTrack.js","names":["progressTimeout: NodeJS.Timeout | null"],"sources":["../src/generateSingleTrack.ts"],"sourcesContent":["import { idempotentTask } from \"./idempotentTask.js\";\nimport debug from \"debug\";\nimport { PassThrough } from \"node:stream\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"./Probe.js\";\nimport { generateFragmentIndex } from \"./generateFragmentIndex.js\";\n\nconst log = debug(\"ef:generateSingleTrack\");\n\nexport const generateSingleTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) - tracks use 1-based IDs, streams use 0-based indices\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(\n `Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`,\n );\n }\n\n // Get the track stream from FFmpeg (single track, fragmented MP4)\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Create a PassThrough to tee the stream\n const outputStream = new PassThrough();\n const indexStream = new PassThrough();\n\n // Pipe data but DON'T end outputStream automatically - we'll control this\n trackStream.pipe(outputStream, { end: false });\n trackStream.pipe(indexStream);\n\n // Track when the source stream ends (but don't end output yet)\n let sourceStreamEnded = false;\n trackStream.on(\"end\", () => {\n sourceStreamEnded = true;\n });\n\n trackStream.on(\"error\", (error) => {\n outputStream.destroy(error);\n indexStream.destroy(error);\n });\n\n // Generate fragment index from the single-track stream\n // This will be a single-track index since we're processing isolated track\n // Map the single-track file's track ID 1 to the original multi-track ID\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndexPromise = generateFragmentIndex(\n indexStream,\n undefined,\n trackIdMapping,\n );\n\n // End outputStream only after BOTH source ends AND fragment index completes\n fragmentIndexPromise\n .then(() => {\n if (sourceStreamEnded) {\n outputStream.end();\n } else {\n // If fragment index completes first, wait for stream to end\n trackStream.once(\"end\", () => {\n outputStream.end();\n });\n }\n })\n .catch((error) => {\n outputStream.destroy(error);\n });\n\n // Return both the stream and the index\n return {\n stream: outputStream,\n fragmentIndex: fragmentIndexPromise,\n };\n};\n\nexport const generateSingleTrackTask = idempotentTask({\n label: \"track-single\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: async (absolutePath: string, trackId: number) => {\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Create a PassThrough stream that processes fragment index in parallel\n const finalStream = new PassThrough();\n\n // Start fragment index processing immediately (don't wait for stream to end)\n const fragmentIndexPromise = result.fragmentIndex.catch((error) => {\n console.warn(\n `Fragment index generation failed for track ${trackId}:`,\n error,\n );\n // Don't fail the stream if fragment index fails\n });\n\n // Monitor progress and extend timeout based on actual work\n let progressTimeout: NodeJS.Timeout | null = null;\n\n const resetProgressTimeout = () => {\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n progressTimeout = setTimeout(() => {\n if (!finalStream.destroyed) {\n console.warn(\n `Progress timeout triggered for track ${trackId} - no activity for 10 seconds`,\n );\n finalStream.end();\n }\n }, 10000); // 10 second sliding timeout\n };\n\n // Start the initial timeout\n resetProgressTimeout();\n\n // Monitor data flow to detect active work\n result.stream.on(\"data\", () => {\n resetProgressTimeout(); // Reset timeout when we see data\n });\n\n result.stream.on(\"end\", () => {\n resetProgressTimeout(); // Reset timeout when stream ends\n });\n\n // Pipe data through but don't end until fragment index is ready\n result.stream.pipe(finalStream, { end: false });\n\n // Wait for fragment index to complete, then end the stream\n await fragmentIndexPromise;\n finalStream.end();\n\n // Clean up timeout\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n return finalStream;\n },\n});\n\nexport const generateSingleTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=0\",\n );\n }\n return await generateSingleTrackTask(\n cacheRoot,\n absolutePath,\n Number(trackId),\n );\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n\n// Helper function to get both stream and fragment index\nexport const generateSingleTrackWithIndex = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} with index for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) for compatibility\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(\n `Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`,\n );\n }\n\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Collect all data for fragment index generation\n const chunks: Buffer[] = [];\n const outputStream = new PassThrough();\n\n // Tee the stream: collect for index AND pass through for output\n trackStream.on(\"data\", (chunk: Buffer) => {\n chunks.push(chunk);\n outputStream.write(chunk);\n });\n\n trackStream.on(\"end\", () => {\n // Don't end the output stream immediately - wait for async processing\n (async () => {\n try {\n // Create a readable from collected chunks for fragment index\n const { Readable } = await import(\"node:stream\");\n const indexInputStream = Readable.from(Buffer.concat(chunks as any));\n\n // Generate fragment index with track ID mapping\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndex = await generateFragmentIndex(\n indexInputStream,\n undefined,\n trackIdMapping,\n );\n\n // Emit the fragment index as metadata\n outputStream.emit(\"fragmentIndex\", fragmentIndex);\n\n // Now it's safe to end the stream\n outputStream.end();\n } catch (error) {\n outputStream.destroy(error as Error);\n }\n })();\n });\n\n trackStream.on(\"error\", (error) => {\n outputStream.destroy(error);\n });\n\n return outputStream;\n};\n"],"mappings":";;;;;;;;AAOA,MAAM,MAAM,MAAM,yBAAyB;AAE3C,MAAa,8BAA8B,OACzC,cACA,YACG;AACH,KAAI,oBAAoB,QAAQ,OAAO,eAAe;CAEtD,MAAM,QAAQ,MAAM,MAAM,UAAU,aAAa;CAGjD,MAAM,cAAc,UAAU;AAE9B,KAAI,cAAc,KAAK,eAAe,MAAM,QAAQ,OAClD,OAAM,IAAI,MACR,SAAS,QAAQ,8BAA8B,MAAM,QAAQ,OAAO,GACrE;CAIH,MAAM,cAAc,MAAM,sBAAsB,YAAY;CAG5D,MAAM,eAAe,IAAI,aAAa;CACtC,MAAM,cAAc,IAAI,aAAa;AAGrC,aAAY,KAAK,cAAc,EAAE,KAAK,OAAO,CAAC;AAC9C,aAAY,KAAK,YAAY;CAG7B,IAAI,oBAAoB;AACxB,aAAY,GAAG,aAAa;AAC1B,sBAAoB;GACpB;AAEF,aAAY,GAAG,UAAU,UAAU;AACjC,eAAa,QAAQ,MAAM;AAC3B,cAAY,QAAQ,MAAM;GAC1B;CAMF,MAAM,uBAAuB,sBAC3B,aACA,QAHqB,EAAE,GAAG,SAAS,CAKpC;AAGD,sBACG,WAAW;AACV,MAAI,kBACF,cAAa,KAAK;MAGlB,aAAY,KAAK,aAAa;AAC5B,gBAAa,KAAK;IAClB;GAEJ,CACD,OAAO,UAAU;AAChB,eAAa,QAAQ,MAAM;GAC3B;AAGJ,QAAO;EACL,QAAQ;EACR,eAAe;EAChB;;AAGH,MAAa,0BAA0B,eAAe;CACpD,OAAO;CACP,WAAW,cAAsB,YAC/B,GAAG,SAAS,aAAa,CAAC,SAAS,QAAQ;CAC7C,QAAQ,OAAO,cAAsB,YAAoB;EACvD,MAAM,SAAS,MAAM,4BAA4B,cAAc,QAAQ;EAGvE,MAAM,cAAc,IAAI,aAAa;EAGrC,MAAM,uBAAuB,OAAO,cAAc,OAAO,UAAU;AACjE,WAAQ,KACN,8CAA8C,QAAQ,IACtD,MACD;IAED;EAGF,IAAIA,kBAAyC;EAE7C,MAAM,6BAA6B;AACjC,OAAI,gBACF,cAAa,gBAAgB;AAG/B,qBAAkB,iBAAiB;AACjC,QAAI,CAAC,YAAY,WAAW;AAC1B,aAAQ,KACN,wCAAwC,QAAQ,+BACjD;AACD,iBAAY,KAAK;;MAElB,IAAM;;AAIX,wBAAsB;AAGtB,SAAO,OAAO,GAAG,cAAc;AAC7B,yBAAsB;IACtB;AAEF,SAAO,OAAO,GAAG,aAAa;AAC5B,yBAAsB;IACtB;AAGF,SAAO,OAAO,KAAK,aAAa,EAAE,KAAK,OAAO,CAAC;AAG/C,QAAM;AACN,cAAY,KAAK;AAGjB,MAAI,gBACF,cAAa,gBAAgB;AAG/B,SAAO;;CAEV,CAAC"}
@@ -2,7 +2,7 @@ import { md5FilePath } from "./md5.js";
2
2
  import { createWriteStream, existsSync } from "node:fs";
3
3
  import debug from "debug";
4
4
  import { Readable } from "node:stream";
5
- import { mkdir, stat, writeFile } from "node:fs/promises";
5
+ import { mkdir, readdir, stat, writeFile } from "node:fs/promises";
6
6
  import path from "node:path";
7
7
 
8
8
  //#region src/idempotentTask.ts
@@ -62,11 +62,36 @@ const idempotentTask = ({ label, filename, runner }) => {
62
62
  delete downloadTasks[downloadKey];
63
63
  }
64
64
  }
65
- const md5 = await md5FilePath(absolutePath);
65
+ const expectedFilename = filename(absolutePath, ...args);
66
+ let cachePath = null;
67
+ let md5 = null;
68
+ const scanStartTime = Date.now();
69
+ try {
70
+ const cacheDirs = await readdir(cacheDirRoot, { withFileTypes: true });
71
+ log(`Scanning ${cacheDirs.length} cache directories for ${expectedFilename}`);
72
+ for (const dir of cacheDirs) if (dir.isDirectory()) {
73
+ const candidatePath = path.join(cacheDirRoot, dir.name, expectedFilename);
74
+ if (existsSync(candidatePath) && await isValidCacheFile(candidatePath)) {
75
+ cachePath = candidatePath;
76
+ md5 = dir.name;
77
+ log(`Found existing cache in ${Date.now() - scanStartTime}ms: ${candidatePath} (skipped MD5)`);
78
+ break;
79
+ }
80
+ }
81
+ if (!cachePath) log(`Cache scan completed in ${Date.now() - scanStartTime}ms, no cache found - will compute MD5`);
82
+ } catch (error) {
83
+ log(`Cache scan failed after ${Date.now() - scanStartTime}ms, will compute MD5: ${error}`);
84
+ }
85
+ if (!md5) {
86
+ const md5StartTime = Date.now();
87
+ log(`Computing MD5 for ${absolutePath}...`);
88
+ md5 = await md5FilePath(absolutePath);
89
+ log(`MD5 computed in ${Date.now() - md5StartTime}ms: ${md5}`);
90
+ }
66
91
  const cacheDir = path.join(cacheDirRoot, md5);
67
92
  log(`Cache dir: ${cacheDir}`);
68
93
  await mkdir(cacheDir, { recursive: true });
69
- const cachePath = path.join(cacheDir, filename(absolutePath, ...args));
94
+ if (!cachePath) cachePath = path.join(cacheDir, expectedFilename);
70
95
  const key = cachePath;
71
96
  if (existsSync(cachePath) && await isValidCacheFile(cachePath)) {
72
97
  log(`Returning cached ef:${label} task for ${key}`);
@@ -1 +1 @@
1
- {"version":3,"file":"idempotentTask.js","names":["tasks: Record<string, Promise<TaskResult>>","downloadTasks: Record<string, Promise<string>>"],"sources":["../src/idempotentTask.ts"],"sourcesContent":["import { createWriteStream, existsSync } from \"node:fs\";\nimport path from \"node:path\";\nimport { md5FilePath } from \"./md5.js\";\nimport debug from \"debug\";\nimport { mkdir, writeFile, stat } from \"node:fs/promises\";\nimport { Readable } from \"node:stream\";\n\ninterface TaskOptions<T extends unknown[]> {\n label: string;\n filename: (absolutePath: string, ...args: T) => string;\n runner: (absolutePath: string, ...args: T) => Promise<string | Readable>;\n}\n\nexport interface TaskResult {\n md5Sum: string;\n cachePath: string;\n}\n\nexport const idempotentTask = <T extends unknown[]>({\n label,\n filename,\n runner,\n}: TaskOptions<T>) => {\n const tasks: Record<string, Promise<TaskResult>> = {};\n const downloadTasks: Record<string, Promise<string>> = {};\n\n // Helper function to validate cache file completeness\n const isValidCacheFile = async (filePath: string, allowEmpty = false): Promise<boolean> => {\n try {\n const stats = await stat(filePath);\n // File must exist and either have content or be explicitly allowed to be empty\n return allowEmpty || stats.size > 0;\n } catch {\n return false;\n }\n };\n\n return async (\n rootDir: string,\n absolutePath: string,\n ...args: T\n ): Promise<TaskResult> => {\n const log = debug(`ef:${label}`);\n const cacheDirRoot = path.join(rootDir, \".cache\");\n await mkdir(cacheDirRoot, { recursive: true });\n\n log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);\n\n // Handle HTTP downloads with proper race condition protection\n if (absolutePath.includes(\"http\")) {\n const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, \"_\");\n const downloadCachePath = path.join(rootDir, \".cache\", `${safePath}.file`);\n\n // Check if already downloaded and valid (allow empty downloads)\n if (existsSync(downloadCachePath) && await isValidCacheFile(downloadCachePath, true)) {\n log(`Already cached ${absolutePath}`);\n absolutePath = downloadCachePath;\n } else {\n // Use download task deduplication to prevent concurrent downloads\n const downloadKey = absolutePath;\n if (!downloadTasks[downloadKey]) {\n log(`Starting download for ${absolutePath}`);\n downloadTasks[downloadKey] = (async () => {\n try {\n const response = await fetch(absolutePath);\n if (!response.ok) {\n throw new Error(`Failed to fetch file from URL ${absolutePath}: ${response.status} ${response.statusText}`);\n }\n\n const stream = response.body;\n if (!stream) {\n throw new Error(`No response body for URL ${absolutePath}`);\n }\n\n // Use temporary file to prevent reading incomplete downloads\n const tempPath = `${downloadCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n\n // @ts-ignore node web stream support in typescript is incorrect about this.\n const readable = Readable.fromWeb(stream);\n readable.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n readable.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n const { rename } = await import(\"node:fs/promises\");\n await rename(tempPath, downloadCachePath);\n\n log(`Download completed for ${absolutePath}`);\n return downloadCachePath;\n } catch (error) {\n log(`Download failed for ${absolutePath}: ${error}`);\n // Clean up task reference on failure\n delete downloadTasks[downloadKey];\n throw error;\n }\n })();\n }\n\n absolutePath = await downloadTasks[downloadKey];\n // Clean up completed task\n delete downloadTasks[downloadKey];\n }\n }\n\n const md5 = await md5FilePath(absolutePath);\n const cacheDir = path.join(cacheDirRoot, md5);\n log(`Cache dir: ${cacheDir}`);\n await mkdir(cacheDir, { recursive: true });\n\n const cachePath = path.join(cacheDir, filename(absolutePath, ...args));\n const key = cachePath;\n\n // Check if cache exists and is valid (not zero-byte)\n if (existsSync(cachePath) && await isValidCacheFile(cachePath)) {\n log(`Returning cached ef:${label} task for ${key}`);\n return { cachePath, md5Sum: md5 };\n }\n\n const maybeTask = tasks[key];\n if (maybeTask) {\n log(`Returning existing ef:${label} task for ${key}`);\n return await maybeTask;\n }\n\n log(`Creating new ef:${label} task for ${key}`);\n const fullTask = (async (): Promise<TaskResult> => {\n try {\n log(`Awaiting task for ${key}`);\n const result = await runner(absolutePath, ...args);\n\n if (result instanceof Readable) {\n log(`Piping task for ${key} to cache`);\n // Use temporary file to prevent reading incomplete results\n const tempPath = `${cachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n result.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n result.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n const { rename } = await import(\"node:fs/promises\");\n await rename(tempPath, cachePath);\n } else {\n log(`Writing to ${cachePath}`);\n await writeFile(cachePath, result);\n }\n\n // Clean up task reference after successful completion\n delete tasks[key];\n\n return {\n md5Sum: md5,\n cachePath,\n };\n } catch (error) {\n // Clean up task reference on failure\n delete tasks[key];\n throw error;\n }\n })();\n\n tasks[key] = fullTask;\n return await fullTask;\n };\n};\n"],"mappings":";;;;;;;;AAkBA,MAAa,kBAAuC,EAClD,OACA,UACA,aACoB;CACpB,MAAMA,QAA6C,EAAE;CACrD,MAAMC,gBAAiD,EAAE;CAGzD,MAAM,mBAAmB,OAAO,UAAkB,aAAa,UAA4B;AACzF,MAAI;GACF,MAAM,QAAQ,MAAM,KAAK,SAAS;AAElC,UAAO,cAAc,MAAM,OAAO;UAC5B;AACN,UAAO;;;AAIX,QAAO,OACL,SACA,cACA,GAAG,SACqB;EACxB,MAAM,MAAM,MAAM,MAAM,QAAQ;EAChC,MAAM,eAAe,KAAK,KAAK,SAAS,SAAS;AACjD,QAAM,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;AAE9C,MAAI,cAAc,MAAM,YAAY,aAAa,MAAM,UAAU;AAGjE,MAAI,aAAa,SAAS,OAAO,EAAE;GACjC,MAAM,WAAW,aAAa,QAAQ,iBAAiB,IAAI;GAC3D,MAAM,oBAAoB,KAAK,KAAK,SAAS,UAAU,GAAG,SAAS,OAAO;AAG1E,OAAI,WAAW,kBAAkB,IAAI,MAAM,iBAAiB,mBAAmB,KAAK,EAAE;AACpF,QAAI,kBAAkB,eAAe;AACrC,mBAAe;UACV;IAEL,MAAM,cAAc;AACpB,QAAI,CAAC,cAAc,cAAc;AAC/B,SAAI,yBAAyB,eAAe;AAC5C,mBAAc,gBAAgB,YAAY;AACxC,UAAI;OACF,MAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,WAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,iCAAiC,aAAa,IAAI,SAAS,OAAO,GAAG,SAAS,aAAa;OAG7G,MAAM,SAAS,SAAS;AACxB,WAAI,CAAC,OACH,OAAM,IAAI,MAAM,4BAA4B,eAAe;OAI7D,MAAM,WAAW,GAAG,kBAAkB;OACtC,MAAM,cAAc,kBAAkB,SAAS;OAG/C,MAAM,WAAW,SAAS,QAAQ,OAAO;AACzC,gBAAS,KAAK,YAAY;AAE1B,aAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,iBAAS,GAAG,SAAS,OAAO;AAC5B,oBAAY,GAAG,SAAS,OAAO;AAC/B,oBAAY,GAAG,gBAAgB,SAAS,CAAC;SACzC;OAGF,MAAM,EAAE,WAAW,MAAM,OAAO;AAChC,aAAM,OAAO,UAAU,kBAAkB;AAEzC,WAAI,0BAA0B,eAAe;AAC7C,cAAO;eACA,OAAO;AACd,WAAI,uBAAuB,aAAa,IAAI,QAAQ;AAEpD,cAAO,cAAc;AACrB,aAAM;;SAEN;;AAGN,mBAAe,MAAM,cAAc;AAEnC,WAAO,cAAc;;;EAIzB,MAAM,MAAM,MAAM,YAAY,aAAa;EAC3C,MAAM,WAAW,KAAK,KAAK,cAAc,IAAI;AAC7C,MAAI,cAAc,WAAW;AAC7B,QAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;EAE1C,MAAM,YAAY,KAAK,KAAK,UAAU,SAAS,cAAc,GAAG,KAAK,CAAC;EACtE,MAAM,MAAM;AAGZ,MAAI,WAAW,UAAU,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAC9D,OAAI,uBAAuB,MAAM,YAAY,MAAM;AACnD,UAAO;IAAE;IAAW,QAAQ;IAAK;;EAGnC,MAAM,YAAY,MAAM;AACxB,MAAI,WAAW;AACb,OAAI,yBAAyB,MAAM,YAAY,MAAM;AACrD,UAAO,MAAM;;AAGf,MAAI,mBAAmB,MAAM,YAAY,MAAM;EAC/C,MAAM,YAAY,YAAiC;AACjD,OAAI;AACF,QAAI,qBAAqB,MAAM;IAC/B,MAAM,SAAS,MAAM,OAAO,cAAc,GAAG,KAAK;AAElD,QAAI,kBAAkB,UAAU;AAC9B,SAAI,mBAAmB,IAAI,WAAW;KAEtC,MAAM,WAAW,GAAG,UAAU;KAC9B,MAAM,cAAc,kBAAkB,SAAS;AAC/C,YAAO,KAAK,YAAY;AAExB,WAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,aAAO,GAAG,SAAS,OAAO;AAC1B,kBAAY,GAAG,SAAS,OAAO;AAC/B,kBAAY,GAAG,gBAAgB,SAAS,CAAC;OACzC;KAGF,MAAM,EAAE,WAAW,MAAM,OAAO;AAChC,WAAM,OAAO,UAAU,UAAU;WAC5B;AACL,SAAI,cAAc,YAAY;AAC9B,WAAM,UAAU,WAAW,OAAO;;AAIpC,WAAO,MAAM;AAEb,WAAO;KACL,QAAQ;KACR;KACD;YACM,OAAO;AAEd,WAAO,MAAM;AACb,UAAM;;MAEN;AAEJ,QAAM,OAAO;AACb,SAAO,MAAM"}
1
+ {"version":3,"file":"idempotentTask.js","names":["tasks: Record<string, Promise<TaskResult>>","downloadTasks: Record<string, Promise<string>>","cachePath: string | null","md5: string | null"],"sources":["../src/idempotentTask.ts"],"sourcesContent":["import { createWriteStream, existsSync } from \"node:fs\";\nimport path from \"node:path\";\nimport { md5FilePath } from \"./md5.js\";\nimport debug from \"debug\";\nimport { mkdir, writeFile, stat, readdir } from \"node:fs/promises\";\nimport { Readable } from \"node:stream\";\n\ninterface TaskOptions<T extends unknown[]> {\n label: string;\n filename: (absolutePath: string, ...args: T) => string;\n runner: (absolutePath: string, ...args: T) => Promise<string | Readable>;\n}\n\nexport interface TaskResult {\n md5Sum: string;\n cachePath: string;\n}\n\nexport const idempotentTask = <T extends unknown[]>({\n label,\n filename,\n runner,\n}: TaskOptions<T>) => {\n const tasks: Record<string, Promise<TaskResult>> = {};\n const downloadTasks: Record<string, Promise<string>> = {};\n\n // Helper function to validate cache file completeness\n const isValidCacheFile = async (\n filePath: string,\n allowEmpty = false,\n ): Promise<boolean> => {\n try {\n const stats = await stat(filePath);\n // File must exist and either have content or be explicitly allowed to be empty\n return allowEmpty || stats.size > 0;\n } catch {\n return false;\n }\n };\n\n return async (\n rootDir: string,\n absolutePath: string,\n ...args: T\n ): Promise<TaskResult> => {\n const log = debug(`ef:${label}`);\n const cacheDirRoot = path.join(rootDir, \".cache\");\n await mkdir(cacheDirRoot, { recursive: true });\n\n log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);\n\n // Handle HTTP downloads with proper race condition protection\n if (absolutePath.includes(\"http\")) {\n const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, \"_\");\n const downloadCachePath = path.join(\n rootDir,\n \".cache\",\n `${safePath}.file`,\n );\n\n // Check if already downloaded and valid (allow empty downloads)\n if (\n existsSync(downloadCachePath) &&\n (await isValidCacheFile(downloadCachePath, true))\n ) {\n log(`Already cached ${absolutePath}`);\n absolutePath = downloadCachePath;\n } else {\n // Use download task deduplication to prevent concurrent downloads\n const downloadKey = absolutePath;\n if (!downloadTasks[downloadKey]) {\n log(`Starting download for ${absolutePath}`);\n downloadTasks[downloadKey] = (async () => {\n try {\n const response = await fetch(absolutePath);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch file from URL ${absolutePath}: ${response.status} ${response.statusText}`,\n );\n }\n\n const stream = response.body;\n if (!stream) {\n throw new Error(`No response body for URL ${absolutePath}`);\n }\n\n // Use temporary file to prevent reading incomplete downloads\n const tempPath = `${downloadCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n\n // @ts-ignore node web stream support in typescript is incorrect about this.\n const readable = Readable.fromWeb(stream);\n readable.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n readable.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n const { rename } = await import(\"node:fs/promises\");\n await rename(tempPath, downloadCachePath);\n\n log(`Download completed for ${absolutePath}`);\n return downloadCachePath;\n } catch (error) {\n log(`Download failed for ${absolutePath}: ${error}`);\n // Clean up task reference on failure\n delete downloadTasks[downloadKey];\n throw error;\n }\n })();\n }\n\n absolutePath = await downloadTasks[downloadKey];\n // Clean up completed task\n delete downloadTasks[downloadKey];\n }\n }\n\n // First, try to find existing cache by scanning cache directories\n // This avoids expensive MD5 computation when cache already exists\n const expectedFilename = filename(absolutePath, ...args);\n let cachePath: string | null = null;\n let md5: string | null = null;\n \n // Scan cache directories to find existing cache file\n const scanStartTime = Date.now();\n try {\n const cacheDirs = await readdir(cacheDirRoot, { withFileTypes: true });\n log(`Scanning ${cacheDirs.length} cache directories for ${expectedFilename}`);\n for (const dir of cacheDirs) {\n if (dir.isDirectory()) {\n const candidatePath = path.join(cacheDirRoot, dir.name, expectedFilename);\n if (existsSync(candidatePath) && (await isValidCacheFile(candidatePath))) {\n cachePath = candidatePath;\n md5 = dir.name; // Directory name is the MD5\n const scanElapsed = Date.now() - scanStartTime;\n log(`Found existing cache in ${scanElapsed}ms: ${candidatePath} (skipped MD5)`);\n break;\n }\n }\n }\n if (!cachePath) {\n const scanElapsed = Date.now() - scanStartTime;\n log(`Cache scan completed in ${scanElapsed}ms, no cache found - will compute MD5`);\n }\n } catch (error) {\n // If cache directory doesn't exist or can't be read, continue to MD5 computation\n const scanElapsed = Date.now() - scanStartTime;\n log(`Cache scan failed after ${scanElapsed}ms, will compute MD5: ${error}`);\n }\n\n // Only compute MD5 if we didn't find an existing cache\n if (!md5) {\n const md5StartTime = Date.now();\n log(`Computing MD5 for ${absolutePath}...`);\n md5 = await md5FilePath(absolutePath);\n const md5Elapsed = Date.now() - md5StartTime;\n log(`MD5 computed in ${md5Elapsed}ms: ${md5}`);\n }\n \n const cacheDir = path.join(cacheDirRoot, md5);\n log(`Cache dir: ${cacheDir}`);\n await mkdir(cacheDir, { recursive: true });\n\n if (!cachePath) {\n cachePath = path.join(cacheDir, expectedFilename);\n }\n const key = cachePath;\n\n // Check if cache exists and is valid (not zero-byte)\n if (existsSync(cachePath) && (await isValidCacheFile(cachePath))) {\n log(`Returning cached ef:${label} task for ${key}`);\n return { cachePath, md5Sum: md5 };\n }\n\n const maybeTask = tasks[key];\n if (maybeTask) {\n log(`Returning existing ef:${label} task for ${key}`);\n return await maybeTask;\n }\n\n log(`Creating new ef:${label} task for ${key}`);\n const fullTask = (async (): Promise<TaskResult> => {\n try {\n log(`Awaiting task for ${key}`);\n const result = await runner(absolutePath, ...args);\n\n if (result instanceof Readable) {\n log(`Piping task for ${key} to cache`);\n // Use temporary file to prevent reading incomplete results\n const tempPath = `${cachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n result.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n result.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n const { rename } = await import(\"node:fs/promises\");\n await rename(tempPath, cachePath);\n } else {\n log(`Writing to ${cachePath}`);\n await writeFile(cachePath, result);\n }\n\n // Clean up task reference after successful completion\n delete tasks[key];\n\n return {\n md5Sum: md5,\n cachePath,\n };\n } catch (error) {\n // Clean up task reference on failure\n delete tasks[key];\n throw error;\n }\n })();\n\n tasks[key] = fullTask;\n return await fullTask;\n };\n};\n"],"mappings":";;;;;;;;AAkBA,MAAa,kBAAuC,EAClD,OACA,UACA,aACoB;CACpB,MAAMA,QAA6C,EAAE;CACrD,MAAMC,gBAAiD,EAAE;CAGzD,MAAM,mBAAmB,OACvB,UACA,aAAa,UACQ;AACrB,MAAI;GACF,MAAM,QAAQ,MAAM,KAAK,SAAS;AAElC,UAAO,cAAc,MAAM,OAAO;UAC5B;AACN,UAAO;;;AAIX,QAAO,OACL,SACA,cACA,GAAG,SACqB;EACxB,MAAM,MAAM,MAAM,MAAM,QAAQ;EAChC,MAAM,eAAe,KAAK,KAAK,SAAS,SAAS;AACjD,QAAM,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;AAE9C,MAAI,cAAc,MAAM,YAAY,aAAa,MAAM,UAAU;AAGjE,MAAI,aAAa,SAAS,OAAO,EAAE;GACjC,MAAM,WAAW,aAAa,QAAQ,iBAAiB,IAAI;GAC3D,MAAM,oBAAoB,KAAK,KAC7B,SACA,UACA,GAAG,SAAS,OACb;AAGD,OACE,WAAW,kBAAkB,IAC5B,MAAM,iBAAiB,mBAAmB,KAAK,EAChD;AACA,QAAI,kBAAkB,eAAe;AACrC,mBAAe;UACV;IAEL,MAAM,cAAc;AACpB,QAAI,CAAC,cAAc,cAAc;AAC/B,SAAI,yBAAyB,eAAe;AAC5C,mBAAc,gBAAgB,YAAY;AACxC,UAAI;OACF,MAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,WAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MACR,iCAAiC,aAAa,IAAI,SAAS,OAAO,GAAG,SAAS,aAC/E;OAGH,MAAM,SAAS,SAAS;AACxB,WAAI,CAAC,OACH,OAAM,IAAI,MAAM,4BAA4B,eAAe;OAI7D,MAAM,WAAW,GAAG,kBAAkB;OACtC,MAAM,cAAc,kBAAkB,SAAS;OAG/C,MAAM,WAAW,SAAS,QAAQ,OAAO;AACzC,gBAAS,KAAK,YAAY;AAE1B,aAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,iBAAS,GAAG,SAAS,OAAO;AAC5B,oBAAY,GAAG,SAAS,OAAO;AAC/B,oBAAY,GAAG,gBAAgB,SAAS,CAAC;SACzC;OAGF,MAAM,EAAE,WAAW,MAAM,OAAO;AAChC,aAAM,OAAO,UAAU,kBAAkB;AAEzC,WAAI,0BAA0B,eAAe;AAC7C,cAAO;eACA,OAAO;AACd,WAAI,uBAAuB,aAAa,IAAI,QAAQ;AAEpD,cAAO,cAAc;AACrB,aAAM;;SAEN;;AAGN,mBAAe,MAAM,cAAc;AAEnC,WAAO,cAAc;;;EAMzB,MAAM,mBAAmB,SAAS,cAAc,GAAG,KAAK;EACxD,IAAIC,YAA2B;EAC/B,IAAIC,MAAqB;EAGzB,MAAM,gBAAgB,KAAK,KAAK;AAChC,MAAI;GACF,MAAM,YAAY,MAAM,QAAQ,cAAc,EAAE,eAAe,MAAM,CAAC;AACtE,OAAI,YAAY,UAAU,OAAO,yBAAyB,mBAAmB;AAC7E,QAAK,MAAM,OAAO,UAChB,KAAI,IAAI,aAAa,EAAE;IACrB,MAAM,gBAAgB,KAAK,KAAK,cAAc,IAAI,MAAM,iBAAiB;AACzE,QAAI,WAAW,cAAc,IAAK,MAAM,iBAAiB,cAAc,EAAG;AACxE,iBAAY;AACZ,WAAM,IAAI;AAEV,SAAI,2BADgB,KAAK,KAAK,GAAG,cACU,MAAM,cAAc,gBAAgB;AAC/E;;;AAIN,OAAI,CAAC,UAEH,KAAI,2BADgB,KAAK,KAAK,GAAG,cACU,uCAAuC;WAE7E,OAAO;AAGd,OAAI,2BADgB,KAAK,KAAK,GAAG,cACU,wBAAwB,QAAQ;;AAI7E,MAAI,CAAC,KAAK;GACR,MAAM,eAAe,KAAK,KAAK;AAC/B,OAAI,qBAAqB,aAAa,KAAK;AAC3C,SAAM,MAAM,YAAY,aAAa;AAErC,OAAI,mBADe,KAAK,KAAK,GAAG,aACE,MAAM,MAAM;;EAGhD,MAAM,WAAW,KAAK,KAAK,cAAc,IAAI;AAC7C,MAAI,cAAc,WAAW;AAC7B,QAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;AAE1C,MAAI,CAAC,UACH,aAAY,KAAK,KAAK,UAAU,iBAAiB;EAEnD,MAAM,MAAM;AAGZ,MAAI,WAAW,UAAU,IAAK,MAAM,iBAAiB,UAAU,EAAG;AAChE,OAAI,uBAAuB,MAAM,YAAY,MAAM;AACnD,UAAO;IAAE;IAAW,QAAQ;IAAK;;EAGnC,MAAM,YAAY,MAAM;AACxB,MAAI,WAAW;AACb,OAAI,yBAAyB,MAAM,YAAY,MAAM;AACrD,UAAO,MAAM;;AAGf,MAAI,mBAAmB,MAAM,YAAY,MAAM;EAC/C,MAAM,YAAY,YAAiC;AACjD,OAAI;AACF,QAAI,qBAAqB,MAAM;IAC/B,MAAM,SAAS,MAAM,OAAO,cAAc,GAAG,KAAK;AAElD,QAAI,kBAAkB,UAAU;AAC9B,SAAI,mBAAmB,IAAI,WAAW;KAEtC,MAAM,WAAW,GAAG,UAAU;KAC9B,MAAM,cAAc,kBAAkB,SAAS;AAC/C,YAAO,KAAK,YAAY;AAExB,WAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,aAAO,GAAG,SAAS,OAAO;AAC1B,kBAAY,GAAG,SAAS,OAAO;AAC/B,kBAAY,GAAG,gBAAgB,SAAS,CAAC;OACzC;KAGF,MAAM,EAAE,WAAW,MAAM,OAAO;AAChC,WAAM,OAAO,UAAU,UAAU;WAC5B;AACL,SAAI,cAAc,YAAY;AAC9B,WAAM,UAAU,WAAW,OAAO;;AAIpC,WAAO,MAAM;AAEb,WAAO;KACL,QAAQ;KACR;KACD;YACM,OAAO;AAEd,WAAO,MAAM;AACb,UAAM;;MAEN;AAEJ,QAAM,OAAO;AACb,SAAO,MAAM"}