@editframe/assets 0.40.1-beta.0 → 0.40.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Probe.cjs.map +1 -1
- package/dist/Probe.d.cts +26 -26
- package/dist/Probe.d.ts +26 -26
- package/dist/Probe.js.map +1 -1
- package/dist/VideoRenderOptions.d.cts +48 -48
- package/dist/VideoRenderOptions.d.ts +48 -48
- package/dist/generateFragmentIndex.cjs.map +1 -1
- package/dist/generateFragmentIndex.js.map +1 -1
- package/dist/generateSingleTrack.cjs.map +1 -1
- package/dist/generateSingleTrack.js.map +1 -1
- package/dist/idempotentTask.cjs +31 -3
- package/dist/idempotentTask.cjs.map +1 -1
- package/dist/idempotentTask.js +31 -4
- package/dist/idempotentTask.js.map +1 -1
- package/dist/tasks/findOrCreateCaptions.cjs.map +1 -1
- package/dist/tasks/findOrCreateCaptions.js.map +1 -1
- package/dist/tasks/generateTrackFragmentIndex.cjs.map +1 -1
- package/dist/tasks/generateTrackFragmentIndex.js.map +1 -1
- package/package.json +2 -2
- package/dist/package.cjs +0 -12
- package/dist/package.cjs.map +0 -1
- package/dist/package.js +0 -6
- package/dist/package.js.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"generateFragmentIndex.js","names":["box: MP4BoxHeader","probe: PacketProbe","trackIndexes: Record<number, TrackFragmentIndex>","fragmentTimingData: FragmentTimingData[]","segments: TrackSegment[]","trackStartTimeOffsetMs: number | undefined"],"sources":["../src/generateFragmentIndex.ts"],"sourcesContent":["import { Readable, Transform, Writable } from \"node:stream\";\nimport { pipeline } from \"node:stream/promises\";\nimport { createWriteStream } from \"node:fs\";\nimport { unlink } from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport { randomBytes } from \"node:crypto\";\nimport debug from \"debug\";\nimport type { TrackFragmentIndex, TrackSegment } from \"./Probe.js\";\nimport { PacketProbe } from \"./Probe.js\";\n\nconst log = debug(\"ef:generateFragmentIndex\");\n\n// Minimum segment duration in milliseconds\nconst MIN_SEGMENT_DURATION_MS = 2000; // 2 seconds\nconst MS_PER_SECOND = 1000;\n\n// ============================================================================\n// Core Domain Types (Type Safety as Invariant Enforcement)\n// ============================================================================\n\n/** Raw packet from ffprobe - the fundamental unit of media data */\ninterface ProbePacket {\n stream_index: number;\n pts: number;\n dts: number;\n pts_time: number;\n dts_time: number;\n duration?: number;\n pos?: number;\n flags?: string;\n}\n\n/** Video packet with keyframe status - invariant: isKeyframe is always defined */\ninterface VideoPacket {\n pts: number;\n dts: number;\n duration?: number;\n isKeyframe: boolean;\n}\n\n/** Audio packet - simpler than video, no keyframe concept */\ninterface AudioPacket {\n pts: number;\n dts: number;\n duration?: number;\n}\n\n/** Fragment timing data - packets organized by fragment */\ninterface FragmentTimingData {\n fragmentIndex: number;\n videoPackets: VideoPacket[];\n audioPackets: AudioPacket[];\n}\n\n/** Timebase for timestamp conversion */\ninterface Timebase {\n num: number;\n den: number;\n}\n\n// Helper function to construct H.264 codec string from profile and level\nfunction constructH264CodecString(\n codecTagString: string,\n profile?: string,\n level?: number,\n): string {\n if (codecTagString !== \"avc1\" || !profile || level === undefined) {\n return codecTagString;\n }\n\n // Map H.264 profile names to profile_idc values\n const profileMap: Record<string, number> = {\n Baseline: 0x42,\n Main: 0x4d,\n High: 0x64,\n \"High 10\": 0x6e,\n \"High 422\": 0x7a,\n \"High 444\": 0xf4,\n };\n\n const profileIdc = profileMap[profile];\n if (!profileIdc) {\n return codecTagString;\n }\n\n // Format: avc1.PPCCLL where PP=profile_idc, CC=constraint_flags, LL=level_idc\n const profileHex = profileIdc.toString(16).padStart(2, \"0\");\n const constraintFlags = \"00\"; // Most common case\n const levelHex = level.toString(16).padStart(2, \"0\");\n\n return `${codecTagString}.${profileHex}${constraintFlags}${levelHex}`;\n}\n\ninterface MP4BoxHeader {\n type: string;\n offset: number;\n size: number;\n headerSize: number;\n}\n\ninterface Fragment {\n type: \"init\" | \"media\";\n offset: number;\n size: number;\n moofOffset?: number;\n mdatOffset?: number;\n}\n\n/**\n * Streaming MP4 box parser that detects box boundaries without loading entire file into memory\n */\nclass StreamingBoxParser extends Transform {\n private buffer = Buffer.alloc(0);\n private globalOffset = 0;\n private fragments: Fragment[] = [];\n private currentMoof: MP4BoxHeader | null = null;\n private initSegmentEnd = 0;\n private foundBoxes: MP4BoxHeader[] = [];\n\n constructor() {\n super({ objectMode: false });\n }\n\n _transform(chunk: Buffer, _encoding: BufferEncoding, callback: () => void) {\n // Append new data to our sliding buffer\n this.buffer = Buffer.concat([this.buffer, chunk]);\n\n // Parse all complete boxes in the current buffer\n this.parseBoxes();\n\n // Pass through the original chunk unchanged\n this.push(chunk);\n callback();\n }\n\n private parseBoxes() {\n let bufferOffset = 0;\n\n while (this.buffer.length - bufferOffset >= 8) {\n const size = this.buffer.readUInt32BE(bufferOffset);\n const type = this.buffer\n .subarray(bufferOffset + 4, bufferOffset + 8)\n .toString(\"ascii\");\n\n // Invalid or incomplete box\n if (size === 0 || size < 8 || this.buffer.length < bufferOffset + size) {\n break;\n }\n\n const box: MP4BoxHeader = {\n type,\n offset: this.globalOffset + bufferOffset,\n size,\n headerSize: 8,\n };\n\n log(`Found box: ${box.type} at offset ${box.offset}, size ${box.size}`);\n this.foundBoxes.push(box);\n this.handleBox(box);\n\n bufferOffset += size;\n }\n\n // Update global offset and trim processed data from buffer\n this.globalOffset += bufferOffset;\n this.buffer = this.buffer.subarray(bufferOffset);\n }\n\n private handleBox(box: MP4BoxHeader) {\n switch (box.type) {\n case \"ftyp\":\n case \"moov\":\n // Part of init segment\n this.initSegmentEnd = Math.max(\n this.initSegmentEnd,\n box.offset + box.size,\n );\n break;\n\n case \"moof\":\n this.currentMoof = box;\n break;\n\n case \"mdat\":\n if (this.currentMoof) {\n // Found a complete fragment (moof + mdat pair) - fragmented MP4\n this.fragments.push({\n type: \"media\",\n offset: this.currentMoof.offset,\n size: box.offset + box.size - this.currentMoof.offset,\n moofOffset: this.currentMoof.offset,\n mdatOffset: box.offset,\n });\n this.currentMoof = null;\n } else {\n // mdat without moof - this is non-fragmented content, not a fragment\n // Common in mixed MP4 files where initial content is non-fragmented\n // followed by fragmented content. Ignore for fragment indexing.\n log(\n `Found non-fragmented mdat at offset ${box.offset}, skipping for fragment index`,\n );\n }\n break;\n }\n }\n\n _flush(callback: () => void) {\n this.parseBoxes(); // Process any remaining buffered data\n\n // Probe always outputs fragmented MP4\n // Init segment is ftyp + moov boxes before the first moof\n if (this.initSegmentEnd > 0) {\n this.fragments.unshift({\n type: \"init\",\n offset: 0,\n size: this.initSegmentEnd,\n });\n }\n\n callback();\n }\n\n getFragments(): Fragment[] {\n return this.fragments;\n }\n}\n\n// Helper to convert timestamp from ffprobe timebase to track timescale\nfunction convertTimestamp(\n pts: number,\n timebase: Timebase,\n timescale: number,\n): number {\n return Math.round((pts * timescale) / timebase.den);\n}\n\n// Helper to calculate duration in milliseconds from timescale units\nfunction durationMsFromTimescale(\n durationTimescale: number,\n timescale: number,\n): number {\n return (durationTimescale / timescale) * MS_PER_SECOND;\n}\n\n// Helper to calculate segment byte range from accumulated fragments\nfunction calculateSegmentByteRange(\n accumulatedFragments: Array<{ fragment: Fragment }>,\n): { offset: number; size: number } {\n const firstFrag = accumulatedFragments[0]!;\n const lastFrag = accumulatedFragments[accumulatedFragments.length - 1]!;\n return {\n offset: firstFrag.fragment.offset,\n size:\n lastFrag.fragment.offset +\n lastFrag.fragment.size -\n firstFrag.fragment.offset,\n };\n}\n\n// Explicit enumeration of segment accumulation state (Enumerate the Core Concept)\ntype SegmentAccumulationState =\n | { type: \"idle\" }\n | {\n type: \"accumulating\";\n startPts: number;\n startDts: number;\n fragments: Array<{\n fragment: Fragment;\n fragmentData: FragmentTimingData;\n }>;\n };\n\n// Invariant: Segment must start on keyframe (for video) and have minimum duration\ninterface SegmentEvaluation {\n cts: number;\n dts: number;\n duration: number;\n offset: number;\n size: number;\n}\n\n// Track processing context - single source of truth for track processing\ninterface TrackProcessingContext {\n timebase: Timebase;\n timescale: number;\n fragmentTimingData: FragmentTimingData[];\n mediaFragments: Fragment[];\n // Cached filtered packets for this stream (Performance Through Caching)\n streamPackets: ProbePacket[];\n streamType: \"video\" | \"audio\";\n streamIndex: number;\n}\n\n// Segment accumulator that encapsulates accumulation logic\nclass SegmentAccumulator {\n private state: SegmentAccumulationState = { type: \"idle\" };\n private readonly context: TrackProcessingContext;\n private readonly minDurationMs: number;\n\n constructor(context: TrackProcessingContext, minDurationMs: number) {\n this.context = context;\n this.minDurationMs = minDurationMs;\n }\n\n // Evaluation: Determine if we should finalize (semantics)\n shouldFinalize(nextKeyframe: { pts: number; dts: number } | null): boolean {\n if (this.state.type !== \"accumulating\") {\n return false;\n }\n\n const durationMs = this.calculateAccumulatedDurationMs();\n const hasMinimumDuration = durationMs >= this.minDurationMs;\n\n // For video: finalize on keyframe + minimum duration\n // For audio: finalize on minimum duration (no keyframe requirement)\n if (this.context.streamType === \"video\") {\n return hasMinimumDuration && nextKeyframe !== null;\n } else {\n return hasMinimumDuration;\n }\n }\n\n // Evaluation: Calculate what the segment would be (semantics)\n evaluateSegment(\n nextBoundary: { pts: number } | null,\n ): SegmentEvaluation | null {\n if (this.state.type !== \"accumulating\") {\n return null;\n }\n\n const segmentCts = convertTimestamp(\n this.state.startPts,\n this.context.timebase,\n this.context.timescale,\n );\n const segmentDts = convertTimestamp(\n this.state.startDts,\n this.context.timebase,\n this.context.timescale,\n );\n const segmentDuration = this.calculateSegmentDuration(\n segmentCts,\n nextBoundary,\n );\n const { offset, size } = calculateSegmentByteRange(this.state.fragments);\n\n return {\n cts: segmentCts,\n dts: segmentDts,\n duration: segmentDuration,\n offset,\n size,\n };\n }\n\n // Application: Add fragment to accumulation (mechanism)\n addFragment(fragment: Fragment, fragmentData: FragmentTimingData): void {\n if (this.state.type === \"idle\") {\n // Start accumulation - invariant: video segments must start on keyframe\n const startPts = this.getStartPts(fragmentData);\n const startDts = this.getStartDts(fragmentData);\n this.state = {\n type: \"accumulating\",\n startPts,\n startDts,\n fragments: [{ fragment, fragmentData }],\n };\n } else {\n // Continue accumulation\n this.state.fragments.push({ fragment, fragmentData });\n }\n }\n\n // Application: Reset accumulation (mechanism)\n reset(): void {\n this.state = { type: \"idle\" };\n }\n\n // Application: Start new segment with keyframe (mechanism)\n startNewSegment(keyframe: { pts: number; dts: number }): void {\n this.state = {\n type: \"accumulating\",\n startPts: keyframe.pts,\n startDts: keyframe.dts,\n fragments: [],\n };\n }\n\n // Query: Get current state\n getState(): SegmentAccumulationState {\n return this.state;\n }\n\n // Query: Check if accumulating\n isAccumulating(): boolean {\n return this.state.type === \"accumulating\";\n }\n\n // Private helpers\n private calculateAccumulatedDurationMs(): number {\n if (this.state.type !== \"accumulating\") {\n return 0;\n }\n\n const lastFrag = this.state.fragments[this.state.fragments.length - 1]!;\n const lastPacket = this.getLastPacket(lastFrag.fragmentData);\n const endCts = convertTimestamp(\n lastPacket.pts + (lastPacket.duration || 0),\n this.context.timebase,\n this.context.timescale,\n );\n const startCts = convertTimestamp(\n this.state.startPts,\n this.context.timebase,\n this.context.timescale,\n );\n return durationMsFromTimescale(endCts - startCts, this.context.timescale);\n }\n\n private calculateSegmentDuration(\n segmentCts: number,\n nextBoundary: { pts: number } | null,\n ): number {\n if (nextBoundary) {\n const nextSegmentCts = convertTimestamp(\n nextBoundary.pts,\n this.context.timebase,\n this.context.timescale,\n );\n return nextSegmentCts - segmentCts;\n }\n\n // Last segment: duration to end of all packets\n // Use pre-cached streamPackets (Performance Through Caching)\n const sortedPackets = [...this.context.streamPackets].sort(\n (a, b) => a.pts - b.pts,\n );\n const lastPacket = sortedPackets[sortedPackets.length - 1]!;\n const streamEnd = convertTimestamp(\n lastPacket.pts + (lastPacket.duration || 0),\n this.context.timebase,\n this.context.timescale,\n );\n return streamEnd - segmentCts;\n }\n\n private getStartPts(fragmentData: FragmentTimingData): number {\n if (this.context.streamType === \"video\") {\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n return keyframe?.pts ?? fragmentData.videoPackets[0]?.pts ?? 0;\n } else {\n return fragmentData.audioPackets[0]?.pts ?? 0;\n }\n }\n\n private getStartDts(fragmentData: FragmentTimingData): number {\n if (this.context.streamType === \"video\") {\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n return keyframe?.dts ?? fragmentData.videoPackets[0]?.dts ?? 0;\n } else {\n return fragmentData.audioPackets[0]?.dts ?? 0;\n }\n }\n\n private getLastPacket(fragmentData: FragmentTimingData): {\n pts: number;\n duration?: number;\n } {\n if (this.context.streamType === \"video\") {\n const packets = fragmentData.videoPackets;\n return packets[packets.length - 1]!;\n } else {\n const packets = fragmentData.audioPackets;\n return packets[packets.length - 1]!;\n }\n }\n}\n\n// Helper function to extract fragment data (init + media fragment)\n\nexport const generateFragmentIndex = async (\n inputStream: Readable,\n startTimeOffsetMs?: number,\n trackIdMapping?: Record<number, number>, // Map from source track ID to desired track ID\n options?: { tmpDir?: string },\n): Promise<Record<number, TrackFragmentIndex>> => {\n // Step 1: Create a streaming parser that detects fragment boundaries\n const parser = new StreamingBoxParser();\n\n // Step 2: Write stream to a temp file to avoid buffering the entire MP4 in memory\n const tempDir = options?.tmpDir ?? tmpdir();\n const tempFile = join(tempDir, `ef-probe-${randomBytes(8).toString(\"hex\")}.mp4`);\n let totalSize = 0;\n\n const dest = new Writable({\n write(chunk, _encoding, callback) {\n totalSize += chunk.length;\n callback();\n },\n });\n\n const tempWriteStream = createWriteStream(tempFile);\n\n // Split input through both parser (for fragment detection) and temp file (for probing)\n // We must tee the stream: pipe inputStream → parser → dest, and also write to tempFile\n const teeTransform = new Transform({\n transform(chunk, _encoding, callback) {\n tempWriteStream.write(chunk);\n this.push(chunk);\n callback();\n },\n flush(callback) {\n tempWriteStream.end(() => callback());\n },\n });\n\n // Process the stream through both parser and collection\n await pipeline(inputStream, teeTransform, parser, dest);\n const fragments = parser.getFragments();\n\n // If no data was collected, clean up and return empty result\n if (totalSize === 0) {\n await unlink(tempFile).catch(() => {});\n return {};\n }\n\n // Step 3: Use ffprobe to analyze the temp file for track metadata (avoids in-memory buffering)\n let probe: PacketProbe;\n try {\n probe = await PacketProbe.probePath(tempFile);\n } catch (error) {\n console.warn(\"Failed to probe stream with ffprobe:\", error);\n await unlink(tempFile).catch(() => {});\n return {};\n } finally {\n await unlink(tempFile).catch(() => {});\n }\n\n const videoStreams = probe.videoStreams;\n const audioStreams = probe.audioStreams;\n\n const trackIndexes: Record<number, TrackFragmentIndex> = {};\n const initFragment = fragments.find((f) => f.type === \"init\");\n const mediaFragments = fragments.filter((f) => f.type === \"media\");\n\n // Map packets to fragments using byte position for moof+mdat boundaries\n // But create contiguous segments based on keyframes\n const fragmentTimingData: FragmentTimingData[] = [];\n\n for (\n let fragmentIndex = 0;\n fragmentIndex < mediaFragments.length;\n fragmentIndex++\n ) {\n const fragment = mediaFragments[fragmentIndex]!;\n\n // Find packets that belong to this fragment based on byte position (moof+mdat boundaries)\n const fragmentStart = fragment.offset;\n const fragmentEnd = fragment.offset + fragment.size;\n\n const videoPackets = probe.packets\n .filter((packet) => {\n const stream = videoStreams.find(\n (s) => s.index === packet.stream_index,\n );\n return (\n stream?.codec_type === \"video\" &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd\n );\n })\n .map((packet) => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n isKeyframe: packet.flags?.includes(\"K\") ?? false,\n }));\n\n const audioPackets = probe.packets\n .filter((packet) => {\n const stream = audioStreams.find(\n (s) => s.index === packet.stream_index,\n );\n return (\n stream?.codec_type === \"audio\" &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd\n );\n })\n .map((packet) => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n }));\n\n fragmentTimingData.push({\n fragmentIndex,\n videoPackets,\n audioPackets,\n });\n }\n\n // Unified track processing function (One Direction of Truth)\n const processTrack = (\n streamIndex: number,\n streamType: \"video\" | \"audio\",\n timebase: Timebase,\n allPackets: ProbePacket[],\n ): TrackSegment[] => {\n const segments: TrackSegment[] = [];\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = allPackets.filter(\n (p) => p.stream_index === streamIndex,\n );\n\n const context: TrackProcessingContext = {\n timebase,\n timescale,\n fragmentTimingData,\n mediaFragments,\n streamPackets,\n streamType,\n streamIndex,\n };\n\n const accumulator = new SegmentAccumulator(\n context,\n MIN_SEGMENT_DURATION_MS,\n );\n\n for (let i = 0; i < fragmentTimingData.length; i++) {\n const fragmentData = fragmentTimingData[i]!;\n const fragment = mediaFragments[fragmentData.fragmentIndex]!;\n const packets =\n streamType === \"video\"\n ? fragmentData.videoPackets\n : fragmentData.audioPackets;\n\n log(\n `Fragment ${fragmentData.fragmentIndex}: ${packets.length} ${streamType} packets`,\n );\n\n if (packets.length === 0) {\n log(\n `Skipping fragment ${fragmentData.fragmentIndex} - no ${streamType} packets`,\n );\n continue;\n }\n\n if (streamType === \"video\") {\n // Video: segments must start on keyframes\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n const hasKeyframe = keyframe !== undefined;\n\n // Start new segment on keyframe if none exists\n if (!accumulator.isAccumulating() && hasKeyframe) {\n accumulator.startNewSegment({\n pts: keyframe.pts,\n dts: keyframe.dts,\n });\n accumulator.addFragment(fragment, fragmentData);\n continue;\n }\n\n // Skip fragments without keyframes if no segment started\n if (!accumulator.isAccumulating()) {\n continue;\n }\n\n // Check if we should finalize when encountering a new keyframe\n if (hasKeyframe) {\n if (\n accumulator.shouldFinalize({ pts: keyframe.pts, dts: keyframe.dts })\n ) {\n // Duration should be to the start of this keyframe (start of next segment)\n const nextBoundary = { pts: keyframe.pts };\n const evaluation = accumulator.evaluateSegment(nextBoundary);\n if (evaluation) {\n segments.push(evaluation);\n }\n accumulator.reset();\n accumulator.startNewSegment({\n pts: keyframe.pts,\n dts: keyframe.dts,\n });\n }\n }\n } else {\n // Audio: no keyframe requirement, just duration-based\n if (!accumulator.isAccumulating()) {\n accumulator.addFragment(fragment, fragmentData);\n continue;\n }\n\n // Check if we should finalize based on accumulated duration\n if (accumulator.shouldFinalize(null)) {\n // Duration should be to the start of this fragment (start of next segment)\n const nextBoundary = { pts: fragmentData.audioPackets[0]!.pts };\n const evaluation = accumulator.evaluateSegment(nextBoundary);\n if (evaluation) {\n segments.push(evaluation);\n }\n accumulator.reset();\n }\n }\n\n // Add fragment to current segment\n accumulator.addFragment(fragment, fragmentData);\n }\n\n // Finalize any remaining accumulated fragments\n if (accumulator.isAccumulating()) {\n const evaluation = accumulator.evaluateSegment(null);\n if (evaluation) {\n segments.push(evaluation);\n }\n }\n\n return segments;\n };\n\n // Step 4: Process video tracks using ffprobe data\n for (const videoStream of videoStreams) {\n // Get timebase for this stream to convert timestamps\n const timebase = probe.videoTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for video stream\");\n continue;\n }\n\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = (probe.packets as ProbePacket[]).filter(\n (p) => p.stream_index === videoStream.index,\n );\n const keyframeCount = streamPackets.filter((p) => p.flags?.includes(\"K\")).length;\n const totalSampleCount = streamPackets.length;\n\n log(\n `Complete stream has ${streamPackets.length} video packets, ${keyframeCount} keyframes for stream ${videoStream.index}`,\n );\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n if (streamPackets.length > 0) {\n log(\n `First video packet dts_time: ${streamPackets[0]!.dts_time}, pts_time: ${streamPackets[0]!.pts_time}`,\n );\n const presentationTime = streamPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process fragments to create segments with minimum duration\n const segments = processTrack(\n videoStream.index,\n \"video\",\n timebase,\n probe.packets as ProbePacket[],\n );\n\n // Calculate total duration from cached stream packets (inclusive of last frame duration)\n let totalDuration = 0;\n if (streamPackets.length > 0) {\n const firstPacket = streamPackets[0]!;\n const lastPacket = streamPackets[streamPackets.length - 1]!;\n const firstPts = convertTimestamp(firstPacket.pts, timebase, timescale);\n const lastPts = convertTimestamp(lastPacket.pts, timebase, timescale);\n const lastDuration = convertTimestamp(lastPacket.duration ?? 0, timebase, timescale);\n totalDuration = lastPts - firstPts + lastDuration;\n }\n\n const finalTrackId =\n trackIdMapping?.[videoStream.index] ?? videoStream.index + 1;\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"video\",\n width: videoStream.coded_width || videoStream.width,\n height: videoStream.coded_height || videoStream.height,\n timescale: timescale,\n sample_count: totalSampleCount,\n codec: constructH264CodecString(\n videoStream.codec_tag_string,\n videoStream.profile,\n videoStream.level,\n ),\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n // Step 5: Process audio tracks using ffprobe data\n for (const audioStream of audioStreams) {\n // Get timebase for this stream to convert timestamps\n const timebase = probe.audioTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for audio stream\");\n continue;\n }\n\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = (probe.packets as ProbePacket[]).filter(\n (p) => p.stream_index === audioStream.index,\n );\n const totalSampleCount = streamPackets.length;\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n if (streamPackets.length > 0) {\n const presentationTime = streamPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process fragments to create segments with minimum duration\n const segments = processTrack(\n audioStream.index,\n \"audio\",\n timebase,\n probe.packets as ProbePacket[],\n );\n\n // Calculate total duration\n const totalDuration = segments.reduce((sum, seg) => sum + seg.duration, 0);\n\n const finalTrackId =\n trackIdMapping?.[audioStream.index] ?? audioStream.index + 1;\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"audio\",\n channel_count: audioStream.channels,\n sample_rate: Number(audioStream.sample_rate),\n sample_size: audioStream.bits_per_sample,\n sample_count: totalSampleCount,\n timescale: timescale,\n codec: audioStream.codec_tag_string || audioStream.codec_name || \"\",\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n return trackIndexes;\n};\n"],"mappings":";;;;;;;;;;;AAWA,MAAM,MAAM,MAAM,2BAA2B;AAG7C,MAAM,0BAA0B;AAChC,MAAM,gBAAgB;AA+CtB,SAAS,yBACP,gBACA,SACA,OACQ;AACR,KAAI,mBAAmB,UAAU,CAAC,WAAW,UAAU,OACrD,QAAO;CAaT,MAAM,aATqC;EACzC,UAAU;EACV,MAAM;EACN,MAAM;EACN,WAAW;EACX,YAAY;EACZ,YAAY;EACb,CAE6B;AAC9B,KAAI,CAAC,WACH,QAAO;AAQT,QAAO,GAAG,eAAe,GAJN,WAAW,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI,KAE1C,MAAM,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;;;;AAuBtD,IAAM,qBAAN,cAAiC,UAAU;CAQzC,cAAc;AACZ,QAAM,EAAE,YAAY,OAAO,CAAC;gBARb,OAAO,MAAM,EAAE;sBACT;mBACS,EAAE;qBACS;wBAClB;oBACY,EAAE;;CAMvC,WAAW,OAAe,WAA2B,UAAsB;AAEzE,OAAK,SAAS,OAAO,OAAO,CAAC,KAAK,QAAQ,MAAM,CAAC;AAGjD,OAAK,YAAY;AAGjB,OAAK,KAAK,MAAM;AAChB,YAAU;;CAGZ,AAAQ,aAAa;EACnB,IAAI,eAAe;AAEnB,SAAO,KAAK,OAAO,SAAS,gBAAgB,GAAG;GAC7C,MAAM,OAAO,KAAK,OAAO,aAAa,aAAa;GACnD,MAAM,OAAO,KAAK,OACf,SAAS,eAAe,GAAG,eAAe,EAAE,CAC5C,SAAS,QAAQ;AAGpB,OAAI,SAAS,KAAK,OAAO,KAAK,KAAK,OAAO,SAAS,eAAe,KAChE;GAGF,MAAMA,MAAoB;IACxB;IACA,QAAQ,KAAK,eAAe;IAC5B;IACA,YAAY;IACb;AAED,OAAI,cAAc,IAAI,KAAK,aAAa,IAAI,OAAO,SAAS,IAAI,OAAO;AACvE,QAAK,WAAW,KAAK,IAAI;AACzB,QAAK,UAAU,IAAI;AAEnB,mBAAgB;;AAIlB,OAAK,gBAAgB;AACrB,OAAK,SAAS,KAAK,OAAO,SAAS,aAAa;;CAGlD,AAAQ,UAAU,KAAmB;AACnC,UAAQ,IAAI,MAAZ;GACE,KAAK;GACL,KAAK;AAEH,SAAK,iBAAiB,KAAK,IACzB,KAAK,gBACL,IAAI,SAAS,IAAI,KAClB;AACD;GAEF,KAAK;AACH,SAAK,cAAc;AACnB;GAEF,KAAK;AACH,QAAI,KAAK,aAAa;AAEpB,UAAK,UAAU,KAAK;MAClB,MAAM;MACN,QAAQ,KAAK,YAAY;MACzB,MAAM,IAAI,SAAS,IAAI,OAAO,KAAK,YAAY;MAC/C,YAAY,KAAK,YAAY;MAC7B,YAAY,IAAI;MACjB,CAAC;AACF,UAAK,cAAc;UAKnB,KACE,uCAAuC,IAAI,OAAO,+BACnD;AAEH;;;CAIN,OAAO,UAAsB;AAC3B,OAAK,YAAY;AAIjB,MAAI,KAAK,iBAAiB,EACxB,MAAK,UAAU,QAAQ;GACrB,MAAM;GACN,QAAQ;GACR,MAAM,KAAK;GACZ,CAAC;AAGJ,YAAU;;CAGZ,eAA2B;AACzB,SAAO,KAAK;;;AAKhB,SAAS,iBACP,KACA,UACA,WACQ;AACR,QAAO,KAAK,MAAO,MAAM,YAAa,SAAS,IAAI;;AAIrD,SAAS,wBACP,mBACA,WACQ;AACR,QAAQ,oBAAoB,YAAa;;AAI3C,SAAS,0BACP,sBACkC;CAClC,MAAM,YAAY,qBAAqB;CACvC,MAAM,WAAW,qBAAqB,qBAAqB,SAAS;AACpE,QAAO;EACL,QAAQ,UAAU,SAAS;EAC3B,MACE,SAAS,SAAS,SAClB,SAAS,SAAS,OAClB,UAAU,SAAS;EACtB;;AAsCH,IAAM,qBAAN,MAAyB;CAKvB,YAAY,SAAiC,eAAuB;eAJ1B,EAAE,MAAM,QAAQ;AAKxD,OAAK,UAAU;AACf,OAAK,gBAAgB;;CAIvB,eAAe,cAA4D;AACzE,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAIT,MAAM,qBADa,KAAK,gCAAgC,IACf,KAAK;AAI9C,MAAI,KAAK,QAAQ,eAAe,QAC9B,QAAO,sBAAsB,iBAAiB;MAE9C,QAAO;;CAKX,gBACE,cAC0B;AAC1B,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAGT,MAAM,aAAa,iBACjB,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd;EACD,MAAM,aAAa,iBACjB,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd;EACD,MAAM,kBAAkB,KAAK,yBAC3B,YACA,aACD;EACD,MAAM,EAAE,QAAQ,SAAS,0BAA0B,KAAK,MAAM,UAAU;AAExE,SAAO;GACL,KAAK;GACL,KAAK;GACL,UAAU;GACV;GACA;GACD;;CAIH,YAAY,UAAoB,cAAwC;AACtE,MAAI,KAAK,MAAM,SAAS,OAItB,MAAK,QAAQ;GACX,MAAM;GACN,UAJe,KAAK,YAAY,aAAa;GAK7C,UAJe,KAAK,YAAY,aAAa;GAK7C,WAAW,CAAC;IAAE;IAAU;IAAc,CAAC;GACxC;MAGD,MAAK,MAAM,UAAU,KAAK;GAAE;GAAU;GAAc,CAAC;;CAKzD,QAAc;AACZ,OAAK,QAAQ,EAAE,MAAM,QAAQ;;CAI/B,gBAAgB,UAA8C;AAC5D,OAAK,QAAQ;GACX,MAAM;GACN,UAAU,SAAS;GACnB,UAAU,SAAS;GACnB,WAAW,EAAE;GACd;;CAIH,WAAqC;AACnC,SAAO,KAAK;;CAId,iBAA0B;AACxB,SAAO,KAAK,MAAM,SAAS;;CAI7B,AAAQ,iCAAyC;AAC/C,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAGT,MAAM,WAAW,KAAK,MAAM,UAAU,KAAK,MAAM,UAAU,SAAS;EACpE,MAAM,aAAa,KAAK,cAAc,SAAS,aAAa;AAW5D,SAAO,wBAVQ,iBACb,WAAW,OAAO,WAAW,YAAY,IACzC,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACgB,iBACf,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,EACiD,KAAK,QAAQ,UAAU;;CAG3E,AAAQ,yBACN,YACA,cACQ;AACR,MAAI,aAMF,QALuB,iBACrB,aAAa,KACb,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACuB;EAK1B,MAAM,gBAAgB,CAAC,GAAG,KAAK,QAAQ,cAAc,CAAC,MACnD,GAAG,MAAM,EAAE,MAAM,EAAE,IACrB;EACD,MAAM,aAAa,cAAc,cAAc,SAAS;AAMxD,SALkB,iBAChB,WAAW,OAAO,WAAW,YAAY,IACzC,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACkB;;CAGrB,AAAQ,YAAY,cAA0C;AAC5D,MAAI,KAAK,QAAQ,eAAe,QAE9B,QADiB,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW,EACnD,OAAO,aAAa,aAAa,IAAI,OAAO;MAE7D,QAAO,aAAa,aAAa,IAAI,OAAO;;CAIhD,AAAQ,YAAY,cAA0C;AAC5D,MAAI,KAAK,QAAQ,eAAe,QAE9B,QADiB,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW,EACnD,OAAO,aAAa,aAAa,IAAI,OAAO;MAE7D,QAAO,aAAa,aAAa,IAAI,OAAO;;CAIhD,AAAQ,cAAc,cAGpB;AACA,MAAI,KAAK,QAAQ,eAAe,SAAS;GACvC,MAAM,UAAU,aAAa;AAC7B,UAAO,QAAQ,QAAQ,SAAS;SAC3B;GACL,MAAM,UAAU,aAAa;AAC7B,UAAO,QAAQ,QAAQ,SAAS;;;;AAOtC,MAAa,wBAAwB,OACnC,aACA,mBACA,gBACA,YACgD;CAEhD,MAAM,SAAS,IAAI,oBAAoB;CAIvC,MAAM,WAAW,KADD,SAAS,UAAU,QAAQ,EACZ,YAAY,YAAY,EAAE,CAAC,SAAS,MAAM,CAAC,MAAM;CAChF,IAAI,YAAY;CAEhB,MAAM,OAAO,IAAI,SAAS,EACxB,MAAM,OAAO,WAAW,UAAU;AAChC,eAAa,MAAM;AACnB,YAAU;IAEb,CAAC;CAEF,MAAM,kBAAkB,kBAAkB,SAAS;AAgBnD,OAAM,SAAS,aAZM,IAAI,UAAU;EACjC,UAAU,OAAO,WAAW,UAAU;AACpC,mBAAgB,MAAM,MAAM;AAC5B,QAAK,KAAK,MAAM;AAChB,aAAU;;EAEZ,MAAM,UAAU;AACd,mBAAgB,UAAU,UAAU,CAAC;;EAExC,CAAC,EAGwC,QAAQ,KAAK;CACvD,MAAM,YAAY,OAAO,cAAc;AAGvC,KAAI,cAAc,GAAG;AACnB,QAAM,OAAO,SAAS,CAAC,YAAY,GAAG;AACtC,SAAO,EAAE;;CAIX,IAAIC;AACJ,KAAI;AACF,UAAQ,MAAM,YAAY,UAAU,SAAS;UACtC,OAAO;AACd,UAAQ,KAAK,wCAAwC,MAAM;AAC3D,QAAM,OAAO,SAAS,CAAC,YAAY,GAAG;AACtC,SAAO,EAAE;WACD;AACR,QAAM,OAAO,SAAS,CAAC,YAAY,GAAG;;CAGxC,MAAM,eAAe,MAAM;CAC3B,MAAM,eAAe,MAAM;CAE3B,MAAMC,eAAmD,EAAE;CAC3D,MAAM,eAAe,UAAU,MAAM,MAAM,EAAE,SAAS,OAAO;CAC7D,MAAM,iBAAiB,UAAU,QAAQ,MAAM,EAAE,SAAS,QAAQ;CAIlE,MAAMC,qBAA2C,EAAE;AAEnD,MACE,IAAI,gBAAgB,GACpB,gBAAgB,eAAe,QAC/B,iBACA;EACA,MAAM,WAAW,eAAe;EAGhC,MAAM,gBAAgB,SAAS;EAC/B,MAAM,cAAc,SAAS,SAAS,SAAS;EAE/C,MAAM,eAAe,MAAM,QACxB,QAAQ,WAAW;AAIlB,UAHe,aAAa,MACzB,MAAM,EAAE,UAAU,OAAO,aAC3B,EAES,eAAe,WACvB,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IAEf,CACD,KAAK,YAAY;GAChB,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GACjB,YAAY,OAAO,OAAO,SAAS,IAAI,IAAI;GAC5C,EAAE;EAEL,MAAM,eAAe,MAAM,QACxB,QAAQ,WAAW;AAIlB,UAHe,aAAa,MACzB,MAAM,EAAE,UAAU,OAAO,aAC3B,EAES,eAAe,WACvB,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IAEf,CACD,KAAK,YAAY;GAChB,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GAClB,EAAE;AAEL,qBAAmB,KAAK;GACtB;GACA;GACA;GACD,CAAC;;CAIJ,MAAM,gBACJ,aACA,YACA,UACA,eACmB;EACnB,MAAMC,WAA2B,EAAE;EAkBnC,MAAM,cAAc,IAAI,mBAVgB;GACtC;GACA,WATgB,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;GAUvD;GACA;GACA,eAToB,WAAW,QAC9B,MAAM,EAAE,iBAAiB,YAC3B;GAQC;GACA;GACD,EAIC,wBACD;AAED,OAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,QAAQ,KAAK;GAClD,MAAM,eAAe,mBAAmB;GACxC,MAAM,WAAW,eAAe,aAAa;GAC7C,MAAM,UACJ,eAAe,UACX,aAAa,eACb,aAAa;AAEnB,OACE,YAAY,aAAa,cAAc,IAAI,QAAQ,OAAO,GAAG,WAAW,UACzE;AAED,OAAI,QAAQ,WAAW,GAAG;AACxB,QACE,qBAAqB,aAAa,cAAc,QAAQ,WAAW,UACpE;AACD;;AAGF,OAAI,eAAe,SAAS;IAE1B,MAAM,WAAW,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW;IACpE,MAAM,cAAc,aAAa;AAGjC,QAAI,CAAC,YAAY,gBAAgB,IAAI,aAAa;AAChD,iBAAY,gBAAgB;MAC1B,KAAK,SAAS;MACd,KAAK,SAAS;MACf,CAAC;AACF,iBAAY,YAAY,UAAU,aAAa;AAC/C;;AAIF,QAAI,CAAC,YAAY,gBAAgB,CAC/B;AAIF,QAAI,aACF;SACE,YAAY,eAAe;MAAE,KAAK,SAAS;MAAK,KAAK,SAAS;MAAK,CAAC,EACpE;MAEA,MAAM,eAAe,EAAE,KAAK,SAAS,KAAK;MAC1C,MAAM,aAAa,YAAY,gBAAgB,aAAa;AAC5D,UAAI,WACF,UAAS,KAAK,WAAW;AAE3B,kBAAY,OAAO;AACnB,kBAAY,gBAAgB;OAC1B,KAAK,SAAS;OACd,KAAK,SAAS;OACf,CAAC;;;UAGD;AAEL,QAAI,CAAC,YAAY,gBAAgB,EAAE;AACjC,iBAAY,YAAY,UAAU,aAAa;AAC/C;;AAIF,QAAI,YAAY,eAAe,KAAK,EAAE;KAEpC,MAAM,eAAe,EAAE,KAAK,aAAa,aAAa,GAAI,KAAK;KAC/D,MAAM,aAAa,YAAY,gBAAgB,aAAa;AAC5D,SAAI,WACF,UAAS,KAAK,WAAW;AAE3B,iBAAY,OAAO;;;AAKvB,eAAY,YAAY,UAAU,aAAa;;AAIjD,MAAI,YAAY,gBAAgB,EAAE;GAChC,MAAM,aAAa,YAAY,gBAAgB,KAAK;AACpD,OAAI,WACF,UAAS,KAAK,WAAW;;AAI7B,SAAO;;AAIT,MAAK,MAAM,eAAe,cAAc;EAEtC,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAGF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,MAAM,gBAAiB,MAAM,QAA0B,QACpD,MAAM,EAAE,iBAAiB,YAAY,MACvC;EACD,MAAM,gBAAgB,cAAc,QAAQ,MAAM,EAAE,OAAO,SAAS,IAAI,CAAC,CAAC;EAC1E,MAAM,mBAAmB,cAAc;AAEvC,MACE,uBAAuB,cAAc,OAAO,kBAAkB,cAAc,wBAAwB,YAAY,QACjH;EAGD,IAAIC;AACJ,MAAI,cAAc,SAAS,GAAG;AAC5B,OACE,gCAAgC,cAAc,GAAI,SAAS,cAAc,cAAc,GAAI,WAC5F;GACD,MAAM,mBAAmB,cAAc,GAAI;AAC3C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;EAI3B,MAAM,WAAW,aACf,YAAY,OACZ,SACA,UACA,MAAM,QACP;EAGD,IAAI,gBAAgB;AACpB,MAAI,cAAc,SAAS,GAAG;GAC5B,MAAM,cAAc,cAAc;GAClC,MAAM,aAAa,cAAc,cAAc,SAAS;GACxD,MAAM,WAAW,iBAAiB,YAAY,KAAK,UAAU,UAAU;GACvE,MAAM,UAAU,iBAAiB,WAAW,KAAK,UAAU,UAAU;GACrE,MAAM,eAAe,iBAAiB,WAAW,YAAY,GAAG,UAAU,UAAU;AACpF,mBAAgB,UAAU,WAAW;;EAGvC,MAAM,eACJ,iBAAiB,YAAY,UAAU,YAAY,QAAQ;AAC7D,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,OAAO,YAAY,eAAe,YAAY;GAC9C,QAAQ,YAAY,gBAAgB,YAAY;GACrC;GACX,cAAc;GACd,OAAO,yBACL,YAAY,kBACZ,YAAY,SACZ,YAAY,MACb;GACD,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAIH,MAAK,MAAM,eAAe,cAAc;EAEtC,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAGF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,MAAM,gBAAiB,MAAM,QAA0B,QACpD,MAAM,EAAE,iBAAiB,YAAY,MACvC;EACD,MAAM,mBAAmB,cAAc;EAGvC,IAAIA;AACJ,MAAI,cAAc,SAAS,GAAG;GAC5B,MAAM,mBAAmB,cAAc,GAAI;AAC3C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;EAI3B,MAAM,WAAW,aACf,YAAY,OACZ,SACA,UACA,MAAM,QACP;EAGD,MAAM,gBAAgB,SAAS,QAAQ,KAAK,QAAQ,MAAM,IAAI,UAAU,EAAE;EAE1E,MAAM,eACJ,iBAAiB,YAAY,UAAU,YAAY,QAAQ;AAC7D,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,eAAe,YAAY;GAC3B,aAAa,OAAO,YAAY,YAAY;GAC5C,aAAa,YAAY;GACzB,cAAc;GACH;GACX,OAAO,YAAY,oBAAoB,YAAY,cAAc;GACjE,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAGH,QAAO"}
|
|
1
|
+
{"version":3,"file":"generateFragmentIndex.js","names":["box: MP4BoxHeader","probe: PacketProbe","trackIndexes: Record<number, TrackFragmentIndex>","fragmentTimingData: FragmentTimingData[]","segments: TrackSegment[]","trackStartTimeOffsetMs: number | undefined"],"sources":["../src/generateFragmentIndex.ts"],"sourcesContent":["import { Readable, Transform, Writable } from \"node:stream\";\nimport { pipeline } from \"node:stream/promises\";\nimport { createWriteStream } from \"node:fs\";\nimport { unlink } from \"node:fs/promises\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport { randomBytes } from \"node:crypto\";\nimport debug from \"debug\";\nimport type { TrackFragmentIndex, TrackSegment } from \"./Probe.js\";\nimport { PacketProbe } from \"./Probe.js\";\n\nconst log = debug(\"ef:generateFragmentIndex\");\n\n// Minimum segment duration in milliseconds\nconst MIN_SEGMENT_DURATION_MS = 2000; // 2 seconds\nconst MS_PER_SECOND = 1000;\n\n// ============================================================================\n// Core Domain Types (Type Safety as Invariant Enforcement)\n// ============================================================================\n\n/** Raw packet from ffprobe - the fundamental unit of media data */\ninterface ProbePacket {\n stream_index: number;\n pts: number;\n dts: number;\n pts_time: number;\n dts_time: number;\n duration?: number;\n pos?: number;\n flags?: string;\n}\n\n/** Video packet with keyframe status - invariant: isKeyframe is always defined */\ninterface VideoPacket {\n pts: number;\n dts: number;\n duration?: number;\n isKeyframe: boolean;\n}\n\n/** Audio packet - simpler than video, no keyframe concept */\ninterface AudioPacket {\n pts: number;\n dts: number;\n duration?: number;\n}\n\n/** Fragment timing data - packets organized by fragment */\ninterface FragmentTimingData {\n fragmentIndex: number;\n videoPackets: VideoPacket[];\n audioPackets: AudioPacket[];\n}\n\n/** Timebase for timestamp conversion */\ninterface Timebase {\n num: number;\n den: number;\n}\n\n// Helper function to construct H.264 codec string from profile and level\nfunction constructH264CodecString(\n codecTagString: string,\n profile?: string,\n level?: number,\n): string {\n if (codecTagString !== \"avc1\" || !profile || level === undefined) {\n return codecTagString;\n }\n\n // Map H.264 profile names to profile_idc values\n const profileMap: Record<string, number> = {\n Baseline: 0x42,\n Main: 0x4d,\n High: 0x64,\n \"High 10\": 0x6e,\n \"High 422\": 0x7a,\n \"High 444\": 0xf4,\n };\n\n const profileIdc = profileMap[profile];\n if (!profileIdc) {\n return codecTagString;\n }\n\n // Format: avc1.PPCCLL where PP=profile_idc, CC=constraint_flags, LL=level_idc\n const profileHex = profileIdc.toString(16).padStart(2, \"0\");\n const constraintFlags = \"00\"; // Most common case\n const levelHex = level.toString(16).padStart(2, \"0\");\n\n return `${codecTagString}.${profileHex}${constraintFlags}${levelHex}`;\n}\n\ninterface MP4BoxHeader {\n type: string;\n offset: number;\n size: number;\n headerSize: number;\n}\n\ninterface Fragment {\n type: \"init\" | \"media\";\n offset: number;\n size: number;\n moofOffset?: number;\n mdatOffset?: number;\n}\n\n/**\n * Streaming MP4 box parser that detects box boundaries without loading entire file into memory\n */\nclass StreamingBoxParser extends Transform {\n private buffer = Buffer.alloc(0);\n private globalOffset = 0;\n private fragments: Fragment[] = [];\n private currentMoof: MP4BoxHeader | null = null;\n private initSegmentEnd = 0;\n private foundBoxes: MP4BoxHeader[] = [];\n\n constructor() {\n super({ objectMode: false });\n }\n\n _transform(chunk: Buffer, _encoding: BufferEncoding, callback: () => void) {\n // Append new data to our sliding buffer\n this.buffer = Buffer.concat([this.buffer, chunk]);\n\n // Parse all complete boxes in the current buffer\n this.parseBoxes();\n\n // Pass through the original chunk unchanged\n this.push(chunk);\n callback();\n }\n\n private parseBoxes() {\n let bufferOffset = 0;\n\n while (this.buffer.length - bufferOffset >= 8) {\n const size = this.buffer.readUInt32BE(bufferOffset);\n const type = this.buffer\n .subarray(bufferOffset + 4, bufferOffset + 8)\n .toString(\"ascii\");\n\n // Invalid or incomplete box\n if (size === 0 || size < 8 || this.buffer.length < bufferOffset + size) {\n break;\n }\n\n const box: MP4BoxHeader = {\n type,\n offset: this.globalOffset + bufferOffset,\n size,\n headerSize: 8,\n };\n\n log(`Found box: ${box.type} at offset ${box.offset}, size ${box.size}`);\n this.foundBoxes.push(box);\n this.handleBox(box);\n\n bufferOffset += size;\n }\n\n // Update global offset and trim processed data from buffer\n this.globalOffset += bufferOffset;\n this.buffer = this.buffer.subarray(bufferOffset);\n }\n\n private handleBox(box: MP4BoxHeader) {\n switch (box.type) {\n case \"ftyp\":\n case \"moov\":\n // Part of init segment\n this.initSegmentEnd = Math.max(\n this.initSegmentEnd,\n box.offset + box.size,\n );\n break;\n\n case \"moof\":\n this.currentMoof = box;\n break;\n\n case \"mdat\":\n if (this.currentMoof) {\n // Found a complete fragment (moof + mdat pair) - fragmented MP4\n this.fragments.push({\n type: \"media\",\n offset: this.currentMoof.offset,\n size: box.offset + box.size - this.currentMoof.offset,\n moofOffset: this.currentMoof.offset,\n mdatOffset: box.offset,\n });\n this.currentMoof = null;\n } else {\n // mdat without moof - this is non-fragmented content, not a fragment\n // Common in mixed MP4 files where initial content is non-fragmented\n // followed by fragmented content. Ignore for fragment indexing.\n log(\n `Found non-fragmented mdat at offset ${box.offset}, skipping for fragment index`,\n );\n }\n break;\n }\n }\n\n _flush(callback: () => void) {\n this.parseBoxes(); // Process any remaining buffered data\n\n // Probe always outputs fragmented MP4\n // Init segment is ftyp + moov boxes before the first moof\n if (this.initSegmentEnd > 0) {\n this.fragments.unshift({\n type: \"init\",\n offset: 0,\n size: this.initSegmentEnd,\n });\n }\n\n callback();\n }\n\n getFragments(): Fragment[] {\n return this.fragments;\n }\n}\n\n// Helper to convert timestamp from ffprobe timebase to track timescale\nfunction convertTimestamp(\n pts: number,\n timebase: Timebase,\n timescale: number,\n): number {\n return Math.round((pts * timescale) / timebase.den);\n}\n\n// Helper to calculate duration in milliseconds from timescale units\nfunction durationMsFromTimescale(\n durationTimescale: number,\n timescale: number,\n): number {\n return (durationTimescale / timescale) * MS_PER_SECOND;\n}\n\n// Helper to calculate segment byte range from accumulated fragments\nfunction calculateSegmentByteRange(\n accumulatedFragments: Array<{ fragment: Fragment }>,\n): { offset: number; size: number } {\n const firstFrag = accumulatedFragments[0]!;\n const lastFrag = accumulatedFragments[accumulatedFragments.length - 1]!;\n return {\n offset: firstFrag.fragment.offset,\n size:\n lastFrag.fragment.offset +\n lastFrag.fragment.size -\n firstFrag.fragment.offset,\n };\n}\n\n// Explicit enumeration of segment accumulation state (Enumerate the Core Concept)\ntype SegmentAccumulationState =\n | { type: \"idle\" }\n | {\n type: \"accumulating\";\n startPts: number;\n startDts: number;\n fragments: Array<{\n fragment: Fragment;\n fragmentData: FragmentTimingData;\n }>;\n };\n\n// Invariant: Segment must start on keyframe (for video) and have minimum duration\ninterface SegmentEvaluation {\n cts: number;\n dts: number;\n duration: number;\n offset: number;\n size: number;\n}\n\n// Track processing context - single source of truth for track processing\ninterface TrackProcessingContext {\n timebase: Timebase;\n timescale: number;\n fragmentTimingData: FragmentTimingData[];\n mediaFragments: Fragment[];\n // Cached filtered packets for this stream (Performance Through Caching)\n streamPackets: ProbePacket[];\n streamType: \"video\" | \"audio\";\n streamIndex: number;\n}\n\n// Segment accumulator that encapsulates accumulation logic\nclass SegmentAccumulator {\n private state: SegmentAccumulationState = { type: \"idle\" };\n private readonly context: TrackProcessingContext;\n private readonly minDurationMs: number;\n\n constructor(context: TrackProcessingContext, minDurationMs: number) {\n this.context = context;\n this.minDurationMs = minDurationMs;\n }\n\n // Evaluation: Determine if we should finalize (semantics)\n shouldFinalize(nextKeyframe: { pts: number; dts: number } | null): boolean {\n if (this.state.type !== \"accumulating\") {\n return false;\n }\n\n const durationMs = this.calculateAccumulatedDurationMs();\n const hasMinimumDuration = durationMs >= this.minDurationMs;\n\n // For video: finalize on keyframe + minimum duration\n // For audio: finalize on minimum duration (no keyframe requirement)\n if (this.context.streamType === \"video\") {\n return hasMinimumDuration && nextKeyframe !== null;\n } else {\n return hasMinimumDuration;\n }\n }\n\n // Evaluation: Calculate what the segment would be (semantics)\n evaluateSegment(\n nextBoundary: { pts: number } | null,\n ): SegmentEvaluation | null {\n if (this.state.type !== \"accumulating\") {\n return null;\n }\n\n const segmentCts = convertTimestamp(\n this.state.startPts,\n this.context.timebase,\n this.context.timescale,\n );\n const segmentDts = convertTimestamp(\n this.state.startDts,\n this.context.timebase,\n this.context.timescale,\n );\n const segmentDuration = this.calculateSegmentDuration(\n segmentCts,\n nextBoundary,\n );\n const { offset, size } = calculateSegmentByteRange(this.state.fragments);\n\n return {\n cts: segmentCts,\n dts: segmentDts,\n duration: segmentDuration,\n offset,\n size,\n };\n }\n\n // Application: Add fragment to accumulation (mechanism)\n addFragment(fragment: Fragment, fragmentData: FragmentTimingData): void {\n if (this.state.type === \"idle\") {\n // Start accumulation - invariant: video segments must start on keyframe\n const startPts = this.getStartPts(fragmentData);\n const startDts = this.getStartDts(fragmentData);\n this.state = {\n type: \"accumulating\",\n startPts,\n startDts,\n fragments: [{ fragment, fragmentData }],\n };\n } else {\n // Continue accumulation\n this.state.fragments.push({ fragment, fragmentData });\n }\n }\n\n // Application: Reset accumulation (mechanism)\n reset(): void {\n this.state = { type: \"idle\" };\n }\n\n // Application: Start new segment with keyframe (mechanism)\n startNewSegment(keyframe: { pts: number; dts: number }): void {\n this.state = {\n type: \"accumulating\",\n startPts: keyframe.pts,\n startDts: keyframe.dts,\n fragments: [],\n };\n }\n\n // Query: Get current state\n getState(): SegmentAccumulationState {\n return this.state;\n }\n\n // Query: Check if accumulating\n isAccumulating(): boolean {\n return this.state.type === \"accumulating\";\n }\n\n // Private helpers\n private calculateAccumulatedDurationMs(): number {\n if (this.state.type !== \"accumulating\") {\n return 0;\n }\n\n const lastFrag = this.state.fragments[this.state.fragments.length - 1]!;\n const lastPacket = this.getLastPacket(lastFrag.fragmentData);\n const endCts = convertTimestamp(\n lastPacket.pts + (lastPacket.duration || 0),\n this.context.timebase,\n this.context.timescale,\n );\n const startCts = convertTimestamp(\n this.state.startPts,\n this.context.timebase,\n this.context.timescale,\n );\n return durationMsFromTimescale(endCts - startCts, this.context.timescale);\n }\n\n private calculateSegmentDuration(\n segmentCts: number,\n nextBoundary: { pts: number } | null,\n ): number {\n if (nextBoundary) {\n const nextSegmentCts = convertTimestamp(\n nextBoundary.pts,\n this.context.timebase,\n this.context.timescale,\n );\n return nextSegmentCts - segmentCts;\n }\n\n // Last segment: duration to end of all packets\n // Use pre-cached streamPackets (Performance Through Caching)\n const sortedPackets = [...this.context.streamPackets].sort(\n (a, b) => a.pts - b.pts,\n );\n const lastPacket = sortedPackets[sortedPackets.length - 1]!;\n const streamEnd = convertTimestamp(\n lastPacket.pts + (lastPacket.duration || 0),\n this.context.timebase,\n this.context.timescale,\n );\n return streamEnd - segmentCts;\n }\n\n private getStartPts(fragmentData: FragmentTimingData): number {\n if (this.context.streamType === \"video\") {\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n return keyframe?.pts ?? fragmentData.videoPackets[0]?.pts ?? 0;\n } else {\n return fragmentData.audioPackets[0]?.pts ?? 0;\n }\n }\n\n private getStartDts(fragmentData: FragmentTimingData): number {\n if (this.context.streamType === \"video\") {\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n return keyframe?.dts ?? fragmentData.videoPackets[0]?.dts ?? 0;\n } else {\n return fragmentData.audioPackets[0]?.dts ?? 0;\n }\n }\n\n private getLastPacket(fragmentData: FragmentTimingData): {\n pts: number;\n duration?: number;\n } {\n if (this.context.streamType === \"video\") {\n const packets = fragmentData.videoPackets;\n return packets[packets.length - 1]!;\n } else {\n const packets = fragmentData.audioPackets;\n return packets[packets.length - 1]!;\n }\n }\n}\n\n// Helper function to extract fragment data (init + media fragment)\n\nexport const generateFragmentIndex = async (\n inputStream: Readable,\n startTimeOffsetMs?: number,\n trackIdMapping?: Record<number, number>, // Map from source track ID to desired track ID\n options?: { tmpDir?: string },\n): Promise<Record<number, TrackFragmentIndex>> => {\n // Step 1: Create a streaming parser that detects fragment boundaries\n const parser = new StreamingBoxParser();\n\n // Step 2: Write stream to a temp file to avoid buffering the entire MP4 in memory\n const tempDir = options?.tmpDir ?? tmpdir();\n const tempFile = join(\n tempDir,\n `ef-probe-${randomBytes(8).toString(\"hex\")}.mp4`,\n );\n let totalSize = 0;\n\n const dest = new Writable({\n write(chunk, _encoding, callback) {\n totalSize += chunk.length;\n callback();\n },\n });\n\n const tempWriteStream = createWriteStream(tempFile);\n\n // Split input through both parser (for fragment detection) and temp file (for probing)\n // We must tee the stream: pipe inputStream → parser → dest, and also write to tempFile\n const teeTransform = new Transform({\n transform(chunk, _encoding, callback) {\n tempWriteStream.write(chunk);\n this.push(chunk);\n callback();\n },\n flush(callback) {\n tempWriteStream.end(() => callback());\n },\n });\n\n // Process the stream through both parser and collection\n await pipeline(inputStream, teeTransform, parser, dest);\n const fragments = parser.getFragments();\n\n // If no data was collected, clean up and return empty result\n if (totalSize === 0) {\n await unlink(tempFile).catch(() => {});\n return {};\n }\n\n // Step 3: Use ffprobe to analyze the temp file for track metadata (avoids in-memory buffering)\n let probe: PacketProbe;\n try {\n probe = await PacketProbe.probePath(tempFile);\n } catch (error) {\n console.warn(\"Failed to probe stream with ffprobe:\", error);\n await unlink(tempFile).catch(() => {});\n return {};\n } finally {\n await unlink(tempFile).catch(() => {});\n }\n\n const videoStreams = probe.videoStreams;\n const audioStreams = probe.audioStreams;\n\n const trackIndexes: Record<number, TrackFragmentIndex> = {};\n const initFragment = fragments.find((f) => f.type === \"init\");\n const mediaFragments = fragments.filter((f) => f.type === \"media\");\n\n // Map packets to fragments using byte position for moof+mdat boundaries\n // But create contiguous segments based on keyframes\n const fragmentTimingData: FragmentTimingData[] = [];\n\n for (\n let fragmentIndex = 0;\n fragmentIndex < mediaFragments.length;\n fragmentIndex++\n ) {\n const fragment = mediaFragments[fragmentIndex]!;\n\n // Find packets that belong to this fragment based on byte position (moof+mdat boundaries)\n const fragmentStart = fragment.offset;\n const fragmentEnd = fragment.offset + fragment.size;\n\n const videoPackets = probe.packets\n .filter((packet) => {\n const stream = videoStreams.find(\n (s) => s.index === packet.stream_index,\n );\n return (\n stream?.codec_type === \"video\" &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd\n );\n })\n .map((packet) => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n isKeyframe: packet.flags?.includes(\"K\") ?? false,\n }));\n\n const audioPackets = probe.packets\n .filter((packet) => {\n const stream = audioStreams.find(\n (s) => s.index === packet.stream_index,\n );\n return (\n stream?.codec_type === \"audio\" &&\n packet.pos !== undefined &&\n packet.pos >= fragmentStart &&\n packet.pos < fragmentEnd\n );\n })\n .map((packet) => ({\n pts: packet.pts,\n dts: packet.dts,\n duration: packet.duration,\n }));\n\n fragmentTimingData.push({\n fragmentIndex,\n videoPackets,\n audioPackets,\n });\n }\n\n // Unified track processing function (One Direction of Truth)\n const processTrack = (\n streamIndex: number,\n streamType: \"video\" | \"audio\",\n timebase: Timebase,\n allPackets: ProbePacket[],\n ): TrackSegment[] => {\n const segments: TrackSegment[] = [];\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = allPackets.filter(\n (p) => p.stream_index === streamIndex,\n );\n\n const context: TrackProcessingContext = {\n timebase,\n timescale,\n fragmentTimingData,\n mediaFragments,\n streamPackets,\n streamType,\n streamIndex,\n };\n\n const accumulator = new SegmentAccumulator(\n context,\n MIN_SEGMENT_DURATION_MS,\n );\n\n for (let i = 0; i < fragmentTimingData.length; i++) {\n const fragmentData = fragmentTimingData[i]!;\n const fragment = mediaFragments[fragmentData.fragmentIndex]!;\n const packets =\n streamType === \"video\"\n ? fragmentData.videoPackets\n : fragmentData.audioPackets;\n\n log(\n `Fragment ${fragmentData.fragmentIndex}: ${packets.length} ${streamType} packets`,\n );\n\n if (packets.length === 0) {\n log(\n `Skipping fragment ${fragmentData.fragmentIndex} - no ${streamType} packets`,\n );\n continue;\n }\n\n if (streamType === \"video\") {\n // Video: segments must start on keyframes\n const keyframe = fragmentData.videoPackets.find((p) => p.isKeyframe);\n const hasKeyframe = keyframe !== undefined;\n\n // Start new segment on keyframe if none exists\n if (!accumulator.isAccumulating() && hasKeyframe) {\n accumulator.startNewSegment({\n pts: keyframe.pts,\n dts: keyframe.dts,\n });\n accumulator.addFragment(fragment, fragmentData);\n continue;\n }\n\n // Skip fragments without keyframes if no segment started\n if (!accumulator.isAccumulating()) {\n continue;\n }\n\n // Check if we should finalize when encountering a new keyframe\n if (hasKeyframe) {\n if (\n accumulator.shouldFinalize({ pts: keyframe.pts, dts: keyframe.dts })\n ) {\n // Duration should be to the start of this keyframe (start of next segment)\n const nextBoundary = { pts: keyframe.pts };\n const evaluation = accumulator.evaluateSegment(nextBoundary);\n if (evaluation) {\n segments.push(evaluation);\n }\n accumulator.reset();\n accumulator.startNewSegment({\n pts: keyframe.pts,\n dts: keyframe.dts,\n });\n }\n }\n } else {\n // Audio: no keyframe requirement, just duration-based\n if (!accumulator.isAccumulating()) {\n accumulator.addFragment(fragment, fragmentData);\n continue;\n }\n\n // Check if we should finalize based on accumulated duration\n if (accumulator.shouldFinalize(null)) {\n // Duration should be to the start of this fragment (start of next segment)\n const nextBoundary = { pts: fragmentData.audioPackets[0]!.pts };\n const evaluation = accumulator.evaluateSegment(nextBoundary);\n if (evaluation) {\n segments.push(evaluation);\n }\n accumulator.reset();\n }\n }\n\n // Add fragment to current segment\n accumulator.addFragment(fragment, fragmentData);\n }\n\n // Finalize any remaining accumulated fragments\n if (accumulator.isAccumulating()) {\n const evaluation = accumulator.evaluateSegment(null);\n if (evaluation) {\n segments.push(evaluation);\n }\n }\n\n return segments;\n };\n\n // Step 4: Process video tracks using ffprobe data\n for (const videoStream of videoStreams) {\n // Get timebase for this stream to convert timestamps\n const timebase = probe.videoTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for video stream\");\n continue;\n }\n\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = (probe.packets as ProbePacket[]).filter(\n (p) => p.stream_index === videoStream.index,\n );\n const keyframeCount = streamPackets.filter((p) =>\n p.flags?.includes(\"K\"),\n ).length;\n const totalSampleCount = streamPackets.length;\n\n log(\n `Complete stream has ${streamPackets.length} video packets, ${keyframeCount} keyframes for stream ${videoStream.index}`,\n );\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n if (streamPackets.length > 0) {\n log(\n `First video packet dts_time: ${streamPackets[0]!.dts_time}, pts_time: ${streamPackets[0]!.pts_time}`,\n );\n const presentationTime = streamPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process fragments to create segments with minimum duration\n const segments = processTrack(\n videoStream.index,\n \"video\",\n timebase,\n probe.packets as ProbePacket[],\n );\n\n // Calculate total duration from cached stream packets (inclusive of last frame duration)\n let totalDuration = 0;\n if (streamPackets.length > 0) {\n const firstPacket = streamPackets[0]!;\n const lastPacket = streamPackets[streamPackets.length - 1]!;\n const firstPts = convertTimestamp(firstPacket.pts, timebase, timescale);\n const lastPts = convertTimestamp(lastPacket.pts, timebase, timescale);\n const lastDuration = convertTimestamp(\n lastPacket.duration ?? 0,\n timebase,\n timescale,\n );\n totalDuration = lastPts - firstPts + lastDuration;\n }\n\n const finalTrackId =\n trackIdMapping?.[videoStream.index] ?? videoStream.index + 1;\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"video\",\n width: videoStream.coded_width || videoStream.width,\n height: videoStream.coded_height || videoStream.height,\n timescale: timescale,\n sample_count: totalSampleCount,\n codec: constructH264CodecString(\n videoStream.codec_tag_string,\n videoStream.profile,\n videoStream.level,\n ),\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n // Step 5: Process audio tracks using ffprobe data\n for (const audioStream of audioStreams) {\n // Get timebase for this stream to convert timestamps\n const timebase = probe.audioTimebase;\n if (!timebase) {\n console.warn(\"No timebase found for audio stream\");\n continue;\n }\n\n const timescale = Math.round(timebase.den / timebase.num);\n\n // Cache filtered packets once (Performance Through Caching)\n const streamPackets = (probe.packets as ProbePacket[]).filter(\n (p) => p.stream_index === audioStream.index,\n );\n const totalSampleCount = streamPackets.length;\n\n // Calculate per-track timing offset from first packet for timeline mapping\n let trackStartTimeOffsetMs: number | undefined;\n if (streamPackets.length > 0) {\n const presentationTime = streamPackets[0]!.pts_time;\n if (Math.abs(presentationTime) > 0.01) {\n trackStartTimeOffsetMs = presentationTime * MS_PER_SECOND;\n }\n }\n if (startTimeOffsetMs !== undefined) {\n trackStartTimeOffsetMs = startTimeOffsetMs;\n }\n\n // Process fragments to create segments with minimum duration\n const segments = processTrack(\n audioStream.index,\n \"audio\",\n timebase,\n probe.packets as ProbePacket[],\n );\n\n // Calculate total duration\n const totalDuration = segments.reduce((sum, seg) => sum + seg.duration, 0);\n\n const finalTrackId =\n trackIdMapping?.[audioStream.index] ?? audioStream.index + 1;\n trackIndexes[finalTrackId] = {\n track: finalTrackId,\n type: \"audio\",\n channel_count: audioStream.channels,\n sample_rate: Number(audioStream.sample_rate),\n sample_size: audioStream.bits_per_sample,\n sample_count: totalSampleCount,\n timescale: timescale,\n codec: audioStream.codec_tag_string || audioStream.codec_name || \"\",\n duration: totalDuration,\n startTimeOffsetMs: trackStartTimeOffsetMs,\n initSegment: {\n offset: 0,\n size: initFragment?.size || 0,\n },\n segments,\n };\n }\n\n return trackIndexes;\n};\n"],"mappings":";;;;;;;;;;;AAWA,MAAM,MAAM,MAAM,2BAA2B;AAG7C,MAAM,0BAA0B;AAChC,MAAM,gBAAgB;AA+CtB,SAAS,yBACP,gBACA,SACA,OACQ;AACR,KAAI,mBAAmB,UAAU,CAAC,WAAW,UAAU,OACrD,QAAO;CAaT,MAAM,aATqC;EACzC,UAAU;EACV,MAAM;EACN,MAAM;EACN,WAAW;EACX,YAAY;EACZ,YAAY;EACb,CAE6B;AAC9B,KAAI,CAAC,WACH,QAAO;AAQT,QAAO,GAAG,eAAe,GAJN,WAAW,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI,KAE1C,MAAM,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;;;;AAuBtD,IAAM,qBAAN,cAAiC,UAAU;CAQzC,cAAc;AACZ,QAAM,EAAE,YAAY,OAAO,CAAC;gBARb,OAAO,MAAM,EAAE;sBACT;mBACS,EAAE;qBACS;wBAClB;oBACY,EAAE;;CAMvC,WAAW,OAAe,WAA2B,UAAsB;AAEzE,OAAK,SAAS,OAAO,OAAO,CAAC,KAAK,QAAQ,MAAM,CAAC;AAGjD,OAAK,YAAY;AAGjB,OAAK,KAAK,MAAM;AAChB,YAAU;;CAGZ,AAAQ,aAAa;EACnB,IAAI,eAAe;AAEnB,SAAO,KAAK,OAAO,SAAS,gBAAgB,GAAG;GAC7C,MAAM,OAAO,KAAK,OAAO,aAAa,aAAa;GACnD,MAAM,OAAO,KAAK,OACf,SAAS,eAAe,GAAG,eAAe,EAAE,CAC5C,SAAS,QAAQ;AAGpB,OAAI,SAAS,KAAK,OAAO,KAAK,KAAK,OAAO,SAAS,eAAe,KAChE;GAGF,MAAMA,MAAoB;IACxB;IACA,QAAQ,KAAK,eAAe;IAC5B;IACA,YAAY;IACb;AAED,OAAI,cAAc,IAAI,KAAK,aAAa,IAAI,OAAO,SAAS,IAAI,OAAO;AACvE,QAAK,WAAW,KAAK,IAAI;AACzB,QAAK,UAAU,IAAI;AAEnB,mBAAgB;;AAIlB,OAAK,gBAAgB;AACrB,OAAK,SAAS,KAAK,OAAO,SAAS,aAAa;;CAGlD,AAAQ,UAAU,KAAmB;AACnC,UAAQ,IAAI,MAAZ;GACE,KAAK;GACL,KAAK;AAEH,SAAK,iBAAiB,KAAK,IACzB,KAAK,gBACL,IAAI,SAAS,IAAI,KAClB;AACD;GAEF,KAAK;AACH,SAAK,cAAc;AACnB;GAEF,KAAK;AACH,QAAI,KAAK,aAAa;AAEpB,UAAK,UAAU,KAAK;MAClB,MAAM;MACN,QAAQ,KAAK,YAAY;MACzB,MAAM,IAAI,SAAS,IAAI,OAAO,KAAK,YAAY;MAC/C,YAAY,KAAK,YAAY;MAC7B,YAAY,IAAI;MACjB,CAAC;AACF,UAAK,cAAc;UAKnB,KACE,uCAAuC,IAAI,OAAO,+BACnD;AAEH;;;CAIN,OAAO,UAAsB;AAC3B,OAAK,YAAY;AAIjB,MAAI,KAAK,iBAAiB,EACxB,MAAK,UAAU,QAAQ;GACrB,MAAM;GACN,QAAQ;GACR,MAAM,KAAK;GACZ,CAAC;AAGJ,YAAU;;CAGZ,eAA2B;AACzB,SAAO,KAAK;;;AAKhB,SAAS,iBACP,KACA,UACA,WACQ;AACR,QAAO,KAAK,MAAO,MAAM,YAAa,SAAS,IAAI;;AAIrD,SAAS,wBACP,mBACA,WACQ;AACR,QAAQ,oBAAoB,YAAa;;AAI3C,SAAS,0BACP,sBACkC;CAClC,MAAM,YAAY,qBAAqB;CACvC,MAAM,WAAW,qBAAqB,qBAAqB,SAAS;AACpE,QAAO;EACL,QAAQ,UAAU,SAAS;EAC3B,MACE,SAAS,SAAS,SAClB,SAAS,SAAS,OAClB,UAAU,SAAS;EACtB;;AAsCH,IAAM,qBAAN,MAAyB;CAKvB,YAAY,SAAiC,eAAuB;eAJ1B,EAAE,MAAM,QAAQ;AAKxD,OAAK,UAAU;AACf,OAAK,gBAAgB;;CAIvB,eAAe,cAA4D;AACzE,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAIT,MAAM,qBADa,KAAK,gCAAgC,IACf,KAAK;AAI9C,MAAI,KAAK,QAAQ,eAAe,QAC9B,QAAO,sBAAsB,iBAAiB;MAE9C,QAAO;;CAKX,gBACE,cAC0B;AAC1B,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAGT,MAAM,aAAa,iBACjB,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd;EACD,MAAM,aAAa,iBACjB,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd;EACD,MAAM,kBAAkB,KAAK,yBAC3B,YACA,aACD;EACD,MAAM,EAAE,QAAQ,SAAS,0BAA0B,KAAK,MAAM,UAAU;AAExE,SAAO;GACL,KAAK;GACL,KAAK;GACL,UAAU;GACV;GACA;GACD;;CAIH,YAAY,UAAoB,cAAwC;AACtE,MAAI,KAAK,MAAM,SAAS,OAItB,MAAK,QAAQ;GACX,MAAM;GACN,UAJe,KAAK,YAAY,aAAa;GAK7C,UAJe,KAAK,YAAY,aAAa;GAK7C,WAAW,CAAC;IAAE;IAAU;IAAc,CAAC;GACxC;MAGD,MAAK,MAAM,UAAU,KAAK;GAAE;GAAU;GAAc,CAAC;;CAKzD,QAAc;AACZ,OAAK,QAAQ,EAAE,MAAM,QAAQ;;CAI/B,gBAAgB,UAA8C;AAC5D,OAAK,QAAQ;GACX,MAAM;GACN,UAAU,SAAS;GACnB,UAAU,SAAS;GACnB,WAAW,EAAE;GACd;;CAIH,WAAqC;AACnC,SAAO,KAAK;;CAId,iBAA0B;AACxB,SAAO,KAAK,MAAM,SAAS;;CAI7B,AAAQ,iCAAyC;AAC/C,MAAI,KAAK,MAAM,SAAS,eACtB,QAAO;EAGT,MAAM,WAAW,KAAK,MAAM,UAAU,KAAK,MAAM,UAAU,SAAS;EACpE,MAAM,aAAa,KAAK,cAAc,SAAS,aAAa;AAW5D,SAAO,wBAVQ,iBACb,WAAW,OAAO,WAAW,YAAY,IACzC,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACgB,iBACf,KAAK,MAAM,UACX,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,EACiD,KAAK,QAAQ,UAAU;;CAG3E,AAAQ,yBACN,YACA,cACQ;AACR,MAAI,aAMF,QALuB,iBACrB,aAAa,KACb,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACuB;EAK1B,MAAM,gBAAgB,CAAC,GAAG,KAAK,QAAQ,cAAc,CAAC,MACnD,GAAG,MAAM,EAAE,MAAM,EAAE,IACrB;EACD,MAAM,aAAa,cAAc,cAAc,SAAS;AAMxD,SALkB,iBAChB,WAAW,OAAO,WAAW,YAAY,IACzC,KAAK,QAAQ,UACb,KAAK,QAAQ,UACd,GACkB;;CAGrB,AAAQ,YAAY,cAA0C;AAC5D,MAAI,KAAK,QAAQ,eAAe,QAE9B,QADiB,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW,EACnD,OAAO,aAAa,aAAa,IAAI,OAAO;MAE7D,QAAO,aAAa,aAAa,IAAI,OAAO;;CAIhD,AAAQ,YAAY,cAA0C;AAC5D,MAAI,KAAK,QAAQ,eAAe,QAE9B,QADiB,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW,EACnD,OAAO,aAAa,aAAa,IAAI,OAAO;MAE7D,QAAO,aAAa,aAAa,IAAI,OAAO;;CAIhD,AAAQ,cAAc,cAGpB;AACA,MAAI,KAAK,QAAQ,eAAe,SAAS;GACvC,MAAM,UAAU,aAAa;AAC7B,UAAO,QAAQ,QAAQ,SAAS;SAC3B;GACL,MAAM,UAAU,aAAa;AAC7B,UAAO,QAAQ,QAAQ,SAAS;;;;AAOtC,MAAa,wBAAwB,OACnC,aACA,mBACA,gBACA,YACgD;CAEhD,MAAM,SAAS,IAAI,oBAAoB;CAIvC,MAAM,WAAW,KADD,SAAS,UAAU,QAAQ,EAGzC,YAAY,YAAY,EAAE,CAAC,SAAS,MAAM,CAAC,MAC5C;CACD,IAAI,YAAY;CAEhB,MAAM,OAAO,IAAI,SAAS,EACxB,MAAM,OAAO,WAAW,UAAU;AAChC,eAAa,MAAM;AACnB,YAAU;IAEb,CAAC;CAEF,MAAM,kBAAkB,kBAAkB,SAAS;AAgBnD,OAAM,SAAS,aAZM,IAAI,UAAU;EACjC,UAAU,OAAO,WAAW,UAAU;AACpC,mBAAgB,MAAM,MAAM;AAC5B,QAAK,KAAK,MAAM;AAChB,aAAU;;EAEZ,MAAM,UAAU;AACd,mBAAgB,UAAU,UAAU,CAAC;;EAExC,CAAC,EAGwC,QAAQ,KAAK;CACvD,MAAM,YAAY,OAAO,cAAc;AAGvC,KAAI,cAAc,GAAG;AACnB,QAAM,OAAO,SAAS,CAAC,YAAY,GAAG;AACtC,SAAO,EAAE;;CAIX,IAAIC;AACJ,KAAI;AACF,UAAQ,MAAM,YAAY,UAAU,SAAS;UACtC,OAAO;AACd,UAAQ,KAAK,wCAAwC,MAAM;AAC3D,QAAM,OAAO,SAAS,CAAC,YAAY,GAAG;AACtC,SAAO,EAAE;WACD;AACR,QAAM,OAAO,SAAS,CAAC,YAAY,GAAG;;CAGxC,MAAM,eAAe,MAAM;CAC3B,MAAM,eAAe,MAAM;CAE3B,MAAMC,eAAmD,EAAE;CAC3D,MAAM,eAAe,UAAU,MAAM,MAAM,EAAE,SAAS,OAAO;CAC7D,MAAM,iBAAiB,UAAU,QAAQ,MAAM,EAAE,SAAS,QAAQ;CAIlE,MAAMC,qBAA2C,EAAE;AAEnD,MACE,IAAI,gBAAgB,GACpB,gBAAgB,eAAe,QAC/B,iBACA;EACA,MAAM,WAAW,eAAe;EAGhC,MAAM,gBAAgB,SAAS;EAC/B,MAAM,cAAc,SAAS,SAAS,SAAS;EAE/C,MAAM,eAAe,MAAM,QACxB,QAAQ,WAAW;AAIlB,UAHe,aAAa,MACzB,MAAM,EAAE,UAAU,OAAO,aAC3B,EAES,eAAe,WACvB,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IAEf,CACD,KAAK,YAAY;GAChB,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GACjB,YAAY,OAAO,OAAO,SAAS,IAAI,IAAI;GAC5C,EAAE;EAEL,MAAM,eAAe,MAAM,QACxB,QAAQ,WAAW;AAIlB,UAHe,aAAa,MACzB,MAAM,EAAE,UAAU,OAAO,aAC3B,EAES,eAAe,WACvB,OAAO,QAAQ,UACf,OAAO,OAAO,iBACd,OAAO,MAAM;IAEf,CACD,KAAK,YAAY;GAChB,KAAK,OAAO;GACZ,KAAK,OAAO;GACZ,UAAU,OAAO;GAClB,EAAE;AAEL,qBAAmB,KAAK;GACtB;GACA;GACA;GACD,CAAC;;CAIJ,MAAM,gBACJ,aACA,YACA,UACA,eACmB;EACnB,MAAMC,WAA2B,EAAE;EAkBnC,MAAM,cAAc,IAAI,mBAVgB;GACtC;GACA,WATgB,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;GAUvD;GACA;GACA,eAToB,WAAW,QAC9B,MAAM,EAAE,iBAAiB,YAC3B;GAQC;GACA;GACD,EAIC,wBACD;AAED,OAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,QAAQ,KAAK;GAClD,MAAM,eAAe,mBAAmB;GACxC,MAAM,WAAW,eAAe,aAAa;GAC7C,MAAM,UACJ,eAAe,UACX,aAAa,eACb,aAAa;AAEnB,OACE,YAAY,aAAa,cAAc,IAAI,QAAQ,OAAO,GAAG,WAAW,UACzE;AAED,OAAI,QAAQ,WAAW,GAAG;AACxB,QACE,qBAAqB,aAAa,cAAc,QAAQ,WAAW,UACpE;AACD;;AAGF,OAAI,eAAe,SAAS;IAE1B,MAAM,WAAW,aAAa,aAAa,MAAM,MAAM,EAAE,WAAW;IACpE,MAAM,cAAc,aAAa;AAGjC,QAAI,CAAC,YAAY,gBAAgB,IAAI,aAAa;AAChD,iBAAY,gBAAgB;MAC1B,KAAK,SAAS;MACd,KAAK,SAAS;MACf,CAAC;AACF,iBAAY,YAAY,UAAU,aAAa;AAC/C;;AAIF,QAAI,CAAC,YAAY,gBAAgB,CAC/B;AAIF,QAAI,aACF;SACE,YAAY,eAAe;MAAE,KAAK,SAAS;MAAK,KAAK,SAAS;MAAK,CAAC,EACpE;MAEA,MAAM,eAAe,EAAE,KAAK,SAAS,KAAK;MAC1C,MAAM,aAAa,YAAY,gBAAgB,aAAa;AAC5D,UAAI,WACF,UAAS,KAAK,WAAW;AAE3B,kBAAY,OAAO;AACnB,kBAAY,gBAAgB;OAC1B,KAAK,SAAS;OACd,KAAK,SAAS;OACf,CAAC;;;UAGD;AAEL,QAAI,CAAC,YAAY,gBAAgB,EAAE;AACjC,iBAAY,YAAY,UAAU,aAAa;AAC/C;;AAIF,QAAI,YAAY,eAAe,KAAK,EAAE;KAEpC,MAAM,eAAe,EAAE,KAAK,aAAa,aAAa,GAAI,KAAK;KAC/D,MAAM,aAAa,YAAY,gBAAgB,aAAa;AAC5D,SAAI,WACF,UAAS,KAAK,WAAW;AAE3B,iBAAY,OAAO;;;AAKvB,eAAY,YAAY,UAAU,aAAa;;AAIjD,MAAI,YAAY,gBAAgB,EAAE;GAChC,MAAM,aAAa,YAAY,gBAAgB,KAAK;AACpD,OAAI,WACF,UAAS,KAAK,WAAW;;AAI7B,SAAO;;AAIT,MAAK,MAAM,eAAe,cAAc;EAEtC,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAGF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,MAAM,gBAAiB,MAAM,QAA0B,QACpD,MAAM,EAAE,iBAAiB,YAAY,MACvC;EACD,MAAM,gBAAgB,cAAc,QAAQ,MAC1C,EAAE,OAAO,SAAS,IAAI,CACvB,CAAC;EACF,MAAM,mBAAmB,cAAc;AAEvC,MACE,uBAAuB,cAAc,OAAO,kBAAkB,cAAc,wBAAwB,YAAY,QACjH;EAGD,IAAIC;AACJ,MAAI,cAAc,SAAS,GAAG;AAC5B,OACE,gCAAgC,cAAc,GAAI,SAAS,cAAc,cAAc,GAAI,WAC5F;GACD,MAAM,mBAAmB,cAAc,GAAI;AAC3C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;EAI3B,MAAM,WAAW,aACf,YAAY,OACZ,SACA,UACA,MAAM,QACP;EAGD,IAAI,gBAAgB;AACpB,MAAI,cAAc,SAAS,GAAG;GAC5B,MAAM,cAAc,cAAc;GAClC,MAAM,aAAa,cAAc,cAAc,SAAS;GACxD,MAAM,WAAW,iBAAiB,YAAY,KAAK,UAAU,UAAU;GACvE,MAAM,UAAU,iBAAiB,WAAW,KAAK,UAAU,UAAU;GACrE,MAAM,eAAe,iBACnB,WAAW,YAAY,GACvB,UACA,UACD;AACD,mBAAgB,UAAU,WAAW;;EAGvC,MAAM,eACJ,iBAAiB,YAAY,UAAU,YAAY,QAAQ;AAC7D,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,OAAO,YAAY,eAAe,YAAY;GAC9C,QAAQ,YAAY,gBAAgB,YAAY;GACrC;GACX,cAAc;GACd,OAAO,yBACL,YAAY,kBACZ,YAAY,SACZ,YAAY,MACb;GACD,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAIH,MAAK,MAAM,eAAe,cAAc;EAEtC,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,UAAU;AACb,WAAQ,KAAK,qCAAqC;AAClD;;EAGF,MAAM,YAAY,KAAK,MAAM,SAAS,MAAM,SAAS,IAAI;EAGzD,MAAM,gBAAiB,MAAM,QAA0B,QACpD,MAAM,EAAE,iBAAiB,YAAY,MACvC;EACD,MAAM,mBAAmB,cAAc;EAGvC,IAAIA;AACJ,MAAI,cAAc,SAAS,GAAG;GAC5B,MAAM,mBAAmB,cAAc,GAAI;AAC3C,OAAI,KAAK,IAAI,iBAAiB,GAAG,IAC/B,0BAAyB,mBAAmB;;AAGhD,MAAI,sBAAsB,OACxB,0BAAyB;EAI3B,MAAM,WAAW,aACf,YAAY,OACZ,SACA,UACA,MAAM,QACP;EAGD,MAAM,gBAAgB,SAAS,QAAQ,KAAK,QAAQ,MAAM,IAAI,UAAU,EAAE;EAE1E,MAAM,eACJ,iBAAiB,YAAY,UAAU,YAAY,QAAQ;AAC7D,eAAa,gBAAgB;GAC3B,OAAO;GACP,MAAM;GACN,eAAe,YAAY;GAC3B,aAAa,OAAO,YAAY,YAAY;GAC5C,aAAa,YAAY;GACzB,cAAc;GACH;GACX,OAAO,YAAY,oBAAoB,YAAY,cAAc;GACjE,UAAU;GACV,mBAAmB;GACnB,aAAa;IACX,QAAQ;IACR,MAAM,cAAc,QAAQ;IAC7B;GACD;GACD;;AAGH,QAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"generateSingleTrack.cjs","names":["Probe","PassThrough","generateFragmentIndex","idempotentTask","progressTimeout: NodeJS.Timeout | null"],"sources":["../src/generateSingleTrack.ts"],"sourcesContent":["import { idempotentTask } from \"./idempotentTask.js\";\nimport debug from \"debug\";\nimport { PassThrough } from \"node:stream\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"./Probe.js\";\nimport { generateFragmentIndex } from \"./generateFragmentIndex.js\";\n\nconst log = debug(\"ef:generateSingleTrack\");\n\nexport const generateSingleTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) - tracks use 1-based IDs, streams use 0-based indices\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(\n `Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`,\n );\n }\n\n // Get the track stream from FFmpeg (single track, fragmented MP4)\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Create a PassThrough to tee the stream\n const outputStream = new PassThrough();\n const indexStream = new PassThrough();\n\n // Pipe data but DON'T end outputStream automatically - we'll control this\n trackStream.pipe(outputStream, { end: false });\n trackStream.pipe(indexStream);\n\n // Track when the source stream ends (but don't end output yet)\n let sourceStreamEnded = false;\n trackStream.on(\"end\", () => {\n sourceStreamEnded = true;\n });\n\n trackStream.on(\"error\", (error) => {\n outputStream.destroy(error);\n indexStream.destroy(error);\n });\n\n // Generate fragment index from the single-track stream\n // This will be a single-track index since we're processing isolated track\n // Map the single-track file's track ID 1 to the original multi-track ID\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndexPromise = generateFragmentIndex(\n indexStream,\n undefined,\n trackIdMapping,\n );\n\n // End outputStream only after BOTH source ends AND fragment index completes\n fragmentIndexPromise\n .then(() => {\n if (sourceStreamEnded) {\n outputStream.end();\n } else {\n // If fragment index completes first, wait for stream to end\n trackStream.once(\"end\", () => {\n outputStream.end();\n });\n }\n })\n .catch((error) => {\n outputStream.destroy(error);\n });\n\n // Return both the stream and the index\n return {\n stream: outputStream,\n fragmentIndex: fragmentIndexPromise,\n };\n};\n\nexport const generateSingleTrackTask = idempotentTask({\n label: \"track-single\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: async (absolutePath: string, trackId: number) => {\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Create a PassThrough stream that processes fragment index in parallel\n const finalStream = new PassThrough();\n\n // Start fragment index processing immediately (don't wait for stream to end)\n const fragmentIndexPromise = result.fragmentIndex.catch((error) => {\n console.warn(\n `Fragment index generation failed for track ${trackId}:`,\n error,\n );\n // Don't fail the stream if fragment index fails\n });\n\n // Monitor progress and extend timeout based on actual work\n let progressTimeout: NodeJS.Timeout | null = null;\n\n const resetProgressTimeout = () => {\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n progressTimeout = setTimeout(() => {\n if (!finalStream.destroyed) {\n console.warn(\n `Progress timeout triggered for track ${trackId} - no activity for 10 seconds`,\n );\n finalStream.end();\n }\n }, 10000); // 10 second sliding timeout\n };\n\n // Start the initial timeout\n resetProgressTimeout();\n\n // Monitor data flow to detect active work\n result.stream.on(\"data\", () => {\n resetProgressTimeout(); // Reset timeout when we see data\n });\n\n result.stream.on(\"end\", () => {\n resetProgressTimeout(); // Reset timeout when stream ends\n });\n\n // Pipe data through but don't end until fragment index is ready\n result.stream.pipe(finalStream, { end: false });\n\n // Wait for fragment index to complete, then end the stream\n await fragmentIndexPromise;\n finalStream.end();\n\n // Clean up timeout\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n return finalStream;\n },\n});\n\nexport const generateSingleTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=0\",\n );\n }\n return await generateSingleTrackTask(\n cacheRoot,\n absolutePath,\n Number(trackId),\n );\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n
|
|
1
|
+
{"version":3,"file":"generateSingleTrack.cjs","names":["Probe","PassThrough","generateFragmentIndex","idempotentTask","progressTimeout: NodeJS.Timeout | null"],"sources":["../src/generateSingleTrack.ts"],"sourcesContent":["import { idempotentTask } from \"./idempotentTask.js\";\nimport debug from \"debug\";\nimport { PassThrough } from \"node:stream\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"./Probe.js\";\nimport { generateFragmentIndex } from \"./generateFragmentIndex.js\";\n\nconst log = debug(\"ef:generateSingleTrack\");\n\nexport const generateSingleTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) - tracks use 1-based IDs, streams use 0-based indices\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(\n `Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`,\n );\n }\n\n // Get the track stream from FFmpeg (single track, fragmented MP4)\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Create a PassThrough to tee the stream\n const outputStream = new PassThrough();\n const indexStream = new PassThrough();\n\n // Pipe data but DON'T end outputStream automatically - we'll control this\n trackStream.pipe(outputStream, { end: false });\n trackStream.pipe(indexStream);\n\n // Track when the source stream ends (but don't end output yet)\n let sourceStreamEnded = false;\n trackStream.on(\"end\", () => {\n sourceStreamEnded = true;\n });\n\n trackStream.on(\"error\", (error) => {\n outputStream.destroy(error);\n indexStream.destroy(error);\n });\n\n // Generate fragment index from the single-track stream\n // This will be a single-track index since we're processing isolated track\n // Map the single-track file's track ID 1 to the original multi-track ID\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndexPromise = generateFragmentIndex(\n indexStream,\n undefined,\n trackIdMapping,\n );\n\n // End outputStream only after BOTH source ends AND fragment index completes\n fragmentIndexPromise\n .then(() => {\n if (sourceStreamEnded) {\n outputStream.end();\n } else {\n // If fragment index completes first, wait for stream to end\n trackStream.once(\"end\", () => {\n outputStream.end();\n });\n }\n })\n .catch((error) => {\n outputStream.destroy(error);\n });\n\n // Return both the stream and the index\n return {\n stream: outputStream,\n fragmentIndex: fragmentIndexPromise,\n };\n};\n\nexport const generateSingleTrackTask = idempotentTask({\n label: \"track-single\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: async (absolutePath: string, trackId: number) => {\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Create a PassThrough stream that processes fragment index in parallel\n const finalStream = new PassThrough();\n\n // Start fragment index processing immediately (don't wait for stream to end)\n const fragmentIndexPromise = result.fragmentIndex.catch((error) => {\n console.warn(\n `Fragment index generation failed for track ${trackId}:`,\n error,\n );\n // Don't fail the stream if fragment index fails\n });\n\n // Monitor progress and extend timeout based on actual work\n let progressTimeout: NodeJS.Timeout | null = null;\n\n const resetProgressTimeout = () => {\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n progressTimeout = setTimeout(() => {\n if (!finalStream.destroyed) {\n console.warn(\n `Progress timeout triggered for track ${trackId} - no activity for 10 seconds`,\n );\n finalStream.end();\n }\n }, 10000); // 10 second sliding timeout\n };\n\n // Start the initial timeout\n resetProgressTimeout();\n\n // Monitor data flow to detect active work\n result.stream.on(\"data\", () => {\n resetProgressTimeout(); // Reset timeout when we see data\n });\n\n result.stream.on(\"end\", () => {\n resetProgressTimeout(); // Reset timeout when stream ends\n });\n\n // Pipe data through but don't end until fragment index is ready\n result.stream.pipe(finalStream, { end: false });\n\n // Wait for fragment index to complete, then end the stream\n await fragmentIndexPromise;\n finalStream.end();\n\n // Clean up timeout\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n return finalStream;\n },\n});\n\nexport const generateSingleTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=0\",\n );\n }\n return await generateSingleTrackTask(\n cacheRoot,\n absolutePath,\n Number(trackId),\n );\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;;;;;;AAOA,MAAM,yBAAY,yBAAyB;AAE3C,MAAa,8BAA8B,OACzC,cACA,YACG;AACH,KAAI,oBAAoB,QAAQ,OAAO,eAAe;CAEtD,MAAM,QAAQ,MAAMA,oBAAM,UAAU,aAAa;CAGjD,MAAM,cAAc,UAAU;AAE9B,KAAI,cAAc,KAAK,eAAe,MAAM,QAAQ,OAClD,OAAM,IAAI,MACR,SAAS,QAAQ,8BAA8B,MAAM,QAAQ,OAAO,GACrE;CAIH,MAAM,cAAc,MAAM,sBAAsB,YAAY;CAG5D,MAAM,eAAe,IAAIC,yBAAa;CACtC,MAAM,cAAc,IAAIA,yBAAa;AAGrC,aAAY,KAAK,cAAc,EAAE,KAAK,OAAO,CAAC;AAC9C,aAAY,KAAK,YAAY;CAG7B,IAAI,oBAAoB;AACxB,aAAY,GAAG,aAAa;AAC1B,sBAAoB;GACpB;AAEF,aAAY,GAAG,UAAU,UAAU;AACjC,eAAa,QAAQ,MAAM;AAC3B,cAAY,QAAQ,MAAM;GAC1B;CAMF,MAAM,uBAAuBC,oDAC3B,aACA,QAHqB,EAAE,GAAG,SAAS,CAKpC;AAGD,sBACG,WAAW;AACV,MAAI,kBACF,cAAa,KAAK;MAGlB,aAAY,KAAK,aAAa;AAC5B,gBAAa,KAAK;IAClB;GAEJ,CACD,OAAO,UAAU;AAChB,eAAa,QAAQ,MAAM;GAC3B;AAGJ,QAAO;EACL,QAAQ;EACR,eAAe;EAChB;;AAGH,MAAa,0BAA0BC,sCAAe;CACpD,OAAO;CACP,WAAW,cAAsB,YAC/B,2BAAY,aAAa,CAAC,SAAS,QAAQ;CAC7C,QAAQ,OAAO,cAAsB,YAAoB;EACvD,MAAM,SAAS,MAAM,4BAA4B,cAAc,QAAQ;EAGvE,MAAM,cAAc,IAAIF,yBAAa;EAGrC,MAAM,uBAAuB,OAAO,cAAc,OAAO,UAAU;AACjE,WAAQ,KACN,8CAA8C,QAAQ,IACtD,MACD;IAED;EAGF,IAAIG,kBAAyC;EAE7C,MAAM,6BAA6B;AACjC,OAAI,gBACF,cAAa,gBAAgB;AAG/B,qBAAkB,iBAAiB;AACjC,QAAI,CAAC,YAAY,WAAW;AAC1B,aAAQ,KACN,wCAAwC,QAAQ,+BACjD;AACD,iBAAY,KAAK;;MAElB,IAAM;;AAIX,wBAAsB;AAGtB,SAAO,OAAO,GAAG,cAAc;AAC7B,yBAAsB;IACtB;AAEF,SAAO,OAAO,GAAG,aAAa;AAC5B,yBAAsB;IACtB;AAGF,SAAO,OAAO,KAAK,aAAa,EAAE,KAAK,OAAO,CAAC;AAG/C,QAAM;AACN,cAAY,KAAK;AAGjB,MAAI,gBACF,cAAa,gBAAgB;AAG/B,SAAO;;CAEV,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"generateSingleTrack.js","names":["progressTimeout: NodeJS.Timeout | null"],"sources":["../src/generateSingleTrack.ts"],"sourcesContent":["import { idempotentTask } from \"./idempotentTask.js\";\nimport debug from \"debug\";\nimport { PassThrough } from \"node:stream\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"./Probe.js\";\nimport { generateFragmentIndex } from \"./generateFragmentIndex.js\";\n\nconst log = debug(\"ef:generateSingleTrack\");\n\nexport const generateSingleTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) - tracks use 1-based IDs, streams use 0-based indices\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(\n `Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`,\n );\n }\n\n // Get the track stream from FFmpeg (single track, fragmented MP4)\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Create a PassThrough to tee the stream\n const outputStream = new PassThrough();\n const indexStream = new PassThrough();\n\n // Pipe data but DON'T end outputStream automatically - we'll control this\n trackStream.pipe(outputStream, { end: false });\n trackStream.pipe(indexStream);\n\n // Track when the source stream ends (but don't end output yet)\n let sourceStreamEnded = false;\n trackStream.on(\"end\", () => {\n sourceStreamEnded = true;\n });\n\n trackStream.on(\"error\", (error) => {\n outputStream.destroy(error);\n indexStream.destroy(error);\n });\n\n // Generate fragment index from the single-track stream\n // This will be a single-track index since we're processing isolated track\n // Map the single-track file's track ID 1 to the original multi-track ID\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndexPromise = generateFragmentIndex(\n indexStream,\n undefined,\n trackIdMapping,\n );\n\n // End outputStream only after BOTH source ends AND fragment index completes\n fragmentIndexPromise\n .then(() => {\n if (sourceStreamEnded) {\n outputStream.end();\n } else {\n // If fragment index completes first, wait for stream to end\n trackStream.once(\"end\", () => {\n outputStream.end();\n });\n }\n })\n .catch((error) => {\n outputStream.destroy(error);\n });\n\n // Return both the stream and the index\n return {\n stream: outputStream,\n fragmentIndex: fragmentIndexPromise,\n };\n};\n\nexport const generateSingleTrackTask = idempotentTask({\n label: \"track-single\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: async (absolutePath: string, trackId: number) => {\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Create a PassThrough stream that processes fragment index in parallel\n const finalStream = new PassThrough();\n\n // Start fragment index processing immediately (don't wait for stream to end)\n const fragmentIndexPromise = result.fragmentIndex.catch((error) => {\n console.warn(\n `Fragment index generation failed for track ${trackId}:`,\n error,\n );\n // Don't fail the stream if fragment index fails\n });\n\n // Monitor progress and extend timeout based on actual work\n let progressTimeout: NodeJS.Timeout | null = null;\n\n const resetProgressTimeout = () => {\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n progressTimeout = setTimeout(() => {\n if (!finalStream.destroyed) {\n console.warn(\n `Progress timeout triggered for track ${trackId} - no activity for 10 seconds`,\n );\n finalStream.end();\n }\n }, 10000); // 10 second sliding timeout\n };\n\n // Start the initial timeout\n resetProgressTimeout();\n\n // Monitor data flow to detect active work\n result.stream.on(\"data\", () => {\n resetProgressTimeout(); // Reset timeout when we see data\n });\n\n result.stream.on(\"end\", () => {\n resetProgressTimeout(); // Reset timeout when stream ends\n });\n\n // Pipe data through but don't end until fragment index is ready\n result.stream.pipe(finalStream, { end: false });\n\n // Wait for fragment index to complete, then end the stream\n await fragmentIndexPromise;\n finalStream.end();\n\n // Clean up timeout\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n return finalStream;\n },\n});\n\nexport const generateSingleTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=0\",\n );\n }\n return await generateSingleTrackTask(\n cacheRoot,\n absolutePath,\n Number(trackId),\n );\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n
|
|
1
|
+
{"version":3,"file":"generateSingleTrack.js","names":["progressTimeout: NodeJS.Timeout | null"],"sources":["../src/generateSingleTrack.ts"],"sourcesContent":["import { idempotentTask } from \"./idempotentTask.js\";\nimport debug from \"debug\";\nimport { PassThrough } from \"node:stream\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"./Probe.js\";\nimport { generateFragmentIndex } from \"./generateFragmentIndex.js\";\n\nconst log = debug(\"ef:generateSingleTrack\");\n\nexport const generateSingleTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) - tracks use 1-based IDs, streams use 0-based indices\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(\n `Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`,\n );\n }\n\n // Get the track stream from FFmpeg (single track, fragmented MP4)\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Create a PassThrough to tee the stream\n const outputStream = new PassThrough();\n const indexStream = new PassThrough();\n\n // Pipe data but DON'T end outputStream automatically - we'll control this\n trackStream.pipe(outputStream, { end: false });\n trackStream.pipe(indexStream);\n\n // Track when the source stream ends (but don't end output yet)\n let sourceStreamEnded = false;\n trackStream.on(\"end\", () => {\n sourceStreamEnded = true;\n });\n\n trackStream.on(\"error\", (error) => {\n outputStream.destroy(error);\n indexStream.destroy(error);\n });\n\n // Generate fragment index from the single-track stream\n // This will be a single-track index since we're processing isolated track\n // Map the single-track file's track ID 1 to the original multi-track ID\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndexPromise = generateFragmentIndex(\n indexStream,\n undefined,\n trackIdMapping,\n );\n\n // End outputStream only after BOTH source ends AND fragment index completes\n fragmentIndexPromise\n .then(() => {\n if (sourceStreamEnded) {\n outputStream.end();\n } else {\n // If fragment index completes first, wait for stream to end\n trackStream.once(\"end\", () => {\n outputStream.end();\n });\n }\n })\n .catch((error) => {\n outputStream.destroy(error);\n });\n\n // Return both the stream and the index\n return {\n stream: outputStream,\n fragmentIndex: fragmentIndexPromise,\n };\n};\n\nexport const generateSingleTrackTask = idempotentTask({\n label: \"track-single\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: async (absolutePath: string, trackId: number) => {\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Create a PassThrough stream that processes fragment index in parallel\n const finalStream = new PassThrough();\n\n // Start fragment index processing immediately (don't wait for stream to end)\n const fragmentIndexPromise = result.fragmentIndex.catch((error) => {\n console.warn(\n `Fragment index generation failed for track ${trackId}:`,\n error,\n );\n // Don't fail the stream if fragment index fails\n });\n\n // Monitor progress and extend timeout based on actual work\n let progressTimeout: NodeJS.Timeout | null = null;\n\n const resetProgressTimeout = () => {\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n progressTimeout = setTimeout(() => {\n if (!finalStream.destroyed) {\n console.warn(\n `Progress timeout triggered for track ${trackId} - no activity for 10 seconds`,\n );\n finalStream.end();\n }\n }, 10000); // 10 second sliding timeout\n };\n\n // Start the initial timeout\n resetProgressTimeout();\n\n // Monitor data flow to detect active work\n result.stream.on(\"data\", () => {\n resetProgressTimeout(); // Reset timeout when we see data\n });\n\n result.stream.on(\"end\", () => {\n resetProgressTimeout(); // Reset timeout when stream ends\n });\n\n // Pipe data through but don't end until fragment index is ready\n result.stream.pipe(finalStream, { end: false });\n\n // Wait for fragment index to complete, then end the stream\n await fragmentIndexPromise;\n finalStream.end();\n\n // Clean up timeout\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n return finalStream;\n },\n});\n\nexport const generateSingleTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=0\",\n );\n }\n return await generateSingleTrackTask(\n cacheRoot,\n absolutePath,\n Number(trackId),\n );\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;;AAOA,MAAM,MAAM,MAAM,yBAAyB;AAE3C,MAAa,8BAA8B,OACzC,cACA,YACG;AACH,KAAI,oBAAoB,QAAQ,OAAO,eAAe;CAEtD,MAAM,QAAQ,MAAM,MAAM,UAAU,aAAa;CAGjD,MAAM,cAAc,UAAU;AAE9B,KAAI,cAAc,KAAK,eAAe,MAAM,QAAQ,OAClD,OAAM,IAAI,MACR,SAAS,QAAQ,8BAA8B,MAAM,QAAQ,OAAO,GACrE;CAIH,MAAM,cAAc,MAAM,sBAAsB,YAAY;CAG5D,MAAM,eAAe,IAAI,aAAa;CACtC,MAAM,cAAc,IAAI,aAAa;AAGrC,aAAY,KAAK,cAAc,EAAE,KAAK,OAAO,CAAC;AAC9C,aAAY,KAAK,YAAY;CAG7B,IAAI,oBAAoB;AACxB,aAAY,GAAG,aAAa;AAC1B,sBAAoB;GACpB;AAEF,aAAY,GAAG,UAAU,UAAU;AACjC,eAAa,QAAQ,MAAM;AAC3B,cAAY,QAAQ,MAAM;GAC1B;CAMF,MAAM,uBAAuB,sBAC3B,aACA,QAHqB,EAAE,GAAG,SAAS,CAKpC;AAGD,sBACG,WAAW;AACV,MAAI,kBACF,cAAa,KAAK;MAGlB,aAAY,KAAK,aAAa;AAC5B,gBAAa,KAAK;IAClB;GAEJ,CACD,OAAO,UAAU;AAChB,eAAa,QAAQ,MAAM;GAC3B;AAGJ,QAAO;EACL,QAAQ;EACR,eAAe;EAChB;;AAGH,MAAa,0BAA0B,eAAe;CACpD,OAAO;CACP,WAAW,cAAsB,YAC/B,GAAG,SAAS,aAAa,CAAC,SAAS,QAAQ;CAC7C,QAAQ,OAAO,cAAsB,YAAoB;EACvD,MAAM,SAAS,MAAM,4BAA4B,cAAc,QAAQ;EAGvE,MAAM,cAAc,IAAI,aAAa;EAGrC,MAAM,uBAAuB,OAAO,cAAc,OAAO,UAAU;AACjE,WAAQ,KACN,8CAA8C,QAAQ,IACtD,MACD;IAED;EAGF,IAAIA,kBAAyC;EAE7C,MAAM,6BAA6B;AACjC,OAAI,gBACF,cAAa,gBAAgB;AAG/B,qBAAkB,iBAAiB;AACjC,QAAI,CAAC,YAAY,WAAW;AAC1B,aAAQ,KACN,wCAAwC,QAAQ,+BACjD;AACD,iBAAY,KAAK;;MAElB,IAAM;;AAIX,wBAAsB;AAGtB,SAAO,OAAO,GAAG,cAAc;AAC7B,yBAAsB;IACtB;AAEF,SAAO,OAAO,GAAG,aAAa;AAC5B,yBAAsB;IACtB;AAGF,SAAO,OAAO,KAAK,aAAa,EAAE,KAAK,OAAO,CAAC;AAG/C,QAAM;AACN,cAAY,KAAK;AAGjB,MAAI,gBACF,cAAa,gBAAgB;AAG/B,SAAO;;CAEV,CAAC"}
|
package/dist/idempotentTask.cjs
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
|
|
2
2
|
const require_md5 = require('./md5.cjs');
|
|
3
|
-
const require_package = require('./package.cjs');
|
|
4
3
|
let node_fs = require("node:fs");
|
|
5
4
|
node_fs = require_rolldown_runtime.__toESM(node_fs);
|
|
6
5
|
let debug = require("debug");
|
|
@@ -11,9 +10,12 @@ let node_fs_promises = require("node:fs/promises");
|
|
|
11
10
|
node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
|
|
12
11
|
let node_path = require("node:path");
|
|
13
12
|
node_path = require_rolldown_runtime.__toESM(node_path);
|
|
13
|
+
let node_url = require("node:url");
|
|
14
|
+
node_url = require_rolldown_runtime.__toESM(node_url);
|
|
14
15
|
|
|
15
16
|
//#region src/idempotentTask.ts
|
|
16
|
-
const
|
|
17
|
+
const _pkgDir = node_path.default.dirname((0, node_url.fileURLToPath)(require("url").pathToFileURL(__filename).href));
|
|
18
|
+
const CACHE_VERSION = JSON.parse((0, node_fs.readFileSync)(node_path.default.join(_pkgDir, "../package.json"), "utf-8")).version;
|
|
17
19
|
const rootValidationPromises = /* @__PURE__ */ new Map();
|
|
18
20
|
async function ensureCacheVersion(cacheDirRoot) {
|
|
19
21
|
const existing = rootValidationPromises.get(cacheDirRoot);
|
|
@@ -37,6 +39,26 @@ async function ensureCacheVersion(cacheDirRoot) {
|
|
|
37
39
|
rootValidationPromises.set(cacheDirRoot, promise);
|
|
38
40
|
return promise;
|
|
39
41
|
}
|
|
42
|
+
const MAX_CONCURRENT_RUNNERS = 4;
|
|
43
|
+
let activeRunners = 0;
|
|
44
|
+
const runnerQueue = [];
|
|
45
|
+
function acquireRunnerSlot() {
|
|
46
|
+
if (activeRunners < MAX_CONCURRENT_RUNNERS) {
|
|
47
|
+
activeRunners++;
|
|
48
|
+
return Promise.resolve();
|
|
49
|
+
}
|
|
50
|
+
return new Promise((resolve) => {
|
|
51
|
+
runnerQueue.push(() => {
|
|
52
|
+
activeRunners++;
|
|
53
|
+
resolve();
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
function releaseRunnerSlot() {
|
|
58
|
+
activeRunners--;
|
|
59
|
+
const next = runnerQueue.shift();
|
|
60
|
+
if (next) next();
|
|
61
|
+
}
|
|
40
62
|
const idempotentTask = ({ label, filename, runner }) => {
|
|
41
63
|
const tasks = {};
|
|
42
64
|
const downloadTasks = {};
|
|
@@ -139,7 +161,13 @@ const idempotentTask = ({ label, filename, runner }) => {
|
|
|
139
161
|
};
|
|
140
162
|
}
|
|
141
163
|
log(`Running ef:${label} runner for ${resolvedCachePath}`);
|
|
142
|
-
|
|
164
|
+
await acquireRunnerSlot();
|
|
165
|
+
let result;
|
|
166
|
+
try {
|
|
167
|
+
result = await runner(absolutePath, ...args);
|
|
168
|
+
} finally {
|
|
169
|
+
releaseRunnerSlot();
|
|
170
|
+
}
|
|
143
171
|
if (result instanceof node_stream.Readable) {
|
|
144
172
|
log(`Piping task for ${resolvedCachePath} to cache`);
|
|
145
173
|
const tempPath = `${resolvedCachePath}.tmp`;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"idempotentTask.cjs","names":["storedVersion: string | null","tasks: Record<string, Promise<TaskResult>>","downloadTasks: Record<string, Promise<string>>","path","Readable","cachePath: string | null","md5: string | null","md5FilePath"],"sources":["../src/idempotentTask.ts"],"sourcesContent":["import { createWriteStream, existsSync } from \"node:fs\";\nimport path, { join } from \"node:path\";\nimport { md5FilePath } from \"./md5.js\";\nimport debug from \"debug\";\nimport { mkdir, writeFile, stat, rename, readdir, readFile, rm } from \"node:fs/promises\";\nimport { Readable } from \"node:stream\";\nimport packageJson from \"../package.json\" with { type: \"json\" };\n\nconst CACHE_VERSION = packageJson.version;\n\n// Per-root validation promises — serializes the version check within a process\n// and memoizes it so subsequent calls in the same process are free.\nconst rootValidationPromises = new Map<string, Promise<void>>();\n\nasync function ensureCacheVersion(cacheDirRoot: string): Promise<void> {\n const existing = rootValidationPromises.get(cacheDirRoot);\n if (existing) return existing;\n\n const promise = (async () => {\n const versionFile = join(cacheDirRoot, \".version\");\n let storedVersion: string | null = null;\n try {\n storedVersion = (await readFile(versionFile, \"utf-8\")).trim();\n } catch {}\n\n if (storedVersion === CACHE_VERSION) return;\n\n const log = debug(\"ef:idempotentTask\");\n log(\n `Cache version mismatch (stored: ${storedVersion ?? \"none\"}, current: ${CACHE_VERSION}) — busting computed caches in ${cacheDirRoot}`,\n );\n\n // Delete computed output directories; preserve downloaded .file entries\n const entries = await readdir(cacheDirRoot, { withFileTypes: true }).catch(\n () => [],\n );\n await Promise.all(\n entries\n .filter((e) => e.isDirectory())\n .map((e) =>\n rm(join(cacheDirRoot, e.name), {\n recursive: true,\n force: true,\n }).catch(() => {}),\n ),\n );\n\n await mkdir(cacheDirRoot, { recursive: true });\n await writeFile(versionFile, CACHE_VERSION);\n })();\n\n rootValidationPromises.set(cacheDirRoot, promise);\n return promise;\n}\n\nconst MAX_CONCURRENT_RUNNERS = 4;\nlet activeRunners = 0;\nconst runnerQueue: Array<() => void> = [];\n\nfunction acquireRunnerSlot(): Promise<void> {\n if (activeRunners < MAX_CONCURRENT_RUNNERS) {\n activeRunners++;\n return Promise.resolve();\n }\n return new Promise((resolve) => {\n runnerQueue.push(() => {\n activeRunners++;\n resolve();\n });\n });\n}\n\nfunction releaseRunnerSlot(): void {\n activeRunners--;\n const next = runnerQueue.shift();\n if (next) next();\n}\n\ninterface TaskOptions<T extends unknown[]> {\n label: string;\n filename: (absolutePath: string, ...args: T) => string;\n runner: (absolutePath: string, ...args: T) => Promise<string | Readable>;\n}\n\nexport interface TaskResult {\n md5Sum: string;\n cachePath: string;\n}\n\nexport const idempotentTask = <T extends unknown[]>({\n label,\n filename,\n runner,\n}: TaskOptions<T>) => {\n const tasks: Record<string, Promise<TaskResult>> = {};\n const downloadTasks: Record<string, Promise<string>> = {};\n\n // Helper function to validate cache file completeness\n const isValidCacheFile = async (\n filePath: string,\n allowEmpty = false,\n ): Promise<boolean> => {\n try {\n const stats = await stat(filePath);\n // File must exist and either have content or be explicitly allowed to be empty\n return allowEmpty || stats.size > 0;\n } catch {\n return false;\n }\n };\n\n return async (\n rootDir: string,\n absolutePath: string,\n ...args: T\n ): Promise<TaskResult> => {\n const log = debug(`ef:${label}`);\n const cacheDirRoot = path.join(rootDir, \".cache\");\n await mkdir(cacheDirRoot, { recursive: true });\n await ensureCacheVersion(cacheDirRoot);\n\n log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);\n\n // Handle HTTP downloads with proper race condition protection\n if (absolutePath.startsWith(\"http://\") || absolutePath.startsWith(\"https://\")) {\n const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, \"_\");\n const downloadCachePath = path.join(\n rootDir,\n \".cache\",\n `${safePath}.file`,\n );\n\n // Check if already downloaded and valid (allow empty downloads)\n if (\n existsSync(downloadCachePath) &&\n (await isValidCacheFile(downloadCachePath, true))\n ) {\n log(`Already cached ${absolutePath}`);\n absolutePath = downloadCachePath;\n } else {\n // Use download task deduplication to prevent concurrent downloads\n const downloadKey = absolutePath;\n if (!downloadTasks[downloadKey]) {\n log(`Starting download for ${absolutePath}`);\n downloadTasks[downloadKey] = (async () => {\n try {\n const response = await fetch(absolutePath);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch file from URL ${absolutePath}: ${response.status} ${response.statusText}`,\n );\n }\n\n const stream = response.body;\n if (!stream) {\n throw new Error(`No response body for URL ${absolutePath}`);\n }\n\n // Use temporary file to prevent reading incomplete downloads\n const tempPath = `${downloadCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n\n // @ts-ignore node web stream support in typescript is incorrect about this.\n const readable = Readable.fromWeb(stream);\n readable.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n readable.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n await rename(tempPath, downloadCachePath);\n\n log(`Download completed for ${absolutePath}`);\n return downloadCachePath;\n } catch (error) {\n log(`Download failed for ${absolutePath}: ${error}`);\n // Clean up task reference on failure\n delete downloadTasks[downloadKey];\n throw error;\n }\n })();\n }\n\n absolutePath = await downloadTasks[downloadKey];\n // Clean up completed task\n delete downloadTasks[downloadKey];\n }\n }\n\n // Deduplicate concurrent callers by input parameters before any async work.\n // Using a synchronous key prevents the TOCTOU race where two concurrent\n // callers both pass the tasks[] check before either registers a task.\n const inputKey = JSON.stringify([absolutePath, ...args]);\n if (tasks[inputKey]) {\n log(`Returning existing ef:${label} task for ${absolutePath}`);\n return await tasks[inputKey];\n }\n\n const fullTask = (async (): Promise<TaskResult> => {\n try {\n // Try to find existing cache by scanning cache directories.\n // This avoids expensive MD5 computation when cache already exists.\n const expectedFilename = filename(absolutePath, ...args);\n let cachePath: string | null = null;\n let md5: string | null = null;\n\n const scanStartTime = Date.now();\n try {\n const cacheDirs = await readdir(cacheDirRoot, {\n withFileTypes: true,\n });\n log(\n `Scanning ${cacheDirs.length} cache directories for ${expectedFilename}`,\n );\n for (const dir of cacheDirs) {\n if (dir.isDirectory()) {\n const candidatePath = path.join(\n cacheDirRoot,\n dir.name,\n expectedFilename,\n );\n if (\n existsSync(candidatePath) &&\n (await isValidCacheFile(candidatePath))\n ) {\n cachePath = candidatePath;\n md5 = dir.name; // Directory name is the MD5\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Found existing cache in ${scanElapsed}ms: ${candidatePath} (skipped MD5)`,\n );\n break;\n }\n }\n }\n if (!cachePath) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan completed in ${scanElapsed}ms, no cache found - will compute MD5`,\n );\n }\n } catch (error) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan failed after ${scanElapsed}ms, will compute MD5: ${error}`,\n );\n }\n\n const resolvedMd5 =\n md5 ??\n (await (async () => {\n const md5StartTime = Date.now();\n log(`Computing MD5 for ${absolutePath}...`);\n const computed = await md5FilePath(absolutePath);\n const md5Elapsed = Date.now() - md5StartTime;\n log(`MD5 computed in ${md5Elapsed}ms: ${computed}`);\n return computed;\n })());\n\n const cacheDir = path.join(cacheDirRoot, resolvedMd5);\n log(`Cache dir: ${cacheDir}`);\n await mkdir(cacheDir, { recursive: true });\n\n const resolvedCachePath =\n cachePath ?? path.join(cacheDir, expectedFilename);\n\n // Check if cache exists and is valid (not zero-byte)\n if (\n existsSync(resolvedCachePath) &&\n (await isValidCacheFile(resolvedCachePath))\n ) {\n log(`Returning cached ef:${label} task for ${resolvedCachePath}`);\n return { cachePath: resolvedCachePath, md5Sum: resolvedMd5 };\n }\n\n log(`Running ef:${label} runner for ${resolvedCachePath}`);\n const result = await runner(absolutePath, ...args);\n\n if (result instanceof Readable) {\n log(`Piping task for ${resolvedCachePath} to cache`);\n const tempPath = `${resolvedCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n result.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n result.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n await rename(tempPath, resolvedCachePath);\n } else {\n log(`Writing to ${resolvedCachePath}`);\n await writeFile(resolvedCachePath, result);\n }\n\n return {\n md5Sum: resolvedMd5,\n cachePath: resolvedCachePath,\n };\n } finally {\n delete tasks[inputKey];\n }\n })();\n\n tasks[inputKey] = fullTask;\n return await fullTask;\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;AAQA,MAAM;AAIN,MAAM,yCAAyB,IAAI,KAA4B;AAE/D,eAAe,mBAAmB,cAAqC;CACrE,MAAM,WAAW,uBAAuB,IAAI,aAAa;AACzD,KAAI,SAAU,QAAO;CAErB,MAAM,WAAW,YAAY;EAC3B,MAAM,kCAAmB,cAAc,WAAW;EAClD,IAAIA,gBAA+B;AACnC,MAAI;AACF,oBAAiB,qCAAe,aAAa,QAAQ,EAAE,MAAM;UACvD;AAER,MAAI,kBAAkB,cAAe;AAGrC,qBADkB,oBAAoB,CAEpC,mCAAmC,iBAAiB,OAAO,aAAa,cAAc,iCAAiC,eACxH;EAGD,MAAM,UAAU,oCAAc,cAAc,EAAE,eAAe,MAAM,CAAC,CAAC,YAC7D,EAAE,CACT;AACD,QAAM,QAAQ,IACZ,QACG,QAAQ,MAAM,EAAE,aAAa,CAAC,CAC9B,KAAK,mDACI,cAAc,EAAE,KAAK,EAAE;GAC7B,WAAW;GACX,OAAO;GACR,CAAC,CAAC,YAAY,GAAG,CACnB,CACJ;AAED,oCAAY,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,wCAAgB,aAAa,cAAc;KACzC;AAEJ,wBAAuB,IAAI,cAAc,QAAQ;AACjD,QAAO;;AAqCT,MAAa,kBAAuC,EAClD,OACA,UACA,aACoB;CACpB,MAAMC,QAA6C,EAAE;CACrD,MAAMC,gBAAiD,EAAE;CAGzD,MAAM,mBAAmB,OACvB,UACA,aAAa,UACQ;AACrB,MAAI;GACF,MAAM,QAAQ,iCAAW,SAAS;AAElC,UAAO,cAAc,MAAM,OAAO;UAC5B;AACN,UAAO;;;AAIX,QAAO,OACL,SACA,cACA,GAAG,SACqB;EACxB,MAAM,yBAAY,MAAM,QAAQ;EAChC,MAAM,eAAeC,kBAAK,KAAK,SAAS,SAAS;AACjD,oCAAY,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,QAAM,mBAAmB,aAAa;AAEtC,MAAI,cAAc,MAAM,YAAY,aAAa,MAAM,UAAU;AAGjE,MAAI,aAAa,WAAW,UAAU,IAAI,aAAa,WAAW,WAAW,EAAE;GAC7E,MAAM,WAAW,aAAa,QAAQ,iBAAiB,IAAI;GAC3D,MAAM,oBAAoBA,kBAAK,KAC7B,SACA,UACA,GAAG,SAAS,OACb;AAGD,+BACa,kBAAkB,IAC5B,MAAM,iBAAiB,mBAAmB,KAAK,EAChD;AACA,QAAI,kBAAkB,eAAe;AACrC,mBAAe;UACV;IAEL,MAAM,cAAc;AACpB,QAAI,CAAC,cAAc,cAAc;AAC/B,SAAI,yBAAyB,eAAe;AAC5C,mBAAc,gBAAgB,YAAY;AACxC,UAAI;OACF,MAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,WAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MACR,iCAAiC,aAAa,IAAI,SAAS,OAAO,GAAG,SAAS,aAC/E;OAGH,MAAM,SAAS,SAAS;AACxB,WAAI,CAAC,OACH,OAAM,IAAI,MAAM,4BAA4B,eAAe;OAI7D,MAAM,WAAW,GAAG,kBAAkB;OACtC,MAAM,6CAAgC,SAAS;OAG/C,MAAM,WAAWC,qBAAS,QAAQ,OAAO;AACzC,gBAAS,KAAK,YAAY;AAE1B,aAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,iBAAS,GAAG,SAAS,OAAO;AAC5B,oBAAY,GAAG,SAAS,OAAO;AAC/B,oBAAY,GAAG,gBAAgB,SAAS,CAAC;SACzC;AAGF,0CAAa,UAAU,kBAAkB;AAEzC,WAAI,0BAA0B,eAAe;AAC7C,cAAO;eACA,OAAO;AACd,WAAI,uBAAuB,aAAa,IAAI,QAAQ;AAEpD,cAAO,cAAc;AACrB,aAAM;;SAEN;;AAGN,mBAAe,MAAM,cAAc;AAEnC,WAAO,cAAc;;;EAOzB,MAAM,WAAW,KAAK,UAAU,CAAC,cAAc,GAAG,KAAK,CAAC;AACxD,MAAI,MAAM,WAAW;AACnB,OAAI,yBAAyB,MAAM,YAAY,eAAe;AAC9D,UAAO,MAAM,MAAM;;EAGrB,MAAM,YAAY,YAAiC;AACjD,OAAI;IAGF,MAAM,mBAAmB,SAAS,cAAc,GAAG,KAAK;IACxD,IAAIC,YAA2B;IAC/B,IAAIC,MAAqB;IAEzB,MAAM,gBAAgB,KAAK,KAAK;AAChC,QAAI;KACF,MAAM,YAAY,oCAAc,cAAc,EAC5C,eAAe,MAChB,CAAC;AACF,SACE,YAAY,UAAU,OAAO,yBAAyB,mBACvD;AACD,UAAK,MAAM,OAAO,UAChB,KAAI,IAAI,aAAa,EAAE;MACrB,MAAM,gBAAgBH,kBAAK,KACzB,cACA,IAAI,MACJ,iBACD;AACD,kCACa,cAAc,IACxB,MAAM,iBAAiB,cAAc,EACtC;AACA,mBAAY;AACZ,aAAM,IAAI;AAEV,WACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,MAAM,cAAc,gBAC5D;AACD;;;AAIN,SAAI,CAAC,UAEH,KACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,uCACxC;aAEI,OAAO;AAEd,SACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,wBAAwB,QAChE;;IAGH,MAAM,cACJ,OACC,OAAO,YAAY;KAClB,MAAM,eAAe,KAAK,KAAK;AAC/B,SAAI,qBAAqB,aAAa,KAAK;KAC3C,MAAM,WAAW,MAAMI,wBAAY,aAAa;AAEhD,SAAI,mBADe,KAAK,KAAK,GAAG,aACE,MAAM,WAAW;AACnD,YAAO;QACL;IAEN,MAAM,WAAWJ,kBAAK,KAAK,cAAc,YAAY;AACrD,QAAI,cAAc,WAAW;AAC7B,sCAAY,UAAU,EAAE,WAAW,MAAM,CAAC;IAE1C,MAAM,oBACJ,aAAaA,kBAAK,KAAK,UAAU,iBAAiB;AAGpD,gCACa,kBAAkB,IAC5B,MAAM,iBAAiB,kBAAkB,EAC1C;AACA,SAAI,uBAAuB,MAAM,YAAY,oBAAoB;AACjE,YAAO;MAAE,WAAW;MAAmB,QAAQ;MAAa;;AAG9D,QAAI,cAAc,MAAM,cAAc,oBAAoB;IAC1D,MAAM,SAAS,MAAM,OAAO,cAAc,GAAG,KAAK;AAElD,QAAI,kBAAkBC,sBAAU;AAC9B,SAAI,mBAAmB,kBAAkB,WAAW;KACpD,MAAM,WAAW,GAAG,kBAAkB;KACtC,MAAM,6CAAgC,SAAS;AAC/C,YAAO,KAAK,YAAY;AAExB,WAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,aAAO,GAAG,SAAS,OAAO;AAC1B,kBAAY,GAAG,SAAS,OAAO;AAC/B,kBAAY,GAAG,gBAAgB,SAAS,CAAC;OACzC;AAEF,wCAAa,UAAU,kBAAkB;WACpC;AACL,SAAI,cAAc,oBAAoB;AACtC,2CAAgB,mBAAmB,OAAO;;AAG5C,WAAO;KACL,QAAQ;KACR,WAAW;KACZ;aACO;AACR,WAAO,MAAM;;MAEb;AAEJ,QAAM,YAAY;AAClB,SAAO,MAAM"}
|
|
1
|
+
{"version":3,"file":"idempotentTask.cjs","names":["path","CACHE_VERSION: string","storedVersion: string | null","runnerQueue: Array<() => void>","tasks: Record<string, Promise<TaskResult>>","downloadTasks: Record<string, Promise<string>>","Readable","cachePath: string | null","md5: string | null","md5FilePath","result: string | Readable"],"sources":["../src/idempotentTask.ts"],"sourcesContent":["import { createWriteStream, existsSync, readFileSync } from \"node:fs\";\nimport path, { join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { md5FilePath } from \"./md5.js\";\nimport debug from \"debug\";\nimport {\n mkdir,\n writeFile,\n stat,\n rename,\n readdir,\n readFile,\n rm,\n} from \"node:fs/promises\";\nimport { Readable } from \"node:stream\";\n\n// @ts-ignore - import.meta.url is available at runtime; tsconfig uses CommonJS module for type-checking only\nconst _pkgDir = path.dirname(fileURLToPath(import.meta.url));\nconst CACHE_VERSION: string = (\n JSON.parse(readFileSync(path.join(_pkgDir, \"../package.json\"), \"utf-8\")) as {\n version: string;\n }\n).version;\n\n// Per-root validation promises — serializes the version check within a process\n// and memoizes it so subsequent calls in the same process are free.\nconst rootValidationPromises = new Map<string, Promise<void>>();\n\nasync function ensureCacheVersion(cacheDirRoot: string): Promise<void> {\n const existing = rootValidationPromises.get(cacheDirRoot);\n if (existing) return existing;\n\n const promise = (async () => {\n const versionFile = join(cacheDirRoot, \".version\");\n let storedVersion: string | null = null;\n try {\n storedVersion = (await readFile(versionFile, \"utf-8\")).trim();\n } catch {}\n\n if (storedVersion === CACHE_VERSION) return;\n\n const log = debug(\"ef:idempotentTask\");\n log(\n `Cache version mismatch (stored: ${storedVersion ?? \"none\"}, current: ${CACHE_VERSION}) — busting computed caches in ${cacheDirRoot}`,\n );\n\n // Delete computed output directories; preserve downloaded .file entries\n const entries = await readdir(cacheDirRoot, { withFileTypes: true }).catch(\n () => [],\n );\n await Promise.all(\n entries\n .filter((e) => e.isDirectory())\n .map((e) =>\n rm(join(cacheDirRoot, e.name), {\n recursive: true,\n force: true,\n }).catch(() => {}),\n ),\n );\n\n await mkdir(cacheDirRoot, { recursive: true });\n await writeFile(versionFile, CACHE_VERSION);\n })();\n\n rootValidationPromises.set(cacheDirRoot, promise);\n return promise;\n}\n\nconst MAX_CONCURRENT_RUNNERS = 4;\nlet activeRunners = 0;\nconst runnerQueue: Array<() => void> = [];\n\nfunction acquireRunnerSlot(): Promise<void> {\n if (activeRunners < MAX_CONCURRENT_RUNNERS) {\n activeRunners++;\n return Promise.resolve();\n }\n return new Promise((resolve) => {\n runnerQueue.push(() => {\n activeRunners++;\n resolve();\n });\n });\n}\n\nfunction releaseRunnerSlot(): void {\n activeRunners--;\n const next = runnerQueue.shift();\n if (next) next();\n}\n\ninterface TaskOptions<T extends unknown[]> {\n label: string;\n filename: (absolutePath: string, ...args: T) => string;\n runner: (absolutePath: string, ...args: T) => Promise<string | Readable>;\n}\n\nexport interface TaskResult {\n md5Sum: string;\n cachePath: string;\n}\n\nexport const idempotentTask = <T extends unknown[]>({\n label,\n filename,\n runner,\n}: TaskOptions<T>) => {\n const tasks: Record<string, Promise<TaskResult>> = {};\n const downloadTasks: Record<string, Promise<string>> = {};\n\n // Helper function to validate cache file completeness\n const isValidCacheFile = async (\n filePath: string,\n allowEmpty = false,\n ): Promise<boolean> => {\n try {\n const stats = await stat(filePath);\n // File must exist and either have content or be explicitly allowed to be empty\n return allowEmpty || stats.size > 0;\n } catch {\n return false;\n }\n };\n\n return async (\n rootDir: string,\n absolutePath: string,\n ...args: T\n ): Promise<TaskResult> => {\n const log = debug(`ef:${label}`);\n const cacheDirRoot = path.join(rootDir, \".cache\");\n await mkdir(cacheDirRoot, { recursive: true });\n await ensureCacheVersion(cacheDirRoot);\n\n log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);\n\n // Handle HTTP downloads with proper race condition protection\n if (\n absolutePath.startsWith(\"http://\") ||\n absolutePath.startsWith(\"https://\")\n ) {\n const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, \"_\");\n const downloadCachePath = path.join(\n rootDir,\n \".cache\",\n `${safePath}.file`,\n );\n\n // Check if already downloaded and valid (allow empty downloads)\n if (\n existsSync(downloadCachePath) &&\n (await isValidCacheFile(downloadCachePath, true))\n ) {\n log(`Already cached ${absolutePath}`);\n absolutePath = downloadCachePath;\n } else {\n // Use download task deduplication to prevent concurrent downloads\n const downloadKey = absolutePath;\n if (!downloadTasks[downloadKey]) {\n log(`Starting download for ${absolutePath}`);\n downloadTasks[downloadKey] = (async () => {\n try {\n const response = await fetch(absolutePath);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch file from URL ${absolutePath}: ${response.status} ${response.statusText}`,\n );\n }\n\n const stream = response.body;\n if (!stream) {\n throw new Error(`No response body for URL ${absolutePath}`);\n }\n\n // Use temporary file to prevent reading incomplete downloads\n const tempPath = `${downloadCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n\n // @ts-ignore node web stream support in typescript is incorrect about this.\n const readable = Readable.fromWeb(stream);\n readable.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n readable.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n await rename(tempPath, downloadCachePath);\n\n log(`Download completed for ${absolutePath}`);\n return downloadCachePath;\n } catch (error) {\n log(`Download failed for ${absolutePath}: ${error}`);\n // Clean up task reference on failure\n delete downloadTasks[downloadKey];\n throw error;\n }\n })();\n }\n\n absolutePath = await downloadTasks[downloadKey];\n // Clean up completed task\n delete downloadTasks[downloadKey];\n }\n }\n\n // Deduplicate concurrent callers by input parameters before any async work.\n // Using a synchronous key prevents the TOCTOU race where two concurrent\n // callers both pass the tasks[] check before either registers a task.\n const inputKey = JSON.stringify([absolutePath, ...args]);\n if (tasks[inputKey]) {\n log(`Returning existing ef:${label} task for ${absolutePath}`);\n return await tasks[inputKey];\n }\n\n const fullTask = (async (): Promise<TaskResult> => {\n try {\n // Try to find existing cache by scanning cache directories.\n // This avoids expensive MD5 computation when cache already exists.\n const expectedFilename = filename(absolutePath, ...args);\n let cachePath: string | null = null;\n let md5: string | null = null;\n\n const scanStartTime = Date.now();\n try {\n const cacheDirs = await readdir(cacheDirRoot, {\n withFileTypes: true,\n });\n log(\n `Scanning ${cacheDirs.length} cache directories for ${expectedFilename}`,\n );\n for (const dir of cacheDirs) {\n if (dir.isDirectory()) {\n const candidatePath = path.join(\n cacheDirRoot,\n dir.name,\n expectedFilename,\n );\n if (\n existsSync(candidatePath) &&\n (await isValidCacheFile(candidatePath))\n ) {\n cachePath = candidatePath;\n md5 = dir.name; // Directory name is the MD5\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Found existing cache in ${scanElapsed}ms: ${candidatePath} (skipped MD5)`,\n );\n break;\n }\n }\n }\n if (!cachePath) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan completed in ${scanElapsed}ms, no cache found - will compute MD5`,\n );\n }\n } catch (error) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan failed after ${scanElapsed}ms, will compute MD5: ${error}`,\n );\n }\n\n const resolvedMd5 =\n md5 ??\n (await (async () => {\n const md5StartTime = Date.now();\n log(`Computing MD5 for ${absolutePath}...`);\n const computed = await md5FilePath(absolutePath);\n const md5Elapsed = Date.now() - md5StartTime;\n log(`MD5 computed in ${md5Elapsed}ms: ${computed}`);\n return computed;\n })());\n\n const cacheDir = path.join(cacheDirRoot, resolvedMd5);\n log(`Cache dir: ${cacheDir}`);\n await mkdir(cacheDir, { recursive: true });\n\n const resolvedCachePath =\n cachePath ?? path.join(cacheDir, expectedFilename);\n\n // Check if cache exists and is valid (not zero-byte)\n if (\n existsSync(resolvedCachePath) &&\n (await isValidCacheFile(resolvedCachePath))\n ) {\n log(`Returning cached ef:${label} task for ${resolvedCachePath}`);\n return { cachePath: resolvedCachePath, md5Sum: resolvedMd5 };\n }\n\n log(`Running ef:${label} runner for ${resolvedCachePath}`);\n await acquireRunnerSlot();\n let result: string | Readable;\n try {\n result = await runner(absolutePath, ...args);\n } finally {\n releaseRunnerSlot();\n }\n\n if (result instanceof Readable) {\n log(`Piping task for ${resolvedCachePath} to cache`);\n const tempPath = `${resolvedCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n result.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n result.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n await rename(tempPath, resolvedCachePath);\n } else {\n log(`Writing to ${resolvedCachePath}`);\n await writeFile(resolvedCachePath, result);\n }\n\n return {\n md5Sum: resolvedMd5,\n cachePath: resolvedCachePath,\n };\n } finally {\n delete tasks[inputKey];\n }\n })();\n\n tasks[inputKey] = fullTask;\n return await fullTask;\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;AAiBA,MAAM,UAAUA,kBAAK,kFAAsC,CAAC;AAC5D,MAAMC,gBACJ,KAAK,gCAAmBD,kBAAK,KAAK,SAAS,kBAAkB,EAAE,QAAQ,CAAC,CAGxE;AAIF,MAAM,yCAAyB,IAAI,KAA4B;AAE/D,eAAe,mBAAmB,cAAqC;CACrE,MAAM,WAAW,uBAAuB,IAAI,aAAa;AACzD,KAAI,SAAU,QAAO;CAErB,MAAM,WAAW,YAAY;EAC3B,MAAM,kCAAmB,cAAc,WAAW;EAClD,IAAIE,gBAA+B;AACnC,MAAI;AACF,oBAAiB,qCAAe,aAAa,QAAQ,EAAE,MAAM;UACvD;AAER,MAAI,kBAAkB,cAAe;AAGrC,qBADkB,oBAAoB,CAEpC,mCAAmC,iBAAiB,OAAO,aAAa,cAAc,iCAAiC,eACxH;EAGD,MAAM,UAAU,oCAAc,cAAc,EAAE,eAAe,MAAM,CAAC,CAAC,YAC7D,EAAE,CACT;AACD,QAAM,QAAQ,IACZ,QACG,QAAQ,MAAM,EAAE,aAAa,CAAC,CAC9B,KAAK,mDACI,cAAc,EAAE,KAAK,EAAE;GAC7B,WAAW;GACX,OAAO;GACR,CAAC,CAAC,YAAY,GAAG,CACnB,CACJ;AAED,oCAAY,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,wCAAgB,aAAa,cAAc;KACzC;AAEJ,wBAAuB,IAAI,cAAc,QAAQ;AACjD,QAAO;;AAGT,MAAM,yBAAyB;AAC/B,IAAI,gBAAgB;AACpB,MAAMC,cAAiC,EAAE;AAEzC,SAAS,oBAAmC;AAC1C,KAAI,gBAAgB,wBAAwB;AAC1C;AACA,SAAO,QAAQ,SAAS;;AAE1B,QAAO,IAAI,SAAS,YAAY;AAC9B,cAAY,WAAW;AACrB;AACA,YAAS;IACT;GACF;;AAGJ,SAAS,oBAA0B;AACjC;CACA,MAAM,OAAO,YAAY,OAAO;AAChC,KAAI,KAAM,OAAM;;AAclB,MAAa,kBAAuC,EAClD,OACA,UACA,aACoB;CACpB,MAAMC,QAA6C,EAAE;CACrD,MAAMC,gBAAiD,EAAE;CAGzD,MAAM,mBAAmB,OACvB,UACA,aAAa,UACQ;AACrB,MAAI;GACF,MAAM,QAAQ,iCAAW,SAAS;AAElC,UAAO,cAAc,MAAM,OAAO;UAC5B;AACN,UAAO;;;AAIX,QAAO,OACL,SACA,cACA,GAAG,SACqB;EACxB,MAAM,yBAAY,MAAM,QAAQ;EAChC,MAAM,eAAeL,kBAAK,KAAK,SAAS,SAAS;AACjD,oCAAY,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,QAAM,mBAAmB,aAAa;AAEtC,MAAI,cAAc,MAAM,YAAY,aAAa,MAAM,UAAU;AAGjE,MACE,aAAa,WAAW,UAAU,IAClC,aAAa,WAAW,WAAW,EACnC;GACA,MAAM,WAAW,aAAa,QAAQ,iBAAiB,IAAI;GAC3D,MAAM,oBAAoBA,kBAAK,KAC7B,SACA,UACA,GAAG,SAAS,OACb;AAGD,+BACa,kBAAkB,IAC5B,MAAM,iBAAiB,mBAAmB,KAAK,EAChD;AACA,QAAI,kBAAkB,eAAe;AACrC,mBAAe;UACV;IAEL,MAAM,cAAc;AACpB,QAAI,CAAC,cAAc,cAAc;AAC/B,SAAI,yBAAyB,eAAe;AAC5C,mBAAc,gBAAgB,YAAY;AACxC,UAAI;OACF,MAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,WAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MACR,iCAAiC,aAAa,IAAI,SAAS,OAAO,GAAG,SAAS,aAC/E;OAGH,MAAM,SAAS,SAAS;AACxB,WAAI,CAAC,OACH,OAAM,IAAI,MAAM,4BAA4B,eAAe;OAI7D,MAAM,WAAW,GAAG,kBAAkB;OACtC,MAAM,6CAAgC,SAAS;OAG/C,MAAM,WAAWM,qBAAS,QAAQ,OAAO;AACzC,gBAAS,KAAK,YAAY;AAE1B,aAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,iBAAS,GAAG,SAAS,OAAO;AAC5B,oBAAY,GAAG,SAAS,OAAO;AAC/B,oBAAY,GAAG,gBAAgB,SAAS,CAAC;SACzC;AAGF,0CAAa,UAAU,kBAAkB;AAEzC,WAAI,0BAA0B,eAAe;AAC7C,cAAO;eACA,OAAO;AACd,WAAI,uBAAuB,aAAa,IAAI,QAAQ;AAEpD,cAAO,cAAc;AACrB,aAAM;;SAEN;;AAGN,mBAAe,MAAM,cAAc;AAEnC,WAAO,cAAc;;;EAOzB,MAAM,WAAW,KAAK,UAAU,CAAC,cAAc,GAAG,KAAK,CAAC;AACxD,MAAI,MAAM,WAAW;AACnB,OAAI,yBAAyB,MAAM,YAAY,eAAe;AAC9D,UAAO,MAAM,MAAM;;EAGrB,MAAM,YAAY,YAAiC;AACjD,OAAI;IAGF,MAAM,mBAAmB,SAAS,cAAc,GAAG,KAAK;IACxD,IAAIC,YAA2B;IAC/B,IAAIC,MAAqB;IAEzB,MAAM,gBAAgB,KAAK,KAAK;AAChC,QAAI;KACF,MAAM,YAAY,oCAAc,cAAc,EAC5C,eAAe,MAChB,CAAC;AACF,SACE,YAAY,UAAU,OAAO,yBAAyB,mBACvD;AACD,UAAK,MAAM,OAAO,UAChB,KAAI,IAAI,aAAa,EAAE;MACrB,MAAM,gBAAgBR,kBAAK,KACzB,cACA,IAAI,MACJ,iBACD;AACD,kCACa,cAAc,IACxB,MAAM,iBAAiB,cAAc,EACtC;AACA,mBAAY;AACZ,aAAM,IAAI;AAEV,WACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,MAAM,cAAc,gBAC5D;AACD;;;AAIN,SAAI,CAAC,UAEH,KACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,uCACxC;aAEI,OAAO;AAEd,SACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,wBAAwB,QAChE;;IAGH,MAAM,cACJ,OACC,OAAO,YAAY;KAClB,MAAM,eAAe,KAAK,KAAK;AAC/B,SAAI,qBAAqB,aAAa,KAAK;KAC3C,MAAM,WAAW,MAAMS,wBAAY,aAAa;AAEhD,SAAI,mBADe,KAAK,KAAK,GAAG,aACE,MAAM,WAAW;AACnD,YAAO;QACL;IAEN,MAAM,WAAWT,kBAAK,KAAK,cAAc,YAAY;AACrD,QAAI,cAAc,WAAW;AAC7B,sCAAY,UAAU,EAAE,WAAW,MAAM,CAAC;IAE1C,MAAM,oBACJ,aAAaA,kBAAK,KAAK,UAAU,iBAAiB;AAGpD,gCACa,kBAAkB,IAC5B,MAAM,iBAAiB,kBAAkB,EAC1C;AACA,SAAI,uBAAuB,MAAM,YAAY,oBAAoB;AACjE,YAAO;MAAE,WAAW;MAAmB,QAAQ;MAAa;;AAG9D,QAAI,cAAc,MAAM,cAAc,oBAAoB;AAC1D,UAAM,mBAAmB;IACzB,IAAIU;AACJ,QAAI;AACF,cAAS,MAAM,OAAO,cAAc,GAAG,KAAK;cACpC;AACR,wBAAmB;;AAGrB,QAAI,kBAAkBJ,sBAAU;AAC9B,SAAI,mBAAmB,kBAAkB,WAAW;KACpD,MAAM,WAAW,GAAG,kBAAkB;KACtC,MAAM,6CAAgC,SAAS;AAC/C,YAAO,KAAK,YAAY;AAExB,WAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,aAAO,GAAG,SAAS,OAAO;AAC1B,kBAAY,GAAG,SAAS,OAAO;AAC/B,kBAAY,GAAG,gBAAgB,SAAS,CAAC;OACzC;AAEF,wCAAa,UAAU,kBAAkB;WACpC;AACL,SAAI,cAAc,oBAAoB;AACtC,2CAAgB,mBAAmB,OAAO;;AAG5C,WAAO;KACL,QAAQ;KACR,WAAW;KACZ;aACO;AACR,WAAO,MAAM;;MAEb;AAEJ,QAAM,YAAY;AAClB,SAAO,MAAM"}
|
package/dist/idempotentTask.js
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
import { md5FilePath } from "./md5.js";
|
|
2
|
-
import {
|
|
3
|
-
import { createWriteStream, existsSync } from "node:fs";
|
|
2
|
+
import { createWriteStream, existsSync, readFileSync } from "node:fs";
|
|
4
3
|
import debug from "debug";
|
|
5
4
|
import { Readable } from "node:stream";
|
|
6
5
|
import { mkdir, readFile, readdir, rename, rm, stat, writeFile } from "node:fs/promises";
|
|
7
6
|
import path, { join } from "node:path";
|
|
7
|
+
import { fileURLToPath } from "node:url";
|
|
8
8
|
|
|
9
9
|
//#region src/idempotentTask.ts
|
|
10
|
-
const
|
|
10
|
+
const _pkgDir = path.dirname(fileURLToPath(import.meta.url));
|
|
11
|
+
const CACHE_VERSION = JSON.parse(readFileSync(path.join(_pkgDir, "../package.json"), "utf-8")).version;
|
|
11
12
|
const rootValidationPromises = /* @__PURE__ */ new Map();
|
|
12
13
|
async function ensureCacheVersion(cacheDirRoot) {
|
|
13
14
|
const existing = rootValidationPromises.get(cacheDirRoot);
|
|
@@ -31,6 +32,26 @@ async function ensureCacheVersion(cacheDirRoot) {
|
|
|
31
32
|
rootValidationPromises.set(cacheDirRoot, promise);
|
|
32
33
|
return promise;
|
|
33
34
|
}
|
|
35
|
+
const MAX_CONCURRENT_RUNNERS = 4;
|
|
36
|
+
let activeRunners = 0;
|
|
37
|
+
const runnerQueue = [];
|
|
38
|
+
function acquireRunnerSlot() {
|
|
39
|
+
if (activeRunners < MAX_CONCURRENT_RUNNERS) {
|
|
40
|
+
activeRunners++;
|
|
41
|
+
return Promise.resolve();
|
|
42
|
+
}
|
|
43
|
+
return new Promise((resolve) => {
|
|
44
|
+
runnerQueue.push(() => {
|
|
45
|
+
activeRunners++;
|
|
46
|
+
resolve();
|
|
47
|
+
});
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
function releaseRunnerSlot() {
|
|
51
|
+
activeRunners--;
|
|
52
|
+
const next = runnerQueue.shift();
|
|
53
|
+
if (next) next();
|
|
54
|
+
}
|
|
34
55
|
const idempotentTask = ({ label, filename, runner }) => {
|
|
35
56
|
const tasks = {};
|
|
36
57
|
const downloadTasks = {};
|
|
@@ -133,7 +154,13 @@ const idempotentTask = ({ label, filename, runner }) => {
|
|
|
133
154
|
};
|
|
134
155
|
}
|
|
135
156
|
log(`Running ef:${label} runner for ${resolvedCachePath}`);
|
|
136
|
-
|
|
157
|
+
await acquireRunnerSlot();
|
|
158
|
+
let result;
|
|
159
|
+
try {
|
|
160
|
+
result = await runner(absolutePath, ...args);
|
|
161
|
+
} finally {
|
|
162
|
+
releaseRunnerSlot();
|
|
163
|
+
}
|
|
137
164
|
if (result instanceof Readable) {
|
|
138
165
|
log(`Piping task for ${resolvedCachePath} to cache`);
|
|
139
166
|
const tempPath = `${resolvedCachePath}.tmp`;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"idempotentTask.js","names":["packageJson.version","storedVersion: string | null","tasks: Record<string, Promise<TaskResult>>","downloadTasks: Record<string, Promise<string>>","cachePath: string | null","md5: string | null"],"sources":["../src/idempotentTask.ts"],"sourcesContent":["import { createWriteStream, existsSync } from \"node:fs\";\nimport path, { join } from \"node:path\";\nimport { md5FilePath } from \"./md5.js\";\nimport debug from \"debug\";\nimport { mkdir, writeFile, stat, rename, readdir, readFile, rm } from \"node:fs/promises\";\nimport { Readable } from \"node:stream\";\nimport packageJson from \"../package.json\" with { type: \"json\" };\n\nconst CACHE_VERSION = packageJson.version;\n\n// Per-root validation promises — serializes the version check within a process\n// and memoizes it so subsequent calls in the same process are free.\nconst rootValidationPromises = new Map<string, Promise<void>>();\n\nasync function ensureCacheVersion(cacheDirRoot: string): Promise<void> {\n const existing = rootValidationPromises.get(cacheDirRoot);\n if (existing) return existing;\n\n const promise = (async () => {\n const versionFile = join(cacheDirRoot, \".version\");\n let storedVersion: string | null = null;\n try {\n storedVersion = (await readFile(versionFile, \"utf-8\")).trim();\n } catch {}\n\n if (storedVersion === CACHE_VERSION) return;\n\n const log = debug(\"ef:idempotentTask\");\n log(\n `Cache version mismatch (stored: ${storedVersion ?? \"none\"}, current: ${CACHE_VERSION}) — busting computed caches in ${cacheDirRoot}`,\n );\n\n // Delete computed output directories; preserve downloaded .file entries\n const entries = await readdir(cacheDirRoot, { withFileTypes: true }).catch(\n () => [],\n );\n await Promise.all(\n entries\n .filter((e) => e.isDirectory())\n .map((e) =>\n rm(join(cacheDirRoot, e.name), {\n recursive: true,\n force: true,\n }).catch(() => {}),\n ),\n );\n\n await mkdir(cacheDirRoot, { recursive: true });\n await writeFile(versionFile, CACHE_VERSION);\n })();\n\n rootValidationPromises.set(cacheDirRoot, promise);\n return promise;\n}\n\nconst MAX_CONCURRENT_RUNNERS = 4;\nlet activeRunners = 0;\nconst runnerQueue: Array<() => void> = [];\n\nfunction acquireRunnerSlot(): Promise<void> {\n if (activeRunners < MAX_CONCURRENT_RUNNERS) {\n activeRunners++;\n return Promise.resolve();\n }\n return new Promise((resolve) => {\n runnerQueue.push(() => {\n activeRunners++;\n resolve();\n });\n });\n}\n\nfunction releaseRunnerSlot(): void {\n activeRunners--;\n const next = runnerQueue.shift();\n if (next) next();\n}\n\ninterface TaskOptions<T extends unknown[]> {\n label: string;\n filename: (absolutePath: string, ...args: T) => string;\n runner: (absolutePath: string, ...args: T) => Promise<string | Readable>;\n}\n\nexport interface TaskResult {\n md5Sum: string;\n cachePath: string;\n}\n\nexport const idempotentTask = <T extends unknown[]>({\n label,\n filename,\n runner,\n}: TaskOptions<T>) => {\n const tasks: Record<string, Promise<TaskResult>> = {};\n const downloadTasks: Record<string, Promise<string>> = {};\n\n // Helper function to validate cache file completeness\n const isValidCacheFile = async (\n filePath: string,\n allowEmpty = false,\n ): Promise<boolean> => {\n try {\n const stats = await stat(filePath);\n // File must exist and either have content or be explicitly allowed to be empty\n return allowEmpty || stats.size > 0;\n } catch {\n return false;\n }\n };\n\n return async (\n rootDir: string,\n absolutePath: string,\n ...args: T\n ): Promise<TaskResult> => {\n const log = debug(`ef:${label}`);\n const cacheDirRoot = path.join(rootDir, \".cache\");\n await mkdir(cacheDirRoot, { recursive: true });\n await ensureCacheVersion(cacheDirRoot);\n\n log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);\n\n // Handle HTTP downloads with proper race condition protection\n if (absolutePath.startsWith(\"http://\") || absolutePath.startsWith(\"https://\")) {\n const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, \"_\");\n const downloadCachePath = path.join(\n rootDir,\n \".cache\",\n `${safePath}.file`,\n );\n\n // Check if already downloaded and valid (allow empty downloads)\n if (\n existsSync(downloadCachePath) &&\n (await isValidCacheFile(downloadCachePath, true))\n ) {\n log(`Already cached ${absolutePath}`);\n absolutePath = downloadCachePath;\n } else {\n // Use download task deduplication to prevent concurrent downloads\n const downloadKey = absolutePath;\n if (!downloadTasks[downloadKey]) {\n log(`Starting download for ${absolutePath}`);\n downloadTasks[downloadKey] = (async () => {\n try {\n const response = await fetch(absolutePath);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch file from URL ${absolutePath}: ${response.status} ${response.statusText}`,\n );\n }\n\n const stream = response.body;\n if (!stream) {\n throw new Error(`No response body for URL ${absolutePath}`);\n }\n\n // Use temporary file to prevent reading incomplete downloads\n const tempPath = `${downloadCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n\n // @ts-ignore node web stream support in typescript is incorrect about this.\n const readable = Readable.fromWeb(stream);\n readable.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n readable.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n await rename(tempPath, downloadCachePath);\n\n log(`Download completed for ${absolutePath}`);\n return downloadCachePath;\n } catch (error) {\n log(`Download failed for ${absolutePath}: ${error}`);\n // Clean up task reference on failure\n delete downloadTasks[downloadKey];\n throw error;\n }\n })();\n }\n\n absolutePath = await downloadTasks[downloadKey];\n // Clean up completed task\n delete downloadTasks[downloadKey];\n }\n }\n\n // Deduplicate concurrent callers by input parameters before any async work.\n // Using a synchronous key prevents the TOCTOU race where two concurrent\n // callers both pass the tasks[] check before either registers a task.\n const inputKey = JSON.stringify([absolutePath, ...args]);\n if (tasks[inputKey]) {\n log(`Returning existing ef:${label} task for ${absolutePath}`);\n return await tasks[inputKey];\n }\n\n const fullTask = (async (): Promise<TaskResult> => {\n try {\n // Try to find existing cache by scanning cache directories.\n // This avoids expensive MD5 computation when cache already exists.\n const expectedFilename = filename(absolutePath, ...args);\n let cachePath: string | null = null;\n let md5: string | null = null;\n\n const scanStartTime = Date.now();\n try {\n const cacheDirs = await readdir(cacheDirRoot, {\n withFileTypes: true,\n });\n log(\n `Scanning ${cacheDirs.length} cache directories for ${expectedFilename}`,\n );\n for (const dir of cacheDirs) {\n if (dir.isDirectory()) {\n const candidatePath = path.join(\n cacheDirRoot,\n dir.name,\n expectedFilename,\n );\n if (\n existsSync(candidatePath) &&\n (await isValidCacheFile(candidatePath))\n ) {\n cachePath = candidatePath;\n md5 = dir.name; // Directory name is the MD5\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Found existing cache in ${scanElapsed}ms: ${candidatePath} (skipped MD5)`,\n );\n break;\n }\n }\n }\n if (!cachePath) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan completed in ${scanElapsed}ms, no cache found - will compute MD5`,\n );\n }\n } catch (error) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan failed after ${scanElapsed}ms, will compute MD5: ${error}`,\n );\n }\n\n const resolvedMd5 =\n md5 ??\n (await (async () => {\n const md5StartTime = Date.now();\n log(`Computing MD5 for ${absolutePath}...`);\n const computed = await md5FilePath(absolutePath);\n const md5Elapsed = Date.now() - md5StartTime;\n log(`MD5 computed in ${md5Elapsed}ms: ${computed}`);\n return computed;\n })());\n\n const cacheDir = path.join(cacheDirRoot, resolvedMd5);\n log(`Cache dir: ${cacheDir}`);\n await mkdir(cacheDir, { recursive: true });\n\n const resolvedCachePath =\n cachePath ?? path.join(cacheDir, expectedFilename);\n\n // Check if cache exists and is valid (not zero-byte)\n if (\n existsSync(resolvedCachePath) &&\n (await isValidCacheFile(resolvedCachePath))\n ) {\n log(`Returning cached ef:${label} task for ${resolvedCachePath}`);\n return { cachePath: resolvedCachePath, md5Sum: resolvedMd5 };\n }\n\n log(`Running ef:${label} runner for ${resolvedCachePath}`);\n const result = await runner(absolutePath, ...args);\n\n if (result instanceof Readable) {\n log(`Piping task for ${resolvedCachePath} to cache`);\n const tempPath = `${resolvedCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n result.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n result.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n await rename(tempPath, resolvedCachePath);\n } else {\n log(`Writing to ${resolvedCachePath}`);\n await writeFile(resolvedCachePath, result);\n }\n\n return {\n md5Sum: resolvedMd5,\n cachePath: resolvedCachePath,\n };\n } finally {\n delete tasks[inputKey];\n }\n })();\n\n tasks[inputKey] = fullTask;\n return await fullTask;\n };\n};\n"],"mappings":";;;;;;;;;AAQA,MAAM,gBAAgBA;AAItB,MAAM,yCAAyB,IAAI,KAA4B;AAE/D,eAAe,mBAAmB,cAAqC;CACrE,MAAM,WAAW,uBAAuB,IAAI,aAAa;AACzD,KAAI,SAAU,QAAO;CAErB,MAAM,WAAW,YAAY;EAC3B,MAAM,cAAc,KAAK,cAAc,WAAW;EAClD,IAAIC,gBAA+B;AACnC,MAAI;AACF,oBAAiB,MAAM,SAAS,aAAa,QAAQ,EAAE,MAAM;UACvD;AAER,MAAI,kBAAkB,cAAe;AAGrC,EADY,MAAM,oBAAoB,CAEpC,mCAAmC,iBAAiB,OAAO,aAAa,cAAc,iCAAiC,eACxH;EAGD,MAAM,UAAU,MAAM,QAAQ,cAAc,EAAE,eAAe,MAAM,CAAC,CAAC,YAC7D,EAAE,CACT;AACD,QAAM,QAAQ,IACZ,QACG,QAAQ,MAAM,EAAE,aAAa,CAAC,CAC9B,KAAK,MACJ,GAAG,KAAK,cAAc,EAAE,KAAK,EAAE;GAC7B,WAAW;GACX,OAAO;GACR,CAAC,CAAC,YAAY,GAAG,CACnB,CACJ;AAED,QAAM,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,QAAM,UAAU,aAAa,cAAc;KACzC;AAEJ,wBAAuB,IAAI,cAAc,QAAQ;AACjD,QAAO;;AAqCT,MAAa,kBAAuC,EAClD,OACA,UACA,aACoB;CACpB,MAAMC,QAA6C,EAAE;CACrD,MAAMC,gBAAiD,EAAE;CAGzD,MAAM,mBAAmB,OACvB,UACA,aAAa,UACQ;AACrB,MAAI;GACF,MAAM,QAAQ,MAAM,KAAK,SAAS;AAElC,UAAO,cAAc,MAAM,OAAO;UAC5B;AACN,UAAO;;;AAIX,QAAO,OACL,SACA,cACA,GAAG,SACqB;EACxB,MAAM,MAAM,MAAM,MAAM,QAAQ;EAChC,MAAM,eAAe,KAAK,KAAK,SAAS,SAAS;AACjD,QAAM,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,QAAM,mBAAmB,aAAa;AAEtC,MAAI,cAAc,MAAM,YAAY,aAAa,MAAM,UAAU;AAGjE,MAAI,aAAa,WAAW,UAAU,IAAI,aAAa,WAAW,WAAW,EAAE;GAC7E,MAAM,WAAW,aAAa,QAAQ,iBAAiB,IAAI;GAC3D,MAAM,oBAAoB,KAAK,KAC7B,SACA,UACA,GAAG,SAAS,OACb;AAGD,OACE,WAAW,kBAAkB,IAC5B,MAAM,iBAAiB,mBAAmB,KAAK,EAChD;AACA,QAAI,kBAAkB,eAAe;AACrC,mBAAe;UACV;IAEL,MAAM,cAAc;AACpB,QAAI,CAAC,cAAc,cAAc;AAC/B,SAAI,yBAAyB,eAAe;AAC5C,mBAAc,gBAAgB,YAAY;AACxC,UAAI;OACF,MAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,WAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MACR,iCAAiC,aAAa,IAAI,SAAS,OAAO,GAAG,SAAS,aAC/E;OAGH,MAAM,SAAS,SAAS;AACxB,WAAI,CAAC,OACH,OAAM,IAAI,MAAM,4BAA4B,eAAe;OAI7D,MAAM,WAAW,GAAG,kBAAkB;OACtC,MAAM,cAAc,kBAAkB,SAAS;OAG/C,MAAM,WAAW,SAAS,QAAQ,OAAO;AACzC,gBAAS,KAAK,YAAY;AAE1B,aAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,iBAAS,GAAG,SAAS,OAAO;AAC5B,oBAAY,GAAG,SAAS,OAAO;AAC/B,oBAAY,GAAG,gBAAgB,SAAS,CAAC;SACzC;AAGF,aAAM,OAAO,UAAU,kBAAkB;AAEzC,WAAI,0BAA0B,eAAe;AAC7C,cAAO;eACA,OAAO;AACd,WAAI,uBAAuB,aAAa,IAAI,QAAQ;AAEpD,cAAO,cAAc;AACrB,aAAM;;SAEN;;AAGN,mBAAe,MAAM,cAAc;AAEnC,WAAO,cAAc;;;EAOzB,MAAM,WAAW,KAAK,UAAU,CAAC,cAAc,GAAG,KAAK,CAAC;AACxD,MAAI,MAAM,WAAW;AACnB,OAAI,yBAAyB,MAAM,YAAY,eAAe;AAC9D,UAAO,MAAM,MAAM;;EAGrB,MAAM,YAAY,YAAiC;AACjD,OAAI;IAGF,MAAM,mBAAmB,SAAS,cAAc,GAAG,KAAK;IACxD,IAAIC,YAA2B;IAC/B,IAAIC,MAAqB;IAEzB,MAAM,gBAAgB,KAAK,KAAK;AAChC,QAAI;KACF,MAAM,YAAY,MAAM,QAAQ,cAAc,EAC5C,eAAe,MAChB,CAAC;AACF,SACE,YAAY,UAAU,OAAO,yBAAyB,mBACvD;AACD,UAAK,MAAM,OAAO,UAChB,KAAI,IAAI,aAAa,EAAE;MACrB,MAAM,gBAAgB,KAAK,KACzB,cACA,IAAI,MACJ,iBACD;AACD,UACE,WAAW,cAAc,IACxB,MAAM,iBAAiB,cAAc,EACtC;AACA,mBAAY;AACZ,aAAM,IAAI;AAEV,WACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,MAAM,cAAc,gBAC5D;AACD;;;AAIN,SAAI,CAAC,UAEH,KACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,uCACxC;aAEI,OAAO;AAEd,SACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,wBAAwB,QAChE;;IAGH,MAAM,cACJ,OACC,OAAO,YAAY;KAClB,MAAM,eAAe,KAAK,KAAK;AAC/B,SAAI,qBAAqB,aAAa,KAAK;KAC3C,MAAM,WAAW,MAAM,YAAY,aAAa;AAEhD,SAAI,mBADe,KAAK,KAAK,GAAG,aACE,MAAM,WAAW;AACnD,YAAO;QACL;IAEN,MAAM,WAAW,KAAK,KAAK,cAAc,YAAY;AACrD,QAAI,cAAc,WAAW;AAC7B,UAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;IAE1C,MAAM,oBACJ,aAAa,KAAK,KAAK,UAAU,iBAAiB;AAGpD,QACE,WAAW,kBAAkB,IAC5B,MAAM,iBAAiB,kBAAkB,EAC1C;AACA,SAAI,uBAAuB,MAAM,YAAY,oBAAoB;AACjE,YAAO;MAAE,WAAW;MAAmB,QAAQ;MAAa;;AAG9D,QAAI,cAAc,MAAM,cAAc,oBAAoB;IAC1D,MAAM,SAAS,MAAM,OAAO,cAAc,GAAG,KAAK;AAElD,QAAI,kBAAkB,UAAU;AAC9B,SAAI,mBAAmB,kBAAkB,WAAW;KACpD,MAAM,WAAW,GAAG,kBAAkB;KACtC,MAAM,cAAc,kBAAkB,SAAS;AAC/C,YAAO,KAAK,YAAY;AAExB,WAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,aAAO,GAAG,SAAS,OAAO;AAC1B,kBAAY,GAAG,SAAS,OAAO;AAC/B,kBAAY,GAAG,gBAAgB,SAAS,CAAC;OACzC;AAEF,WAAM,OAAO,UAAU,kBAAkB;WACpC;AACL,SAAI,cAAc,oBAAoB;AACtC,WAAM,UAAU,mBAAmB,OAAO;;AAG5C,WAAO;KACL,QAAQ;KACR,WAAW;KACZ;aACO;AACR,WAAO,MAAM;;MAEb;AAEJ,QAAM,YAAY;AAClB,SAAO,MAAM"}
|
|
1
|
+
{"version":3,"file":"idempotentTask.js","names":["CACHE_VERSION: string","storedVersion: string | null","runnerQueue: Array<() => void>","tasks: Record<string, Promise<TaskResult>>","downloadTasks: Record<string, Promise<string>>","cachePath: string | null","md5: string | null","result: string | Readable"],"sources":["../src/idempotentTask.ts"],"sourcesContent":["import { createWriteStream, existsSync, readFileSync } from \"node:fs\";\nimport path, { join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\nimport { md5FilePath } from \"./md5.js\";\nimport debug from \"debug\";\nimport {\n mkdir,\n writeFile,\n stat,\n rename,\n readdir,\n readFile,\n rm,\n} from \"node:fs/promises\";\nimport { Readable } from \"node:stream\";\n\n// @ts-ignore - import.meta.url is available at runtime; tsconfig uses CommonJS module for type-checking only\nconst _pkgDir = path.dirname(fileURLToPath(import.meta.url));\nconst CACHE_VERSION: string = (\n JSON.parse(readFileSync(path.join(_pkgDir, \"../package.json\"), \"utf-8\")) as {\n version: string;\n }\n).version;\n\n// Per-root validation promises — serializes the version check within a process\n// and memoizes it so subsequent calls in the same process are free.\nconst rootValidationPromises = new Map<string, Promise<void>>();\n\nasync function ensureCacheVersion(cacheDirRoot: string): Promise<void> {\n const existing = rootValidationPromises.get(cacheDirRoot);\n if (existing) return existing;\n\n const promise = (async () => {\n const versionFile = join(cacheDirRoot, \".version\");\n let storedVersion: string | null = null;\n try {\n storedVersion = (await readFile(versionFile, \"utf-8\")).trim();\n } catch {}\n\n if (storedVersion === CACHE_VERSION) return;\n\n const log = debug(\"ef:idempotentTask\");\n log(\n `Cache version mismatch (stored: ${storedVersion ?? \"none\"}, current: ${CACHE_VERSION}) — busting computed caches in ${cacheDirRoot}`,\n );\n\n // Delete computed output directories; preserve downloaded .file entries\n const entries = await readdir(cacheDirRoot, { withFileTypes: true }).catch(\n () => [],\n );\n await Promise.all(\n entries\n .filter((e) => e.isDirectory())\n .map((e) =>\n rm(join(cacheDirRoot, e.name), {\n recursive: true,\n force: true,\n }).catch(() => {}),\n ),\n );\n\n await mkdir(cacheDirRoot, { recursive: true });\n await writeFile(versionFile, CACHE_VERSION);\n })();\n\n rootValidationPromises.set(cacheDirRoot, promise);\n return promise;\n}\n\nconst MAX_CONCURRENT_RUNNERS = 4;\nlet activeRunners = 0;\nconst runnerQueue: Array<() => void> = [];\n\nfunction acquireRunnerSlot(): Promise<void> {\n if (activeRunners < MAX_CONCURRENT_RUNNERS) {\n activeRunners++;\n return Promise.resolve();\n }\n return new Promise((resolve) => {\n runnerQueue.push(() => {\n activeRunners++;\n resolve();\n });\n });\n}\n\nfunction releaseRunnerSlot(): void {\n activeRunners--;\n const next = runnerQueue.shift();\n if (next) next();\n}\n\ninterface TaskOptions<T extends unknown[]> {\n label: string;\n filename: (absolutePath: string, ...args: T) => string;\n runner: (absolutePath: string, ...args: T) => Promise<string | Readable>;\n}\n\nexport interface TaskResult {\n md5Sum: string;\n cachePath: string;\n}\n\nexport const idempotentTask = <T extends unknown[]>({\n label,\n filename,\n runner,\n}: TaskOptions<T>) => {\n const tasks: Record<string, Promise<TaskResult>> = {};\n const downloadTasks: Record<string, Promise<string>> = {};\n\n // Helper function to validate cache file completeness\n const isValidCacheFile = async (\n filePath: string,\n allowEmpty = false,\n ): Promise<boolean> => {\n try {\n const stats = await stat(filePath);\n // File must exist and either have content or be explicitly allowed to be empty\n return allowEmpty || stats.size > 0;\n } catch {\n return false;\n }\n };\n\n return async (\n rootDir: string,\n absolutePath: string,\n ...args: T\n ): Promise<TaskResult> => {\n const log = debug(`ef:${label}`);\n const cacheDirRoot = path.join(rootDir, \".cache\");\n await mkdir(cacheDirRoot, { recursive: true });\n await ensureCacheVersion(cacheDirRoot);\n\n log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);\n\n // Handle HTTP downloads with proper race condition protection\n if (\n absolutePath.startsWith(\"http://\") ||\n absolutePath.startsWith(\"https://\")\n ) {\n const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, \"_\");\n const downloadCachePath = path.join(\n rootDir,\n \".cache\",\n `${safePath}.file`,\n );\n\n // Check if already downloaded and valid (allow empty downloads)\n if (\n existsSync(downloadCachePath) &&\n (await isValidCacheFile(downloadCachePath, true))\n ) {\n log(`Already cached ${absolutePath}`);\n absolutePath = downloadCachePath;\n } else {\n // Use download task deduplication to prevent concurrent downloads\n const downloadKey = absolutePath;\n if (!downloadTasks[downloadKey]) {\n log(`Starting download for ${absolutePath}`);\n downloadTasks[downloadKey] = (async () => {\n try {\n const response = await fetch(absolutePath);\n if (!response.ok) {\n throw new Error(\n `Failed to fetch file from URL ${absolutePath}: ${response.status} ${response.statusText}`,\n );\n }\n\n const stream = response.body;\n if (!stream) {\n throw new Error(`No response body for URL ${absolutePath}`);\n }\n\n // Use temporary file to prevent reading incomplete downloads\n const tempPath = `${downloadCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n\n // @ts-ignore node web stream support in typescript is incorrect about this.\n const readable = Readable.fromWeb(stream);\n readable.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n readable.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n await rename(tempPath, downloadCachePath);\n\n log(`Download completed for ${absolutePath}`);\n return downloadCachePath;\n } catch (error) {\n log(`Download failed for ${absolutePath}: ${error}`);\n // Clean up task reference on failure\n delete downloadTasks[downloadKey];\n throw error;\n }\n })();\n }\n\n absolutePath = await downloadTasks[downloadKey];\n // Clean up completed task\n delete downloadTasks[downloadKey];\n }\n }\n\n // Deduplicate concurrent callers by input parameters before any async work.\n // Using a synchronous key prevents the TOCTOU race where two concurrent\n // callers both pass the tasks[] check before either registers a task.\n const inputKey = JSON.stringify([absolutePath, ...args]);\n if (tasks[inputKey]) {\n log(`Returning existing ef:${label} task for ${absolutePath}`);\n return await tasks[inputKey];\n }\n\n const fullTask = (async (): Promise<TaskResult> => {\n try {\n // Try to find existing cache by scanning cache directories.\n // This avoids expensive MD5 computation when cache already exists.\n const expectedFilename = filename(absolutePath, ...args);\n let cachePath: string | null = null;\n let md5: string | null = null;\n\n const scanStartTime = Date.now();\n try {\n const cacheDirs = await readdir(cacheDirRoot, {\n withFileTypes: true,\n });\n log(\n `Scanning ${cacheDirs.length} cache directories for ${expectedFilename}`,\n );\n for (const dir of cacheDirs) {\n if (dir.isDirectory()) {\n const candidatePath = path.join(\n cacheDirRoot,\n dir.name,\n expectedFilename,\n );\n if (\n existsSync(candidatePath) &&\n (await isValidCacheFile(candidatePath))\n ) {\n cachePath = candidatePath;\n md5 = dir.name; // Directory name is the MD5\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Found existing cache in ${scanElapsed}ms: ${candidatePath} (skipped MD5)`,\n );\n break;\n }\n }\n }\n if (!cachePath) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan completed in ${scanElapsed}ms, no cache found - will compute MD5`,\n );\n }\n } catch (error) {\n const scanElapsed = Date.now() - scanStartTime;\n log(\n `Cache scan failed after ${scanElapsed}ms, will compute MD5: ${error}`,\n );\n }\n\n const resolvedMd5 =\n md5 ??\n (await (async () => {\n const md5StartTime = Date.now();\n log(`Computing MD5 for ${absolutePath}...`);\n const computed = await md5FilePath(absolutePath);\n const md5Elapsed = Date.now() - md5StartTime;\n log(`MD5 computed in ${md5Elapsed}ms: ${computed}`);\n return computed;\n })());\n\n const cacheDir = path.join(cacheDirRoot, resolvedMd5);\n log(`Cache dir: ${cacheDir}`);\n await mkdir(cacheDir, { recursive: true });\n\n const resolvedCachePath =\n cachePath ?? path.join(cacheDir, expectedFilename);\n\n // Check if cache exists and is valid (not zero-byte)\n if (\n existsSync(resolvedCachePath) &&\n (await isValidCacheFile(resolvedCachePath))\n ) {\n log(`Returning cached ef:${label} task for ${resolvedCachePath}`);\n return { cachePath: resolvedCachePath, md5Sum: resolvedMd5 };\n }\n\n log(`Running ef:${label} runner for ${resolvedCachePath}`);\n await acquireRunnerSlot();\n let result: string | Readable;\n try {\n result = await runner(absolutePath, ...args);\n } finally {\n releaseRunnerSlot();\n }\n\n if (result instanceof Readable) {\n log(`Piping task for ${resolvedCachePath} to cache`);\n const tempPath = `${resolvedCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n result.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n result.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n await rename(tempPath, resolvedCachePath);\n } else {\n log(`Writing to ${resolvedCachePath}`);\n await writeFile(resolvedCachePath, result);\n }\n\n return {\n md5Sum: resolvedMd5,\n cachePath: resolvedCachePath,\n };\n } finally {\n delete tasks[inputKey];\n }\n })();\n\n tasks[inputKey] = fullTask;\n return await fullTask;\n };\n};\n"],"mappings":";;;;;;;;;AAiBA,MAAM,UAAU,KAAK,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAC5D,MAAMA,gBACJ,KAAK,MAAM,aAAa,KAAK,KAAK,SAAS,kBAAkB,EAAE,QAAQ,CAAC,CAGxE;AAIF,MAAM,yCAAyB,IAAI,KAA4B;AAE/D,eAAe,mBAAmB,cAAqC;CACrE,MAAM,WAAW,uBAAuB,IAAI,aAAa;AACzD,KAAI,SAAU,QAAO;CAErB,MAAM,WAAW,YAAY;EAC3B,MAAM,cAAc,KAAK,cAAc,WAAW;EAClD,IAAIC,gBAA+B;AACnC,MAAI;AACF,oBAAiB,MAAM,SAAS,aAAa,QAAQ,EAAE,MAAM;UACvD;AAER,MAAI,kBAAkB,cAAe;AAGrC,EADY,MAAM,oBAAoB,CAEpC,mCAAmC,iBAAiB,OAAO,aAAa,cAAc,iCAAiC,eACxH;EAGD,MAAM,UAAU,MAAM,QAAQ,cAAc,EAAE,eAAe,MAAM,CAAC,CAAC,YAC7D,EAAE,CACT;AACD,QAAM,QAAQ,IACZ,QACG,QAAQ,MAAM,EAAE,aAAa,CAAC,CAC9B,KAAK,MACJ,GAAG,KAAK,cAAc,EAAE,KAAK,EAAE;GAC7B,WAAW;GACX,OAAO;GACR,CAAC,CAAC,YAAY,GAAG,CACnB,CACJ;AAED,QAAM,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,QAAM,UAAU,aAAa,cAAc;KACzC;AAEJ,wBAAuB,IAAI,cAAc,QAAQ;AACjD,QAAO;;AAGT,MAAM,yBAAyB;AAC/B,IAAI,gBAAgB;AACpB,MAAMC,cAAiC,EAAE;AAEzC,SAAS,oBAAmC;AAC1C,KAAI,gBAAgB,wBAAwB;AAC1C;AACA,SAAO,QAAQ,SAAS;;AAE1B,QAAO,IAAI,SAAS,YAAY;AAC9B,cAAY,WAAW;AACrB;AACA,YAAS;IACT;GACF;;AAGJ,SAAS,oBAA0B;AACjC;CACA,MAAM,OAAO,YAAY,OAAO;AAChC,KAAI,KAAM,OAAM;;AAclB,MAAa,kBAAuC,EAClD,OACA,UACA,aACoB;CACpB,MAAMC,QAA6C,EAAE;CACrD,MAAMC,gBAAiD,EAAE;CAGzD,MAAM,mBAAmB,OACvB,UACA,aAAa,UACQ;AACrB,MAAI;GACF,MAAM,QAAQ,MAAM,KAAK,SAAS;AAElC,UAAO,cAAc,MAAM,OAAO;UAC5B;AACN,UAAO;;;AAIX,QAAO,OACL,SACA,cACA,GAAG,SACqB;EACxB,MAAM,MAAM,MAAM,MAAM,QAAQ;EAChC,MAAM,eAAe,KAAK,KAAK,SAAS,SAAS;AACjD,QAAM,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;AAC9C,QAAM,mBAAmB,aAAa;AAEtC,MAAI,cAAc,MAAM,YAAY,aAAa,MAAM,UAAU;AAGjE,MACE,aAAa,WAAW,UAAU,IAClC,aAAa,WAAW,WAAW,EACnC;GACA,MAAM,WAAW,aAAa,QAAQ,iBAAiB,IAAI;GAC3D,MAAM,oBAAoB,KAAK,KAC7B,SACA,UACA,GAAG,SAAS,OACb;AAGD,OACE,WAAW,kBAAkB,IAC5B,MAAM,iBAAiB,mBAAmB,KAAK,EAChD;AACA,QAAI,kBAAkB,eAAe;AACrC,mBAAe;UACV;IAEL,MAAM,cAAc;AACpB,QAAI,CAAC,cAAc,cAAc;AAC/B,SAAI,yBAAyB,eAAe;AAC5C,mBAAc,gBAAgB,YAAY;AACxC,UAAI;OACF,MAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,WAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MACR,iCAAiC,aAAa,IAAI,SAAS,OAAO,GAAG,SAAS,aAC/E;OAGH,MAAM,SAAS,SAAS;AACxB,WAAI,CAAC,OACH,OAAM,IAAI,MAAM,4BAA4B,eAAe;OAI7D,MAAM,WAAW,GAAG,kBAAkB;OACtC,MAAM,cAAc,kBAAkB,SAAS;OAG/C,MAAM,WAAW,SAAS,QAAQ,OAAO;AACzC,gBAAS,KAAK,YAAY;AAE1B,aAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,iBAAS,GAAG,SAAS,OAAO;AAC5B,oBAAY,GAAG,SAAS,OAAO;AAC/B,oBAAY,GAAG,gBAAgB,SAAS,CAAC;SACzC;AAGF,aAAM,OAAO,UAAU,kBAAkB;AAEzC,WAAI,0BAA0B,eAAe;AAC7C,cAAO;eACA,OAAO;AACd,WAAI,uBAAuB,aAAa,IAAI,QAAQ;AAEpD,cAAO,cAAc;AACrB,aAAM;;SAEN;;AAGN,mBAAe,MAAM,cAAc;AAEnC,WAAO,cAAc;;;EAOzB,MAAM,WAAW,KAAK,UAAU,CAAC,cAAc,GAAG,KAAK,CAAC;AACxD,MAAI,MAAM,WAAW;AACnB,OAAI,yBAAyB,MAAM,YAAY,eAAe;AAC9D,UAAO,MAAM,MAAM;;EAGrB,MAAM,YAAY,YAAiC;AACjD,OAAI;IAGF,MAAM,mBAAmB,SAAS,cAAc,GAAG,KAAK;IACxD,IAAIC,YAA2B;IAC/B,IAAIC,MAAqB;IAEzB,MAAM,gBAAgB,KAAK,KAAK;AAChC,QAAI;KACF,MAAM,YAAY,MAAM,QAAQ,cAAc,EAC5C,eAAe,MAChB,CAAC;AACF,SACE,YAAY,UAAU,OAAO,yBAAyB,mBACvD;AACD,UAAK,MAAM,OAAO,UAChB,KAAI,IAAI,aAAa,EAAE;MACrB,MAAM,gBAAgB,KAAK,KACzB,cACA,IAAI,MACJ,iBACD;AACD,UACE,WAAW,cAAc,IACxB,MAAM,iBAAiB,cAAc,EACtC;AACA,mBAAY;AACZ,aAAM,IAAI;AAEV,WACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,MAAM,cAAc,gBAC5D;AACD;;;AAIN,SAAI,CAAC,UAEH,KACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,uCACxC;aAEI,OAAO;AAEd,SACE,2BAFkB,KAAK,KAAK,GAAG,cAEQ,wBAAwB,QAChE;;IAGH,MAAM,cACJ,OACC,OAAO,YAAY;KAClB,MAAM,eAAe,KAAK,KAAK;AAC/B,SAAI,qBAAqB,aAAa,KAAK;KAC3C,MAAM,WAAW,MAAM,YAAY,aAAa;AAEhD,SAAI,mBADe,KAAK,KAAK,GAAG,aACE,MAAM,WAAW;AACnD,YAAO;QACL;IAEN,MAAM,WAAW,KAAK,KAAK,cAAc,YAAY;AACrD,QAAI,cAAc,WAAW;AAC7B,UAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;IAE1C,MAAM,oBACJ,aAAa,KAAK,KAAK,UAAU,iBAAiB;AAGpD,QACE,WAAW,kBAAkB,IAC5B,MAAM,iBAAiB,kBAAkB,EAC1C;AACA,SAAI,uBAAuB,MAAM,YAAY,oBAAoB;AACjE,YAAO;MAAE,WAAW;MAAmB,QAAQ;MAAa;;AAG9D,QAAI,cAAc,MAAM,cAAc,oBAAoB;AAC1D,UAAM,mBAAmB;IACzB,IAAIC;AACJ,QAAI;AACF,cAAS,MAAM,OAAO,cAAc,GAAG,KAAK;cACpC;AACR,wBAAmB;;AAGrB,QAAI,kBAAkB,UAAU;AAC9B,SAAI,mBAAmB,kBAAkB,WAAW;KACpD,MAAM,WAAW,GAAG,kBAAkB;KACtC,MAAM,cAAc,kBAAkB,SAAS;AAC/C,YAAO,KAAK,YAAY;AAExB,WAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,aAAO,GAAG,SAAS,OAAO;AAC1B,kBAAY,GAAG,SAAS,OAAO;AAC/B,kBAAY,GAAG,gBAAgB,SAAS,CAAC;OACzC;AAEF,WAAM,OAAO,UAAU,kBAAkB;WACpC;AACL,SAAI,cAAc,oBAAoB;AACtC,WAAM,UAAU,mBAAmB,OAAO;;AAG5C,WAAO;KACL,QAAQ;KACR,WAAW;KACZ;aACO;AACR,WAAO,MAAM;;MAEb;AAEJ,QAAM,YAAY;AAClB,SAAO,MAAM"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"findOrCreateCaptions.cjs","names":["execFile","idempotentTask"],"sources":["../../src/tasks/findOrCreateCaptions.ts"],"sourcesContent":["import { basename } from \"node:path\";\nimport { promisify } from \"node:util\";\nimport { execFile } from \"node:child_process\";\n\nimport debug from \"debug\";\n\nimport { idempotentTask } from \"../idempotentTask.js\";\n\nconst execFilePromise = promisify(execFile);\n\nconst log = debug(\"ef:generateCaptions\");\n\ninterface WhisperWord {\n text: string;\n start: number;\n end: number;\n confidence: number;\n}\n\ninterface WhisperSegment {\n text: string;\n start: number;\n end: number;\n words: WhisperWord[];\n}\n\ninterface WhisperOutput {\n segments: WhisperSegment[];\n}\n\ninterface CaptionOutput {\n segments: Array<{\n start: number;\n end: number;\n text: string;\n }>;\n word_segments: Array<{\n text: string;\n start: number;\n end: number;\n }>;\n}\n\nconst convertWhisperToEditframeFormat = (\n whisperData: WhisperOutput,\n): CaptionOutput => {\n const segments = whisperData.segments.map((segment) => ({\n start: Math.round(segment.start * 1000), // Convert to milliseconds\n end: Math.round(segment.end * 1000),\n text: segment.text.trim(),\n }));\n\n const word_segments = whisperData.segments.flatMap((segment) =>\n segment.words.map((word) => ({\n text: word.text,\n start: Math.round(word.start * 1000), // Convert to milliseconds\n end: Math.round(word.end * 1000),\n })),\n );\n\n return { segments, word_segments };\n};\n\nexport const generateCaptionDataFromPath = async (absolutePath: string) => {\n const args = [\"--language\"
|
|
1
|
+
{"version":3,"file":"findOrCreateCaptions.cjs","names":["execFile","idempotentTask"],"sources":["../../src/tasks/findOrCreateCaptions.ts"],"sourcesContent":["import { basename } from \"node:path\";\nimport { promisify } from \"node:util\";\nimport { execFile } from \"node:child_process\";\n\nimport debug from \"debug\";\n\nimport { idempotentTask } from \"../idempotentTask.js\";\n\nconst execFilePromise = promisify(execFile);\n\nconst log = debug(\"ef:generateCaptions\");\n\ninterface WhisperWord {\n text: string;\n start: number;\n end: number;\n confidence: number;\n}\n\ninterface WhisperSegment {\n text: string;\n start: number;\n end: number;\n words: WhisperWord[];\n}\n\ninterface WhisperOutput {\n segments: WhisperSegment[];\n}\n\ninterface CaptionOutput {\n segments: Array<{\n start: number;\n end: number;\n text: string;\n }>;\n word_segments: Array<{\n text: string;\n start: number;\n end: number;\n }>;\n}\n\nconst convertWhisperToEditframeFormat = (\n whisperData: WhisperOutput,\n): CaptionOutput => {\n const segments = whisperData.segments.map((segment) => ({\n start: Math.round(segment.start * 1000), // Convert to milliseconds\n end: Math.round(segment.end * 1000),\n text: segment.text.trim(),\n }));\n\n const word_segments = whisperData.segments.flatMap((segment) =>\n segment.words.map((word) => ({\n text: word.text,\n start: Math.round(word.start * 1000), // Convert to milliseconds\n end: Math.round(word.end * 1000),\n })),\n );\n\n return { segments, word_segments };\n};\n\nexport const generateCaptionDataFromPath = async (absolutePath: string) => {\n const args = [\n \"--language\",\n \"en\",\n \"--efficient\",\n \"--output_format\",\n \"json\",\n absolutePath,\n ];\n log(\"Running whisper_timestamped\", args);\n const { stdout } = await execFilePromise(\"whisper_timestamped\", args);\n\n try {\n const whisperData = JSON.parse(stdout) as WhisperOutput;\n const captionData = convertWhisperToEditframeFormat(whisperData);\n return JSON.stringify(captionData, null, 2);\n } catch (error) {\n log(`Error parsing whisper output: ${error}`);\n throw new Error(`Failed to parse whisper_timestamped output: ${error}`);\n }\n};\n\nconst generateCaptionDataTask = idempotentTask({\n label: \"captions\",\n filename: (absolutePath) => `${basename(absolutePath)}.captions.json`,\n runner: generateCaptionDataFromPath,\n});\n\nexport const findOrCreateCaptions = async (\n cacheRoot: string,\n absolutePath: string,\n) => {\n try {\n return await generateCaptionDataTask(cacheRoot, absolutePath);\n } catch (error) {\n console.trace(\"Error finding or creating captions\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;;;;;;AAQA,MAAM,2CAA4BA,4BAAS;AAE3C,MAAM,yBAAY,sBAAsB;AAiCxC,MAAM,mCACJ,gBACkB;AAelB,QAAO;EAAE,UAdQ,YAAY,SAAS,KAAK,aAAa;GACtD,OAAO,KAAK,MAAM,QAAQ,QAAQ,IAAK;GACvC,KAAK,KAAK,MAAM,QAAQ,MAAM,IAAK;GACnC,MAAM,QAAQ,KAAK,MAAM;GAC1B,EAAE;EAUgB,eARG,YAAY,SAAS,SAAS,YAClD,QAAQ,MAAM,KAAK,UAAU;GAC3B,MAAM,KAAK;GACX,OAAO,KAAK,MAAM,KAAK,QAAQ,IAAK;GACpC,KAAK,KAAK,MAAM,KAAK,MAAM,IAAK;GACjC,EAAE,CACJ;EAEiC;;AAGpC,MAAa,8BAA8B,OAAO,iBAAyB;CACzE,MAAM,OAAO;EACX;EACA;EACA;EACA;EACA;EACA;EACD;AACD,KAAI,+BAA+B,KAAK;CACxC,MAAM,EAAE,WAAW,MAAM,gBAAgB,uBAAuB,KAAK;AAErE,KAAI;EAEF,MAAM,cAAc,gCADA,KAAK,MAAM,OAAO,CAC0B;AAChE,SAAO,KAAK,UAAU,aAAa,MAAM,EAAE;UACpC,OAAO;AACd,MAAI,iCAAiC,QAAQ;AAC7C,QAAM,IAAI,MAAM,+CAA+C,QAAQ;;;AAI3E,MAAM,0BAA0BC,sCAAe;CAC7C,OAAO;CACP,WAAW,iBAAiB,2BAAY,aAAa,CAAC;CACtD,QAAQ;CACT,CAAC;AAEF,MAAa,uBAAuB,OAClC,WACA,iBACG;AACH,KAAI;AACF,SAAO,MAAM,wBAAwB,WAAW,aAAa;UACtD,OAAO;AACd,UAAQ,MAAM,sCAAsC,MAAM;AAC1D,QAAM"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"findOrCreateCaptions.js","names":[],"sources":["../../src/tasks/findOrCreateCaptions.ts"],"sourcesContent":["import { basename } from \"node:path\";\nimport { promisify } from \"node:util\";\nimport { execFile } from \"node:child_process\";\n\nimport debug from \"debug\";\n\nimport { idempotentTask } from \"../idempotentTask.js\";\n\nconst execFilePromise = promisify(execFile);\n\nconst log = debug(\"ef:generateCaptions\");\n\ninterface WhisperWord {\n text: string;\n start: number;\n end: number;\n confidence: number;\n}\n\ninterface WhisperSegment {\n text: string;\n start: number;\n end: number;\n words: WhisperWord[];\n}\n\ninterface WhisperOutput {\n segments: WhisperSegment[];\n}\n\ninterface CaptionOutput {\n segments: Array<{\n start: number;\n end: number;\n text: string;\n }>;\n word_segments: Array<{\n text: string;\n start: number;\n end: number;\n }>;\n}\n\nconst convertWhisperToEditframeFormat = (\n whisperData: WhisperOutput,\n): CaptionOutput => {\n const segments = whisperData.segments.map((segment) => ({\n start: Math.round(segment.start * 1000), // Convert to milliseconds\n end: Math.round(segment.end * 1000),\n text: segment.text.trim(),\n }));\n\n const word_segments = whisperData.segments.flatMap((segment) =>\n segment.words.map((word) => ({\n text: word.text,\n start: Math.round(word.start * 1000), // Convert to milliseconds\n end: Math.round(word.end * 1000),\n })),\n );\n\n return { segments, word_segments };\n};\n\nexport const generateCaptionDataFromPath = async (absolutePath: string) => {\n const args = [\"--language\"
|
|
1
|
+
{"version":3,"file":"findOrCreateCaptions.js","names":[],"sources":["../../src/tasks/findOrCreateCaptions.ts"],"sourcesContent":["import { basename } from \"node:path\";\nimport { promisify } from \"node:util\";\nimport { execFile } from \"node:child_process\";\n\nimport debug from \"debug\";\n\nimport { idempotentTask } from \"../idempotentTask.js\";\n\nconst execFilePromise = promisify(execFile);\n\nconst log = debug(\"ef:generateCaptions\");\n\ninterface WhisperWord {\n text: string;\n start: number;\n end: number;\n confidence: number;\n}\n\ninterface WhisperSegment {\n text: string;\n start: number;\n end: number;\n words: WhisperWord[];\n}\n\ninterface WhisperOutput {\n segments: WhisperSegment[];\n}\n\ninterface CaptionOutput {\n segments: Array<{\n start: number;\n end: number;\n text: string;\n }>;\n word_segments: Array<{\n text: string;\n start: number;\n end: number;\n }>;\n}\n\nconst convertWhisperToEditframeFormat = (\n whisperData: WhisperOutput,\n): CaptionOutput => {\n const segments = whisperData.segments.map((segment) => ({\n start: Math.round(segment.start * 1000), // Convert to milliseconds\n end: Math.round(segment.end * 1000),\n text: segment.text.trim(),\n }));\n\n const word_segments = whisperData.segments.flatMap((segment) =>\n segment.words.map((word) => ({\n text: word.text,\n start: Math.round(word.start * 1000), // Convert to milliseconds\n end: Math.round(word.end * 1000),\n })),\n );\n\n return { segments, word_segments };\n};\n\nexport const generateCaptionDataFromPath = async (absolutePath: string) => {\n const args = [\n \"--language\",\n \"en\",\n \"--efficient\",\n \"--output_format\",\n \"json\",\n absolutePath,\n ];\n log(\"Running whisper_timestamped\", args);\n const { stdout } = await execFilePromise(\"whisper_timestamped\", args);\n\n try {\n const whisperData = JSON.parse(stdout) as WhisperOutput;\n const captionData = convertWhisperToEditframeFormat(whisperData);\n return JSON.stringify(captionData, null, 2);\n } catch (error) {\n log(`Error parsing whisper output: ${error}`);\n throw new Error(`Failed to parse whisper_timestamped output: ${error}`);\n }\n};\n\nconst generateCaptionDataTask = idempotentTask({\n label: \"captions\",\n filename: (absolutePath) => `${basename(absolutePath)}.captions.json`,\n runner: generateCaptionDataFromPath,\n});\n\nexport const findOrCreateCaptions = async (\n cacheRoot: string,\n absolutePath: string,\n) => {\n try {\n return await generateCaptionDataTask(cacheRoot, absolutePath);\n } catch (error) {\n console.trace(\"Error finding or creating captions\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;AAQA,MAAM,kBAAkB,UAAU,SAAS;AAE3C,MAAM,MAAM,MAAM,sBAAsB;AAiCxC,MAAM,mCACJ,gBACkB;AAelB,QAAO;EAAE,UAdQ,YAAY,SAAS,KAAK,aAAa;GACtD,OAAO,KAAK,MAAM,QAAQ,QAAQ,IAAK;GACvC,KAAK,KAAK,MAAM,QAAQ,MAAM,IAAK;GACnC,MAAM,QAAQ,KAAK,MAAM;GAC1B,EAAE;EAUgB,eARG,YAAY,SAAS,SAAS,YAClD,QAAQ,MAAM,KAAK,UAAU;GAC3B,MAAM,KAAK;GACX,OAAO,KAAK,MAAM,KAAK,QAAQ,IAAK;GACpC,KAAK,KAAK,MAAM,KAAK,MAAM,IAAK;GACjC,EAAE,CACJ;EAEiC;;AAGpC,MAAa,8BAA8B,OAAO,iBAAyB;CACzE,MAAM,OAAO;EACX;EACA;EACA;EACA;EACA;EACA;EACD;AACD,KAAI,+BAA+B,KAAK;CACxC,MAAM,EAAE,WAAW,MAAM,gBAAgB,uBAAuB,KAAK;AAErE,KAAI;EAEF,MAAM,cAAc,gCADA,KAAK,MAAM,OAAO,CAC0B;AAChE,SAAO,KAAK,UAAU,aAAa,MAAM,EAAE;UACpC,OAAO;AACd,MAAI,iCAAiC,QAAQ;AAC7C,QAAM,IAAI,MAAM,+CAA+C,QAAQ;;;AAI3E,MAAM,0BAA0B,eAAe;CAC7C,OAAO;CACP,WAAW,iBAAiB,GAAG,SAAS,aAAa,CAAC;CACtD,QAAQ;CACT,CAAC;AAEF,MAAa,uBAAuB,OAClC,WACA,iBACG;AACH,KAAI;AACF,SAAO,MAAM,wBAAwB,WAAW,aAAa;UACtD,OAAO;AACd,UAAQ,MAAM,sCAAsC,MAAM;AAC1D,QAAM"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"generateTrackFragmentIndex.cjs","names":["Probe","generateFragmentIndex","scrubTask: Promise<Record<number, TrackFragmentIndex> | null>","trackFragmentIndexes: Record<number, TrackFragmentIndex>","idempotentTask"],"sources":["../../src/tasks/generateTrackFragmentIndex.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"../Probe.js\";\nimport { generateFragmentIndex } from \"../generateFragmentIndex.js\";\nimport type { TrackFragmentIndex } from \"../Probe.js\";\n\nexport const generateTrackFragmentIndexFromPath = async (\n absolutePath: string,\n) => {\n const log = debug(\"ef:generateTrackFragment\");\n const probe = await Probe.probePath(absolutePath);\n\n const startTimeOffsetMs = probe.startTimeOffsetMs;\n if (startTimeOffsetMs !== undefined) {\n log(`Extracted start_time offset: ${startTimeOffsetMs}ms`);\n } else {\n log(\"No format/stream timing offset found - will detect from composition time\");\n }\n\n log(\n `Generating track fragment index for ${absolutePath} using single-track approach`,\n );\n\n // Process all audio/video streams and scrub track in parallel\n const trackTasks = probe.streams\n .map((stream, streamIndex) => {\n if (stream.codec_type !== \"audio\" && stream.codec_type !== \"video\") {\n return null;\n }\n const trackId = streamIndex + 1;\n log(`Processing track ${trackId} (${stream.codec_type})`);\n const trackStream = probe.createTrackReadstream(streamIndex);\n const trackIdMapping = { 0: trackId };\n return generateFragmentIndex(trackStream
|
|
1
|
+
{"version":3,"file":"generateTrackFragmentIndex.cjs","names":["Probe","generateFragmentIndex","scrubTask: Promise<Record<number, TrackFragmentIndex> | null>","trackFragmentIndexes: Record<number, TrackFragmentIndex>","idempotentTask"],"sources":["../../src/tasks/generateTrackFragmentIndex.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"../Probe.js\";\nimport { generateFragmentIndex } from \"../generateFragmentIndex.js\";\nimport type { TrackFragmentIndex } from \"../Probe.js\";\n\nexport const generateTrackFragmentIndexFromPath = async (\n absolutePath: string,\n) => {\n const log = debug(\"ef:generateTrackFragment\");\n const probe = await Probe.probePath(absolutePath);\n\n const startTimeOffsetMs = probe.startTimeOffsetMs;\n if (startTimeOffsetMs !== undefined) {\n log(`Extracted start_time offset: ${startTimeOffsetMs}ms`);\n } else {\n log(\n \"No format/stream timing offset found - will detect from composition time\",\n );\n }\n\n log(\n `Generating track fragment index for ${absolutePath} using single-track approach`,\n );\n\n // Process all audio/video streams and scrub track in parallel\n const trackTasks = probe.streams\n .map((stream, streamIndex) => {\n if (stream.codec_type !== \"audio\" && stream.codec_type !== \"video\") {\n return null;\n }\n const trackId = streamIndex + 1;\n log(`Processing track ${trackId} (${stream.codec_type})`);\n const trackStream = probe.createTrackReadstream(streamIndex);\n const trackIdMapping = { 0: trackId };\n return generateFragmentIndex(\n trackStream,\n startTimeOffsetMs,\n trackIdMapping,\n );\n })\n .filter(\n (task): task is Promise<Record<number, TrackFragmentIndex>> =>\n task !== null,\n );\n\n const scrubTask: Promise<Record<number, TrackFragmentIndex> | null> =\n probe.videoStreams.length > 0\n ? (async () => {\n try {\n log(\"Generating scrub track fragment index\");\n const scrubStream = probe.createScrubTrackReadstream();\n const scrubTrackId = -1;\n const result = await generateFragmentIndex(\n scrubStream,\n startTimeOffsetMs,\n { 0: scrubTrackId },\n );\n log(\"Scrub track fragment index generated successfully\");\n return result;\n } catch (error) {\n log(`Failed to generate scrub track fragment index: ${error}`);\n return null;\n }\n })()\n : Promise.resolve(null);\n\n const [trackResults, scrubResult] = await Promise.all([\n Promise.all(trackTasks),\n scrubTask,\n ]);\n\n const trackFragmentIndexes: Record<number, TrackFragmentIndex> = {};\n for (const result of trackResults) {\n Object.assign(trackFragmentIndexes, result);\n }\n if (scrubResult) {\n Object.assign(trackFragmentIndexes, scrubResult);\n }\n\n return trackFragmentIndexes;\n};\n\nconst generateTrackFragmentIndexTask = idempotentTask({\n label: \"trackFragmentIndex\",\n filename: (absolutePath) => `${basename(absolutePath)}.tracks.json`,\n runner: async (absolutePath: string) => {\n const index = await generateTrackFragmentIndexFromPath(absolutePath);\n return JSON.stringify(index, null, 2);\n },\n});\n\nexport const generateTrackFragmentIndex = async (\n cacheRoot: string,\n absolutePath: string,\n) => {\n try {\n return await generateTrackFragmentIndexTask(cacheRoot, absolutePath);\n } catch (error) {\n console.trace(\"Error generating track fragment index\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;;;;AAOA,MAAa,qCAAqC,OAChD,iBACG;CACH,MAAM,yBAAY,2BAA2B;CAC7C,MAAM,QAAQ,MAAMA,oBAAM,UAAU,aAAa;CAEjD,MAAM,oBAAoB,MAAM;AAChC,KAAI,sBAAsB,OACxB,KAAI,gCAAgC,kBAAkB,IAAI;KAE1D,KACE,2EACD;AAGH,KACE,uCAAuC,aAAa,8BACrD;CAGD,MAAM,aAAa,MAAM,QACtB,KAAK,QAAQ,gBAAgB;AAC5B,MAAI,OAAO,eAAe,WAAW,OAAO,eAAe,QACzD,QAAO;EAET,MAAM,UAAU,cAAc;AAC9B,MAAI,oBAAoB,QAAQ,IAAI,OAAO,WAAW,GAAG;AAGzD,SAAOC,oDAFa,MAAM,sBAAsB,YAAY,EAI1D,mBAHqB,EAAE,GAAG,SAAS,CAKpC;GACD,CACD,QACE,SACC,SAAS,KACZ;CAEH,MAAMC,YACJ,MAAM,aAAa,SAAS,KACvB,YAAY;AACX,MAAI;AACF,OAAI,wCAAwC;GAC5C,MAAM,cAAc,MAAM,4BAA4B;GACtD,MAAM,eAAe;GACrB,MAAM,SAAS,MAAMD,oDACnB,aACA,mBACA,EAAE,GAAG,cAAc,CACpB;AACD,OAAI,oDAAoD;AACxD,UAAO;WACA,OAAO;AACd,OAAI,kDAAkD,QAAQ;AAC9D,UAAO;;KAEP,GACJ,QAAQ,QAAQ,KAAK;CAE3B,MAAM,CAAC,cAAc,eAAe,MAAM,QAAQ,IAAI,CACpD,QAAQ,IAAI,WAAW,EACvB,UACD,CAAC;CAEF,MAAME,uBAA2D,EAAE;AACnE,MAAK,MAAM,UAAU,aACnB,QAAO,OAAO,sBAAsB,OAAO;AAE7C,KAAI,YACF,QAAO,OAAO,sBAAsB,YAAY;AAGlD,QAAO;;AAGT,MAAM,iCAAiCC,sCAAe;CACpD,OAAO;CACP,WAAW,iBAAiB,2BAAY,aAAa,CAAC;CACtD,QAAQ,OAAO,iBAAyB;EACtC,MAAM,QAAQ,MAAM,mCAAmC,aAAa;AACpE,SAAO,KAAK,UAAU,OAAO,MAAM,EAAE;;CAExC,CAAC;AAEF,MAAa,6BAA6B,OACxC,WACA,iBACG;AACH,KAAI;AACF,SAAO,MAAM,+BAA+B,WAAW,aAAa;UAC7D,OAAO;AACd,UAAQ,MAAM,yCAAyC,MAAM;AAC7D,QAAM"}
|