@editframe/assets 0.24.1-beta.0 → 0.25.1-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Probe.d.ts +922 -967
- package/dist/Probe.js +13 -8
- package/dist/Probe.js.map +1 -0
- package/dist/VideoRenderOptions.d.ts +190 -185
- package/dist/VideoRenderOptions.js +5 -0
- package/dist/VideoRenderOptions.js.map +1 -0
- package/dist/generateFragmentIndex.d.ts +8 -3
- package/dist/generateFragmentIndex.js +9 -1
- package/dist/generateFragmentIndex.js.map +1 -0
- package/dist/generateSingleTrack.js +7 -2
- package/dist/generateSingleTrack.js.map +1 -0
- package/dist/idempotentTask.d.ts +10 -11
- package/dist/idempotentTask.js +5 -0
- package/dist/idempotentTask.js.map +1 -0
- package/dist/index.d.ts +10 -10
- package/dist/index.js +2 -1
- package/dist/md5.d.ts +11 -6
- package/dist/md5.js +5 -0
- package/dist/md5.js.map +1 -0
- package/dist/tasks/cacheImage.d.ts +7 -1
- package/dist/tasks/cacheImage.js +6 -1
- package/dist/tasks/cacheImage.js.map +1 -0
- package/dist/tasks/findOrCreateCaptions.d.ts +8 -2
- package/dist/tasks/findOrCreateCaptions.js +8 -3
- package/dist/tasks/findOrCreateCaptions.js.map +1 -0
- package/dist/tasks/generateTrack.d.ts +8 -3
- package/dist/tasks/generateTrack.js +5 -0
- package/dist/tasks/generateTrack.js.map +1 -0
- package/dist/tasks/generateTrackFragmentIndex.d.ts +9 -3
- package/dist/tasks/generateTrackFragmentIndex.js +6 -1
- package/dist/tasks/generateTrackFragmentIndex.js.map +1 -0
- package/dist/truncateDecimal.js +4 -0
- package/dist/truncateDecimal.js.map +1 -0
- package/package.json +29 -22
- package/tsdown.config.ts +13 -0
- package/dist/generateSingleTrack.d.ts +0 -8
- package/dist/memoize.d.ts +0 -2
- package/dist/tasks/cacheRemoteAsset.d.ts +0 -0
- package/dist/truncateDecimal.d.ts +0 -1
|
@@ -4,7 +4,9 @@ import { idempotentTask } from "./idempotentTask.js";
|
|
|
4
4
|
import debug from "debug";
|
|
5
5
|
import { PassThrough } from "node:stream";
|
|
6
6
|
import { basename } from "node:path";
|
|
7
|
-
|
|
7
|
+
|
|
8
|
+
//#region src/generateSingleTrack.ts
|
|
9
|
+
const log = debug("ef:generateSingleTrack");
|
|
8
10
|
const generateSingleTrackFromPath = async (absolutePath, trackId) => {
|
|
9
11
|
log(`Generating track ${trackId} for ${absolutePath}`);
|
|
10
12
|
const probe = await Probe.probePath(absolutePath);
|
|
@@ -37,7 +39,7 @@ const generateSingleTrackFromPath = async (absolutePath, trackId) => {
|
|
|
37
39
|
fragmentIndex: fragmentIndexPromise
|
|
38
40
|
};
|
|
39
41
|
};
|
|
40
|
-
idempotentTask({
|
|
42
|
+
const generateSingleTrackTask = idempotentTask({
|
|
41
43
|
label: "track-single",
|
|
42
44
|
filename: (absolutePath, trackId) => `${basename(absolutePath)}.track-${trackId}.mp4`,
|
|
43
45
|
runner: async (absolutePath, trackId) => {
|
|
@@ -70,4 +72,7 @@ idempotentTask({
|
|
|
70
72
|
return finalStream;
|
|
71
73
|
}
|
|
72
74
|
});
|
|
75
|
+
|
|
76
|
+
//#endregion
|
|
73
77
|
export { generateSingleTrackFromPath };
|
|
78
|
+
//# sourceMappingURL=generateSingleTrack.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generateSingleTrack.js","names":["progressTimeout: NodeJS.Timeout | null"],"sources":["../src/generateSingleTrack.ts"],"sourcesContent":["import { idempotentTask } from \"./idempotentTask.js\";\nimport debug from \"debug\";\nimport { PassThrough } from \"node:stream\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"./Probe.js\";\nimport { generateFragmentIndex } from \"./generateFragmentIndex.js\";\n\nconst log = debug(\"ef:generateSingleTrack\");\n\nexport const generateSingleTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) - tracks use 1-based IDs, streams use 0-based indices\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(`Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`);\n }\n\n // Get the track stream from FFmpeg (single track, fragmented MP4)\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Create a PassThrough to tee the stream\n const outputStream = new PassThrough();\n const indexStream = new PassThrough();\n\n // Pipe data but DON'T end outputStream automatically - we'll control this\n trackStream.pipe(outputStream, { end: false });\n trackStream.pipe(indexStream);\n\n // Track when the source stream ends (but don't end output yet)\n let sourceStreamEnded = false;\n trackStream.on('end', () => {\n sourceStreamEnded = true;\n });\n\n trackStream.on('error', (error) => {\n outputStream.destroy(error);\n indexStream.destroy(error);\n });\n\n // Generate fragment index from the single-track stream\n // This will be a single-track index since we're processing isolated track\n // Map the single-track file's track ID 1 to the original multi-track ID\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndexPromise = generateFragmentIndex(indexStream, undefined, trackIdMapping);\n\n // End outputStream only after BOTH source ends AND fragment index completes\n fragmentIndexPromise.then(() => {\n if (sourceStreamEnded) {\n outputStream.end();\n } else {\n // If fragment index completes first, wait for stream to end\n trackStream.once('end', () => {\n outputStream.end();\n });\n }\n }).catch((error) => {\n outputStream.destroy(error);\n });\n\n // Return both the stream and the index\n return {\n stream: outputStream,\n fragmentIndex: fragmentIndexPromise\n };\n};\n\nexport const generateSingleTrackTask = idempotentTask({\n label: \"track-single\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: async (absolutePath: string, trackId: number) => {\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Create a PassThrough stream that processes fragment index in parallel\n const finalStream = new PassThrough();\n\n // Start fragment index processing immediately (don't wait for stream to end)\n const fragmentIndexPromise = result.fragmentIndex.catch((error) => {\n console.warn(`Fragment index generation failed for track ${trackId}:`, error);\n // Don't fail the stream if fragment index fails\n });\n\n // Monitor progress and extend timeout based on actual work\n let progressTimeout: NodeJS.Timeout | null = null;\n\n const resetProgressTimeout = () => {\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n progressTimeout = setTimeout(() => {\n if (!finalStream.destroyed) {\n console.warn(`Progress timeout triggered for track ${trackId} - no activity for 10 seconds`);\n finalStream.end();\n }\n }, 10000); // 10 second sliding timeout\n };\n\n // Start the initial timeout\n resetProgressTimeout();\n\n // Monitor data flow to detect active work\n result.stream.on('data', () => {\n resetProgressTimeout(); // Reset timeout when we see data\n });\n\n result.stream.on('end', () => {\n resetProgressTimeout(); // Reset timeout when stream ends\n });\n\n // Pipe data through but don't end until fragment index is ready\n result.stream.pipe(finalStream, { end: false });\n\n // Wait for fragment index to complete, then end the stream\n await fragmentIndexPromise;\n finalStream.end();\n\n // Clean up timeout\n if (progressTimeout) {\n clearTimeout(progressTimeout);\n }\n\n return finalStream;\n },\n});\n\nexport const generateSingleTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=0\",\n );\n }\n return await generateSingleTrackTask(cacheRoot, absolutePath, Number(trackId));\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n\n// Helper function to get both stream and fragment index\nexport const generateSingleTrackWithIndex = async (\n absolutePath: string,\n trackId: number,\n) => {\n log(`Generating track ${trackId} with index for ${absolutePath}`);\n\n const probe = await Probe.probePath(absolutePath);\n\n // Map track ID (1-based) to stream index (0-based) for compatibility\n const streamIndex = trackId - 1;\n\n if (streamIndex < 0 || streamIndex >= probe.streams.length) {\n throw new Error(`Track ${trackId} not found (valid tracks: 1-${probe.streams.length})`);\n }\n\n const trackStream = probe.createTrackReadstream(streamIndex);\n\n // Collect all data for fragment index generation\n const chunks: Buffer[] = [];\n const outputStream = new PassThrough();\n\n // Tee the stream: collect for index AND pass through for output\n trackStream.on('data', (chunk: Buffer) => {\n chunks.push(chunk);\n outputStream.write(chunk);\n });\n\n trackStream.on('end', () => {\n // Don't end the output stream immediately - wait for async processing\n (async () => {\n try {\n // Create a readable from collected chunks for fragment index\n const { Readable } = await import(\"node:stream\");\n const indexInputStream = Readable.from(Buffer.concat(chunks as any));\n\n // Generate fragment index with track ID mapping\n const trackIdMapping = { 1: trackId }; // Single track 1 -> original trackId\n const fragmentIndex = await generateFragmentIndex(indexInputStream, undefined, trackIdMapping);\n\n // Emit the fragment index as metadata\n outputStream.emit('fragmentIndex', fragmentIndex);\n\n // Now it's safe to end the stream\n outputStream.end();\n } catch (error) {\n outputStream.destroy(error as Error);\n }\n })();\n });\n\n trackStream.on('error', (error) => {\n outputStream.destroy(error);\n });\n\n return outputStream;\n};\n"],"mappings":";;;;;;;;AAOA,MAAM,MAAM,MAAM,yBAAyB;AAE3C,MAAa,8BAA8B,OACzC,cACA,YACG;AACH,KAAI,oBAAoB,QAAQ,OAAO,eAAe;CAEtD,MAAM,QAAQ,MAAM,MAAM,UAAU,aAAa;CAGjD,MAAM,cAAc,UAAU;AAE9B,KAAI,cAAc,KAAK,eAAe,MAAM,QAAQ,OAClD,OAAM,IAAI,MAAM,SAAS,QAAQ,8BAA8B,MAAM,QAAQ,OAAO,GAAG;CAIzF,MAAM,cAAc,MAAM,sBAAsB,YAAY;CAG5D,MAAM,eAAe,IAAI,aAAa;CACtC,MAAM,cAAc,IAAI,aAAa;AAGrC,aAAY,KAAK,cAAc,EAAE,KAAK,OAAO,CAAC;AAC9C,aAAY,KAAK,YAAY;CAG7B,IAAI,oBAAoB;AACxB,aAAY,GAAG,aAAa;AAC1B,sBAAoB;GACpB;AAEF,aAAY,GAAG,UAAU,UAAU;AACjC,eAAa,QAAQ,MAAM;AAC3B,cAAY,QAAQ,MAAM;GAC1B;CAMF,MAAM,uBAAuB,sBAAsB,aAAa,QADzC,EAAE,GAAG,SAAS,CACqD;AAG1F,sBAAqB,WAAW;AAC9B,MAAI,kBACF,cAAa,KAAK;MAGlB,aAAY,KAAK,aAAa;AAC5B,gBAAa,KAAK;IAClB;GAEJ,CAAC,OAAO,UAAU;AAClB,eAAa,QAAQ,MAAM;GAC3B;AAGF,QAAO;EACL,QAAQ;EACR,eAAe;EAChB;;AAGH,MAAa,0BAA0B,eAAe;CACpD,OAAO;CACP,WAAW,cAAsB,YAC/B,GAAG,SAAS,aAAa,CAAC,SAAS,QAAQ;CAC7C,QAAQ,OAAO,cAAsB,YAAoB;EACvD,MAAM,SAAS,MAAM,4BAA4B,cAAc,QAAQ;EAGvE,MAAM,cAAc,IAAI,aAAa;EAGrC,MAAM,uBAAuB,OAAO,cAAc,OAAO,UAAU;AACjE,WAAQ,KAAK,8CAA8C,QAAQ,IAAI,MAAM;IAE7E;EAGF,IAAIA,kBAAyC;EAE7C,MAAM,6BAA6B;AACjC,OAAI,gBACF,cAAa,gBAAgB;AAG/B,qBAAkB,iBAAiB;AACjC,QAAI,CAAC,YAAY,WAAW;AAC1B,aAAQ,KAAK,wCAAwC,QAAQ,+BAA+B;AAC5F,iBAAY,KAAK;;MAElB,IAAM;;AAIX,wBAAsB;AAGtB,SAAO,OAAO,GAAG,cAAc;AAC7B,yBAAsB;IACtB;AAEF,SAAO,OAAO,GAAG,aAAa;AAC5B,yBAAsB;IACtB;AAGF,SAAO,OAAO,KAAK,aAAa,EAAE,KAAK,OAAO,CAAC;AAG/C,QAAM;AACN,cAAY,KAAK;AAGjB,MAAI,gBACF,cAAa,gBAAgB;AAG/B,SAAO;;CAEV,CAAC"}
|
package/dist/idempotentTask.d.ts
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
|
-
import { Readable } from
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
import { Readable } from "node:stream";
|
|
2
|
+
|
|
3
|
+
//#region src/idempotentTask.d.ts
|
|
4
|
+
|
|
5
|
+
interface TaskResult {
|
|
6
|
+
md5Sum: string;
|
|
7
|
+
cachePath: string;
|
|
6
8
|
}
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
}
|
|
11
|
-
export declare const idempotentTask: <T extends unknown[]>({ label, filename, runner, }: TaskOptions<T>) => (rootDir: string, absolutePath: string, ...args: T) => Promise<TaskResult>;
|
|
12
|
-
export {};
|
|
9
|
+
//#endregion
|
|
10
|
+
export { TaskResult };
|
|
11
|
+
//# sourceMappingURL=idempotentTask.d.ts.map
|
package/dist/idempotentTask.js
CHANGED
|
@@ -4,6 +4,8 @@ import debug from "debug";
|
|
|
4
4
|
import { Readable } from "node:stream";
|
|
5
5
|
import { mkdir, stat, writeFile } from "node:fs/promises";
|
|
6
6
|
import path from "node:path";
|
|
7
|
+
|
|
8
|
+
//#region src/idempotentTask.ts
|
|
7
9
|
const idempotentTask = ({ label, filename, runner }) => {
|
|
8
10
|
const tasks = {};
|
|
9
11
|
const downloadTasks = {};
|
|
@@ -113,4 +115,7 @@ const idempotentTask = ({ label, filename, runner }) => {
|
|
|
113
115
|
return await fullTask;
|
|
114
116
|
};
|
|
115
117
|
};
|
|
118
|
+
|
|
119
|
+
//#endregion
|
|
116
120
|
export { idempotentTask };
|
|
121
|
+
//# sourceMappingURL=idempotentTask.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"idempotentTask.js","names":["tasks: Record<string, Promise<TaskResult>>","downloadTasks: Record<string, Promise<string>>"],"sources":["../src/idempotentTask.ts"],"sourcesContent":["import { createWriteStream, existsSync } from \"node:fs\";\nimport path from \"node:path\";\nimport { md5FilePath } from \"./md5.js\";\nimport debug from \"debug\";\nimport { mkdir, writeFile, stat } from \"node:fs/promises\";\nimport { Readable } from \"node:stream\";\n\ninterface TaskOptions<T extends unknown[]> {\n label: string;\n filename: (absolutePath: string, ...args: T) => string;\n runner: (absolutePath: string, ...args: T) => Promise<string | Readable>;\n}\n\nexport interface TaskResult {\n md5Sum: string;\n cachePath: string;\n}\n\nexport const idempotentTask = <T extends unknown[]>({\n label,\n filename,\n runner,\n}: TaskOptions<T>) => {\n const tasks: Record<string, Promise<TaskResult>> = {};\n const downloadTasks: Record<string, Promise<string>> = {};\n\n // Helper function to validate cache file completeness\n const isValidCacheFile = async (filePath: string, allowEmpty = false): Promise<boolean> => {\n try {\n const stats = await stat(filePath);\n // File must exist and either have content or be explicitly allowed to be empty\n return allowEmpty || stats.size > 0;\n } catch {\n return false;\n }\n };\n\n return async (\n rootDir: string,\n absolutePath: string,\n ...args: T\n ): Promise<TaskResult> => {\n const log = debug(`ef:${label}`);\n const cacheDirRoot = path.join(rootDir, \".cache\");\n await mkdir(cacheDirRoot, { recursive: true });\n\n log(`Running ef:${label} task for ${absolutePath} in ${rootDir}`);\n\n // Handle HTTP downloads with proper race condition protection\n if (absolutePath.includes(\"http\")) {\n const safePath = absolutePath.replace(/[^a-zA-Z0-9]/g, \"_\");\n const downloadCachePath = path.join(rootDir, \".cache\", `${safePath}.file`);\n\n // Check if already downloaded and valid (allow empty downloads)\n if (existsSync(downloadCachePath) && await isValidCacheFile(downloadCachePath, true)) {\n log(`Already cached ${absolutePath}`);\n absolutePath = downloadCachePath;\n } else {\n // Use download task deduplication to prevent concurrent downloads\n const downloadKey = absolutePath;\n if (!downloadTasks[downloadKey]) {\n log(`Starting download for ${absolutePath}`);\n downloadTasks[downloadKey] = (async () => {\n try {\n const response = await fetch(absolutePath);\n if (!response.ok) {\n throw new Error(`Failed to fetch file from URL ${absolutePath}: ${response.status} ${response.statusText}`);\n }\n\n const stream = response.body;\n if (!stream) {\n throw new Error(`No response body for URL ${absolutePath}`);\n }\n\n // Use temporary file to prevent reading incomplete downloads\n const tempPath = `${downloadCachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n\n // @ts-ignore node web stream support in typescript is incorrect about this.\n const readable = Readable.fromWeb(stream);\n readable.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n readable.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n const { rename } = await import(\"node:fs/promises\");\n await rename(tempPath, downloadCachePath);\n\n log(`Download completed for ${absolutePath}`);\n return downloadCachePath;\n } catch (error) {\n log(`Download failed for ${absolutePath}: ${error}`);\n // Clean up task reference on failure\n delete downloadTasks[downloadKey];\n throw error;\n }\n })();\n }\n\n absolutePath = await downloadTasks[downloadKey];\n // Clean up completed task\n delete downloadTasks[downloadKey];\n }\n }\n\n const md5 = await md5FilePath(absolutePath);\n const cacheDir = path.join(cacheDirRoot, md5);\n log(`Cache dir: ${cacheDir}`);\n await mkdir(cacheDir, { recursive: true });\n\n const cachePath = path.join(cacheDir, filename(absolutePath, ...args));\n const key = cachePath;\n\n // Check if cache exists and is valid (not zero-byte)\n if (existsSync(cachePath) && await isValidCacheFile(cachePath)) {\n log(`Returning cached ef:${label} task for ${key}`);\n return { cachePath, md5Sum: md5 };\n }\n\n const maybeTask = tasks[key];\n if (maybeTask) {\n log(`Returning existing ef:${label} task for ${key}`);\n return await maybeTask;\n }\n\n log(`Creating new ef:${label} task for ${key}`);\n const fullTask = (async (): Promise<TaskResult> => {\n try {\n log(`Awaiting task for ${key}`);\n const result = await runner(absolutePath, ...args);\n\n if (result instanceof Readable) {\n log(`Piping task for ${key} to cache`);\n // Use temporary file to prevent reading incomplete results\n const tempPath = `${cachePath}.tmp`;\n const writeStream = createWriteStream(tempPath);\n result.pipe(writeStream);\n\n await new Promise<void>((resolve, reject) => {\n result.on(\"error\", reject);\n writeStream.on(\"error\", reject);\n writeStream.on(\"finish\", () => resolve());\n });\n\n // Atomically move completed file to final location\n const { rename } = await import(\"node:fs/promises\");\n await rename(tempPath, cachePath);\n } else {\n log(`Writing to ${cachePath}`);\n await writeFile(cachePath, result);\n }\n\n // Clean up task reference after successful completion\n delete tasks[key];\n\n return {\n md5Sum: md5,\n cachePath,\n };\n } catch (error) {\n // Clean up task reference on failure\n delete tasks[key];\n throw error;\n }\n })();\n\n tasks[key] = fullTask;\n return await fullTask;\n };\n};\n"],"mappings":";;;;;;;;AAkBA,MAAa,kBAAuC,EAClD,OACA,UACA,aACoB;CACpB,MAAMA,QAA6C,EAAE;CACrD,MAAMC,gBAAiD,EAAE;CAGzD,MAAM,mBAAmB,OAAO,UAAkB,aAAa,UAA4B;AACzF,MAAI;GACF,MAAM,QAAQ,MAAM,KAAK,SAAS;AAElC,UAAO,cAAc,MAAM,OAAO;UAC5B;AACN,UAAO;;;AAIX,QAAO,OACL,SACA,cACA,GAAG,SACqB;EACxB,MAAM,MAAM,MAAM,MAAM,QAAQ;EAChC,MAAM,eAAe,KAAK,KAAK,SAAS,SAAS;AACjD,QAAM,MAAM,cAAc,EAAE,WAAW,MAAM,CAAC;AAE9C,MAAI,cAAc,MAAM,YAAY,aAAa,MAAM,UAAU;AAGjE,MAAI,aAAa,SAAS,OAAO,EAAE;GACjC,MAAM,WAAW,aAAa,QAAQ,iBAAiB,IAAI;GAC3D,MAAM,oBAAoB,KAAK,KAAK,SAAS,UAAU,GAAG,SAAS,OAAO;AAG1E,OAAI,WAAW,kBAAkB,IAAI,MAAM,iBAAiB,mBAAmB,KAAK,EAAE;AACpF,QAAI,kBAAkB,eAAe;AACrC,mBAAe;UACV;IAEL,MAAM,cAAc;AACpB,QAAI,CAAC,cAAc,cAAc;AAC/B,SAAI,yBAAyB,eAAe;AAC5C,mBAAc,gBAAgB,YAAY;AACxC,UAAI;OACF,MAAM,WAAW,MAAM,MAAM,aAAa;AAC1C,WAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MAAM,iCAAiC,aAAa,IAAI,SAAS,OAAO,GAAG,SAAS,aAAa;OAG7G,MAAM,SAAS,SAAS;AACxB,WAAI,CAAC,OACH,OAAM,IAAI,MAAM,4BAA4B,eAAe;OAI7D,MAAM,WAAW,GAAG,kBAAkB;OACtC,MAAM,cAAc,kBAAkB,SAAS;OAG/C,MAAM,WAAW,SAAS,QAAQ,OAAO;AACzC,gBAAS,KAAK,YAAY;AAE1B,aAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,iBAAS,GAAG,SAAS,OAAO;AAC5B,oBAAY,GAAG,SAAS,OAAO;AAC/B,oBAAY,GAAG,gBAAgB,SAAS,CAAC;SACzC;OAGF,MAAM,EAAE,WAAW,MAAM,OAAO;AAChC,aAAM,OAAO,UAAU,kBAAkB;AAEzC,WAAI,0BAA0B,eAAe;AAC7C,cAAO;eACA,OAAO;AACd,WAAI,uBAAuB,aAAa,IAAI,QAAQ;AAEpD,cAAO,cAAc;AACrB,aAAM;;SAEN;;AAGN,mBAAe,MAAM,cAAc;AAEnC,WAAO,cAAc;;;EAIzB,MAAM,MAAM,MAAM,YAAY,aAAa;EAC3C,MAAM,WAAW,KAAK,KAAK,cAAc,IAAI;AAC7C,MAAI,cAAc,WAAW;AAC7B,QAAM,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;EAE1C,MAAM,YAAY,KAAK,KAAK,UAAU,SAAS,cAAc,GAAG,KAAK,CAAC;EACtE,MAAM,MAAM;AAGZ,MAAI,WAAW,UAAU,IAAI,MAAM,iBAAiB,UAAU,EAAE;AAC9D,OAAI,uBAAuB,MAAM,YAAY,MAAM;AACnD,UAAO;IAAE;IAAW,QAAQ;IAAK;;EAGnC,MAAM,YAAY,MAAM;AACxB,MAAI,WAAW;AACb,OAAI,yBAAyB,MAAM,YAAY,MAAM;AACrD,UAAO,MAAM;;AAGf,MAAI,mBAAmB,MAAM,YAAY,MAAM;EAC/C,MAAM,YAAY,YAAiC;AACjD,OAAI;AACF,QAAI,qBAAqB,MAAM;IAC/B,MAAM,SAAS,MAAM,OAAO,cAAc,GAAG,KAAK;AAElD,QAAI,kBAAkB,UAAU;AAC9B,SAAI,mBAAmB,IAAI,WAAW;KAEtC,MAAM,WAAW,GAAG,UAAU;KAC9B,MAAM,cAAc,kBAAkB,SAAS;AAC/C,YAAO,KAAK,YAAY;AAExB,WAAM,IAAI,SAAe,SAAS,WAAW;AAC3C,aAAO,GAAG,SAAS,OAAO;AAC1B,kBAAY,GAAG,SAAS,OAAO;AAC/B,kBAAY,GAAG,gBAAgB,SAAS,CAAC;OACzC;KAGF,MAAM,EAAE,WAAW,MAAM,OAAO;AAChC,WAAM,OAAO,UAAU,UAAU;WAC5B;AACL,SAAI,cAAc,YAAY;AAC9B,WAAM,UAAU,WAAW,OAAO;;AAIpC,WAAO,MAAM;AAEb,WAAO;KACL,QAAQ;KACR;KACD;YACM,OAAO;AAEd,WAAO,MAAM;AACb,UAAM;;MAEN;AAEJ,QAAM,OAAO;AACb,SAAO,MAAM"}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
export { VideoRenderOptions
|
|
1
|
+
import { AudioStreamSchema, AudioTrackFragmentIndex, PacketProbe, PacketProbeSchema, Probe, ProbeSchema, StreamSchema, TrackFragmentIndex, TrackSegment, VideoStreamSchema, VideoTrackFragmentIndex } from "./Probe.js";
|
|
2
|
+
import { generateFragmentIndex } from "./generateFragmentIndex.js";
|
|
3
|
+
import { md5Buffer, md5Directory, md5FilePath, md5ReadStream } from "./md5.js";
|
|
4
|
+
import { TaskResult } from "./idempotentTask.js";
|
|
5
|
+
import { generateTrackFragmentIndex, generateTrackFragmentIndexFromPath } from "./tasks/generateTrackFragmentIndex.js";
|
|
6
|
+
import { generateTrack, generateTrackFromPath } from "./tasks/generateTrack.js";
|
|
7
|
+
import { findOrCreateCaptions, generateCaptionDataFromPath } from "./tasks/findOrCreateCaptions.js";
|
|
8
|
+
import { cacheImage } from "./tasks/cacheImage.js";
|
|
9
|
+
import { VideoRenderOptions } from "./VideoRenderOptions.js";
|
|
10
|
+
export { type AudioStreamSchema, type AudioTrackFragmentIndex, PacketProbe, type PacketProbeSchema, Probe, type ProbeSchema, type StreamSchema, type TaskResult, type TrackFragmentIndex, type TrackSegment, VideoRenderOptions, type VideoStreamSchema, type VideoTrackFragmentIndex, cacheImage, findOrCreateCaptions, generateCaptionDataFromPath, generateFragmentIndex, generateTrack, generateTrackFragmentIndex, generateTrackFragmentIndexFromPath, generateTrackFromPath, md5Buffer, md5Directory, md5FilePath, md5ReadStream };
|
package/dist/index.js
CHANGED
|
@@ -6,4 +6,5 @@ import { generateTrack, generateTrackFromPath } from "./tasks/generateTrack.js";
|
|
|
6
6
|
import { findOrCreateCaptions, generateCaptionDataFromPath } from "./tasks/findOrCreateCaptions.js";
|
|
7
7
|
import { cacheImage } from "./tasks/cacheImage.js";
|
|
8
8
|
import { VideoRenderOptions } from "./VideoRenderOptions.js";
|
|
9
|
-
|
|
9
|
+
|
|
10
|
+
export { PacketProbe, Probe, VideoRenderOptions, cacheImage, findOrCreateCaptions, generateCaptionDataFromPath, generateFragmentIndex, generateTrack, generateTrackFragmentIndex, generateTrackFragmentIndexFromPath, generateTrackFromPath, md5Buffer, md5Directory, md5FilePath, md5ReadStream };
|
package/dist/md5.d.ts
CHANGED
|
@@ -1,6 +1,11 @@
|
|
|
1
|
-
import { ReadStream } from
|
|
2
|
-
import { Ora } from
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
1
|
+
import { ReadStream } from "node:fs";
|
|
2
|
+
import { Ora } from "ora";
|
|
3
|
+
|
|
4
|
+
//#region src/md5.d.ts
|
|
5
|
+
declare function md5Directory(directory: string, spinner?: Ora): Promise<string>;
|
|
6
|
+
declare function md5FilePath(filePath: string): Promise<string>;
|
|
7
|
+
declare function md5ReadStream(readStream: ReadStream): Promise<string>;
|
|
8
|
+
declare function md5Buffer(buffer: Buffer): string;
|
|
9
|
+
//#endregion
|
|
10
|
+
export { md5Buffer, md5Directory, md5FilePath, md5ReadStream };
|
|
11
|
+
//# sourceMappingURL=md5.d.ts.map
|
package/dist/md5.js
CHANGED
|
@@ -3,6 +3,8 @@ import { readdir } from "node:fs/promises";
|
|
|
3
3
|
import { join } from "node:path";
|
|
4
4
|
import crypto from "node:crypto";
|
|
5
5
|
import ora from "ora";
|
|
6
|
+
|
|
7
|
+
//#region src/md5.ts
|
|
6
8
|
async function md5Directory(directory, spinner) {
|
|
7
9
|
const shouldEndSpinner = !spinner;
|
|
8
10
|
spinner ||= ora("⚡️ Calculating MD5").start();
|
|
@@ -46,4 +48,7 @@ function addDashesToUUID(uuidWithoutDashes) {
|
|
|
46
48
|
if (uuidWithoutDashes.length !== 32) throw new Error("Invalid UUID without dashes. Expected 32 characters.");
|
|
47
49
|
return uuidWithoutDashes.slice(0, 8) + "-" + uuidWithoutDashes.slice(8, 12) + "-" + uuidWithoutDashes.slice(12, 16) + "-" + uuidWithoutDashes.slice(16, 20) + "-" + uuidWithoutDashes.slice(20, 32);
|
|
48
50
|
}
|
|
51
|
+
|
|
52
|
+
//#endregion
|
|
49
53
|
export { md5Buffer, md5Directory, md5FilePath, md5ReadStream };
|
|
54
|
+
//# sourceMappingURL=md5.js.map
|
package/dist/md5.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"md5.js","names":[],"sources":["../src/md5.ts"],"sourcesContent":["import { type ReadStream, createReadStream } from \"node:fs\";\nimport { readdir } from \"node:fs/promises\";\nimport { join } from \"node:path\";\nimport crypto from \"node:crypto\";\nimport ora, { type Ora } from \"ora\";\n\n// Recursively calculate the MD5 hash of all files in a directory\nexport async function md5Directory(directory: string, spinner?: Ora) {\n const shouldEndSpinner = !spinner;\n spinner ||= ora(\"⚡️ Calculating MD5\").start();\n spinner.suffixText = directory;\n const files = await readdir(directory, { withFileTypes: true });\n const hashes = await Promise.all(\n files.map(async (file) => {\n const filePath = join(directory, file.name);\n if (file.isDirectory()) {\n return md5Directory(filePath, spinner);\n }\n spinner.suffixText = filePath;\n return md5FilePath(filePath);\n }),\n );\n\n const hash = crypto.createHash(\"md5\");\n for (const fileHash of hashes) {\n hash.update(fileHash);\n }\n\n if (shouldEndSpinner) {\n spinner.succeed(\"MD5 calculated\");\n spinner.suffixText = directory;\n }\n return addDashesToUUID(hash.digest(\"hex\"));\n}\n\nexport async function md5FilePath(filePath: string) {\n const readStream = createReadStream(filePath);\n return md5ReadStream(readStream);\n}\n\nexport function md5ReadStream(readStream: ReadStream) {\n return new Promise<string>((resolve, reject) => {\n const hash = crypto.createHash(\"md5\");\n readStream.on(\"data\", (data) => {\n hash.update(data);\n });\n readStream.on(\"error\", reject);\n readStream.on(\"end\", () => {\n resolve(addDashesToUUID(hash.digest(\"hex\")));\n });\n });\n}\n\nexport function md5Buffer(buffer: Buffer) {\n const hash = crypto.createHash(\"md5\");\n hash.update(buffer);\n return addDashesToUUID(hash.digest(\"hex\"));\n}\n\nfunction addDashesToUUID(uuidWithoutDashes: string) {\n if (uuidWithoutDashes.length !== 32) {\n throw new Error(\"Invalid UUID without dashes. Expected 32 characters.\");\n }\n\n return (\n // biome-ignore lint/style/useTemplate: using a template makes a long line\n uuidWithoutDashes.slice(0, 8) +\n \"-\" +\n uuidWithoutDashes.slice(8, 12) +\n \"-\" +\n uuidWithoutDashes.slice(12, 16) +\n \"-\" +\n uuidWithoutDashes.slice(16, 20) +\n \"-\" +\n uuidWithoutDashes.slice(20, 32)\n );\n}\n"],"mappings":";;;;;;;AAOA,eAAsB,aAAa,WAAmB,SAAe;CACnE,MAAM,mBAAmB,CAAC;AAC1B,aAAY,IAAI,qBAAqB,CAAC,OAAO;AAC7C,SAAQ,aAAa;CACrB,MAAM,QAAQ,MAAM,QAAQ,WAAW,EAAE,eAAe,MAAM,CAAC;CAC/D,MAAM,SAAS,MAAM,QAAQ,IAC3B,MAAM,IAAI,OAAO,SAAS;EACxB,MAAM,WAAW,KAAK,WAAW,KAAK,KAAK;AAC3C,MAAI,KAAK,aAAa,CACpB,QAAO,aAAa,UAAU,QAAQ;AAExC,UAAQ,aAAa;AACrB,SAAO,YAAY,SAAS;GAC5B,CACH;CAED,MAAM,OAAO,OAAO,WAAW,MAAM;AACrC,MAAK,MAAM,YAAY,OACrB,MAAK,OAAO,SAAS;AAGvB,KAAI,kBAAkB;AACpB,UAAQ,QAAQ,iBAAiB;AACjC,UAAQ,aAAa;;AAEvB,QAAO,gBAAgB,KAAK,OAAO,MAAM,CAAC;;AAG5C,eAAsB,YAAY,UAAkB;AAElD,QAAO,cADY,iBAAiB,SAAS,CACb;;AAGlC,SAAgB,cAAc,YAAwB;AACpD,QAAO,IAAI,SAAiB,SAAS,WAAW;EAC9C,MAAM,OAAO,OAAO,WAAW,MAAM;AACrC,aAAW,GAAG,SAAS,SAAS;AAC9B,QAAK,OAAO,KAAK;IACjB;AACF,aAAW,GAAG,SAAS,OAAO;AAC9B,aAAW,GAAG,aAAa;AACzB,WAAQ,gBAAgB,KAAK,OAAO,MAAM,CAAC,CAAC;IAC5C;GACF;;AAGJ,SAAgB,UAAU,QAAgB;CACxC,MAAM,OAAO,OAAO,WAAW,MAAM;AACrC,MAAK,OAAO,OAAO;AACnB,QAAO,gBAAgB,KAAK,OAAO,MAAM,CAAC;;AAG5C,SAAS,gBAAgB,mBAA2B;AAClD,KAAI,kBAAkB,WAAW,GAC/B,OAAM,IAAI,MAAM,uDAAuD;AAGzE,QAEE,kBAAkB,MAAM,GAAG,EAAE,GAC7B,MACA,kBAAkB,MAAM,GAAG,GAAG,GAC9B,MACA,kBAAkB,MAAM,IAAI,GAAG,GAC/B,MACA,kBAAkB,MAAM,IAAI,GAAG,GAC/B,MACA,kBAAkB,MAAM,IAAI,GAAG"}
|
|
@@ -1 +1,7 @@
|
|
|
1
|
-
|
|
1
|
+
import { TaskResult } from "../idempotentTask.js";
|
|
2
|
+
|
|
3
|
+
//#region src/tasks/cacheImage.d.ts
|
|
4
|
+
declare const cacheImage: (cacheRoot: string, absolutePath: string) => Promise<TaskResult>;
|
|
5
|
+
//#endregion
|
|
6
|
+
export { cacheImage };
|
|
7
|
+
//# sourceMappingURL=cacheImage.d.ts.map
|
package/dist/tasks/cacheImage.js
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { idempotentTask } from "../idempotentTask.js";
|
|
2
2
|
import { createReadStream } from "node:fs";
|
|
3
3
|
import path from "node:path";
|
|
4
|
-
|
|
4
|
+
|
|
5
|
+
//#region src/tasks/cacheImage.ts
|
|
6
|
+
const cacheImageTask = idempotentTask({
|
|
5
7
|
label: "image",
|
|
6
8
|
filename: (absolutePath) => path.basename(absolutePath),
|
|
7
9
|
runner: async (absolutePath) => {
|
|
@@ -17,4 +19,7 @@ const cacheImage = async (cacheRoot, absolutePath) => {
|
|
|
17
19
|
throw error;
|
|
18
20
|
}
|
|
19
21
|
};
|
|
22
|
+
|
|
23
|
+
//#endregion
|
|
20
24
|
export { cacheImage };
|
|
25
|
+
//# sourceMappingURL=cacheImage.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cacheImage.js","names":[],"sources":["../../src/tasks/cacheImage.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport { createReadStream } from \"node:fs\";\n\nimport path from \"node:path\";\n\nconst cacheImageTask = idempotentTask({\n label: \"image\",\n filename: (absolutePath: string) => path.basename(absolutePath),\n runner: async (absolutePath) => {\n return createReadStream(absolutePath);\n },\n});\n\nexport const cacheImage = async (cacheRoot: string, absolutePath: string) => {\n try {\n return await cacheImageTask(cacheRoot, absolutePath);\n } catch (error) {\n console.error(error);\n console.trace(\"Error caching image\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;AAKA,MAAM,iBAAiB,eAAe;CACpC,OAAO;CACP,WAAW,iBAAyB,KAAK,SAAS,aAAa;CAC/D,QAAQ,OAAO,iBAAiB;AAC9B,SAAO,iBAAiB,aAAa;;CAExC,CAAC;AAEF,MAAa,aAAa,OAAO,WAAmB,iBAAyB;AAC3E,KAAI;AACF,SAAO,MAAM,eAAe,WAAW,aAAa;UAC7C,OAAO;AACd,UAAQ,MAAM,MAAM;AACpB,UAAQ,MAAM,uBAAuB,MAAM;AAC3C,QAAM"}
|
|
@@ -1,2 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import { TaskResult } from "../idempotentTask.js";
|
|
2
|
+
|
|
3
|
+
//#region src/tasks/findOrCreateCaptions.d.ts
|
|
4
|
+
declare const generateCaptionDataFromPath: (absolutePath: string) => Promise<any>;
|
|
5
|
+
declare const findOrCreateCaptions: (cacheRoot: string, absolutePath: string) => Promise<TaskResult>;
|
|
6
|
+
//#endregion
|
|
7
|
+
export { findOrCreateCaptions, generateCaptionDataFromPath };
|
|
8
|
+
//# sourceMappingURL=findOrCreateCaptions.d.ts.map
|
|
@@ -3,15 +3,17 @@ import { exec } from "node:child_process";
|
|
|
3
3
|
import { promisify } from "node:util";
|
|
4
4
|
import debug from "debug";
|
|
5
5
|
import { basename } from "node:path";
|
|
6
|
-
|
|
7
|
-
|
|
6
|
+
|
|
7
|
+
//#region src/tasks/findOrCreateCaptions.ts
|
|
8
|
+
const execPromise = promisify(exec);
|
|
9
|
+
const log = debug("ef:generateCaptions");
|
|
8
10
|
const generateCaptionDataFromPath = async (absolutePath) => {
|
|
9
11
|
const command = `whisper_timestamped --language en --efficient --output_format vtt ${absolutePath}`;
|
|
10
12
|
log(`Running command: ${command}`);
|
|
11
13
|
const { stdout } = await execPromise(command);
|
|
12
14
|
return stdout;
|
|
13
15
|
};
|
|
14
|
-
|
|
16
|
+
const generateCaptionDataTask = idempotentTask({
|
|
15
17
|
label: "captions",
|
|
16
18
|
filename: (absolutePath) => `${basename(absolutePath)}.captions.json`,
|
|
17
19
|
runner: generateCaptionDataFromPath
|
|
@@ -24,4 +26,7 @@ const findOrCreateCaptions = async (cacheRoot, absolutePath) => {
|
|
|
24
26
|
throw error;
|
|
25
27
|
}
|
|
26
28
|
};
|
|
29
|
+
|
|
30
|
+
//#endregion
|
|
27
31
|
export { findOrCreateCaptions, generateCaptionDataFromPath };
|
|
32
|
+
//# sourceMappingURL=findOrCreateCaptions.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"findOrCreateCaptions.js","names":[],"sources":["../../src/tasks/findOrCreateCaptions.ts"],"sourcesContent":["import { basename } from \"node:path\";\nimport { promisify } from \"node:util\";\nimport { exec } from \"node:child_process\";\n\nimport debug from \"debug\";\n\nimport { idempotentTask } from \"../idempotentTask.js\";\n\nconst execPromise = promisify(exec);\n\nconst log = debug(\"ef:generateCaptions\");\n\nexport const generateCaptionDataFromPath = async (absolutePath: string) => {\n const command = `whisper_timestamped --language en --efficient --output_format vtt ${absolutePath}`;\n log(`Running command: ${command}`);\n const { stdout } = await execPromise(command);\n return stdout;\n};\n\nconst generateCaptionDataTask = idempotentTask({\n label: \"captions\",\n filename: (absolutePath) => `${basename(absolutePath)}.captions.json`,\n runner: generateCaptionDataFromPath,\n});\n\nexport const findOrCreateCaptions = async (\n cacheRoot: string,\n absolutePath: string,\n) => {\n try {\n return await generateCaptionDataTask(cacheRoot, absolutePath);\n } catch (error) {\n console.trace(\"Error finding or creating captions\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;AAQA,MAAM,cAAc,UAAU,KAAK;AAEnC,MAAM,MAAM,MAAM,sBAAsB;AAExC,MAAa,8BAA8B,OAAO,iBAAyB;CACzE,MAAM,UAAU,qEAAqE;AACrF,KAAI,oBAAoB,UAAU;CAClC,MAAM,EAAE,WAAW,MAAM,YAAY,QAAQ;AAC7C,QAAO;;AAGT,MAAM,0BAA0B,eAAe;CAC7C,OAAO;CACP,WAAW,iBAAiB,GAAG,SAAS,aAAa,CAAC;CACtD,QAAQ;CACT,CAAC;AAEF,MAAa,uBAAuB,OAClC,WACA,iBACG;AACH,KAAI;AACF,SAAO,MAAM,wBAAwB,WAAW,aAAa;UACtD,OAAO;AACd,UAAQ,MAAM,sCAAsC,MAAM;AAC1D,QAAM"}
|
|
@@ -1,3 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
1
|
+
import { TaskResult } from "../idempotentTask.js";
|
|
2
|
+
|
|
3
|
+
//#region src/tasks/generateTrack.d.ts
|
|
4
|
+
declare const generateTrackFromPath: (absolutePath: string, trackId: number) => Promise<any>;
|
|
5
|
+
declare const generateTrack: (cacheRoot: string, absolutePath: string, url: string) => Promise<TaskResult>;
|
|
6
|
+
//#endregion
|
|
7
|
+
export { generateTrack, generateTrackFromPath };
|
|
8
|
+
//# sourceMappingURL=generateTrack.d.ts.map
|
|
@@ -2,6 +2,8 @@ import { idempotentTask } from "../idempotentTask.js";
|
|
|
2
2
|
import { generateSingleTrackFromPath } from "../generateSingleTrack.js";
|
|
3
3
|
import debug from "debug";
|
|
4
4
|
import { basename } from "node:path";
|
|
5
|
+
|
|
6
|
+
//#region src/tasks/generateTrack.ts
|
|
5
7
|
const generateTrackFromPath = async (absolutePath, trackId) => {
|
|
6
8
|
debug("ef:generateTrackFragment")(`Generating track ${trackId} for ${absolutePath}`);
|
|
7
9
|
return (await generateSingleTrackFromPath(absolutePath, trackId)).stream;
|
|
@@ -22,4 +24,7 @@ const generateTrack = async (cacheRoot, absolutePath, url) => {
|
|
|
22
24
|
throw error;
|
|
23
25
|
}
|
|
24
26
|
};
|
|
27
|
+
|
|
28
|
+
//#endregion
|
|
25
29
|
export { generateTrack, generateTrackFromPath };
|
|
30
|
+
//# sourceMappingURL=generateTrack.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generateTrack.js","names":[],"sources":["../../src/tasks/generateTrack.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { generateSingleTrackFromPath } from \"../generateSingleTrack.js\";\n\nexport const generateTrackFromPath = async (\n absolutePath: string,\n trackId: number,\n) => {\n const log = debug(\"ef:generateTrackFragment\");\n log(`Generating track ${trackId} for ${absolutePath}`);\n\n // Use the single-track implementation\n const result = await generateSingleTrackFromPath(absolutePath, trackId);\n\n // Return just the stream for compatibility with existing API\n return result.stream;\n};\n\nexport const generateTrackTask = idempotentTask({\n label: \"track\",\n filename: (absolutePath: string, trackId: number) =>\n `${basename(absolutePath)}.track-${trackId}.mp4`,\n runner: generateTrackFromPath,\n});\n\nexport const generateTrack = async (\n cacheRoot: string,\n absolutePath: string,\n url: string,\n) => {\n try {\n const trackId = new URL(`http://localhost${url}`).searchParams.get(\n \"trackId\",\n );\n if (trackId === null) {\n throw new Error(\n \"No trackId provided. It must be specified in the query string: ?trackId=1 (for video) or ?trackId=2 (for audio)\",\n );\n }\n return await generateTrackTask(cacheRoot, absolutePath, Number(trackId));\n } catch (error) {\n console.error(error);\n console.trace(\"Error generating track\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;AAKA,MAAa,wBAAwB,OACnC,cACA,YACG;AAEH,CADY,MAAM,2BAA2B,CACzC,oBAAoB,QAAQ,OAAO,eAAe;AAMtD,SAHe,MAAM,4BAA4B,cAAc,QAAQ,EAGzD;;AAGhB,MAAa,oBAAoB,eAAe;CAC9C,OAAO;CACP,WAAW,cAAsB,YAC/B,GAAG,SAAS,aAAa,CAAC,SAAS,QAAQ;CAC7C,QAAQ;CACT,CAAC;AAEF,MAAa,gBAAgB,OAC3B,WACA,cACA,QACG;AACH,KAAI;EACF,MAAM,UAAU,IAAI,IAAI,mBAAmB,MAAM,CAAC,aAAa,IAC7D,UACD;AACD,MAAI,YAAY,KACd,OAAM,IAAI,MACR,kHACD;AAEH,SAAO,MAAM,kBAAkB,WAAW,cAAc,OAAO,QAAQ,CAAC;UACjE,OAAO;AACd,UAAQ,MAAM,MAAM;AACpB,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,QAAM"}
|
|
@@ -1,3 +1,9 @@
|
|
|
1
|
-
import { TrackFragmentIndex } from
|
|
2
|
-
|
|
3
|
-
|
|
1
|
+
import { TrackFragmentIndex } from "../Probe.js";
|
|
2
|
+
import { TaskResult } from "../idempotentTask.js";
|
|
3
|
+
|
|
4
|
+
//#region src/tasks/generateTrackFragmentIndex.d.ts
|
|
5
|
+
declare const generateTrackFragmentIndexFromPath: (absolutePath: string) => Promise<Record<number, TrackFragmentIndex>>;
|
|
6
|
+
declare const generateTrackFragmentIndex: (cacheRoot: string, absolutePath: string) => Promise<TaskResult>;
|
|
7
|
+
//#endregion
|
|
8
|
+
export { generateTrackFragmentIndex, generateTrackFragmentIndexFromPath };
|
|
9
|
+
//# sourceMappingURL=generateTrackFragmentIndex.d.ts.map
|
|
@@ -3,6 +3,8 @@ import { generateFragmentIndex } from "../generateFragmentIndex.js";
|
|
|
3
3
|
import { idempotentTask } from "../idempotentTask.js";
|
|
4
4
|
import debug from "debug";
|
|
5
5
|
import { basename } from "node:path";
|
|
6
|
+
|
|
7
|
+
//#region src/tasks/generateTrackFragmentIndex.ts
|
|
6
8
|
const generateTrackFragmentIndexFromPath = async (absolutePath) => {
|
|
7
9
|
const log = debug("ef:generateTrackFragment");
|
|
8
10
|
const probe = await Probe.probePath(absolutePath);
|
|
@@ -29,7 +31,7 @@ const generateTrackFragmentIndexFromPath = async (absolutePath) => {
|
|
|
29
31
|
}
|
|
30
32
|
return trackFragmentIndexes;
|
|
31
33
|
};
|
|
32
|
-
|
|
34
|
+
const generateTrackFragmentIndexTask = idempotentTask({
|
|
33
35
|
label: "trackFragmentIndex",
|
|
34
36
|
filename: (absolutePath) => `${basename(absolutePath)}.tracks.json`,
|
|
35
37
|
runner: async (absolutePath) => {
|
|
@@ -45,4 +47,7 @@ const generateTrackFragmentIndex = async (cacheRoot, absolutePath) => {
|
|
|
45
47
|
throw error;
|
|
46
48
|
}
|
|
47
49
|
};
|
|
50
|
+
|
|
51
|
+
//#endregion
|
|
48
52
|
export { generateTrackFragmentIndex, generateTrackFragmentIndexFromPath };
|
|
53
|
+
//# sourceMappingURL=generateTrackFragmentIndex.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generateTrackFragmentIndex.js","names":["startTimeOffsetMs: number | undefined","trackFragmentIndexes: Record<number, TrackFragmentIndex>"],"sources":["../../src/tasks/generateTrackFragmentIndex.ts"],"sourcesContent":["import { idempotentTask } from \"../idempotentTask.js\";\nimport debug from \"debug\";\nimport { basename } from \"node:path\";\nimport { Probe } from \"../Probe.js\";\nimport { generateFragmentIndex } from \"../generateFragmentIndex.js\";\nimport type { TrackFragmentIndex } from \"../Probe.js\";\n\nexport const generateTrackFragmentIndexFromPath = async (\n absolutePath: string,\n) => {\n const log = debug(\"ef:generateTrackFragment\");\n const probe = await Probe.probePath(absolutePath);\n\n // Extract timing offset from probe metadata (same logic as processISOBMFF.ts)\n let startTimeOffsetMs: number | undefined;\n\n // First check format-level start_time\n if (probe.format.start_time && Number(probe.format.start_time) !== 0) {\n startTimeOffsetMs = Number(probe.format.start_time) * 1000;\n log(`Extracted format start_time offset: ${probe.format.start_time}s (${startTimeOffsetMs}ms)`);\n } else {\n // Check for video stream start_time (more common)\n const videoStream = probe.streams.find(stream => stream.codec_type === 'video');\n if (videoStream && videoStream.start_time && Number(videoStream.start_time) !== 0) {\n startTimeOffsetMs = Number(videoStream.start_time) * 1000;\n log(`Extracted video stream start_time offset: ${videoStream.start_time}s (${startTimeOffsetMs}ms)`);\n } else {\n log(\"No format/stream timing offset found - will detect from composition time\");\n }\n }\n\n log(`Generating track fragment index for ${absolutePath} using single-track approach`);\n\n // FIXED: Generate fragment indexes from individual single-track files\n // This ensures byte offsets match the actual single-track files that clients will request\n const trackFragmentIndexes: Record<number, TrackFragmentIndex> = {};\n\n // Process each audio/video stream as a separate track\n for (let streamIndex = 0; streamIndex < probe.streams.length; streamIndex++) {\n const stream = probe.streams[streamIndex]!;\n\n // Only process audio and video streams\n if (stream.codec_type !== 'audio' && stream.codec_type !== 'video') {\n continue;\n }\n\n const trackId = streamIndex + 1; // Convert to 1-based track ID\n log(`Processing track ${trackId} (${stream.codec_type})`);\n\n // Generate single-track file and its fragment index\n const trackStream = probe.createTrackReadstream(streamIndex);\n const trackIdMapping = { 0: trackId }; // Map single-track stream index 0 to original track ID\n\n const singleTrackIndexes = await generateFragmentIndex(\n trackStream,\n startTimeOffsetMs,\n trackIdMapping\n );\n\n // Merge the single-track index into the combined result\n Object.assign(trackFragmentIndexes, singleTrackIndexes);\n }\n\n return trackFragmentIndexes;\n};\n\nconst generateTrackFragmentIndexTask = idempotentTask({\n label: \"trackFragmentIndex\",\n filename: (absolutePath) => `${basename(absolutePath)}.tracks.json`,\n runner: async (absolutePath: string) => {\n const index = await generateTrackFragmentIndexFromPath(absolutePath);\n return JSON.stringify(index, null, 2);\n },\n});\n\nexport const generateTrackFragmentIndex = async (\n cacheRoot: string,\n absolutePath: string,\n) => {\n try {\n return await generateTrackFragmentIndexTask(cacheRoot, absolutePath);\n } catch (error) {\n console.trace(\"Error generating track fragment index\", error);\n throw error;\n }\n};\n"],"mappings":";;;;;;;AAOA,MAAa,qCAAqC,OAChD,iBACG;CACH,MAAM,MAAM,MAAM,2BAA2B;CAC7C,MAAM,QAAQ,MAAM,MAAM,UAAU,aAAa;CAGjD,IAAIA;AAGJ,KAAI,MAAM,OAAO,cAAc,OAAO,MAAM,OAAO,WAAW,KAAK,GAAG;AACpE,sBAAoB,OAAO,MAAM,OAAO,WAAW,GAAG;AACtD,MAAI,uCAAuC,MAAM,OAAO,WAAW,KAAK,kBAAkB,KAAK;QAC1F;EAEL,MAAM,cAAc,MAAM,QAAQ,MAAK,WAAU,OAAO,eAAe,QAAQ;AAC/E,MAAI,eAAe,YAAY,cAAc,OAAO,YAAY,WAAW,KAAK,GAAG;AACjF,uBAAoB,OAAO,YAAY,WAAW,GAAG;AACrD,OAAI,6CAA6C,YAAY,WAAW,KAAK,kBAAkB,KAAK;QAEpG,KAAI,2EAA2E;;AAInF,KAAI,uCAAuC,aAAa,8BAA8B;CAItF,MAAMC,uBAA2D,EAAE;AAGnE,MAAK,IAAI,cAAc,GAAG,cAAc,MAAM,QAAQ,QAAQ,eAAe;EAC3E,MAAM,SAAS,MAAM,QAAQ;AAG7B,MAAI,OAAO,eAAe,WAAW,OAAO,eAAe,QACzD;EAGF,MAAM,UAAU,cAAc;AAC9B,MAAI,oBAAoB,QAAQ,IAAI,OAAO,WAAW,GAAG;EAMzD,MAAM,qBAAqB,MAAM,sBAHb,MAAM,sBAAsB,YAAY,EAK1D,mBAJqB,EAAE,GAAG,SAAS,CAMpC;AAGD,SAAO,OAAO,sBAAsB,mBAAmB;;AAGzD,QAAO;;AAGT,MAAM,iCAAiC,eAAe;CACpD,OAAO;CACP,WAAW,iBAAiB,GAAG,SAAS,aAAa,CAAC;CACtD,QAAQ,OAAO,iBAAyB;EACtC,MAAM,QAAQ,MAAM,mCAAmC,aAAa;AACpE,SAAO,KAAK,UAAU,OAAO,MAAM,EAAE;;CAExC,CAAC;AAEF,MAAa,6BAA6B,OACxC,WACA,iBACG;AACH,KAAI;AACF,SAAO,MAAM,+BAA+B,WAAW,aAAa;UAC7D,OAAO;AACd,UAAQ,MAAM,yCAAyC,MAAM;AAC7D,QAAM"}
|
package/dist/truncateDecimal.js
CHANGED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"truncateDecimal.js","names":[],"sources":["../src/truncateDecimal.ts"],"sourcesContent":["// Helper to calculate AAC frame-aligned segment durations for audio\nexport function truncateDecimal(num: number, decimals: number) {\n const factor = 10 ** decimals;\n return Math.trunc(num * factor) / factor;\n}"],"mappings":";AACA,SAAgB,gBAAgB,KAAa,UAAkB;CAC7D,MAAM,SAAS,MAAM;AACrB,QAAO,KAAK,MAAM,MAAM,OAAO,GAAG"}
|
package/package.json
CHANGED
|
@@ -1,42 +1,49 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@editframe/assets",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.25.1-beta.0",
|
|
4
4
|
"description": "",
|
|
5
|
-
"exports": {
|
|
6
|
-
".": {
|
|
7
|
-
"import": {
|
|
8
|
-
"types": "./dist/index.d.ts",
|
|
9
|
-
"default": "./dist/index.js"
|
|
10
|
-
}
|
|
11
|
-
},
|
|
12
|
-
"./types.json": {
|
|
13
|
-
"import": {
|
|
14
|
-
"default": "./types.json"
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
},
|
|
18
5
|
"type": "module",
|
|
19
6
|
"scripts": {
|
|
20
7
|
"typecheck": "tsc --noEmit --emitDeclarationOnly false",
|
|
21
|
-
"build": "
|
|
22
|
-
"build:watch": "
|
|
8
|
+
"build": "tsdown",
|
|
9
|
+
"build:watch": "tsdown --watch",
|
|
23
10
|
"typedoc": "typedoc --json ./types.json --plugin typedoc-plugin-zod --excludeExternals ./src && jq -c . ./types.json > ./types.tmp.json && mv ./types.tmp.json ./types.json"
|
|
24
11
|
},
|
|
25
12
|
"author": "",
|
|
26
13
|
"license": "UNLICENSED",
|
|
27
14
|
"dependencies": {
|
|
28
|
-
"@lit/context": "^1.1.6",
|
|
29
15
|
"debug": "^4.3.5",
|
|
30
|
-
"mediabunny": "^1.5.0",
|
|
31
16
|
"ora": "^8.0.1",
|
|
32
17
|
"zod": "^3.23.8"
|
|
33
18
|
},
|
|
34
19
|
"devDependencies": {
|
|
35
20
|
"@types/dom-webcodecs": "^0.1.11",
|
|
36
21
|
"@types/node": "^20.14.13",
|
|
37
|
-
"
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
22
|
+
"typescript": "^5.5.4"
|
|
23
|
+
},
|
|
24
|
+
"main": "./dist/index.js",
|
|
25
|
+
"module": "./dist/index.js",
|
|
26
|
+
"types": "./dist/index.d.ts",
|
|
27
|
+
"exports": {
|
|
28
|
+
".": {
|
|
29
|
+
"import": {
|
|
30
|
+
"types": "./dist/index.d.ts",
|
|
31
|
+
"default": "./dist/index.js"
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
"./package.json": "./package.json",
|
|
35
|
+
"./types.json": "./types.json"
|
|
36
|
+
},
|
|
37
|
+
"publishConfig": {
|
|
38
|
+
"exports": {
|
|
39
|
+
".": {
|
|
40
|
+
"import": {
|
|
41
|
+
"types": "./dist/index.d.ts",
|
|
42
|
+
"default": "./dist/index.js"
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
"./package.json": "./package.json",
|
|
46
|
+
"./types.json": "./types.json"
|
|
47
|
+
}
|
|
41
48
|
}
|
|
42
49
|
}
|
package/tsdown.config.ts
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { defineConfig } from "tsdown";
|
|
2
|
+
|
|
3
|
+
import { createTsdownConfig } from "../tsdown.config.base.ts";
|
|
4
|
+
|
|
5
|
+
export default defineConfig(
|
|
6
|
+
createTsdownConfig({
|
|
7
|
+
platform: "node",
|
|
8
|
+
additionalExports: {
|
|
9
|
+
"./types.json": "./types.json",
|
|
10
|
+
},
|
|
11
|
+
}),
|
|
12
|
+
);
|
|
13
|
+
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import { PassThrough } from 'node:stream';
|
|
2
|
-
export declare const generateSingleTrackFromPath: (absolutePath: string, trackId: number) => Promise<{
|
|
3
|
-
stream: PassThrough;
|
|
4
|
-
fragmentIndex: Promise<Record<number, import('./Probe.js').TrackFragmentIndex>>;
|
|
5
|
-
}>;
|
|
6
|
-
export declare const generateSingleTrackTask: (rootDir: string, absolutePath: string, trackId: number) => Promise<import('./idempotentTask.js').TaskResult>;
|
|
7
|
-
export declare const generateSingleTrack: (cacheRoot: string, absolutePath: string, url: string) => Promise<import('./idempotentTask.js').TaskResult>;
|
|
8
|
-
export declare const generateSingleTrackWithIndex: (absolutePath: string, trackId: number) => Promise<PassThrough>;
|
package/dist/memoize.d.ts
DELETED
|
File without changes
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export declare function truncateDecimal(num: number, decimals: number): number;
|