@editframe/cli 0.23.8-beta.0 → 0.25.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/VERSION.js +5 -1
- package/dist/VERSION.js.map +1 -0
- package/dist/commands/auth.js +8 -3
- package/dist/commands/auth.js.map +1 -0
- package/dist/commands/check.js +7 -2
- package/dist/commands/check.js.map +1 -0
- package/dist/commands/preview.js +6 -1
- package/dist/commands/preview.js.map +1 -0
- package/dist/commands/process-file.js +6 -1
- package/dist/commands/process-file.js.map +1 -0
- package/dist/commands/process.js +6 -1
- package/dist/commands/process.js.map +1 -0
- package/dist/commands/render.js +14 -2
- package/dist/commands/render.js.map +1 -0
- package/dist/commands/sync.js +6 -1
- package/dist/commands/sync.js.map +1 -0
- package/dist/commands/webhook.js +9 -4
- package/dist/commands/webhook.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +6 -1
- package/dist/index.js.map +1 -0
- package/dist/operations/processRenderInfo.js +5 -0
- package/dist/operations/processRenderInfo.js.map +1 -0
- package/dist/operations/syncAssetsDirectory/SubAssetSync.js +10 -5
- package/dist/operations/syncAssetsDirectory/SubAssetSync.js.map +1 -0
- package/dist/operations/syncAssetsDirectory/SyncCaption.js +5 -0
- package/dist/operations/syncAssetsDirectory/SyncCaption.js.map +1 -0
- package/dist/operations/syncAssetsDirectory/SyncFragmentIndex.js +5 -0
- package/dist/operations/syncAssetsDirectory/SyncFragmentIndex.js.map +1 -0
- package/dist/operations/syncAssetsDirectory/SyncImage.js +5 -0
- package/dist/operations/syncAssetsDirectory/SyncImage.js.map +1 -0
- package/dist/operations/syncAssetsDirectory/SyncStatus.js +7 -2
- package/dist/operations/syncAssetsDirectory/SyncStatus.js.map +1 -0
- package/dist/operations/syncAssetsDirectory/SyncTrack.js +5 -0
- package/dist/operations/syncAssetsDirectory/SyncTrack.js.map +1 -0
- package/dist/operations/syncAssetsDirectory/doAssetSync.js +4 -0
- package/dist/operations/syncAssetsDirectory/doAssetSync.js.map +1 -0
- package/dist/operations/syncAssetsDirectory.js +5 -0
- package/dist/operations/syncAssetsDirectory.js.map +1 -0
- package/dist/utils/createReadableStreamFromReadable.js +5 -0
- package/dist/utils/createReadableStreamFromReadable.js.map +1 -0
- package/dist/utils/index.js +6 -1
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/launchBrowserAndWaitForSDK.js +6 -1
- package/dist/utils/launchBrowserAndWaitForSDK.js.map +1 -0
- package/dist/utils/startPreviewServer.js +7 -2
- package/dist/utils/startPreviewServer.js.map +1 -0
- package/dist/utils/validateVideoResolution.js +7 -2
- package/dist/utils/validateVideoResolution.js.map +1 -0
- package/dist/utils/withSpinner.js +5 -0
- package/dist/utils/withSpinner.js.map +1 -0
- package/package.json +16 -16
- package/src/utils/startDevServer.ts +1 -1
- package/src/utils/startPreviewServer.ts +1 -1
- package/tsdown.config.ts +20 -0
- package/dist/VERSION.d.ts +0 -1
- package/dist/commands/auth.d.ts +0 -9
- package/dist/commands/check.d.ts +0 -1
- package/dist/commands/mux.d.ts +0 -1
- package/dist/commands/preview.d.ts +0 -1
- package/dist/commands/process-file.d.ts +0 -1
- package/dist/commands/process.d.ts +0 -1
- package/dist/commands/render.d.ts +0 -1
- package/dist/commands/sync.d.ts +0 -1
- package/dist/commands/webhook.d.ts +0 -7
- package/dist/operations/processRenderInfo.d.ts +0 -2
- package/dist/operations/syncAssetsDirectory/SubAssetSync.d.ts +0 -20
- package/dist/operations/syncAssetsDirectory/SyncCaption.d.ts +0 -19
- package/dist/operations/syncAssetsDirectory/SyncCaption.test.d.ts +0 -1
- package/dist/operations/syncAssetsDirectory/SyncFragmentIndex.d.ts +0 -20
- package/dist/operations/syncAssetsDirectory/SyncFragmentIndex.test.d.ts +0 -1
- package/dist/operations/syncAssetsDirectory/SyncImage.d.ts +0 -23
- package/dist/operations/syncAssetsDirectory/SyncImage.test.d.ts +0 -1
- package/dist/operations/syncAssetsDirectory/SyncStatus.d.ts +0 -37
- package/dist/operations/syncAssetsDirectory/SyncTrack.d.ts +0 -78
- package/dist/operations/syncAssetsDirectory/SyncTrack.test.d.ts +0 -1
- package/dist/operations/syncAssetsDirectory/doAssetSync.d.ts +0 -5
- package/dist/operations/syncAssetsDirectory/doAssetSync.test.d.ts +0 -1
- package/dist/operations/syncAssetsDirectory.d.ts +0 -1
- package/dist/operations/syncAssetsDirectory.test.d.ts +0 -1
- package/dist/utils/attachWorkbench.d.ts +0 -2
- package/dist/utils/createReadableStreamFromReadable.d.ts +0 -4
- package/dist/utils/getFolderSize.d.ts +0 -1
- package/dist/utils/index.d.ts +0 -2
- package/dist/utils/launchBrowserAndWaitForSDK.d.ts +0 -9
- package/dist/utils/startDevServer.d.ts +0 -7
- package/dist/utils/startPreviewServer.d.ts +0 -7
- package/dist/utils/validateVideoResolution.d.ts +0 -9
- package/dist/utils/withSpinner.d.ts +0 -1
package/dist/VERSION.js
CHANGED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"VERSION.js","names":[],"sources":["../src/VERSION.ts"],"sourcesContent":["export const VERSION = \"0.25.0-beta.0\";\n"],"mappings":";AAAA,MAAa,UAAU"}
|
package/dist/commands/auth.js
CHANGED
|
@@ -3,11 +3,13 @@ import { program } from "commander";
|
|
|
3
3
|
import chalk from "chalk";
|
|
4
4
|
import debug from "debug";
|
|
5
5
|
import ora from "ora";
|
|
6
|
-
|
|
6
|
+
|
|
7
|
+
//#region src/commands/auth.ts
|
|
8
|
+
const log = debug("ef:cli:auth");
|
|
7
9
|
const getApiData = async () => {
|
|
8
10
|
return (await getClient().authenticatedFetch("/api/v1/organization")).json();
|
|
9
11
|
};
|
|
10
|
-
|
|
12
|
+
const authCommand = program.command("auth").description("Fetch organization data using API token").action(async () => {
|
|
11
13
|
log("Options:", authCommand.opts());
|
|
12
14
|
const spinner = ora("Loading...").start();
|
|
13
15
|
try {
|
|
@@ -20,4 +22,7 @@ var authCommand = program.command("auth").description("Fetch organization data u
|
|
|
20
22
|
log("Error:", error);
|
|
21
23
|
}
|
|
22
24
|
});
|
|
23
|
-
|
|
25
|
+
|
|
26
|
+
//#endregion
|
|
27
|
+
export { };
|
|
28
|
+
//# sourceMappingURL=auth.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"auth.js","names":["error: any"],"sources":["../../src/commands/auth.ts"],"sourcesContent":["import chalk from \"chalk\";\nimport { program } from \"commander\";\nimport debug from \"debug\";\nimport ora from \"ora\";\n\nimport { getClient } from \"../utils/index.js\";\n\nconst log = debug(\"ef:cli:auth\");\n\nexport interface APIOrgResult {\n apiKeyName: string;\n id: string;\n org_id: string;\n created_at: unknown;\n updated_at: unknown;\n displayName: string;\n}\n\nexport const getApiData = async () => {\n const response = await getClient().authenticatedFetch(\"/api/v1/organization\");\n return response.json() as Promise<APIOrgResult>;\n};\n\nconst authCommand = program\n .command(\"auth\")\n .description(\"Fetch organization data using API token\")\n .action(async () => {\n const options = authCommand.opts();\n log(\"Options:\", options);\n\n const spinner = ora(\"Loading...\").start();\n\n try {\n const apiData = await getApiData();\n spinner.succeed(\"You are authenticated! 🎉\");\n process.stderr.write(\n chalk.green(`You're using ${apiData.apiKeyName} API key 🚀\\n`),\n );\n process.stderr.write(\n chalk.blue(`Welcome to ${apiData.displayName} organization 🎉\\n`),\n );\n } catch (error: any) {\n spinner.fail(\"Authentication failed!\");\n log(\"Error:\", error);\n }\n });\n"],"mappings":";;;;;;;AAOA,MAAM,MAAM,MAAM,cAAc;AAWhC,MAAa,aAAa,YAAY;AAEpC,SADiB,MAAM,WAAW,CAAC,mBAAmB,uBAAuB,EAC7D,MAAM;;AAGxB,MAAM,cAAc,QACjB,QAAQ,OAAO,CACf,YAAY,0CAA0C,CACtD,OAAO,YAAY;AAElB,KAAI,YADY,YAAY,MAAM,CACV;CAExB,MAAM,UAAU,IAAI,aAAa,CAAC,OAAO;AAEzC,KAAI;EACF,MAAM,UAAU,MAAM,YAAY;AAClC,UAAQ,QAAQ,4BAA4B;AAC5C,UAAQ,OAAO,MACb,MAAM,MAAM,gBAAgB,QAAQ,WAAW,eAAe,CAC/D;AACD,UAAQ,OAAO,MACb,MAAM,KAAK,cAAc,QAAQ,YAAY,oBAAoB,CAClE;UACMA,OAAY;AACnB,UAAQ,KAAK,yBAAyB;AACtC,MAAI,UAAU,MAAM;;EAEtB"}
|
package/dist/commands/check.js
CHANGED
|
@@ -3,7 +3,9 @@ import chalk from "chalk";
|
|
|
3
3
|
import ora from "ora";
|
|
4
4
|
import { exec } from "node:child_process";
|
|
5
5
|
import os from "node:os";
|
|
6
|
-
|
|
6
|
+
|
|
7
|
+
//#region src/commands/check.ts
|
|
8
|
+
const checks = {
|
|
7
9
|
ffmpeg: {
|
|
8
10
|
message: () => {
|
|
9
11
|
const platform = os.platform();
|
|
@@ -81,4 +83,7 @@ program.command("check").description("Check on dependencies and other requiremen
|
|
|
81
83
|
}
|
|
82
84
|
}
|
|
83
85
|
});
|
|
84
|
-
|
|
86
|
+
|
|
87
|
+
//#endregion
|
|
88
|
+
export { };
|
|
89
|
+
//# sourceMappingURL=check.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"check.js","names":["checks: Record<string, CheckDescriptor>"],"sources":["../../src/commands/check.ts"],"sourcesContent":["import { exec } from \"node:child_process\";\nimport os from \"node:os\";\nimport chalk from \"chalk\";\nimport { program } from \"commander\";\nimport ora from \"ora\";\n\ninterface CheckDescriptor {\n check(): Promise<boolean>;\n message(): string[];\n}\n\nconst checks: Record<string, CheckDescriptor> = {\n ffmpeg: {\n message: () => {\n const platform = os.platform();\n const message = [\n \"Processing assets for <ef-video>, <ef-audio>, <ef-captions>, and <ef-waveform>\\n elements requires ffmpeg to be installed.\",\n ];\n switch (platform) {\n case \"darwin\": {\n message.push(\n \"On platform=darwin you can install ffmpeg using Homebrew:\",\n );\n message.push(\" - brew install ffmpeg\");\n message.push(\n \"Or you can download ffmpeg from https://ffmpeg.org/download.html\",\n );\n break;\n }\n case \"linux\": {\n message.push(\n \"You can install ffmpeg using your distribution's package manager.\",\n );\n break;\n }\n case \"win32\": {\n message.push(\n \"You can download ffmpeg from https://ffmpeg.org/download.html\",\n );\n message.push(\n \"You can use package managers like Chocolatey or Scoop to install ffmpeg.\",\n );\n message.push(\" - choco install ffmpeg-full\");\n message.push(\" - scoop install ffmpeg\");\n message.push(\" - winget install ffmpeg\");\n break;\n }\n default: {\n message.push(`Unrecognized platform ${platform}`);\n message.push(\n \"You can download ffmpeg from https://ffmpeg.org/download.html\",\n );\n message.push(\n \"Or try installing it from your operating system's package manager\",\n );\n break;\n }\n }\n return message;\n },\n check: async () => {\n return new Promise((resolve, reject) => {\n exec(\"ffmpeg -version\", (error: any, stdout: any, _stderr: any) => {\n if (error) {\n reject(error);\n return;\n }\n resolve(stdout);\n });\n });\n },\n },\n\n whisper_timestamped: {\n message: () => {\n const message = [\n \"<ef-captions> Requires whisper_timestamped to be installed.\",\n ];\n\n message.push(\"whisper_timestamped depends on python3\");\n\n message.push(\" - pip3 install whisper_timestamped\");\n\n message.push(\"Alternate installation instructions are availble at:\");\n message.push(\n \"https://github.com/linto-ai/whisper-timestamped#installation\",\n );\n\n return message;\n },\n check: async () => {\n return new Promise((resolve, reject) => {\n exec(\n \"whisper_timestamped --version\",\n (error: any, stdout: any, _stderr: any) => {\n if (error) {\n reject(error);\n return;\n }\n resolve(stdout);\n },\n );\n });\n },\n },\n};\n\nprogram\n .command(\"check\")\n .description(\"Check on dependencies and other requirements\")\n .action(async () => {\n for (const checkName in checks) {\n const check = checks[checkName];\n if (!check) {\n continue;\n }\n const spinner = ora(`Checking ${checkName}`).start();\n try {\n await check.check();\n spinner.succeed(\n chalk.white.bgGreen(` Check for ${checkName} passed `),\n );\n } catch (_error) {\n spinner.fail(chalk.white.bgRed(` Check for ${checkName} failed `));\n process.stderr.write(chalk.red(check.message().join(\"\\n\\n\")));\n process.stderr.write(\"\\n\");\n }\n }\n });\n"],"mappings":";;;;;;;AAWA,MAAMA,SAA0C;CAC9C,QAAQ;EACN,eAAe;GACb,MAAM,WAAW,GAAG,UAAU;GAC9B,MAAM,UAAU,CACd,6HACD;AACD,WAAQ,UAAR;IACE,KAAK;AACH,aAAQ,KACN,4DACD;AACD,aAAQ,KAAK,yBAAyB;AACtC,aAAQ,KACN,mEACD;AACD;IAEF,KAAK;AACH,aAAQ,KACN,oEACD;AACD;IAEF,KAAK;AACH,aAAQ,KACN,gEACD;AACD,aAAQ,KACN,2EACD;AACD,aAAQ,KAAK,+BAA+B;AAC5C,aAAQ,KAAK,0BAA0B;AACvC,aAAQ,KAAK,2BAA2B;AACxC;IAEF;AACE,aAAQ,KAAK,yBAAyB,WAAW;AACjD,aAAQ,KACN,gEACD;AACD,aAAQ,KACN,oEACD;AACD;;AAGJ,UAAO;;EAET,OAAO,YAAY;AACjB,UAAO,IAAI,SAAS,SAAS,WAAW;AACtC,SAAK,oBAAoB,OAAY,QAAa,YAAiB;AACjE,SAAI,OAAO;AACT,aAAO,MAAM;AACb;;AAEF,aAAQ,OAAO;MACf;KACF;;EAEL;CAED,qBAAqB;EACnB,eAAe;GACb,MAAM,UAAU,CACd,8DACD;AAED,WAAQ,KAAK,yCAAyC;AAEtD,WAAQ,KAAK,sCAAsC;AAEnD,WAAQ,KAAK,uDAAuD;AACpE,WAAQ,KACN,+DACD;AAED,UAAO;;EAET,OAAO,YAAY;AACjB,UAAO,IAAI,SAAS,SAAS,WAAW;AACtC,SACE,kCACC,OAAY,QAAa,YAAiB;AACzC,SAAI,OAAO;AACT,aAAO,MAAM;AACb;;AAEF,aAAQ,OAAO;MAElB;KACD;;EAEL;CACF;AAED,QACG,QAAQ,QAAQ,CAChB,YAAY,+CAA+C,CAC3D,OAAO,YAAY;AAClB,MAAK,MAAM,aAAa,QAAQ;EAC9B,MAAM,QAAQ,OAAO;AACrB,MAAI,CAAC,MACH;EAEF,MAAM,UAAU,IAAI,YAAY,YAAY,CAAC,OAAO;AACpD,MAAI;AACF,SAAM,MAAM,OAAO;AACnB,WAAQ,QACN,MAAM,MAAM,QAAQ,cAAc,UAAU,WAAW,CACxD;WACM,QAAQ;AACf,WAAQ,KAAK,MAAM,MAAM,MAAM,eAAe,UAAU,WAAW,CAAC;AACpE,WAAQ,OAAO,MAAM,MAAM,IAAI,MAAM,SAAS,CAAC,KAAK,OAAO,CAAC,CAAC;AAC7D,WAAQ,OAAO,MAAM,KAAK;;;EAG9B"}
|
package/dist/commands/preview.js
CHANGED
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
import { program } from "commander";
|
|
2
2
|
import { spawn } from "node:child_process";
|
|
3
|
+
|
|
4
|
+
//#region src/commands/preview.ts
|
|
3
5
|
program.command("preview [directory]").description("Preview a directory's index.html file").action(async (projectDirectory = ".") => {
|
|
4
6
|
spawn("npx", ["vite", "dev"], {
|
|
5
7
|
cwd: projectDirectory,
|
|
6
8
|
stdio: "inherit"
|
|
7
9
|
});
|
|
8
10
|
});
|
|
9
|
-
|
|
11
|
+
|
|
12
|
+
//#endregion
|
|
13
|
+
export { };
|
|
14
|
+
//# sourceMappingURL=preview.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"preview.js","names":[],"sources":["../../src/commands/preview.ts"],"sourcesContent":["import { spawn } from \"node:child_process\";\nimport { program } from \"commander\";\n\nprogram\n .command(\"preview [directory]\")\n .description(\"Preview a directory's index.html file\")\n .action(async (projectDirectory = \".\") => {\n spawn(\"npx\", [\"vite\", \"dev\"], { cwd: projectDirectory, stdio: \"inherit\" });\n });\n"],"mappings":";;;;AAGA,QACG,QAAQ,sBAAsB,CAC9B,YAAY,wCAAwC,CACpD,OAAO,OAAO,mBAAmB,QAAQ;AACxC,OAAM,OAAO,CAAC,QAAQ,MAAM,EAAE;EAAE,KAAK;EAAkB,OAAO;EAAW,CAAC;EAC1E"}
|
|
@@ -3,6 +3,8 @@ import { withSpinner } from "../utils/withSpinner.js";
|
|
|
3
3
|
import { program } from "commander";
|
|
4
4
|
import ora from "ora";
|
|
5
5
|
import { createUnprocessedFileFromPath, getIsobmffProcessInfo, getIsobmffProcessProgress, processIsobmffFile, uploadUnprocessedFile } from "@editframe/api/node";
|
|
6
|
+
|
|
7
|
+
//#region src/commands/process-file.ts
|
|
6
8
|
program.command("process-file <file>").description("Upload a audio/video to Editframe for processing.").action(async (path) => {
|
|
7
9
|
const client = getClient();
|
|
8
10
|
const unprocessedFile = await withSpinner("Creating unprocessed file record", () => createUnprocessedFileFromPath(client, path));
|
|
@@ -20,4 +22,7 @@ program.command("process-file <file>").description("Upload a audio/video to Edit
|
|
|
20
22
|
console.log("Processed file info");
|
|
21
23
|
console.log(info);
|
|
22
24
|
});
|
|
23
|
-
|
|
25
|
+
|
|
26
|
+
//#endregion
|
|
27
|
+
export { };
|
|
28
|
+
//# sourceMappingURL=process-file.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"process-file.js","names":[],"sources":["../../src/commands/process-file.ts"],"sourcesContent":["import {\n createUnprocessedFileFromPath,\n getIsobmffProcessInfo,\n getIsobmffProcessProgress,\n processIsobmffFile,\n uploadUnprocessedFile,\n} from \"@editframe/api/node\";\nimport { program } from \"commander\";\nimport ora from \"ora\";\nimport { getClient } from \"../utils/index.js\";\nimport { withSpinner } from \"../utils/withSpinner.js\";\n\nprogram\n .command(\"process-file <file>\")\n .description(\"Upload a audio/video to Editframe for processing.\")\n .action(async (path: string) => {\n const client = getClient();\n\n const unprocessedFile = await withSpinner(\n \"Creating unprocessed file record\",\n () => createUnprocessedFileFromPath(client, path),\n );\n\n const upload = await uploadUnprocessedFile(client, unprocessedFile, path);\n const uploadSpinner = ora(\"Uploading file\");\n\n for await (const event of upload) {\n uploadSpinner.text = `Uploading file: ${(100 * event.progress).toFixed(2)}%`;\n }\n uploadSpinner.succeed(\"Upload complete\");\n const processorRecord = await withSpinner(\n \"Marking for processing\",\n async () => await processIsobmffFile(client, unprocessedFile.id),\n );\n\n const processSpinner = ora(\"Waiting for processing to complete\");\n processSpinner.start();\n const progress = await getIsobmffProcessProgress(\n client,\n processorRecord.id,\n );\n\n for await (const event of progress) {\n if (event.type === \"progress\") {\n processSpinner.text = `Processing: ${(100 * event.data.progress).toFixed(2)}%`;\n } else if (event.type === \"complete\") {\n processSpinner.succeed(\"Processing complete\");\n }\n }\n\n const info = await getIsobmffProcessInfo(client, processorRecord.id);\n\n console.log(\"Processed file info\");\n console.log(info);\n });\n"],"mappings":";;;;;;;AAYA,QACG,QAAQ,sBAAsB,CAC9B,YAAY,oDAAoD,CAChE,OAAO,OAAO,SAAiB;CAC9B,MAAM,SAAS,WAAW;CAE1B,MAAM,kBAAkB,MAAM,YAC5B,0CACM,8BAA8B,QAAQ,KAAK,CAClD;CAED,MAAM,SAAS,MAAM,sBAAsB,QAAQ,iBAAiB,KAAK;CACzE,MAAM,gBAAgB,IAAI,iBAAiB;AAE3C,YAAW,MAAM,SAAS,OACxB,eAAc,OAAO,oBAAoB,MAAM,MAAM,UAAU,QAAQ,EAAE,CAAC;AAE5E,eAAc,QAAQ,kBAAkB;CACxC,MAAM,kBAAkB,MAAM,YAC5B,0BACA,YAAY,MAAM,mBAAmB,QAAQ,gBAAgB,GAAG,CACjE;CAED,MAAM,iBAAiB,IAAI,qCAAqC;AAChE,gBAAe,OAAO;CACtB,MAAM,WAAW,MAAM,0BACrB,QACA,gBAAgB,GACjB;AAED,YAAW,MAAM,SAAS,SACxB,KAAI,MAAM,SAAS,WACjB,gBAAe,OAAO,gBAAgB,MAAM,MAAM,KAAK,UAAU,QAAQ,EAAE,CAAC;UACnE,MAAM,SAAS,WACxB,gBAAe,QAAQ,sBAAsB;CAIjD,MAAM,OAAO,MAAM,sBAAsB,QAAQ,gBAAgB,GAAG;AAEpE,SAAQ,IAAI,sBAAsB;AAClC,SAAQ,IAAI,KAAK;EACjB"}
|
package/dist/commands/process.js
CHANGED
|
@@ -6,6 +6,8 @@ import { program } from "commander";
|
|
|
6
6
|
import path from "node:path";
|
|
7
7
|
import { spawnSync } from "node:child_process";
|
|
8
8
|
import { getRenderInfo } from "@editframe/elements";
|
|
9
|
+
|
|
10
|
+
//#region src/commands/process.ts
|
|
9
11
|
program.command("process [directory]").description("Process's a directory's index.html file, analyzing assets and processing them for rendering").action(async (directory) => {
|
|
10
12
|
directory ??= ".";
|
|
11
13
|
const distDir = path.join(directory, "dist");
|
|
@@ -29,4 +31,7 @@ program.command("process [directory]").description("Process's a directory's inde
|
|
|
29
31
|
await processRenderInfo(await page.evaluate(getRenderInfo));
|
|
30
32
|
});
|
|
31
33
|
});
|
|
32
|
-
|
|
34
|
+
|
|
35
|
+
//#endregion
|
|
36
|
+
export { };
|
|
37
|
+
//# sourceMappingURL=process.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"process.js","names":[],"sources":["../../src/commands/process.ts"],"sourcesContent":["import { spawnSync } from \"node:child_process\";\nimport path from \"node:path\";\nimport { getRenderInfo } from \"@editframe/elements\";\nimport { program } from \"commander\";\nimport { processRenderInfo } from \"../operations/processRenderInfo.js\";\nimport { launchBrowserAndWaitForSDK } from \"../utils/launchBrowserAndWaitForSDK.js\";\nimport { PreviewServer } from \"../utils/startPreviewServer.js\";\nimport { withSpinner } from \"../utils/withSpinner.js\";\n\nprogram\n .command(\"process [directory]\")\n .description(\n \"Process's a directory's index.html file, analyzing assets and processing them for rendering\",\n )\n .action(async (directory) => {\n directory ??= \".\";\n\n const distDir = path.join(directory, \"dist\");\n await withSpinner(\"Building\\n\", async () => {\n spawnSync(\"npx\", [\"vite\", \"build\", directory], {\n stdio: \"inherit\",\n });\n });\n\n const previewServer = await PreviewServer.start(distDir);\n process.stderr.write(\"Preview server started at \");\n process.stderr.write(previewServer.url);\n process.stderr.write(\"\\n\");\n await launchBrowserAndWaitForSDK(\n {\n url: previewServer.url,\n efInteractive: false,\n interactive: false,\n headless: true,\n },\n async (page) => {\n const renderInfo = await page.evaluate(getRenderInfo);\n await processRenderInfo(renderInfo);\n },\n );\n });\n"],"mappings":";;;;;;;;;;AASA,QACG,QAAQ,sBAAsB,CAC9B,YACC,8FACD,CACA,OAAO,OAAO,cAAc;AAC3B,eAAc;CAEd,MAAM,UAAU,KAAK,KAAK,WAAW,OAAO;AAC5C,OAAM,YAAY,cAAc,YAAY;AAC1C,YAAU,OAAO;GAAC;GAAQ;GAAS;GAAU,EAAE,EAC7C,OAAO,WACR,CAAC;GACF;CAEF,MAAM,gBAAgB,MAAM,cAAc,MAAM,QAAQ;AACxD,SAAQ,OAAO,MAAM,6BAA6B;AAClD,SAAQ,OAAO,MAAM,cAAc,IAAI;AACvC,SAAQ,OAAO,MAAM,KAAK;AAC1B,OAAM,2BACJ;EACE,KAAK,cAAc;EACnB,eAAe;EACf,aAAa;EACb,UAAU;EACX,EACD,OAAO,SAAS;AAEd,QAAM,kBADa,MAAM,KAAK,SAAS,cAAc,CAClB;GAEtC;EACD"}
|
package/dist/commands/render.js
CHANGED
|
@@ -19,7 +19,9 @@ import { inspect } from "node:util";
|
|
|
19
19
|
import { RenderInfo, getRenderInfo } from "@editframe/elements";
|
|
20
20
|
import { parse } from "node-html-parser";
|
|
21
21
|
import * as tar from "tar";
|
|
22
|
-
|
|
22
|
+
|
|
23
|
+
//#region src/commands/render.ts
|
|
24
|
+
const log = debug("ef:cli:render");
|
|
23
25
|
const buildAssetId = async (srcDir, src, basename$1) => {
|
|
24
26
|
log(`Building image asset id for ${src}\n`);
|
|
25
27
|
const syncStatus = new SyncStatus(join(srcDir, "assets", ".cache", await md5FilePath(path.join(srcDir, src)), basename$1));
|
|
@@ -104,6 +106,13 @@ program.command("render [directory]").description("Render a directory's index.ht
|
|
|
104
106
|
process.stderr.write(`Render is in '${render?.status}' status. It cannot be recreated while in this status.\n`);
|
|
105
107
|
return;
|
|
106
108
|
}
|
|
109
|
+
/**
|
|
110
|
+
* This tar stream is created with the dist directory as the root.
|
|
111
|
+
* This is acheived by setting the cwd option to the dist directory.
|
|
112
|
+
* And the files to be included in the tar stream are all files in the dist directory.
|
|
113
|
+
*
|
|
114
|
+
* The renderer expects to find the index.html file at the root of the tar stream.
|
|
115
|
+
*/
|
|
107
116
|
const tarStream = tar.create({
|
|
108
117
|
gzip: true,
|
|
109
118
|
cwd: distDir
|
|
@@ -116,4 +125,7 @@ program.command("render [directory]").description("Render a directory's index.ht
|
|
|
116
125
|
process.stderr.write("\n");
|
|
117
126
|
});
|
|
118
127
|
});
|
|
119
|
-
|
|
128
|
+
|
|
129
|
+
//#endregion
|
|
130
|
+
export { };
|
|
131
|
+
//# sourceMappingURL=render.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"render.js","names":["basename","parseHTML"],"sources":["../../src/commands/render.ts"],"sourcesContent":["import { spawnSync } from \"node:child_process\";\nimport { readFile, writeFile } from \"node:fs/promises\";\nimport path, { basename, join } from \"node:path\";\nimport { PassThrough } from \"node:stream\";\nimport { inspect } from \"node:util\";\nimport { createRender, uploadRender } from \"@editframe/api\";\nimport { md5Directory, md5FilePath } from \"@editframe/assets\";\nimport { getRenderInfo, RenderInfo } from \"@editframe/elements\";\nimport { Option, program } from \"commander\";\nimport debug from \"debug\";\nimport { parse as parseHTML } from \"node-html-parser\";\nimport * as tar from \"tar\";\nimport { processRenderInfo } from \"../operations/processRenderInfo.js\";\nimport { SyncStatus } from \"../operations/syncAssetsDirectory/SyncStatus.js\";\nimport { syncAssetDirectory } from \"../operations/syncAssetsDirectory.js\";\nimport { createReadableStreamFromReadable } from \"../utils/createReadableStreamFromReadable.js\";\nimport { getClient } from \"../utils/index.js\";\nimport { launchBrowserAndWaitForSDK } from \"../utils/launchBrowserAndWaitForSDK.js\";\nimport { PreviewServer } from \"../utils/startPreviewServer.js\";\nimport { validateVideoResolution } from \"../utils/validateVideoResolution.js\";\nimport { withSpinner } from \"../utils/withSpinner.js\";\n\nconst log = debug(\"ef:cli:render\");\n\nexport const buildAssetId = async (\n srcDir: string,\n src: string,\n basename: string,\n) => {\n log(`Building image asset id for ${src}\\n`);\n const assetPath = path.join(srcDir, src);\n const assetMd5 = await md5FilePath(assetPath);\n const syncStatus = new SyncStatus(\n join(srcDir, \"assets\", \".cache\", assetMd5, basename),\n );\n const info = await syncStatus.readInfo();\n if (!info) {\n throw new Error(`SyncStatus info is not found for ${syncStatus.infoPath}`);\n }\n\n return info.id;\n};\n\nprogram\n .command(\"render [directory]\")\n .description(\n \"Render a directory's index.html file as a video in the editframe cloud\",\n )\n .addOption(\n new Option(\"-s, --strategy <strategy>\", \"Render strategy\")\n .choices([\"v1\"])\n .default(\"v1\"),\n )\n .action(async (directory, options) => {\n directory ??= \".\";\n\n await syncAssetDirectory(\n join(process.cwd(), directory, \"src\", \"assets\", \".cache\"),\n );\n\n const srcDir = path.join(directory, \"src\");\n const distDir = path.join(directory, \"dist\");\n await withSpinner(\"Building\\n\", async () => {\n try {\n await withSpinner(\"Building\\n\", async () => {\n spawnSync(\n \"npx\",\n // biome-ignore format: Grouping CLI arguments\n [\n \"vite\", \"build\", directory,\n \"--clearScreen\", \"false\",\n \"--logLevel\", \"debug\",\n ],\n {\n stdio: \"inherit\",\n },\n );\n });\n } catch (error) {\n console.error(\"Build failed:\", error);\n }\n });\n\n const previewServer = await PreviewServer.start(distDir);\n process.stderr.write(\"Preview server started at:\");\n process.stderr.write(previewServer.url);\n process.stderr.write(\"\\n\");\n await launchBrowserAndWaitForSDK(\n {\n url: previewServer.url,\n efInteractive: false,\n interactive: false,\n headless: true,\n },\n async (page) => {\n const renderInfo = RenderInfo.parse(await page.evaluate(getRenderInfo));\n\n validateVideoResolution({\n width: renderInfo.width,\n height: renderInfo.height,\n });\n\n await processRenderInfo(renderInfo);\n\n const doc = parseHTML(\n await readFile(path.join(distDir, \"index.html\"), \"utf-8\"),\n );\n\n log(\"Building asset IDs\");\n for (const element of doc.querySelectorAll(\n \"ef-image, ef-audio, ef-video\",\n )) {\n log(`Processing ${element.tagName}`);\n if (element.hasAttribute(\"asset-id\")) {\n log(\n `Asset ID for ${element.tagName} ${element.getAttribute(\"src\")} is ${element.getAttribute(\"asset-id\")}`,\n );\n continue;\n }\n const src = element.getAttribute(\"src\");\n if (!src) {\n log(`No src attribute for ${element.tagName}`);\n continue;\n }\n\n switch (element.tagName) {\n case \"EF-IMAGE\":\n element.setAttribute(\n \"asset-id\",\n await buildAssetId(srcDir, src, basename(src)),\n );\n break;\n case \"EF-AUDIO\":\n case \"EF-VIDEO\":\n element.setAttribute(\n \"asset-id\",\n await buildAssetId(srcDir, src, \"isobmff\"),\n );\n break;\n default:\n log(`Unknown element type: ${element.tagName}`);\n }\n }\n\n await writeFile(path.join(distDir, \"index.html\"), doc.toString());\n\n const md5 = await md5Directory(distDir);\n const render = await createRender(getClient(), {\n md5,\n width: renderInfo.width,\n height: renderInfo.height,\n fps: renderInfo.fps,\n duration_ms: renderInfo.durationMs,\n work_slice_ms: 4_000,\n strategy: options.strategy,\n });\n if (render?.status !== \"created\") {\n process.stderr.write(\n `Render is in '${render?.status}' status. It cannot be recreated while in this status.\\n`,\n );\n return;\n }\n /**\n * This tar stream is created with the dist directory as the root.\n * This is acheived by setting the cwd option to the dist directory.\n * And the files to be included in the tar stream are all files in the dist directory.\n *\n * The renderer expects to find the index.html file at the root of the tar stream.\n */\n const tarStream = tar.create(\n {\n gzip: true,\n cwd: distDir,\n },\n [\".\"],\n );\n const readable = new PassThrough();\n tarStream.pipe(readable);\n\n await uploadRender(\n getClient(),\n render.id,\n createReadableStreamFromReadable(readable),\n );\n process.stderr.write(\"Render assets uploaded\\n\");\n process.stderr.write(inspect(render));\n process.stderr.write(\"\\n\");\n },\n );\n });\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAsBA,MAAM,MAAM,MAAM,gBAAgB;AAElC,MAAa,eAAe,OAC1B,QACA,KACA,eACG;AACH,KAAI,+BAA+B,IAAI,IAAI;CAG3C,MAAM,aAAa,IAAI,WACrB,KAAK,QAAQ,UAAU,UAFR,MAAM,YADL,KAAK,KAAK,QAAQ,IAAI,CACK,EAEAA,WAAS,CACrD;CACD,MAAM,OAAO,MAAM,WAAW,UAAU;AACxC,KAAI,CAAC,KACH,OAAM,IAAI,MAAM,oCAAoC,WAAW,WAAW;AAG5E,QAAO,KAAK;;AAGd,QACG,QAAQ,qBAAqB,CAC7B,YACC,yEACD,CACA,UACC,IAAI,OAAO,6BAA6B,kBAAkB,CACvD,QAAQ,CAAC,KAAK,CAAC,CACf,QAAQ,KAAK,CACjB,CACA,OAAO,OAAO,WAAW,YAAY;AACpC,eAAc;AAEd,OAAM,mBACJ,KAAK,QAAQ,KAAK,EAAE,WAAW,OAAO,UAAU,SAAS,CAC1D;CAED,MAAM,SAAS,KAAK,KAAK,WAAW,MAAM;CAC1C,MAAM,UAAU,KAAK,KAAK,WAAW,OAAO;AAC5C,OAAM,YAAY,cAAc,YAAY;AAC1C,MAAI;AACF,SAAM,YAAY,cAAc,YAAY;AAC1C,cACE,OAEA;KACE;KAAQ;KAAS;KACjB;KAAiB;KACjB;KAAc;KACf,EACD,EACE,OAAO,WACR,CACF;KACD;WACK,OAAO;AACd,WAAQ,MAAM,iBAAiB,MAAM;;GAEvC;CAEF,MAAM,gBAAgB,MAAM,cAAc,MAAM,QAAQ;AACxD,SAAQ,OAAO,MAAM,6BAA6B;AAClD,SAAQ,OAAO,MAAM,cAAc,IAAI;AACvC,SAAQ,OAAO,MAAM,KAAK;AAC1B,OAAM,2BACJ;EACE,KAAK,cAAc;EACnB,eAAe;EACf,aAAa;EACb,UAAU;EACX,EACD,OAAO,SAAS;EACd,MAAM,aAAa,WAAW,MAAM,MAAM,KAAK,SAAS,cAAc,CAAC;AAEvE,0BAAwB;GACtB,OAAO,WAAW;GAClB,QAAQ,WAAW;GACpB,CAAC;AAEF,QAAM,kBAAkB,WAAW;EAEnC,MAAM,MAAMC,MACV,MAAM,SAAS,KAAK,KAAK,SAAS,aAAa,EAAE,QAAQ,CAC1D;AAED,MAAI,qBAAqB;AACzB,OAAK,MAAM,WAAW,IAAI,iBACxB,+BACD,EAAE;AACD,OAAI,cAAc,QAAQ,UAAU;AACpC,OAAI,QAAQ,aAAa,WAAW,EAAE;AACpC,QACE,gBAAgB,QAAQ,QAAQ,GAAG,QAAQ,aAAa,MAAM,CAAC,MAAM,QAAQ,aAAa,WAAW,GACtG;AACD;;GAEF,MAAM,MAAM,QAAQ,aAAa,MAAM;AACvC,OAAI,CAAC,KAAK;AACR,QAAI,wBAAwB,QAAQ,UAAU;AAC9C;;AAGF,WAAQ,QAAQ,SAAhB;IACE,KAAK;AACH,aAAQ,aACN,YACA,MAAM,aAAa,QAAQ,KAAK,SAAS,IAAI,CAAC,CAC/C;AACD;IACF,KAAK;IACL,KAAK;AACH,aAAQ,aACN,YACA,MAAM,aAAa,QAAQ,KAAK,UAAU,CAC3C;AACD;IACF,QACE,KAAI,yBAAyB,QAAQ,UAAU;;;AAIrD,QAAM,UAAU,KAAK,KAAK,SAAS,aAAa,EAAE,IAAI,UAAU,CAAC;EAEjE,MAAM,MAAM,MAAM,aAAa,QAAQ;EACvC,MAAM,SAAS,MAAM,aAAa,WAAW,EAAE;GAC7C;GACA,OAAO,WAAW;GAClB,QAAQ,WAAW;GACnB,KAAK,WAAW;GAChB,aAAa,WAAW;GACxB,eAAe;GACf,UAAU,QAAQ;GACnB,CAAC;AACF,MAAI,QAAQ,WAAW,WAAW;AAChC,WAAQ,OAAO,MACb,iBAAiB,QAAQ,OAAO,0DACjC;AACD;;;;;;;;;EASF,MAAM,YAAY,IAAI,OACpB;GACE,MAAM;GACN,KAAK;GACN,EACD,CAAC,IAAI,CACN;EACD,MAAM,WAAW,IAAI,aAAa;AAClC,YAAU,KAAK,SAAS;AAExB,QAAM,aACJ,WAAW,EACX,OAAO,IACP,iCAAiC,SAAS,CAC3C;AACD,UAAQ,OAAO,MAAM,2BAA2B;AAChD,UAAQ,OAAO,MAAM,QAAQ,OAAO,CAAC;AACrC,UAAQ,OAAO,MAAM,KAAK;GAE7B;EACD"}
|
package/dist/commands/sync.js
CHANGED
|
@@ -1,7 +1,12 @@
|
|
|
1
1
|
import { syncAssetDirectory } from "../operations/syncAssetsDirectory.js";
|
|
2
2
|
import { program } from "commander";
|
|
3
3
|
import { join } from "node:path";
|
|
4
|
+
|
|
5
|
+
//#region src/commands/sync.ts
|
|
4
6
|
program.command("sync").description("Sync assets to Editframe servers for rendering").argument("[directory]", "Path to project directory to sync.").action(async (directory = ".") => {
|
|
5
7
|
await syncAssetDirectory(join(process.cwd(), directory, "src", "assets", ".cache"));
|
|
6
8
|
});
|
|
7
|
-
|
|
9
|
+
|
|
10
|
+
//#endregion
|
|
11
|
+
export { };
|
|
12
|
+
//# sourceMappingURL=sync.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sync.js","names":[],"sources":["../../src/commands/sync.ts"],"sourcesContent":["import { join } from \"node:path\";\nimport { program } from \"commander\";\nimport { syncAssetDirectory } from \"../operations/syncAssetsDirectory.js\";\n\nprogram\n .command(\"sync\")\n .description(\"Sync assets to Editframe servers for rendering\")\n .argument(\"[directory]\", \"Path to project directory to sync.\")\n .action(async (directory = \".\") => {\n await syncAssetDirectory(\n join(process.cwd(), directory, \"src\", \"assets\", \".cache\"),\n );\n });\n"],"mappings":";;;;;AAIA,QACG,QAAQ,OAAO,CACf,YAAY,iDAAiD,CAC7D,SAAS,eAAe,qCAAqC,CAC7D,OAAO,OAAO,YAAY,QAAQ;AACjC,OAAM,mBACJ,KAAK,QAAQ,KAAK,EAAE,WAAW,OAAO,UAAU,SAAS,CAC1D;EACD"}
|
package/dist/commands/webhook.js
CHANGED
|
@@ -4,8 +4,10 @@ import chalk from "chalk";
|
|
|
4
4
|
import debug from "debug";
|
|
5
5
|
import ora from "ora";
|
|
6
6
|
import { input, select } from "@inquirer/prompts";
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
|
|
8
|
+
//#region src/commands/webhook.ts
|
|
9
|
+
const log = debug("ef:cli:auth");
|
|
10
|
+
const topics = [
|
|
9
11
|
"render.created",
|
|
10
12
|
"render.rendering",
|
|
11
13
|
"render.pending",
|
|
@@ -21,7 +23,7 @@ const testWebhookURL = async ({ webhookURL, topic }) => {
|
|
|
21
23
|
})
|
|
22
24
|
})).json();
|
|
23
25
|
};
|
|
24
|
-
|
|
26
|
+
const webhookCommand = program.command("webhook").description("Test webhook URL with a topic").option("-u, --webhookURL <webhookURL>", "Webhook URL").addOption(new Option("-t, --topic <topic>", "Topic").choices(topics)).action(async () => {
|
|
25
27
|
const options = webhookCommand.opts();
|
|
26
28
|
log("Options:", options);
|
|
27
29
|
let { webhookURL, topic } = options;
|
|
@@ -48,4 +50,7 @@ var webhookCommand = program.command("webhook").description("Test webhook URL wi
|
|
|
48
50
|
log("Error:", error);
|
|
49
51
|
}
|
|
50
52
|
});
|
|
51
|
-
|
|
53
|
+
|
|
54
|
+
//#endregion
|
|
55
|
+
export { };
|
|
56
|
+
//# sourceMappingURL=webhook.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"webhook.js","names":["topic","error: any"],"sources":["../../src/commands/webhook.ts"],"sourcesContent":["import { input, select } from \"@inquirer/prompts\";\nimport chalk from \"chalk\";\nimport { Option, program } from \"commander\";\nimport debug from \"debug\";\nimport ora from \"ora\";\n\nimport { getClient } from \"../utils/index.js\";\n\nconst log = debug(\"ef:cli:auth\");\n\nexport interface APITestWebhhokResult {\n message: string;\n}\nconst topics = [\n \"render.created\",\n \"render.rendering\",\n \"render.pending\",\n \"render.failed\",\n \"render.completed\",\n];\n\nexport const testWebhookURL = async ({\n webhookURL,\n topic,\n}: {\n webhookURL: string;\n topic: string;\n}) => {\n const response = await getClient().authenticatedFetch(\n \"/api/v1/test_webhook\",\n {\n method: \"POST\",\n body: JSON.stringify({\n webhookURL,\n topic,\n }),\n },\n );\n return response.json() as Promise<APITestWebhhokResult>;\n};\n\nconst webhookCommand = program\n .command(\"webhook\")\n .description(\"Test webhook URL with a topic\")\n .option(\"-u, --webhookURL <webhookURL>\", \"Webhook URL\")\n .addOption(new Option(\"-t, --topic <topic>\", \"Topic\").choices(topics))\n .action(async () => {\n const options = webhookCommand.opts();\n log(\"Options:\", options);\n let { webhookURL, topic } = options;\n\n if (!webhookURL) {\n const answer = await input({ message: \"Enter a webhook URL:\" });\n webhookURL = answer;\n }\n\n if (!topic) {\n const answer = await select({\n message: \"Select a topic:\",\n choices: [...topics.map((topic) => ({ title: topic, value: topic }))],\n });\n topic = answer;\n }\n\n const spinner = ora(\"Testing...\").start();\n try {\n const apiData = await testWebhookURL({ webhookURL, topic });\n spinner.succeed(\"Webhook URL is working! 🎉\");\n process.stderr.write(chalk.green(`${apiData.message}\\n`));\n } catch (error: any) {\n spinner.fail(\"Webhook URL is not working!\");\n process.stderr.write(error?.message);\n process.stderr.write(\"\\n\");\n log(\"Error:\", error);\n }\n });\n"],"mappings":";;;;;;;;AAQA,MAAM,MAAM,MAAM,cAAc;AAKhC,MAAM,SAAS;CACb;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,iBAAiB,OAAO,EACnC,YACA,YAII;AAWJ,SAViB,MAAM,WAAW,CAAC,mBACjC,wBACA;EACE,QAAQ;EACR,MAAM,KAAK,UAAU;GACnB;GACA;GACD,CAAC;EACH,CACF,EACe,MAAM;;AAGxB,MAAM,iBAAiB,QACpB,QAAQ,UAAU,CAClB,YAAY,gCAAgC,CAC5C,OAAO,iCAAiC,cAAc,CACtD,UAAU,IAAI,OAAO,uBAAuB,QAAQ,CAAC,QAAQ,OAAO,CAAC,CACrE,OAAO,YAAY;CAClB,MAAM,UAAU,eAAe,MAAM;AACrC,KAAI,YAAY,QAAQ;CACxB,IAAI,EAAE,YAAY,UAAU;AAE5B,KAAI,CAAC,WAEH,cADe,MAAM,MAAM,EAAE,SAAS,wBAAwB,CAAC;AAIjE,KAAI,CAAC,MAKH,SAJe,MAAM,OAAO;EAC1B,SAAS;EACT,SAAS,CAAC,GAAG,OAAO,KAAK,aAAW;GAAE,OAAOA;GAAO,OAAOA;GAAO,EAAE,CAAC;EACtE,CAAC;CAIJ,MAAM,UAAU,IAAI,aAAa,CAAC,OAAO;AACzC,KAAI;EACF,MAAM,UAAU,MAAM,eAAe;GAAE;GAAY;GAAO,CAAC;AAC3D,UAAQ,QAAQ,6BAA6B;AAC7C,UAAQ,OAAO,MAAM,MAAM,MAAM,GAAG,QAAQ,QAAQ,IAAI,CAAC;UAClDC,OAAY;AACnB,UAAQ,KAAK,8BAA8B;AAC3C,UAAQ,OAAO,MAAM,OAAO,QAAQ;AACpC,UAAQ,OAAO,MAAM,KAAK;AAC1B,MAAI,UAAU,MAAM;;EAEtB"}
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -10,6 +10,11 @@ import "./commands/check.js";
|
|
|
10
10
|
import "./commands/webhook.js";
|
|
11
11
|
import "dotenv/config";
|
|
12
12
|
import { Option, program } from "commander";
|
|
13
|
+
|
|
14
|
+
//#region src/index.ts
|
|
13
15
|
program.name("editframe").addOption(new Option("-t, --token <token>", "API Token").env("EF_TOKEN")).addOption(new Option("--ef-host <host>", "Editframe Host").env("EF_HOST").default("https://editframe.dev")).addOption(new Option("--ef-render-host <host>", "Editframe Render Host").env("EF_RENDER_HOST").default("https://editframe.dev")).version(VERSION);
|
|
14
16
|
program.parse(process.argv);
|
|
15
|
-
|
|
17
|
+
|
|
18
|
+
//#endregion
|
|
19
|
+
export { };
|
|
20
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../src/index.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { Option, program } from \"commander\";\n\nimport { VERSION } from \"./VERSION.js\";\n\nprogram\n .name(\"editframe\")\n .addOption(new Option(\"-t, --token <token>\", \"API Token\").env(\"EF_TOKEN\"))\n .addOption(\n new Option(\"--ef-host <host>\", \"Editframe Host\")\n .env(\"EF_HOST\")\n .default(\"https://editframe.dev\"),\n )\n .addOption(\n new Option(\"--ef-render-host <host>\", \"Editframe Render Host\")\n .env(\"EF_RENDER_HOST\")\n .default(\"https://editframe.dev\"),\n )\n .version(VERSION);\n\nimport \"./commands/auth.js\";\nimport \"./commands/sync.js\";\nimport \"./commands/render.js\";\nimport \"./commands/preview.js\";\nimport \"./commands/process.js\";\nimport \"./commands/process-file.js\";\nimport \"./commands/check.js\";\nimport \"./commands/webhook.js\";\n\nprogram.parse(process.argv);\n"],"mappings":";;;;;;;;;;;;;;AAMA,QACG,KAAK,YAAY,CACjB,UAAU,IAAI,OAAO,uBAAuB,YAAY,CAAC,IAAI,WAAW,CAAC,CACzE,UACC,IAAI,OAAO,oBAAoB,iBAAiB,CAC7C,IAAI,UAAU,CACd,QAAQ,wBAAwB,CACpC,CACA,UACC,IAAI,OAAO,2BAA2B,wBAAwB,CAC3D,IAAI,iBAAiB,CACrB,QAAQ,wBAAwB,CACpC,CACA,QAAQ,QAAQ;AAWnB,QAAQ,MAAM,QAAQ,KAAK"}
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import { cacheImage, findOrCreateCaptions, generateTrack } from "@editframe/assets";
|
|
2
|
+
|
|
3
|
+
//#region src/operations/processRenderInfo.ts
|
|
2
4
|
const processRenderInfo = async (renderInfo) => {
|
|
3
5
|
for (const [src, tracks] of Object.entries(renderInfo.assets.efMedia)) {
|
|
4
6
|
process.stderr.write("Processing media asset: ");
|
|
@@ -24,4 +26,7 @@ const processRenderInfo = async (renderInfo) => {
|
|
|
24
26
|
await findOrCreateCaptions("./src/assets", `./src${captionsAsset}`);
|
|
25
27
|
}
|
|
26
28
|
};
|
|
29
|
+
|
|
30
|
+
//#endregion
|
|
27
31
|
export { processRenderInfo };
|
|
32
|
+
//# sourceMappingURL=processRenderInfo.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"processRenderInfo.js","names":[],"sources":["../../src/operations/processRenderInfo.ts"],"sourcesContent":["import {\n cacheImage,\n findOrCreateCaptions,\n generateTrack,\n} from \"@editframe/assets\";\nimport type { getRenderInfo } from \"@editframe/elements\";\n\nexport const processRenderInfo = async (\n renderInfo: Awaited<ReturnType<typeof getRenderInfo>>,\n) => {\n for (const [src, tracks] of Object.entries(renderInfo.assets.efMedia)) {\n process.stderr.write(\"Processing media asset: \");\n process.stderr.write(src);\n process.stderr.write(\"\\n\");\n for (const trackId in tracks) {\n process.stderr.write(\"Generating track: \");\n process.stderr.write(trackId);\n process.stderr.write(\"\\n\");\n await generateTrack(\n \"./src/assets\",\n `./src${src}`,\n `src?trackId=${trackId}`,\n );\n }\n }\n\n for (const imageAsset of renderInfo.assets.efImage) {\n process.stderr.write(\"Processing image asset: \");\n process.stderr.write(imageAsset);\n process.stderr.write(\"\\n\");\n await cacheImage(\"./src/assets\", `./src${imageAsset}`);\n }\n\n for (const captionsAsset of renderInfo.assets.efCaptions) {\n process.stderr.write(\"Processing captions asset: \");\n process.stderr.write(captionsAsset);\n process.stderr.write(\"\\n\");\n await findOrCreateCaptions(\"./src/assets\", `./src${captionsAsset}`);\n }\n};\n"],"mappings":";;;AAOA,MAAa,oBAAoB,OAC/B,eACG;AACH,MAAK,MAAM,CAAC,KAAK,WAAW,OAAO,QAAQ,WAAW,OAAO,QAAQ,EAAE;AACrE,UAAQ,OAAO,MAAM,2BAA2B;AAChD,UAAQ,OAAO,MAAM,IAAI;AACzB,UAAQ,OAAO,MAAM,KAAK;AAC1B,OAAK,MAAM,WAAW,QAAQ;AAC5B,WAAQ,OAAO,MAAM,qBAAqB;AAC1C,WAAQ,OAAO,MAAM,QAAQ;AAC7B,WAAQ,OAAO,MAAM,KAAK;AAC1B,SAAM,cACJ,gBACA,QAAQ,OACR,eAAe,UAChB;;;AAIL,MAAK,MAAM,cAAc,WAAW,OAAO,SAAS;AAClD,UAAQ,OAAO,MAAM,2BAA2B;AAChD,UAAQ,OAAO,MAAM,WAAW;AAChC,UAAQ,OAAO,MAAM,KAAK;AAC1B,QAAM,WAAW,gBAAgB,QAAQ,aAAa;;AAGxD,MAAK,MAAM,iBAAiB,WAAW,OAAO,YAAY;AACxD,UAAQ,OAAO,MAAM,8BAA8B;AACnD,UAAQ,OAAO,MAAM,cAAc;AACnC,UAAQ,OAAO,MAAM,KAAK;AAC1B,QAAM,qBAAqB,gBAAgB,QAAQ,gBAAgB"}
|
|
@@ -3,11 +3,13 @@ import { SyncFragmentIndex } from "./SyncFragmentIndex.js";
|
|
|
3
3
|
import { SyncImage } from "./SyncImage.js";
|
|
4
4
|
import { SyncTrack } from "./SyncTrack.js";
|
|
5
5
|
import debug from "debug";
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
6
|
+
|
|
7
|
+
//#region src/operations/syncAssetsDirectory/SubAssetSync.ts
|
|
8
|
+
const trackMatch = /\.track-[\d]+.mp4$/i;
|
|
9
|
+
const fragmentIndexMatch = /\.tracks.json$/i;
|
|
10
|
+
const captionsMatch = /\.captions.json$/i;
|
|
11
|
+
const imageMatch = /\.(png|jpe?g|gif|webp)$/i;
|
|
12
|
+
const log = debug("ef:SubAssetSync");
|
|
11
13
|
const getAssetSync = (subAssetPath, md5) => {
|
|
12
14
|
log("getAssetSync", {
|
|
13
15
|
subAssetPath,
|
|
@@ -19,4 +21,7 @@ const getAssetSync = (subAssetPath, md5) => {
|
|
|
19
21
|
if (captionsMatch.test(subAssetPath)) return new SyncCaption(subAssetPath, md5);
|
|
20
22
|
throw new Error(`Unrecognized sub-asset type: ${subAssetPath}`);
|
|
21
23
|
};
|
|
24
|
+
|
|
25
|
+
//#endregion
|
|
22
26
|
export { getAssetSync };
|
|
27
|
+
//# sourceMappingURL=SubAssetSync.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SubAssetSync.js","names":[],"sources":["../../../src/operations/syncAssetsDirectory/SubAssetSync.ts"],"sourcesContent":["import debug from \"debug\";\nimport { SyncCaption } from \"./SyncCaption.js\";\nimport { SyncFragmentIndex } from \"./SyncFragmentIndex.js\";\nimport { SyncImage } from \"./SyncImage.js\";\nimport type { SyncStatus } from \"./SyncStatus.js\";\nimport { SyncTrack } from \"./SyncTrack.js\";\n\nexport interface SubAssetSync<CreationType> {\n icon: string;\n label: string;\n path: string;\n md5: string;\n prepare: () => Promise<void>;\n validate: () => Promise<void>;\n create: () => Promise<void>;\n upload: () => Promise<void>;\n syncStatus: SyncStatus;\n isComplete: () => boolean;\n markSynced: () => Promise<void>;\n created: CreationType | null;\n}\n\nconst trackMatch = /\\.track-[\\d]+.mp4$/i;\nconst fragmentIndexMatch = /\\.tracks.json$/i;\nconst captionsMatch = /\\.captions.json$/i;\nconst imageMatch = /\\.(png|jpe?g|gif|webp)$/i;\n\nconst log = debug(\"ef:SubAssetSync\");\n\nexport const getAssetSync = (subAssetPath: string, md5: string) => {\n log(\"getAssetSync\", { subAssetPath, md5 });\n if (imageMatch.test(subAssetPath)) {\n return new SyncImage(subAssetPath, md5);\n }\n if (trackMatch.test(subAssetPath)) {\n return new SyncTrack(subAssetPath, md5);\n }\n if (fragmentIndexMatch.test(subAssetPath)) {\n return new SyncFragmentIndex(subAssetPath, md5);\n }\n if (captionsMatch.test(subAssetPath)) {\n return new SyncCaption(subAssetPath, md5);\n }\n throw new Error(`Unrecognized sub-asset type: ${subAssetPath}`);\n};\n"],"mappings":";;;;;;;AAsBA,MAAM,aAAa;AACnB,MAAM,qBAAqB;AAC3B,MAAM,gBAAgB;AACtB,MAAM,aAAa;AAEnB,MAAM,MAAM,MAAM,kBAAkB;AAEpC,MAAa,gBAAgB,cAAsB,QAAgB;AACjE,KAAI,gBAAgB;EAAE;EAAc;EAAK,CAAC;AAC1C,KAAI,WAAW,KAAK,aAAa,CAC/B,QAAO,IAAI,UAAU,cAAc,IAAI;AAEzC,KAAI,WAAW,KAAK,aAAa,CAC/B,QAAO,IAAI,UAAU,cAAc,IAAI;AAEzC,KAAI,mBAAmB,KAAK,aAAa,CACvC,QAAO,IAAI,kBAAkB,cAAc,IAAI;AAEjD,KAAI,cAAc,KAAK,aAAa,CAClC,QAAO,IAAI,YAAY,cAAc,IAAI;AAE3C,OAAM,IAAI,MAAM,gCAAgC,eAAe"}
|
|
@@ -5,6 +5,8 @@ import { createCaptionFile, lookupCaptionFileByMd5, uploadCaptionFile } from "@e
|
|
|
5
5
|
import { basename } from "node:path";
|
|
6
6
|
import fs from "node:fs/promises";
|
|
7
7
|
import { Readable } from "node:stream";
|
|
8
|
+
|
|
9
|
+
//#region src/operations/syncAssetsDirectory/SyncCaption.ts
|
|
8
10
|
var SyncCaption = class {
|
|
9
11
|
constructor(path$1, md5) {
|
|
10
12
|
this.path = path$1;
|
|
@@ -47,4 +49,7 @@ var SyncCaption = class {
|
|
|
47
49
|
});
|
|
48
50
|
}
|
|
49
51
|
};
|
|
52
|
+
|
|
53
|
+
//#endregion
|
|
50
54
|
export { SyncCaption };
|
|
55
|
+
//# sourceMappingURL=SyncCaption.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SyncCaption.js","names":["path: string","md5: string"],"sources":["../../../src/operations/syncAssetsDirectory/SyncCaption.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport { basename } from \"node:path\";\n\nimport { Readable } from \"node:stream\";\nimport {\n type CreateCaptionFileResult,\n createCaptionFile,\n type LookupCaptionFileByMd5Result,\n lookupCaptionFileByMd5,\n uploadCaptionFile,\n} from \"@editframe/api\";\nimport { createReadableStreamFromReadable } from \"../../utils/createReadableStreamFromReadable.js\";\nimport { getClient } from \"../../utils/index.js\";\nimport type { SubAssetSync } from \"./SubAssetSync.js\";\nimport { SyncStatus } from \"./SyncStatus.js\";\nexport class SyncCaption implements SubAssetSync<CreateCaptionFileResult> {\n icon = \"📝\";\n label = \"captions\";\n syncStatus: SyncStatus = new SyncStatus(this.path);\n created: CreateCaptionFileResult | LookupCaptionFileByMd5Result | null = null;\n constructor(\n public path: string,\n public md5: string,\n ) {}\n\n async byteSize() {\n return (await fs.stat(this.path)).size;\n }\n\n async prepare() {}\n\n async validate() {}\n\n async create() {\n const maybeCaptionFile = await lookupCaptionFileByMd5(\n getClient(),\n this.md5,\n );\n if (maybeCaptionFile) {\n this.created = maybeCaptionFile;\n } else {\n this.created = await createCaptionFile(getClient(), {\n md5: this.md5,\n filename: basename(this.path).replace(/\\.captions.json$/, \"\"),\n byte_size: await this.byteSize(),\n });\n }\n }\n\n isComplete() {\n return !!this.created?.complete;\n }\n\n async upload() {\n if (!this.created) {\n throw new Error(\n \"Caption not created. Should have been prevented by .isComplete()\",\n );\n }\n await uploadCaptionFile(\n getClient(),\n this.created.id,\n // It's not clear why we need to use Readable.from here, but it seems\n // to fix an issue where the request is closed early in tests\n createReadableStreamFromReadable(\n Readable.from(await fs.readFile(this.path)),\n ),\n await this.byteSize(),\n );\n }\n\n async markSynced() {\n if (!this.created) {\n throw new Error(\n \"Caption not created. Should have been prevented by .isComplete()\",\n );\n }\n const byteSize = await this.byteSize();\n await this.syncStatus.markSynced({\n version: \"1\",\n complete: true,\n id: this.created.id,\n md5: this.md5,\n byte_size: byteSize,\n });\n }\n}\n"],"mappings":";;;;;;;;;AAeA,IAAa,cAAb,MAA0E;CAKxE,YACE,AAAOA,QACP,AAAOC,KACP;EAFO;EACA;cANF;eACC;oBACiB,IAAI,WAAW,KAAK,KAAK;iBACuB;;CAMzE,MAAM,WAAW;AACf,UAAQ,MAAM,GAAG,KAAK,KAAK,KAAK,EAAE;;CAGpC,MAAM,UAAU;CAEhB,MAAM,WAAW;CAEjB,MAAM,SAAS;EACb,MAAM,mBAAmB,MAAM,uBAC7B,WAAW,EACX,KAAK,IACN;AACD,MAAI,iBACF,MAAK,UAAU;MAEf,MAAK,UAAU,MAAM,kBAAkB,WAAW,EAAE;GAClD,KAAK,KAAK;GACV,UAAU,SAAS,KAAK,KAAK,CAAC,QAAQ,oBAAoB,GAAG;GAC7D,WAAW,MAAM,KAAK,UAAU;GACjC,CAAC;;CAIN,aAAa;AACX,SAAO,CAAC,CAAC,KAAK,SAAS;;CAGzB,MAAM,SAAS;AACb,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,mEACD;AAEH,QAAM,kBACJ,WAAW,EACX,KAAK,QAAQ,IAGb,iCACE,SAAS,KAAK,MAAM,GAAG,SAAS,KAAK,KAAK,CAAC,CAC5C,EACD,MAAM,KAAK,UAAU,CACtB;;CAGH,MAAM,aAAa;AACjB,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,mEACD;EAEH,MAAM,WAAW,MAAM,KAAK,UAAU;AACtC,QAAM,KAAK,WAAW,WAAW;GAC/B,SAAS;GACT,UAAU;GACV,IAAI,KAAK,QAAQ;GACjB,KAAK,KAAK;GACV,WAAW;GACZ,CAAC"}
|
|
@@ -5,6 +5,8 @@ import { createISOBMFFFile, lookupISOBMFFFileByMd5, uploadFragmentIndex } from "
|
|
|
5
5
|
import { basename, dirname, join } from "node:path";
|
|
6
6
|
import fs from "node:fs/promises";
|
|
7
7
|
import { Readable } from "node:stream";
|
|
8
|
+
|
|
9
|
+
//#region src/operations/syncAssetsDirectory/SyncFragmentIndex.ts
|
|
8
10
|
var SyncFragmentIndex = class {
|
|
9
11
|
constructor(path$1, md5) {
|
|
10
12
|
this.path = path$1;
|
|
@@ -53,4 +55,7 @@ var SyncFragmentIndex = class {
|
|
|
53
55
|
})]);
|
|
54
56
|
}
|
|
55
57
|
};
|
|
58
|
+
|
|
59
|
+
//#endregion
|
|
56
60
|
export { SyncFragmentIndex };
|
|
61
|
+
//# sourceMappingURL=SyncFragmentIndex.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SyncFragmentIndex.js","names":["path: string","md5: string"],"sources":["../../../src/operations/syncAssetsDirectory/SyncFragmentIndex.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport { basename, dirname, join } from \"node:path\";\nimport { Readable } from \"node:stream\";\n\nimport {\n type CreateISOBMFFFileResult,\n createISOBMFFFile,\n type LookupISOBMFFFileByMd5Result,\n lookupISOBMFFFileByMd5,\n uploadFragmentIndex,\n} from \"@editframe/api\";\n\nimport { createReadableStreamFromReadable } from \"../../utils/createReadableStreamFromReadable.js\";\nimport { getClient } from \"../../utils/index.js\";\nimport type { SubAssetSync } from \"./SubAssetSync.js\";\nimport { SyncStatus } from \"./SyncStatus.js\";\n\nexport class SyncFragmentIndex\n implements SubAssetSync<CreateISOBMFFFileResult>\n{\n icon = \"📋\";\n label = \"fragment index\";\n syncStatus = new SyncStatus(this.path);\n fileSyncStatus = new SyncStatus(join(dirname(this.path), \"isobmff\"));\n created: CreateISOBMFFFileResult | LookupISOBMFFFileByMd5Result | null = null;\n\n constructor(\n public path: string,\n public md5: string,\n ) {}\n\n async byteSize() {\n return (await fs.stat(this.path)).size;\n }\n\n async prepare() {}\n\n async validate() {}\n\n async create() {\n const maybeISOBMFFFile = await lookupISOBMFFFileByMd5(\n getClient(),\n this.md5,\n );\n if (maybeISOBMFFFile) {\n this.created = maybeISOBMFFFile;\n } else {\n this.created = await createISOBMFFFile(getClient(), {\n md5: this.md5,\n filename: basename(this.path).replace(/\\.tracks.json$/, \"\"),\n });\n }\n }\n\n isComplete() {\n return !!this.created?.fragment_index_complete;\n }\n\n async upload() {\n if (!this.created) {\n throw new Error(\n \"Fragment index not created. Should have been prevented by .isComplete()\",\n );\n }\n await uploadFragmentIndex(\n getClient(),\n this.created.id,\n // It is unclear why we need to use Readable.from here\n // Tests fail when using createReadStream\n createReadableStreamFromReadable(\n Readable.from(await fs.readFile(this.path)),\n ),\n await this.byteSize(),\n );\n }\n\n async markSynced() {\n if (!this.created) {\n throw new Error(\n \"Fragment index not created. Should have been prevented by .isComplete()\",\n );\n }\n const byteSize = await this.byteSize();\n await Promise.all([\n this.syncStatus.markSynced({\n version: \"1\",\n complete: true,\n id: this.created.id,\n md5: this.md5,\n byte_size: byteSize,\n }),\n this.fileSyncStatus.markSynced({\n version: \"1\",\n complete: true,\n id: this.created.id,\n md5: this.md5,\n byte_size: byteSize,\n }),\n ]);\n }\n}\n"],"mappings":";;;;;;;;;AAiBA,IAAa,oBAAb,MAEA;CAOE,YACE,AAAOA,QACP,AAAOC,KACP;EAFO;EACA;cARF;eACC;oBACK,IAAI,WAAW,KAAK,KAAK;wBACrB,IAAI,WAAW,KAAK,QAAQ,KAAK,KAAK,EAAE,UAAU,CAAC;iBACK;;CAOzE,MAAM,WAAW;AACf,UAAQ,MAAM,GAAG,KAAK,KAAK,KAAK,EAAE;;CAGpC,MAAM,UAAU;CAEhB,MAAM,WAAW;CAEjB,MAAM,SAAS;EACb,MAAM,mBAAmB,MAAM,uBAC7B,WAAW,EACX,KAAK,IACN;AACD,MAAI,iBACF,MAAK,UAAU;MAEf,MAAK,UAAU,MAAM,kBAAkB,WAAW,EAAE;GAClD,KAAK,KAAK;GACV,UAAU,SAAS,KAAK,KAAK,CAAC,QAAQ,kBAAkB,GAAG;GAC5D,CAAC;;CAIN,aAAa;AACX,SAAO,CAAC,CAAC,KAAK,SAAS;;CAGzB,MAAM,SAAS;AACb,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,0EACD;AAEH,QAAM,oBACJ,WAAW,EACX,KAAK,QAAQ,IAGb,iCACE,SAAS,KAAK,MAAM,GAAG,SAAS,KAAK,KAAK,CAAC,CAC5C,EACD,MAAM,KAAK,UAAU,CACtB;;CAGH,MAAM,aAAa;AACjB,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,0EACD;EAEH,MAAM,WAAW,MAAM,KAAK,UAAU;AACtC,QAAM,QAAQ,IAAI,CAChB,KAAK,WAAW,WAAW;GACzB,SAAS;GACT,UAAU;GACV,IAAI,KAAK,QAAQ;GACjB,KAAK,KAAK;GACV,WAAW;GACZ,CAAC,EACF,KAAK,eAAe,WAAW;GAC7B,SAAS;GACT,UAAU;GACV,IAAI,KAAK,QAAQ;GACjB,KAAK,KAAK;GACV,WAAW;GACZ,CAAC,CACH,CAAC"}
|
|
@@ -6,6 +6,8 @@ import path, { basename } from "node:path";
|
|
|
6
6
|
import fs from "node:fs/promises";
|
|
7
7
|
import { createReadStream } from "node:fs";
|
|
8
8
|
import { Probe } from "@editframe/assets";
|
|
9
|
+
|
|
10
|
+
//#region src/operations/syncAssetsDirectory/SyncImage.ts
|
|
9
11
|
var SyncImage = class {
|
|
10
12
|
constructor(path$1, md5) {
|
|
11
13
|
this.path = path$1;
|
|
@@ -72,4 +74,7 @@ var SyncImage = class {
|
|
|
72
74
|
});
|
|
73
75
|
}
|
|
74
76
|
};
|
|
77
|
+
|
|
78
|
+
//#endregion
|
|
75
79
|
export { SyncImage };
|
|
80
|
+
//# sourceMappingURL=SyncImage.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SyncImage.js","names":["path: string","md5: string"],"sources":["../../../src/operations/syncAssetsDirectory/SyncImage.ts"],"sourcesContent":["import { createReadStream } from \"node:fs\";\nimport fs from \"node:fs/promises\";\nimport path, { basename } from \"node:path\";\n\nimport {\n type CreateImageFileResult,\n createImageFile,\n type LookupImageFileByMd5Result,\n lookupImageFileByMd5,\n uploadImageFile,\n} from \"@editframe/api\";\n\nimport { Probe } from \"@editframe/assets\";\n\nimport { createReadableStreamFromReadable } from \"../../utils/createReadableStreamFromReadable.js\";\nimport { getClient } from \"../../utils/index.js\";\nimport type { SubAssetSync } from \"./SubAssetSync.js\";\nimport { SyncStatus } from \"./SyncStatus.js\";\n\nexport class SyncImage implements SubAssetSync<CreateImageFileResult> {\n icon = \"🖼️\";\n label = \"image\";\n syncStatus: SyncStatus = new SyncStatus(this.path);\n created: CreateImageFileResult | LookupImageFileByMd5Result | null = null;\n\n constructor(\n public path: string,\n public md5: string,\n ) {}\n\n private _probeResult: Probe | null = null;\n\n async prepare() {\n this._probeResult = await Probe.probePath(this.path);\n }\n\n get probeResult() {\n if (!this._probeResult) {\n throw new Error(\"Probe result not found. Call prepare() first.\");\n }\n return this._probeResult;\n }\n\n get extension() {\n return path.extname(this.path).slice(1);\n }\n\n async byteSize() {\n return (await fs.stat(this.path)).size;\n }\n\n async validate() {\n const [videoProbe] = this.probeResult.videoStreams;\n if (!videoProbe) {\n throw new Error(`No media info found in image: ${this.path}`);\n }\n const ext = this.extension;\n if (!(ext === \"jpg\" || ext === \"jpeg\" || ext === \"png\" || ext === \"webp\")) {\n throw new Error(`Invalid image format: ${this.path}`);\n }\n }\n async create() {\n const byteSize = (await fs.stat(this.path)).size;\n const [videoProbe] = this.probeResult.videoStreams;\n if (!videoProbe) {\n throw new Error(\n \"No video stream found in image. Should have been prevented by .validate()\",\n );\n }\n\n const maybeImageFile = await lookupImageFileByMd5(getClient(), this.md5);\n if (maybeImageFile) {\n this.created = maybeImageFile;\n } else {\n this.created = await createImageFile(getClient(), {\n md5: this.md5,\n filename: basename(this.path),\n width: videoProbe.width,\n height: videoProbe.height,\n mime_type: `image/${this.extension}` as\n | \"image/jpeg\"\n | \"image/png\"\n | \"image/jpg\"\n | \"image/webp\"\n | \"image/svg+xml\",\n byte_size: byteSize,\n });\n }\n }\n isComplete() {\n return !!this.created?.complete;\n }\n async upload() {\n if (!this.created) {\n throw new Error(\n \"Image not created. Should have been prevented by .isComplete()\",\n );\n }\n await uploadImageFile(\n getClient(),\n {\n id: this.created.id,\n byte_size: Number.parseInt(this.probeResult.format.size || \"0\", 10),\n },\n createReadableStreamFromReadable(createReadStream(this.path)),\n ).whenUploaded();\n }\n async markSynced() {\n if (!this.created) {\n throw new Error(\n \"Image not created. Should have been prevented by .isComplete()\",\n );\n }\n const byteSize = await this.byteSize();\n return this.syncStatus.markSynced({\n version: \"1\",\n complete: true,\n id: this.created.id,\n md5: this.md5,\n byte_size: byteSize,\n });\n }\n}\n"],"mappings":";;;;;;;;;;AAmBA,IAAa,YAAb,MAAsE;CAMpE,YACE,AAAOA,QACP,AAAOC,KACP;EAFO;EACA;cAPF;eACC;oBACiB,IAAI,WAAW,KAAK,KAAK;iBACmB;sBAOhC;;CAErC,MAAM,UAAU;AACd,OAAK,eAAe,MAAM,MAAM,UAAU,KAAK,KAAK;;CAGtD,IAAI,cAAc;AAChB,MAAI,CAAC,KAAK,aACR,OAAM,IAAI,MAAM,gDAAgD;AAElE,SAAO,KAAK;;CAGd,IAAI,YAAY;AACd,SAAO,KAAK,QAAQ,KAAK,KAAK,CAAC,MAAM,EAAE;;CAGzC,MAAM,WAAW;AACf,UAAQ,MAAM,GAAG,KAAK,KAAK,KAAK,EAAE;;CAGpC,MAAM,WAAW;EACf,MAAM,CAAC,cAAc,KAAK,YAAY;AACtC,MAAI,CAAC,WACH,OAAM,IAAI,MAAM,iCAAiC,KAAK,OAAO;EAE/D,MAAM,MAAM,KAAK;AACjB,MAAI,EAAE,QAAQ,SAAS,QAAQ,UAAU,QAAQ,SAAS,QAAQ,QAChE,OAAM,IAAI,MAAM,yBAAyB,KAAK,OAAO;;CAGzD,MAAM,SAAS;EACb,MAAM,YAAY,MAAM,GAAG,KAAK,KAAK,KAAK,EAAE;EAC5C,MAAM,CAAC,cAAc,KAAK,YAAY;AACtC,MAAI,CAAC,WACH,OAAM,IAAI,MACR,4EACD;EAGH,MAAM,iBAAiB,MAAM,qBAAqB,WAAW,EAAE,KAAK,IAAI;AACxE,MAAI,eACF,MAAK,UAAU;MAEf,MAAK,UAAU,MAAM,gBAAgB,WAAW,EAAE;GAChD,KAAK,KAAK;GACV,UAAU,SAAS,KAAK,KAAK;GAC7B,OAAO,WAAW;GAClB,QAAQ,WAAW;GACnB,WAAW,SAAS,KAAK;GAMzB,WAAW;GACZ,CAAC;;CAGN,aAAa;AACX,SAAO,CAAC,CAAC,KAAK,SAAS;;CAEzB,MAAM,SAAS;AACb,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,iEACD;AAEH,QAAM,gBACJ,WAAW,EACX;GACE,IAAI,KAAK,QAAQ;GACjB,WAAW,OAAO,SAAS,KAAK,YAAY,OAAO,QAAQ,KAAK,GAAG;GACpE,EACD,iCAAiC,iBAAiB,KAAK,KAAK,CAAC,CAC9D,CAAC,cAAc;;CAElB,MAAM,aAAa;AACjB,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,iEACD;EAEH,MAAM,WAAW,MAAM,KAAK,UAAU;AACtC,SAAO,KAAK,WAAW,WAAW;GAChC,SAAS;GACT,UAAU;GACV,IAAI,KAAK,QAAQ;GACjB,KAAK,KAAK;GACV,WAAW;GACZ,CAAC"}
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
2
|
import { z } from "zod";
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
|
|
4
|
+
//#region src/operations/syncAssetsDirectory/SyncStatus.ts
|
|
5
|
+
const SYNC_VERSION = "1";
|
|
6
|
+
const SyncStatusSchema = z.object({
|
|
5
7
|
version: z.string(),
|
|
6
8
|
complete: z.boolean(),
|
|
7
9
|
id: z.string(),
|
|
@@ -32,4 +34,7 @@ var SyncStatus = class {
|
|
|
32
34
|
await fs.writeFile(this.infoPath, JSON.stringify(info, null, 2), "utf-8");
|
|
33
35
|
}
|
|
34
36
|
};
|
|
37
|
+
|
|
38
|
+
//#endregion
|
|
35
39
|
export { SyncStatus };
|
|
40
|
+
//# sourceMappingURL=SyncStatus.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SyncStatus.js","names":["basePath: string"],"sources":["../../../src/operations/syncAssetsDirectory/SyncStatus.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\n\nimport { z } from \"zod\";\n\nconst SYNC_VERSION = \"1\";\n\nconst SyncStatusSchema = z.object({\n version: z.string(),\n complete: z.boolean(),\n id: z.string(),\n md5: z.string(),\n byte_size: z.number(),\n});\n\nexport interface SyncStatusInfo extends z.infer<typeof SyncStatusSchema> {}\n\nexport class SyncStatus {\n infoPath = `${this.basePath}.info`;\n\n constructor(private basePath: string) {}\n\n async isSynced() {\n const syncInfo = await this.readInfo();\n if (!syncInfo) {\n return false;\n }\n return syncInfo.version === SYNC_VERSION && syncInfo.complete;\n }\n\n async readInfo() {\n try {\n const info = await fs.readFile(this.infoPath, \"utf-8\");\n return SyncStatusSchema.parse(JSON.parse(info));\n } catch (error) {\n if (\n error instanceof Error &&\n \"code\" in error &&\n error.code === \"ENOENT\"\n ) {\n return null;\n }\n throw error;\n }\n }\n\n async markSynced(info: SyncStatusInfo) {\n process.stderr.write(`✏️ Marking asset as synced: ${this.basePath}\\n`);\n await fs.writeFile(this.infoPath, JSON.stringify(info, null, 2), \"utf-8\");\n }\n}\n"],"mappings":";;;;AAIA,MAAM,eAAe;AAErB,MAAM,mBAAmB,EAAE,OAAO;CAChC,SAAS,EAAE,QAAQ;CACnB,UAAU,EAAE,SAAS;CACrB,IAAI,EAAE,QAAQ;CACd,KAAK,EAAE,QAAQ;CACf,WAAW,EAAE,QAAQ;CACtB,CAAC;AAIF,IAAa,aAAb,MAAwB;CAGtB,YAAY,AAAQA,UAAkB;EAAlB;kBAFT,GAAG,KAAK,SAAS;;CAI5B,MAAM,WAAW;EACf,MAAM,WAAW,MAAM,KAAK,UAAU;AACtC,MAAI,CAAC,SACH,QAAO;AAET,SAAO,SAAS,YAAY,gBAAgB,SAAS;;CAGvD,MAAM,WAAW;AACf,MAAI;GACF,MAAM,OAAO,MAAM,GAAG,SAAS,KAAK,UAAU,QAAQ;AACtD,UAAO,iBAAiB,MAAM,KAAK,MAAM,KAAK,CAAC;WACxC,OAAO;AACd,OACE,iBAAiB,SACjB,UAAU,SACV,MAAM,SAAS,SAEf,QAAO;AAET,SAAM;;;CAIV,MAAM,WAAW,MAAsB;AACrC,UAAQ,OAAO,MAAM,gCAAgC,KAAK,SAAS,IAAI;AACvE,QAAM,GAAG,UAAU,KAAK,UAAU,KAAK,UAAU,MAAM,MAAM,EAAE,EAAE,QAAQ"}
|
|
@@ -6,6 +6,8 @@ import { basename, dirname, join } from "node:path";
|
|
|
6
6
|
import fs from "node:fs/promises";
|
|
7
7
|
import { createReadStream } from "node:fs";
|
|
8
8
|
import { Probe } from "@editframe/assets";
|
|
9
|
+
|
|
10
|
+
//#region src/operations/syncAssetsDirectory/SyncTrack.ts
|
|
9
11
|
var SyncTrack = class {
|
|
10
12
|
constructor(path$1, md5) {
|
|
11
13
|
this.path = path$1;
|
|
@@ -107,4 +109,7 @@ var SyncTrack = class {
|
|
|
107
109
|
})]);
|
|
108
110
|
}
|
|
109
111
|
};
|
|
112
|
+
|
|
113
|
+
//#endregion
|
|
110
114
|
export { SyncTrack };
|
|
115
|
+
//# sourceMappingURL=SyncTrack.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SyncTrack.js","names":["path: string","md5: string","createPayload: CreateISOBMFFTrackPayload"],"sources":["../../../src/operations/syncAssetsDirectory/SyncTrack.ts"],"sourcesContent":["import { createReadStream } from \"node:fs\";\nimport fs from \"node:fs/promises\";\nimport { basename, dirname, join } from \"node:path\";\n\nimport {\n type CreateISOBMFFFileResult,\n type CreateISOBMFFTrackPayload,\n type CreateISOBMFFTrackResult,\n createISOBMFFFile,\n createISOBMFFTrack,\n type LookupISOBMFFFileByMd5Result,\n lookupISOBMFFFileByMd5,\n uploadISOBMFFTrack,\n} from \"@editframe/api\";\nimport { Probe } from \"@editframe/assets\";\n\nimport { createReadableStreamFromReadable } from \"../../utils/createReadableStreamFromReadable.js\";\nimport { getClient } from \"../../utils/index.js\";\nimport type { SubAssetSync } from \"./SubAssetSync.js\";\nimport { SyncStatus } from \"./SyncStatus.js\";\n\nexport class SyncTrack implements SubAssetSync<CreateISOBMFFTrackResult> {\n icon = \"📼\";\n label = \"track\";\n syncStatus = new SyncStatus(this.path);\n fileSyncStatus = new SyncStatus(join(dirname(this.path), \"isobmff\"));\n created: CreateISOBMFFTrackResult | null = null;\n\n constructor(\n public path: string,\n public md5: string,\n ) {}\n\n private _isoFile:\n | CreateISOBMFFFileResult\n | LookupISOBMFFFileByMd5Result\n | null = null;\n\n get isoFile() {\n if (this._isoFile) {\n return this._isoFile;\n }\n throw new Error(\"ISOBMFF file not found. Call prepare() first.\");\n }\n\n async byteSize() {\n return (await fs.stat(this.path)).size;\n }\n\n private _probeResult: Probe | null = null;\n get probeResult() {\n if (this._probeResult) {\n return this._probeResult;\n }\n throw new Error(\"Probe result not found. Call prepare() first.\");\n }\n\n get track() {\n const [track] = this.probeResult.streams;\n if (track) {\n return track;\n }\n throw new Error(`No track found in track: ${this.path}`);\n }\n\n async prepare() {\n const maybeIsoFile = await lookupISOBMFFFileByMd5(getClient(), this.md5);\n if (maybeIsoFile) {\n this._isoFile = maybeIsoFile;\n } else {\n this._isoFile = await createISOBMFFFile(getClient(), {\n md5: this.md5,\n filename: basename(this.path).replace(/\\.track-[\\d]+.mp4$/, \"\"),\n });\n }\n this._probeResult = await Probe.probePath(this.path);\n }\n\n get trackId() {\n const trackId = this.path.match(/track-([\\d]+).mp4/)?.[1];\n if (!trackId) {\n throw new Error(`No track ID found for track: ${this.path}`);\n }\n return trackId;\n }\n\n get trackDuration() {\n const track = this.track;\n if (!track.duration) {\n throw new Error(`No duration found in track: ${this.path}`);\n }\n if (typeof track.duration === \"string\") {\n return Number.parseFloat(track.duration);\n }\n return track.duration;\n }\n\n async validate() {\n this.trackId;\n this.isoFile;\n this.trackDuration;\n }\n\n async create(): Promise<void> {\n const track = this.track;\n const isoFile = this.isoFile;\n\n if (track.codec_type === \"data\") {\n throw new Error(`Unsupported codec type: ${track.codec_type}`);\n }\n const createPayload: CreateISOBMFFTrackPayload =\n track.codec_type === \"audio\"\n ? {\n type: track.codec_type,\n file_id: isoFile.id,\n track_id: Number(this.trackId),\n probe_info: track,\n duration_ms: Math.round(this.trackDuration * 1000),\n codec_name: track.codec_name,\n byte_size: await this.byteSize(),\n }\n : {\n type: track.codec_type,\n file_id: isoFile.id,\n track_id: Number(this.trackId),\n probe_info: track,\n duration_ms: Math.round(this.trackDuration * 1000),\n codec_name: track.codec_name,\n byte_size: await this.byteSize(),\n };\n\n this.created = await createISOBMFFTrack(getClient(), createPayload);\n }\n isComplete() {\n return !!this.created?.complete;\n }\n async upload() {\n if (!this.created) {\n throw new Error(\n \"Track not created. Should have been prevented by .isComplete()\",\n );\n }\n await uploadISOBMFFTrack(\n getClient(),\n this.isoFile.id,\n Number(this.trackId),\n createReadableStreamFromReadable(createReadStream(this.path)),\n this.created?.byte_size,\n ).whenUploaded();\n }\n async markSynced() {\n if (!this.created) {\n throw new Error(\n \"Track not created. Should have been prevented by .isComplete()\",\n );\n }\n const byteSize = await this.byteSize();\n await Promise.all([\n this.syncStatus.markSynced({\n version: \"1\",\n complete: true,\n id: `${this.created.file_id}:${this.created.track_id}`,\n md5: this.md5,\n byte_size: byteSize,\n }),\n this.fileSyncStatus.markSynced({\n version: \"1\",\n complete: true,\n id: this.created.file_id,\n md5: this.md5,\n byte_size: byteSize,\n }),\n ]);\n }\n}\n"],"mappings":";;;;;;;;;;AAqBA,IAAa,YAAb,MAAyE;CAOvE,YACE,AAAOA,QACP,AAAOC,KACP;EAFO;EACA;cARF;eACC;oBACK,IAAI,WAAW,KAAK,KAAK;wBACrB,IAAI,WAAW,KAAK,QAAQ,KAAK,KAAK,EAAE,UAAU,CAAC;iBACzB;kBAUhC;sBAa0B;;CAXrC,IAAI,UAAU;AACZ,MAAI,KAAK,SACP,QAAO,KAAK;AAEd,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,WAAW;AACf,UAAQ,MAAM,GAAG,KAAK,KAAK,KAAK,EAAE;;CAIpC,IAAI,cAAc;AAChB,MAAI,KAAK,aACP,QAAO,KAAK;AAEd,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,IAAI,QAAQ;EACV,MAAM,CAAC,SAAS,KAAK,YAAY;AACjC,MAAI,MACF,QAAO;AAET,QAAM,IAAI,MAAM,4BAA4B,KAAK,OAAO;;CAG1D,MAAM,UAAU;EACd,MAAM,eAAe,MAAM,uBAAuB,WAAW,EAAE,KAAK,IAAI;AACxE,MAAI,aACF,MAAK,WAAW;MAEhB,MAAK,WAAW,MAAM,kBAAkB,WAAW,EAAE;GACnD,KAAK,KAAK;GACV,UAAU,SAAS,KAAK,KAAK,CAAC,QAAQ,sBAAsB,GAAG;GAChE,CAAC;AAEJ,OAAK,eAAe,MAAM,MAAM,UAAU,KAAK,KAAK;;CAGtD,IAAI,UAAU;EACZ,MAAM,UAAU,KAAK,KAAK,MAAM,oBAAoB,GAAG;AACvD,MAAI,CAAC,QACH,OAAM,IAAI,MAAM,iCAAiC,KAAK,OAAO;AAE/D,SAAO;;CAGT,IAAI,gBAAgB;EAClB,MAAM,QAAQ,KAAK;AACnB,MAAI,CAAC,MAAM,SACT,OAAM,IAAI,MAAM,+BAA+B,KAAK,OAAO;AAE7D,MAAI,OAAO,MAAM,aAAa,SAC5B,QAAO,OAAO,WAAW,MAAM,SAAS;AAE1C,SAAO,MAAM;;CAGf,MAAM,WAAW;AACf,OAAK;AACL,OAAK;AACL,OAAK;;CAGP,MAAM,SAAwB;EAC5B,MAAM,QAAQ,KAAK;EACnB,MAAM,UAAU,KAAK;AAErB,MAAI,MAAM,eAAe,OACvB,OAAM,IAAI,MAAM,2BAA2B,MAAM,aAAa;EAEhE,MAAMC,gBACJ,MAAM,eAAe,UACjB;GACE,MAAM,MAAM;GACZ,SAAS,QAAQ;GACjB,UAAU,OAAO,KAAK,QAAQ;GAC9B,YAAY;GACZ,aAAa,KAAK,MAAM,KAAK,gBAAgB,IAAK;GAClD,YAAY,MAAM;GAClB,WAAW,MAAM,KAAK,UAAU;GACjC,GACD;GACE,MAAM,MAAM;GACZ,SAAS,QAAQ;GACjB,UAAU,OAAO,KAAK,QAAQ;GAC9B,YAAY;GACZ,aAAa,KAAK,MAAM,KAAK,gBAAgB,IAAK;GAClD,YAAY,MAAM;GAClB,WAAW,MAAM,KAAK,UAAU;GACjC;AAEP,OAAK,UAAU,MAAM,mBAAmB,WAAW,EAAE,cAAc;;CAErE,aAAa;AACX,SAAO,CAAC,CAAC,KAAK,SAAS;;CAEzB,MAAM,SAAS;AACb,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,iEACD;AAEH,QAAM,mBACJ,WAAW,EACX,KAAK,QAAQ,IACb,OAAO,KAAK,QAAQ,EACpB,iCAAiC,iBAAiB,KAAK,KAAK,CAAC,EAC7D,KAAK,SAAS,UACf,CAAC,cAAc;;CAElB,MAAM,aAAa;AACjB,MAAI,CAAC,KAAK,QACR,OAAM,IAAI,MACR,iEACD;EAEH,MAAM,WAAW,MAAM,KAAK,UAAU;AACtC,QAAM,QAAQ,IAAI,CAChB,KAAK,WAAW,WAAW;GACzB,SAAS;GACT,UAAU;GACV,IAAI,GAAG,KAAK,QAAQ,QAAQ,GAAG,KAAK,QAAQ;GAC5C,KAAK,KAAK;GACV,WAAW;GACZ,CAAC,EACF,KAAK,eAAe,WAAW;GAC7B,SAAS;GACT,UAAU;GACV,IAAI,KAAK,QAAQ;GACjB,KAAK,KAAK;GACV,WAAW;GACZ,CAAC,CACH,CAAC"}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
//#region src/operations/syncAssetsDirectory/doAssetSync.ts
|
|
1
2
|
const doAssetSync = async function* (assetSync) {
|
|
2
3
|
if (await assetSync.syncStatus.isSynced()) {
|
|
3
4
|
yield {
|
|
@@ -40,4 +41,7 @@ const doAssetSync = async function* (assetSync) {
|
|
|
40
41
|
message: `Synced ${assetSync.label}: ${assetSync.path}`
|
|
41
42
|
};
|
|
42
43
|
};
|
|
44
|
+
|
|
45
|
+
//#endregion
|
|
43
46
|
export { doAssetSync };
|
|
47
|
+
//# sourceMappingURL=doAssetSync.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"doAssetSync.js","names":[],"sources":["../../../src/operations/syncAssetsDirectory/doAssetSync.ts"],"sourcesContent":["import type { SubAssetSync } from \"./SubAssetSync.js\";\n\nexport const doAssetSync = async function* (\n assetSync: SubAssetSync<unknown>,\n): AsyncGenerator<{\n status: \"info\" | \"success\";\n message: string;\n}> {\n if (await assetSync.syncStatus.isSynced()) {\n yield {\n status: \"info\",\n message: `Sub-asset has already been synced: ${assetSync.path}`,\n };\n return;\n }\n\n try {\n await assetSync.prepare();\n await assetSync.validate();\n } catch (error) {\n const message = error instanceof Error ? error.message : \"Unknown error\";\n\n throw new Error(`Error validating ${assetSync.label}: ${message}`);\n }\n\n yield {\n status: \"info\",\n message: `${assetSync.icon} Syncing ${assetSync.label}: ${assetSync.path}`,\n };\n\n try {\n await assetSync.create();\n } catch (error) {\n const message = error instanceof Error ? error.message : \"Unknown error\";\n\n throw new Error(`Error creating ${assetSync.label}: ${message}`);\n }\n\n if (!assetSync.isComplete()) {\n try {\n await assetSync.upload();\n } catch (error) {\n const message = error instanceof Error ? error.message : \"Unknown error\";\n\n throw new Error(`Error uploading ${assetSync.label}: ${message}`);\n }\n }\n\n try {\n await assetSync.markSynced();\n } catch (error) {\n const message = error instanceof Error ? error.message : \"Unknown error\";\n\n throw new Error(`Error marking ${assetSync.label} as synced: ${message}`);\n }\n\n yield {\n status: \"success\",\n message: `Synced ${assetSync.label}: ${assetSync.path}`,\n };\n return;\n};\n"],"mappings":";AAEA,MAAa,cAAc,iBACzB,WAIC;AACD,KAAI,MAAM,UAAU,WAAW,UAAU,EAAE;AACzC,QAAM;GACJ,QAAQ;GACR,SAAS,sCAAsC,UAAU;GAC1D;AACD;;AAGF,KAAI;AACF,QAAM,UAAU,SAAS;AACzB,QAAM,UAAU,UAAU;UACnB,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AAEzD,QAAM,IAAI,MAAM,oBAAoB,UAAU,MAAM,IAAI,UAAU;;AAGpE,OAAM;EACJ,QAAQ;EACR,SAAS,GAAG,UAAU,KAAK,YAAY,UAAU,MAAM,IAAI,UAAU;EACtE;AAED,KAAI;AACF,QAAM,UAAU,QAAQ;UACjB,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AAEzD,QAAM,IAAI,MAAM,kBAAkB,UAAU,MAAM,IAAI,UAAU;;AAGlE,KAAI,CAAC,UAAU,YAAY,CACzB,KAAI;AACF,QAAM,UAAU,QAAQ;UACjB,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AAEzD,QAAM,IAAI,MAAM,mBAAmB,UAAU,MAAM,IAAI,UAAU;;AAIrE,KAAI;AACF,QAAM,UAAU,YAAY;UACrB,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AAEzD,QAAM,IAAI,MAAM,iBAAiB,UAAU,MAAM,cAAc,UAAU;;AAG3E,OAAM;EACJ,QAAQ;EACR,SAAS,UAAU,UAAU,MAAM,IAAI,UAAU;EAClD"}
|
|
@@ -2,6 +2,8 @@ import { doAssetSync } from "./syncAssetsDirectory/doAssetSync.js";
|
|
|
2
2
|
import { getAssetSync } from "./syncAssetsDirectory/SubAssetSync.js";
|
|
3
3
|
import path from "node:path";
|
|
4
4
|
import fs from "node:fs/promises";
|
|
5
|
+
|
|
6
|
+
//#region src/operations/syncAssetsDirectory.ts
|
|
5
7
|
const syncAssetDirectory = async (cacheDir) => {
|
|
6
8
|
if (!(await fs.stat(cacheDir).catch((error) => {
|
|
7
9
|
if (error.code === "ENOENT") return;
|
|
@@ -54,4 +56,7 @@ const syncAssetDirectory = async (cacheDir) => {
|
|
|
54
56
|
throw new Error("Failed to sync assets");
|
|
55
57
|
}
|
|
56
58
|
};
|
|
59
|
+
|
|
60
|
+
//#endregion
|
|
57
61
|
export { syncAssetDirectory };
|
|
62
|
+
//# sourceMappingURL=syncAssetsDirectory.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"syncAssetsDirectory.js","names":["errors: Record<string, string[]>","path"],"sources":["../../src/operations/syncAssetsDirectory.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { doAssetSync } from \"./syncAssetsDirectory/doAssetSync.js\";\nimport { getAssetSync } from \"./syncAssetsDirectory/SubAssetSync.js\";\n\nexport const syncAssetDirectory = async (\n /**\n * Project directory will be used as the base to find an assets directory.\n * Assets will be synced from `<projectDirectory>/src/assets`\n */\n cacheDir: string,\n) => {\n const stat = await fs.stat(cacheDir).catch((error) => {\n if (error.code === \"ENOENT\") {\n return;\n }\n throw error;\n });\n\n if (!stat?.isDirectory()) {\n console.error(`No assets cache directory found at ${cacheDir}`);\n return;\n }\n const assets = await fs.readdir(cacheDir);\n\n process.stderr.write(`Syncing asset dir: ${cacheDir}\\n`);\n\n const errors: Record<string, string[]> = {};\n\n const reportError = (path: string, message: string) => {\n errors[path] ||= [];\n errors[path].push(message);\n process.stderr.write(` 🚫 ${message}\\n`);\n };\n\n const reportSuccess = (_path: string, message: string) => {\n process.stderr.write(` ✅ ${message}\\n`);\n };\n\n const reportInfo = (_path: string, message: string) => {\n process.stderr.write(` ${message}\\n`);\n };\n\n for (const assetMd5 of assets) {\n reportInfo(assetMd5, `Syncing asset: ${assetMd5}`);\n const assetDir = path.join(cacheDir, assetMd5);\n const stat = await fs.stat(assetDir);\n if (!stat.isDirectory()) {\n reportError(assetMd5, \"Invalid asset. Did not find asset directory.\");\n return;\n }\n const subAssets = await fs.readdir(assetDir);\n\n for (const subAsset of subAssets) {\n if (subAsset.endsWith(\".info\")) {\n // skip .info files, they are not assets\n continue;\n }\n const subAssetPath = path.join(assetDir, subAsset);\n\n try {\n const assetSync = getAssetSync(subAssetPath, assetMd5);\n for await (const { status, message } of doAssetSync(assetSync)) {\n if (status === \"success\") {\n reportSuccess(subAsset, message);\n } else if (status === \"info\") {\n reportInfo(subAsset, message);\n }\n }\n } catch (error) {\n if (error instanceof Error) {\n reportError(subAsset, error.message);\n } else {\n reportError(subAsset, \"Unknown error\");\n }\n }\n }\n }\n\n if (Object.keys(errors).length) {\n process.stderr.write(\"\\n\\n❌ Encountered errors while syncing assets:\\n\");\n for (const [asset, messages] of Object.entries(errors)) {\n process.stderr.write(`\\n🚫 ${asset}\\n`);\n for (const message of messages) {\n process.stderr.write(` - ${message}\\n`);\n }\n }\n\n throw new Error(\"Failed to sync assets\");\n }\n};\n"],"mappings":";;;;;;AAKA,MAAa,qBAAqB,OAKhC,aACG;AAQH,KAAI,EAPS,MAAM,GAAG,KAAK,SAAS,CAAC,OAAO,UAAU;AACpD,MAAI,MAAM,SAAS,SACjB;AAEF,QAAM;GACN,GAES,aAAa,EAAE;AACxB,UAAQ,MAAM,sCAAsC,WAAW;AAC/D;;CAEF,MAAM,SAAS,MAAM,GAAG,QAAQ,SAAS;AAEzC,SAAQ,OAAO,MAAM,sBAAsB,SAAS,IAAI;CAExD,MAAMA,SAAmC,EAAE;CAE3C,MAAM,eAAe,QAAc,YAAoB;AACrD,SAAOC,YAAU,EAAE;AACnB,SAAOA,QAAM,KAAK,QAAQ;AAC1B,UAAQ,OAAO,MAAM,OAAO,QAAQ,IAAI;;CAG1C,MAAM,iBAAiB,OAAe,YAAoB;AACxD,UAAQ,OAAO,MAAM,MAAM,QAAQ,IAAI;;CAGzC,MAAM,cAAc,OAAe,YAAoB;AACrD,UAAQ,OAAO,MAAM,IAAI,QAAQ,IAAI;;AAGvC,MAAK,MAAM,YAAY,QAAQ;AAC7B,aAAW,UAAU,kBAAkB,WAAW;EAClD,MAAM,WAAW,KAAK,KAAK,UAAU,SAAS;AAE9C,MAAI,EADS,MAAM,GAAG,KAAK,SAAS,EAC1B,aAAa,EAAE;AACvB,eAAY,UAAU,+CAA+C;AACrE;;EAEF,MAAM,YAAY,MAAM,GAAG,QAAQ,SAAS;AAE5C,OAAK,MAAM,YAAY,WAAW;AAChC,OAAI,SAAS,SAAS,QAAQ,CAE5B;GAEF,MAAM,eAAe,KAAK,KAAK,UAAU,SAAS;AAElD,OAAI;IACF,MAAM,YAAY,aAAa,cAAc,SAAS;AACtD,eAAW,MAAM,EAAE,QAAQ,aAAa,YAAY,UAAU,CAC5D,KAAI,WAAW,UACb,eAAc,UAAU,QAAQ;aACvB,WAAW,OACpB,YAAW,UAAU,QAAQ;YAG1B,OAAO;AACd,QAAI,iBAAiB,MACnB,aAAY,UAAU,MAAM,QAAQ;QAEpC,aAAY,UAAU,gBAAgB;;;;AAM9C,KAAI,OAAO,KAAK,OAAO,CAAC,QAAQ;AAC9B,UAAQ,OAAO,MAAM,mDAAmD;AACxE,OAAK,MAAM,CAAC,OAAO,aAAa,OAAO,QAAQ,OAAO,EAAE;AACtD,WAAQ,OAAO,MAAM,QAAQ,MAAM,IAAI;AACvC,QAAK,MAAM,WAAW,SACpB,SAAQ,OAAO,MAAM,OAAO,QAAQ,IAAI;;AAI5C,QAAM,IAAI,MAAM,wBAAwB"}
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import { Stream } from "node:stream";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/createReadableStreamFromReadable.ts
|
|
2
4
|
const createReadableStreamFromReadable = (source) => {
|
|
3
5
|
const pump = new StreamPump(source);
|
|
4
6
|
return new ReadableStream(pump, pump);
|
|
@@ -61,4 +63,7 @@ var StreamPump = class {
|
|
|
61
63
|
}
|
|
62
64
|
}
|
|
63
65
|
};
|
|
66
|
+
|
|
67
|
+
//#endregion
|
|
64
68
|
export { createReadableStreamFromReadable };
|
|
69
|
+
//# sourceMappingURL=createReadableStreamFromReadable.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"createReadableStreamFromReadable.js","names":["_error: any"],"sources":["../../src/utils/createReadableStreamFromReadable.ts"],"sourcesContent":["import { type Readable, Stream } from \"node:stream\";\n\nexport const createReadableStreamFromReadable = (\n source: Readable & { readableHighWaterMark?: number },\n) => {\n const pump = new StreamPump(source);\n const stream = new ReadableStream(pump, pump);\n return stream;\n};\n\nclass StreamPump {\n public highWaterMark: number;\n public accumalatedSize: number;\n private stream: Stream & {\n readableHighWaterMark?: number;\n readable?: boolean;\n resume?: () => void;\n pause?: () => void;\n destroy?: (error?: Error) => void;\n };\n private controller?: ReadableStreamController<Uint8Array>;\n\n constructor(\n stream: Stream & {\n readableHighWaterMark?: number;\n readable?: boolean;\n resume?: () => void;\n pause?: () => void;\n destroy?: (error?: Error) => void;\n },\n ) {\n this.highWaterMark =\n stream.readableHighWaterMark ||\n new Stream.Readable().readableHighWaterMark;\n this.accumalatedSize = 0;\n this.stream = stream;\n this.enqueue = this.enqueue.bind(this);\n this.error = this.error.bind(this);\n this.close = this.close.bind(this);\n }\n\n size(chunk: Uint8Array) {\n return chunk?.byteLength || 0;\n }\n\n start(controller: ReadableStreamController<Uint8Array>) {\n this.controller = controller;\n this.stream.on(\"data\", this.enqueue);\n this.stream.once(\"error\", this.error);\n this.stream.once(\"end\", this.close);\n this.stream.once(\"close\", this.close);\n }\n\n pull() {\n this.resume();\n }\n\n cancel(reason?: Error) {\n if (this.stream.destroy) {\n this.stream.destroy(reason);\n }\n\n this.stream.off(\"data\", this.enqueue);\n this.stream.off(\"error\", this.error);\n this.stream.off(\"end\", this.close);\n this.stream.off(\"close\", this.close);\n }\n\n enqueue(chunk: Uint8Array | string) {\n if (this.controller) {\n try {\n const available = (this.controller.desiredSize || 0) - chunk.length;\n this.controller.enqueue(chunk as Uint8Array);\n if (available <= 0) {\n this.pause();\n }\n } catch (_error: any) {\n this.controller.error(\n new Error(\n \"Could not create Buffer, chunk must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object\",\n ),\n );\n this.cancel();\n }\n }\n }\n\n pause() {\n if (this.stream.pause) {\n this.stream.pause();\n }\n }\n\n resume() {\n if (this.stream.readable && this.stream.resume) {\n this.stream.resume();\n }\n }\n\n close() {\n if (this.controller) {\n this.controller.close();\n delete this.controller;\n }\n }\n\n error(error: Error) {\n if (this.controller) {\n this.controller.error(error);\n delete this.controller;\n }\n }\n}\n"],"mappings":";;;AAEA,MAAa,oCACX,WACG;CACH,MAAM,OAAO,IAAI,WAAW,OAAO;AAEnC,QADe,IAAI,eAAe,MAAM,KAAK;;AAI/C,IAAM,aAAN,MAAiB;CAYf,YACE,QAOA;AACA,OAAK,gBACH,OAAO,yBACP,IAAI,OAAO,UAAU,CAAC;AACxB,OAAK,kBAAkB;AACvB,OAAK,SAAS;AACd,OAAK,UAAU,KAAK,QAAQ,KAAK,KAAK;AACtC,OAAK,QAAQ,KAAK,MAAM,KAAK,KAAK;AAClC,OAAK,QAAQ,KAAK,MAAM,KAAK,KAAK;;CAGpC,KAAK,OAAmB;AACtB,SAAO,OAAO,cAAc;;CAG9B,MAAM,YAAkD;AACtD,OAAK,aAAa;AAClB,OAAK,OAAO,GAAG,QAAQ,KAAK,QAAQ;AACpC,OAAK,OAAO,KAAK,SAAS,KAAK,MAAM;AACrC,OAAK,OAAO,KAAK,OAAO,KAAK,MAAM;AACnC,OAAK,OAAO,KAAK,SAAS,KAAK,MAAM;;CAGvC,OAAO;AACL,OAAK,QAAQ;;CAGf,OAAO,QAAgB;AACrB,MAAI,KAAK,OAAO,QACd,MAAK,OAAO,QAAQ,OAAO;AAG7B,OAAK,OAAO,IAAI,QAAQ,KAAK,QAAQ;AACrC,OAAK,OAAO,IAAI,SAAS,KAAK,MAAM;AACpC,OAAK,OAAO,IAAI,OAAO,KAAK,MAAM;AAClC,OAAK,OAAO,IAAI,SAAS,KAAK,MAAM;;CAGtC,QAAQ,OAA4B;AAClC,MAAI,KAAK,WACP,KAAI;GACF,MAAM,aAAa,KAAK,WAAW,eAAe,KAAK,MAAM;AAC7D,QAAK,WAAW,QAAQ,MAAoB;AAC5C,OAAI,aAAa,EACf,MAAK,OAAO;WAEPA,QAAa;AACpB,QAAK,WAAW,sBACd,IAAI,MACF,gIACD,CACF;AACD,QAAK,QAAQ;;;CAKnB,QAAQ;AACN,MAAI,KAAK,OAAO,MACd,MAAK,OAAO,OAAO;;CAIvB,SAAS;AACP,MAAI,KAAK,OAAO,YAAY,KAAK,OAAO,OACtC,MAAK,OAAO,QAAQ;;CAIxB,QAAQ;AACN,MAAI,KAAK,YAAY;AACnB,QAAK,WAAW,OAAO;AACvB,UAAO,KAAK;;;CAIhB,MAAM,OAAc;AAClB,MAAI,KAAK,YAAY;AACnB,QAAK,WAAW,MAAM,MAAM;AAC5B,UAAO,KAAK"}
|