@vibeframe/mcp-server 0.36.0 → 0.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1223 -135
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -2280,7 +2280,7 @@ function buildFFmpegArgs(clips, sources, presetSettings, outputPath, options, so
|
|
|
2280
2280
|
return args;
|
|
2281
2281
|
}
|
|
2282
2282
|
function runFFmpegProcess(ffmpegPath, args, onProgress) {
|
|
2283
|
-
return new Promise((
|
|
2283
|
+
return new Promise((resolve20, reject) => {
|
|
2284
2284
|
const ffmpeg = spawn(ffmpegPath, args, {
|
|
2285
2285
|
stdio: ["pipe", "pipe", "pipe"]
|
|
2286
2286
|
});
|
|
@@ -2304,7 +2304,7 @@ function runFFmpegProcess(ffmpegPath, args, onProgress) {
|
|
|
2304
2304
|
});
|
|
2305
2305
|
ffmpeg.on("close", (code) => {
|
|
2306
2306
|
if (code === 0) {
|
|
2307
|
-
|
|
2307
|
+
resolve20();
|
|
2308
2308
|
} else {
|
|
2309
2309
|
const errorMatch = stderr.match(/Error.*$/m);
|
|
2310
2310
|
const errorMsg = errorMatch ? errorMatch[0] : `FFmpeg exited with code ${code}`;
|
|
@@ -3252,7 +3252,7 @@ var GeminiProvider = class {
|
|
|
3252
3252
|
* Sleep helper
|
|
3253
3253
|
*/
|
|
3254
3254
|
sleep(ms) {
|
|
3255
|
-
return new Promise((
|
|
3255
|
+
return new Promise((resolve20) => setTimeout(resolve20, ms));
|
|
3256
3256
|
}
|
|
3257
3257
|
/**
|
|
3258
3258
|
* Extend a previously generated Veo video
|
|
@@ -7116,7 +7116,7 @@ var RunwayProvider = class _RunwayProvider {
|
|
|
7116
7116
|
* Sleep helper
|
|
7117
7117
|
*/
|
|
7118
7118
|
sleep(ms) {
|
|
7119
|
-
return new Promise((
|
|
7119
|
+
return new Promise((resolve20) => setTimeout(resolve20, ms));
|
|
7120
7120
|
}
|
|
7121
7121
|
};
|
|
7122
7122
|
RunwayProvider.API_VERSION = "2024-11-06";
|
|
@@ -7534,7 +7534,7 @@ var KlingProvider = class {
|
|
|
7534
7534
|
* Sleep helper
|
|
7535
7535
|
*/
|
|
7536
7536
|
sleep(ms) {
|
|
7537
|
-
return new Promise((
|
|
7537
|
+
return new Promise((resolve20) => setTimeout(resolve20, ms));
|
|
7538
7538
|
}
|
|
7539
7539
|
};
|
|
7540
7540
|
var klingProvider = new KlingProvider();
|
|
@@ -7829,7 +7829,7 @@ var GrokProvider = class {
|
|
|
7829
7829
|
}
|
|
7830
7830
|
}
|
|
7831
7831
|
sleep(ms) {
|
|
7832
|
-
return new Promise((
|
|
7832
|
+
return new Promise((resolve20) => setTimeout(resolve20, ms));
|
|
7833
7833
|
}
|
|
7834
7834
|
};
|
|
7835
7835
|
var grokProvider = new GrokProvider();
|
|
@@ -8088,7 +8088,7 @@ var ReplicateProvider = class {
|
|
|
8088
8088
|
* Sleep helper
|
|
8089
8089
|
*/
|
|
8090
8090
|
sleep(ms) {
|
|
8091
|
-
return new Promise((
|
|
8091
|
+
return new Promise((resolve20) => setTimeout(resolve20, ms));
|
|
8092
8092
|
}
|
|
8093
8093
|
/**
|
|
8094
8094
|
* Generate music from text prompt using MusicGen
|
|
@@ -8570,7 +8570,7 @@ async function prompt(question, hidden = false) {
|
|
|
8570
8570
|
input: process.stdin,
|
|
8571
8571
|
output: process.stdout
|
|
8572
8572
|
});
|
|
8573
|
-
return new Promise((
|
|
8573
|
+
return new Promise((resolve20) => {
|
|
8574
8574
|
if (hidden && process.stdin.isTTY) {
|
|
8575
8575
|
process.stdout.write(question);
|
|
8576
8576
|
let input = "";
|
|
@@ -8584,7 +8584,7 @@ async function prompt(question, hidden = false) {
|
|
|
8584
8584
|
process.stdin.removeListener("data", onData);
|
|
8585
8585
|
process.stdout.write("\n");
|
|
8586
8586
|
rl.close();
|
|
8587
|
-
|
|
8587
|
+
resolve20(input);
|
|
8588
8588
|
} else if (char === "") {
|
|
8589
8589
|
process.exit(1);
|
|
8590
8590
|
} else if (char === "\x7F" || char === "\b") {
|
|
@@ -8599,7 +8599,7 @@ async function prompt(question, hidden = false) {
|
|
|
8599
8599
|
} else {
|
|
8600
8600
|
rl.question(question, (answer) => {
|
|
8601
8601
|
rl.close();
|
|
8602
|
-
|
|
8602
|
+
resolve20(answer);
|
|
8603
8603
|
});
|
|
8604
8604
|
}
|
|
8605
8605
|
});
|
|
@@ -8769,6 +8769,28 @@ async function extendVideoNaturally(videoPath, targetDuration, outputPath) {
|
|
|
8769
8769
|
}
|
|
8770
8770
|
|
|
8771
8771
|
// ../cli/src/utils/subtitle.ts
|
|
8772
|
+
function detectFormat(outputPath, explicitFormat) {
|
|
8773
|
+
if (explicitFormat) {
|
|
8774
|
+
const fmt = explicitFormat.toLowerCase();
|
|
8775
|
+
if (fmt === "srt" || fmt === "vtt" || fmt === "json") {
|
|
8776
|
+
return fmt;
|
|
8777
|
+
}
|
|
8778
|
+
}
|
|
8779
|
+
const ext = outputPath.toLowerCase().split(".").pop();
|
|
8780
|
+
if (ext === "srt") return "srt";
|
|
8781
|
+
if (ext === "vtt") return "vtt";
|
|
8782
|
+
return "json";
|
|
8783
|
+
}
|
|
8784
|
+
function formatTranscript(result, format) {
|
|
8785
|
+
if (format === "json") {
|
|
8786
|
+
return JSON.stringify(result, null, 2);
|
|
8787
|
+
}
|
|
8788
|
+
const segments = result.segments || [];
|
|
8789
|
+
if (format === "srt") {
|
|
8790
|
+
return formatSRT(segments);
|
|
8791
|
+
}
|
|
8792
|
+
return formatVTT(segments);
|
|
8793
|
+
}
|
|
8772
8794
|
function formatSRT(segments) {
|
|
8773
8795
|
return segments.map((seg, index) => {
|
|
8774
8796
|
const start = formatSRTTime(seg.startTime);
|
|
@@ -8779,6 +8801,18 @@ ${seg.text}
|
|
|
8779
8801
|
`;
|
|
8780
8802
|
}).join("\n");
|
|
8781
8803
|
}
|
|
8804
|
+
function formatVTT(segments) {
|
|
8805
|
+
const header = "WEBVTT\n\n";
|
|
8806
|
+
const cues = segments.map((seg, index) => {
|
|
8807
|
+
const start = formatVTTTime(seg.startTime);
|
|
8808
|
+
const end = formatVTTTime(seg.endTime);
|
|
8809
|
+
return `${index + 1}
|
|
8810
|
+
${start} --> ${end}
|
|
8811
|
+
${seg.text}
|
|
8812
|
+
`;
|
|
8813
|
+
}).join("\n");
|
|
8814
|
+
return header + cues;
|
|
8815
|
+
}
|
|
8782
8816
|
function formatSRTTime(seconds) {
|
|
8783
8817
|
const hours = Math.floor(seconds / 3600);
|
|
8784
8818
|
const mins = Math.floor(seconds % 3600 / 60);
|
|
@@ -8786,6 +8820,13 @@ function formatSRTTime(seconds) {
|
|
|
8786
8820
|
const ms = Math.round(seconds % 1 * 1e3);
|
|
8787
8821
|
return `${pad(hours, 2)}:${pad(mins, 2)}:${pad(secs, 2)},${pad(ms, 3)}`;
|
|
8788
8822
|
}
|
|
8823
|
+
function formatVTTTime(seconds) {
|
|
8824
|
+
const hours = Math.floor(seconds / 3600);
|
|
8825
|
+
const mins = Math.floor(seconds % 3600 / 60);
|
|
8826
|
+
const secs = Math.floor(seconds % 60);
|
|
8827
|
+
const ms = Math.round(seconds % 1 * 1e3);
|
|
8828
|
+
return `${pad(hours, 2)}:${pad(mins, 2)}:${pad(secs, 2)}.${pad(ms, 3)}`;
|
|
8829
|
+
}
|
|
8789
8830
|
function parseSRT(content) {
|
|
8790
8831
|
const segments = [];
|
|
8791
8832
|
const blocks = content.trim().split(/\n\s*\n/);
|
|
@@ -10387,11 +10428,195 @@ Score each category 1-10. For fixable issues, provide an FFmpeg filter in autoFi
|
|
|
10387
10428
|
}
|
|
10388
10429
|
|
|
10389
10430
|
// ../cli/src/commands/ai-image.ts
|
|
10431
|
+
import { resolve as resolve10, dirname as dirname2, basename as basename3, extname as extname3 } from "node:path";
|
|
10390
10432
|
import { readFile as readFile8, writeFile as writeFile6, mkdir as mkdir4 } from "node:fs/promises";
|
|
10391
10433
|
import { existsSync as existsSync5 } from "node:fs";
|
|
10392
10434
|
import chalk6 from "chalk";
|
|
10393
10435
|
import ora5 from "ora";
|
|
10394
10436
|
init_exec_safe();
|
|
10437
|
+
async function executeImageGenerate(options) {
|
|
10438
|
+
const {
|
|
10439
|
+
prompt: prompt3,
|
|
10440
|
+
provider = "gemini",
|
|
10441
|
+
output,
|
|
10442
|
+
size = "1024x1024",
|
|
10443
|
+
ratio = "1:1",
|
|
10444
|
+
quality = "standard",
|
|
10445
|
+
style = "vivid",
|
|
10446
|
+
count = 1,
|
|
10447
|
+
model,
|
|
10448
|
+
apiKey
|
|
10449
|
+
} = options;
|
|
10450
|
+
try {
|
|
10451
|
+
if (provider === "openai") {
|
|
10452
|
+
const key = apiKey || process.env.OPENAI_API_KEY;
|
|
10453
|
+
if (!key) return { success: false, error: "OPENAI_API_KEY required" };
|
|
10454
|
+
const openaiImage = new OpenAIImageProvider();
|
|
10455
|
+
await openaiImage.initialize({ apiKey: key });
|
|
10456
|
+
const result = await openaiImage.generateImage(prompt3, {
|
|
10457
|
+
size,
|
|
10458
|
+
quality,
|
|
10459
|
+
style,
|
|
10460
|
+
n: count
|
|
10461
|
+
});
|
|
10462
|
+
if (!result.success || !result.images) {
|
|
10463
|
+
return { success: false, error: result.error || "Image generation failed" };
|
|
10464
|
+
}
|
|
10465
|
+
let outputPath;
|
|
10466
|
+
if (output && result.images.length > 0) {
|
|
10467
|
+
const img = result.images[0];
|
|
10468
|
+
let buffer;
|
|
10469
|
+
if (img.url) {
|
|
10470
|
+
const response = await fetch(img.url);
|
|
10471
|
+
buffer = Buffer.from(await response.arrayBuffer());
|
|
10472
|
+
} else if (img.base64) {
|
|
10473
|
+
buffer = Buffer.from(img.base64, "base64");
|
|
10474
|
+
} else {
|
|
10475
|
+
return { success: false, error: "No image data available" };
|
|
10476
|
+
}
|
|
10477
|
+
outputPath = resolve10(process.cwd(), output);
|
|
10478
|
+
await mkdir4(dirname2(outputPath), { recursive: true });
|
|
10479
|
+
await writeFile6(outputPath, buffer);
|
|
10480
|
+
}
|
|
10481
|
+
return {
|
|
10482
|
+
success: true,
|
|
10483
|
+
outputPath,
|
|
10484
|
+
images: result.images.map((img) => ({ url: img.url, base64: img.base64, revisedPrompt: img.revisedPrompt })),
|
|
10485
|
+
provider: "openai"
|
|
10486
|
+
};
|
|
10487
|
+
} else if (provider === "gemini") {
|
|
10488
|
+
const key = apiKey || process.env.GOOGLE_API_KEY;
|
|
10489
|
+
if (!key) return { success: false, error: "GOOGLE_API_KEY required" };
|
|
10490
|
+
const gemini = new GeminiProvider();
|
|
10491
|
+
await gemini.initialize({ apiKey: key });
|
|
10492
|
+
const modelMap = { latest: "3.1-flash" };
|
|
10493
|
+
const mappedModel = model ? modelMap[model] || model : void 0;
|
|
10494
|
+
let result = await gemini.generateImage(prompt3, {
|
|
10495
|
+
model: mappedModel,
|
|
10496
|
+
aspectRatio: ratio
|
|
10497
|
+
});
|
|
10498
|
+
const fallbackModels = ["3.1-flash"];
|
|
10499
|
+
if (!result.success && mappedModel && fallbackModels.includes(mappedModel)) {
|
|
10500
|
+
result = await gemini.generateImage(prompt3, {
|
|
10501
|
+
model: "flash",
|
|
10502
|
+
aspectRatio: ratio
|
|
10503
|
+
});
|
|
10504
|
+
}
|
|
10505
|
+
if (!result.success || !result.images) {
|
|
10506
|
+
return { success: false, error: result.error || "Image generation failed" };
|
|
10507
|
+
}
|
|
10508
|
+
let outputPath;
|
|
10509
|
+
if (output && result.images.length > 0) {
|
|
10510
|
+
const img = result.images[0];
|
|
10511
|
+
if (img.base64) {
|
|
10512
|
+
outputPath = resolve10(process.cwd(), output);
|
|
10513
|
+
await mkdir4(dirname2(outputPath), { recursive: true });
|
|
10514
|
+
await writeFile6(outputPath, Buffer.from(img.base64, "base64"));
|
|
10515
|
+
}
|
|
10516
|
+
}
|
|
10517
|
+
return {
|
|
10518
|
+
success: true,
|
|
10519
|
+
outputPath,
|
|
10520
|
+
images: result.images.map((img) => ({ base64: img.base64, mimeType: img.mimeType })),
|
|
10521
|
+
provider: "gemini",
|
|
10522
|
+
model: result.model
|
|
10523
|
+
};
|
|
10524
|
+
} else if (provider === "grok") {
|
|
10525
|
+
const key = apiKey || process.env.XAI_API_KEY;
|
|
10526
|
+
if (!key) return { success: false, error: "XAI_API_KEY required" };
|
|
10527
|
+
const openaiImage = new OpenAIImageProvider();
|
|
10528
|
+
await openaiImage.initialize({ apiKey: key, baseUrl: "https://api.x.ai/v1" });
|
|
10529
|
+
const result = await openaiImage.generateImage(prompt3, {
|
|
10530
|
+
size,
|
|
10531
|
+
n: count
|
|
10532
|
+
});
|
|
10533
|
+
if (!result.success || !result.images) {
|
|
10534
|
+
return { success: false, error: result.error || "Image generation failed" };
|
|
10535
|
+
}
|
|
10536
|
+
let outputPath;
|
|
10537
|
+
if (output && result.images.length > 0) {
|
|
10538
|
+
const img = result.images[0];
|
|
10539
|
+
let buffer;
|
|
10540
|
+
if (img.url) {
|
|
10541
|
+
const response = await fetch(img.url);
|
|
10542
|
+
buffer = Buffer.from(await response.arrayBuffer());
|
|
10543
|
+
} else if (img.base64) {
|
|
10544
|
+
buffer = Buffer.from(img.base64, "base64");
|
|
10545
|
+
} else {
|
|
10546
|
+
return { success: false, error: "No image data available" };
|
|
10547
|
+
}
|
|
10548
|
+
outputPath = resolve10(process.cwd(), output);
|
|
10549
|
+
await mkdir4(dirname2(outputPath), { recursive: true });
|
|
10550
|
+
await writeFile6(outputPath, buffer);
|
|
10551
|
+
}
|
|
10552
|
+
return {
|
|
10553
|
+
success: true,
|
|
10554
|
+
outputPath,
|
|
10555
|
+
images: result.images.map((img) => ({ url: img.url, base64: img.base64, revisedPrompt: img.revisedPrompt })),
|
|
10556
|
+
provider: "grok"
|
|
10557
|
+
};
|
|
10558
|
+
}
|
|
10559
|
+
return { success: false, error: `Unsupported provider: ${provider}` };
|
|
10560
|
+
} catch (error) {
|
|
10561
|
+
return { success: false, error: `Image generation failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
10562
|
+
}
|
|
10563
|
+
}
|
|
10564
|
+
async function executeGeminiEdit(options) {
|
|
10565
|
+
const {
|
|
10566
|
+
imagePaths,
|
|
10567
|
+
prompt: prompt3,
|
|
10568
|
+
output = "edited.png",
|
|
10569
|
+
model = "flash",
|
|
10570
|
+
ratio,
|
|
10571
|
+
resolution,
|
|
10572
|
+
apiKey
|
|
10573
|
+
} = options;
|
|
10574
|
+
try {
|
|
10575
|
+
const key = apiKey || process.env.GOOGLE_API_KEY;
|
|
10576
|
+
if (!key) return { success: false, error: "GOOGLE_API_KEY required" };
|
|
10577
|
+
const imageBuffers = [];
|
|
10578
|
+
for (const imagePath of imagePaths) {
|
|
10579
|
+
const absPath = resolve10(process.cwd(), imagePath);
|
|
10580
|
+
if (!existsSync5(absPath)) {
|
|
10581
|
+
return { success: false, error: `Image not found: ${absPath}` };
|
|
10582
|
+
}
|
|
10583
|
+
const buffer = await readFile8(absPath);
|
|
10584
|
+
imageBuffers.push(buffer);
|
|
10585
|
+
}
|
|
10586
|
+
const gemini = new GeminiProvider();
|
|
10587
|
+
await gemini.initialize({ apiKey: key });
|
|
10588
|
+
let result = await gemini.editImage(imageBuffers, prompt3, {
|
|
10589
|
+
model,
|
|
10590
|
+
aspectRatio: ratio,
|
|
10591
|
+
resolution
|
|
10592
|
+
});
|
|
10593
|
+
const fallbackModels = ["latest", "3.1-flash"];
|
|
10594
|
+
if (!result.success && fallbackModels.includes(model)) {
|
|
10595
|
+
result = await gemini.editImage(imageBuffers, prompt3, {
|
|
10596
|
+
model: "flash",
|
|
10597
|
+
aspectRatio: ratio,
|
|
10598
|
+
resolution
|
|
10599
|
+
});
|
|
10600
|
+
}
|
|
10601
|
+
if (!result.success || !result.images || result.images.length === 0) {
|
|
10602
|
+
return { success: false, error: result.error || "Image editing failed" };
|
|
10603
|
+
}
|
|
10604
|
+
const img = result.images[0];
|
|
10605
|
+
let outputPath;
|
|
10606
|
+
if (img.base64) {
|
|
10607
|
+
outputPath = resolve10(process.cwd(), output);
|
|
10608
|
+
await mkdir4(dirname2(outputPath), { recursive: true });
|
|
10609
|
+
await writeFile6(outputPath, Buffer.from(img.base64, "base64"));
|
|
10610
|
+
}
|
|
10611
|
+
return {
|
|
10612
|
+
success: true,
|
|
10613
|
+
outputPath,
|
|
10614
|
+
model: result.model
|
|
10615
|
+
};
|
|
10616
|
+
} catch (error) {
|
|
10617
|
+
return { success: false, error: `Image editing failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
10618
|
+
}
|
|
10619
|
+
}
|
|
10395
10620
|
async function executeThumbnailBestFrame(options) {
|
|
10396
10621
|
const {
|
|
10397
10622
|
videoPath,
|
|
@@ -10612,7 +10837,7 @@ async function handleAiAnalysisToolCall(name, args) {
|
|
|
10612
10837
|
|
|
10613
10838
|
// ../cli/src/commands/ai-script-pipeline.ts
|
|
10614
10839
|
import { readFile as readFile9, writeFile as writeFile7, mkdir as mkdir5, unlink, rename as rename2 } from "node:fs/promises";
|
|
10615
|
-
import { resolve as
|
|
10840
|
+
import { resolve as resolve11, basename as basename4, extname as extname4 } from "node:path";
|
|
10616
10841
|
import { existsSync as existsSync6 } from "node:fs";
|
|
10617
10842
|
import chalk7 from "chalk";
|
|
10618
10843
|
init_exec_safe();
|
|
@@ -10639,7 +10864,7 @@ function formatTime(seconds) {
|
|
|
10639
10864
|
var DEFAULT_VIDEO_RETRIES = 2;
|
|
10640
10865
|
var RETRY_DELAY_MS = 5e3;
|
|
10641
10866
|
function sleep(ms) {
|
|
10642
|
-
return new Promise((
|
|
10867
|
+
return new Promise((resolve20) => setTimeout(resolve20, ms));
|
|
10643
10868
|
}
|
|
10644
10869
|
async function uploadToImgbb(imageBuffer, apiKey) {
|
|
10645
10870
|
try {
|
|
@@ -10854,7 +11079,7 @@ async function executeScriptToVideo(options) {
|
|
|
10854
11079
|
return { success: false, outputDir, scenes: 0, error: `${generatorInfo.name} API key required (or use imagesOnly option). Run 'vibe setup' or set ${generatorInfo.envVar} in .env` };
|
|
10855
11080
|
}
|
|
10856
11081
|
}
|
|
10857
|
-
const absOutputDir =
|
|
11082
|
+
const absOutputDir = resolve11(process.cwd(), outputDir);
|
|
10858
11083
|
if (!existsSync6(absOutputDir)) {
|
|
10859
11084
|
await mkdir5(absOutputDir, { recursive: true });
|
|
10860
11085
|
}
|
|
@@ -10876,7 +11101,7 @@ async function executeScriptToVideo(options) {
|
|
|
10876
11101
|
if (segments.length === 0) {
|
|
10877
11102
|
return { success: false, outputDir, scenes: 0, error: "Failed to generate storyboard" };
|
|
10878
11103
|
}
|
|
10879
|
-
const storyboardPath =
|
|
11104
|
+
const storyboardPath = resolve11(absOutputDir, "storyboard.json");
|
|
10880
11105
|
await writeFile7(storyboardPath, JSON.stringify(segments, null, 2), "utf-8");
|
|
10881
11106
|
const result = {
|
|
10882
11107
|
success: true,
|
|
@@ -10910,7 +11135,7 @@ async function executeScriptToVideo(options) {
|
|
|
10910
11135
|
voiceId: options.voice
|
|
10911
11136
|
});
|
|
10912
11137
|
if (ttsResult.success && ttsResult.audioBuffer) {
|
|
10913
|
-
const audioPath =
|
|
11138
|
+
const audioPath = resolve11(absOutputDir, `narration-${i + 1}.mp3`);
|
|
10914
11139
|
await writeFile7(audioPath, ttsResult.audioBuffer);
|
|
10915
11140
|
const actualDuration = await getAudioDuration(audioPath);
|
|
10916
11141
|
segment.duration = actualDuration;
|
|
@@ -10998,7 +11223,7 @@ async function executeScriptToVideo(options) {
|
|
|
10998
11223
|
}
|
|
10999
11224
|
}
|
|
11000
11225
|
}
|
|
11001
|
-
const imagePath =
|
|
11226
|
+
const imagePath = resolve11(absOutputDir, `scene-${i + 1}.png`);
|
|
11002
11227
|
if (imageBuffer) {
|
|
11003
11228
|
await writeFile7(imagePath, imageBuffer);
|
|
11004
11229
|
imagePaths.push(imagePath);
|
|
@@ -11033,7 +11258,7 @@ async function executeScriptToVideo(options) {
|
|
|
11033
11258
|
const segment = segments[i];
|
|
11034
11259
|
const videoDuration = Math.min(15, Math.max(1, segment.duration));
|
|
11035
11260
|
const imageBuffer = await readFile9(imagePaths[i]);
|
|
11036
|
-
const ext =
|
|
11261
|
+
const ext = extname4(imagePaths[i]).toLowerCase().slice(1);
|
|
11037
11262
|
const mimeType = ext === "jpg" || ext === "jpeg" ? "image/jpeg" : "image/png";
|
|
11038
11263
|
const referenceImage = `data:${mimeType};base64,${imageBuffer.toString("base64")}`;
|
|
11039
11264
|
const taskResult = await generateVideoWithRetryGrok(
|
|
@@ -11046,13 +11271,13 @@ async function executeScriptToVideo(options) {
|
|
|
11046
11271
|
try {
|
|
11047
11272
|
const waitResult = await grok.waitForCompletion(taskResult.requestId, void 0, 3e5);
|
|
11048
11273
|
if (waitResult.status === "completed" && waitResult.videoUrl) {
|
|
11049
|
-
const videoPath =
|
|
11274
|
+
const videoPath = resolve11(absOutputDir, `scene-${i + 1}.mp4`);
|
|
11050
11275
|
const buffer = await downloadVideo(waitResult.videoUrl, videoApiKey);
|
|
11051
11276
|
await writeFile7(videoPath, buffer);
|
|
11052
11277
|
const targetDuration = segment.duration;
|
|
11053
11278
|
const actualVideoDuration = await getVideoDuration(videoPath);
|
|
11054
11279
|
if (actualVideoDuration < targetDuration - 0.1) {
|
|
11055
|
-
const extendedPath =
|
|
11280
|
+
const extendedPath = resolve11(absOutputDir, `scene-${i + 1}-extended.mp4`);
|
|
11056
11281
|
await extendVideoNaturally(videoPath, targetDuration, extendedPath);
|
|
11057
11282
|
await unlink(videoPath);
|
|
11058
11283
|
await rename2(extendedPath, videoPath);
|
|
@@ -11095,13 +11320,13 @@ async function executeScriptToVideo(options) {
|
|
|
11095
11320
|
try {
|
|
11096
11321
|
const waitResult = await kling.waitForCompletion(taskResult.taskId, taskResult.type, void 0, 6e5);
|
|
11097
11322
|
if (waitResult.status === "completed" && waitResult.videoUrl) {
|
|
11098
|
-
const videoPath =
|
|
11323
|
+
const videoPath = resolve11(absOutputDir, `scene-${i + 1}.mp4`);
|
|
11099
11324
|
const buffer = await downloadVideo(waitResult.videoUrl, videoApiKey);
|
|
11100
11325
|
await writeFile7(videoPath, buffer);
|
|
11101
11326
|
const targetDuration = segment.duration;
|
|
11102
11327
|
const actualVideoDuration = await getVideoDuration(videoPath);
|
|
11103
11328
|
if (actualVideoDuration < targetDuration - 0.1) {
|
|
11104
|
-
const extendedPath =
|
|
11329
|
+
const extendedPath = resolve11(absOutputDir, `scene-${i + 1}-extended.mp4`);
|
|
11105
11330
|
await extendVideoNaturally(videoPath, targetDuration, extendedPath);
|
|
11106
11331
|
await unlink(videoPath);
|
|
11107
11332
|
await rename2(extendedPath, videoPath);
|
|
@@ -11141,13 +11366,13 @@ async function executeScriptToVideo(options) {
|
|
|
11141
11366
|
try {
|
|
11142
11367
|
const waitResult = await veo.waitForVideoCompletion(taskResult.operationName, void 0, 3e5);
|
|
11143
11368
|
if (waitResult.status === "completed" && waitResult.videoUrl) {
|
|
11144
|
-
const videoPath =
|
|
11369
|
+
const videoPath = resolve11(absOutputDir, `scene-${i + 1}.mp4`);
|
|
11145
11370
|
const buffer = await downloadVideo(waitResult.videoUrl, videoApiKey);
|
|
11146
11371
|
await writeFile7(videoPath, buffer);
|
|
11147
11372
|
const targetDuration = segment.duration;
|
|
11148
11373
|
const actualVideoDuration = await getVideoDuration(videoPath);
|
|
11149
11374
|
if (actualVideoDuration < targetDuration - 0.1) {
|
|
11150
|
-
const extendedPath =
|
|
11375
|
+
const extendedPath = resolve11(absOutputDir, `scene-${i + 1}-extended.mp4`);
|
|
11151
11376
|
await extendVideoNaturally(videoPath, targetDuration, extendedPath);
|
|
11152
11377
|
await unlink(videoPath);
|
|
11153
11378
|
await rename2(extendedPath, videoPath);
|
|
@@ -11177,7 +11402,7 @@ async function executeScriptToVideo(options) {
|
|
|
11177
11402
|
}
|
|
11178
11403
|
const segment = segments[i];
|
|
11179
11404
|
const imageBuffer = await readFile9(imagePaths[i]);
|
|
11180
|
-
const ext =
|
|
11405
|
+
const ext = extname4(imagePaths[i]).toLowerCase().slice(1);
|
|
11181
11406
|
const mimeType = ext === "jpg" || ext === "jpeg" ? "image/jpeg" : "image/png";
|
|
11182
11407
|
const referenceImage = `data:${mimeType};base64,${imageBuffer.toString("base64")}`;
|
|
11183
11408
|
const videoDuration = segment.duration > 5 ? 10 : 5;
|
|
@@ -11193,13 +11418,13 @@ async function executeScriptToVideo(options) {
|
|
|
11193
11418
|
try {
|
|
11194
11419
|
const waitResult = await runway.waitForCompletion(taskResult.taskId, void 0, 3e5);
|
|
11195
11420
|
if (waitResult.status === "completed" && waitResult.videoUrl) {
|
|
11196
|
-
const videoPath =
|
|
11421
|
+
const videoPath = resolve11(absOutputDir, `scene-${i + 1}.mp4`);
|
|
11197
11422
|
const buffer = await downloadVideo(waitResult.videoUrl, videoApiKey);
|
|
11198
11423
|
await writeFile7(videoPath, buffer);
|
|
11199
11424
|
const targetDuration = segment.duration;
|
|
11200
11425
|
const actualVideoDuration = await getVideoDuration(videoPath);
|
|
11201
11426
|
if (actualVideoDuration < targetDuration - 0.1) {
|
|
11202
|
-
const extendedPath =
|
|
11427
|
+
const extendedPath = resolve11(absOutputDir, `scene-${i + 1}-extended.mp4`);
|
|
11203
11428
|
await extendVideoNaturally(videoPath, targetDuration, extendedPath);
|
|
11204
11429
|
await unlink(videoPath);
|
|
11205
11430
|
await rename2(extendedPath, videoPath);
|
|
@@ -11312,13 +11537,13 @@ async function executeScriptToVideo(options) {
|
|
|
11312
11537
|
});
|
|
11313
11538
|
currentTime += actualDuration;
|
|
11314
11539
|
}
|
|
11315
|
-
const projectPath =
|
|
11540
|
+
const projectPath = resolve11(absOutputDir, "project.vibe.json");
|
|
11316
11541
|
await writeFile7(projectPath, JSON.stringify(project.toJSON(), null, 2), "utf-8");
|
|
11317
11542
|
result.projectPath = projectPath;
|
|
11318
11543
|
result.totalDuration = currentTime;
|
|
11319
11544
|
if (options.review) {
|
|
11320
11545
|
try {
|
|
11321
|
-
const storyboardFile =
|
|
11546
|
+
const storyboardFile = resolve11(absOutputDir, "storyboard.json");
|
|
11322
11547
|
const reviewTarget = videoPaths.find((p) => p && p !== "") || imagePaths.find((p) => p && p !== "");
|
|
11323
11548
|
if (reviewTarget) {
|
|
11324
11549
|
const reviewResult = await executeReview({
|
|
@@ -11353,8 +11578,8 @@ async function executeRegenerateScene(options) {
|
|
|
11353
11578
|
failedScenes: []
|
|
11354
11579
|
};
|
|
11355
11580
|
try {
|
|
11356
|
-
const outputDir =
|
|
11357
|
-
const storyboardPath =
|
|
11581
|
+
const outputDir = resolve11(process.cwd(), options.projectDir);
|
|
11582
|
+
const storyboardPath = resolve11(outputDir, "storyboard.json");
|
|
11358
11583
|
if (!existsSync6(outputDir)) {
|
|
11359
11584
|
return { ...result, error: `Project directory not found: ${outputDir}` };
|
|
11360
11585
|
}
|
|
@@ -11389,8 +11614,8 @@ async function executeRegenerateScene(options) {
|
|
|
11389
11614
|
}
|
|
11390
11615
|
for (const sceneNum of options.scenes) {
|
|
11391
11616
|
const segment = segments[sceneNum - 1];
|
|
11392
|
-
const imagePath =
|
|
11393
|
-
const videoPath =
|
|
11617
|
+
const imagePath = resolve11(outputDir, `scene-${sceneNum}.png`);
|
|
11618
|
+
const videoPath = resolve11(outputDir, `scene-${sceneNum}.mp4`);
|
|
11394
11619
|
if (regenerateVideo && videoApiKey) {
|
|
11395
11620
|
if (!existsSync6(imagePath)) {
|
|
11396
11621
|
result.failedScenes.push(sceneNum);
|
|
@@ -11433,7 +11658,7 @@ async function executeRegenerateScene(options) {
|
|
|
11433
11658
|
const targetDuration = segment.duration;
|
|
11434
11659
|
const actualVideoDuration = await getVideoDuration(videoPath);
|
|
11435
11660
|
if (actualVideoDuration < targetDuration - 0.1) {
|
|
11436
|
-
const extendedPath =
|
|
11661
|
+
const extendedPath = resolve11(outputDir, `scene-${sceneNum}-extended.mp4`);
|
|
11437
11662
|
await extendVideoNaturally(videoPath, targetDuration, extendedPath);
|
|
11438
11663
|
await unlink(videoPath);
|
|
11439
11664
|
await rename2(extendedPath, videoPath);
|
|
@@ -11451,7 +11676,7 @@ async function executeRegenerateScene(options) {
|
|
|
11451
11676
|
} else {
|
|
11452
11677
|
const runway = new RunwayProvider();
|
|
11453
11678
|
await runway.initialize({ apiKey: videoApiKey });
|
|
11454
|
-
const ext =
|
|
11679
|
+
const ext = extname4(imagePath).toLowerCase().slice(1);
|
|
11455
11680
|
const mimeType = ext === "jpg" || ext === "jpeg" ? "image/jpeg" : "image/png";
|
|
11456
11681
|
const referenceImage = `data:${mimeType};base64,${imageBuffer.toString("base64")}`;
|
|
11457
11682
|
const aspectRatio = options.aspectRatio === "1:1" ? "16:9" : options.aspectRatio || "16:9";
|
|
@@ -11471,7 +11696,7 @@ async function executeRegenerateScene(options) {
|
|
|
11471
11696
|
const targetDuration = segment.duration;
|
|
11472
11697
|
const actualVideoDuration = await getVideoDuration(videoPath);
|
|
11473
11698
|
if (actualVideoDuration < targetDuration - 0.1) {
|
|
11474
|
-
const extendedPath =
|
|
11699
|
+
const extendedPath = resolve11(outputDir, `scene-${sceneNum}-extended.mp4`);
|
|
11475
11700
|
await extendVideoNaturally(videoPath, targetDuration, extendedPath);
|
|
11476
11701
|
await unlink(videoPath);
|
|
11477
11702
|
await rename2(extendedPath, videoPath);
|
|
@@ -11501,7 +11726,7 @@ async function executeRegenerateScene(options) {
|
|
|
11501
11726
|
|
|
11502
11727
|
// ../cli/src/commands/ai-highlights.ts
|
|
11503
11728
|
import { readFile as readFile10, writeFile as writeFile8, mkdir as mkdir6 } from "node:fs/promises";
|
|
11504
|
-
import { resolve as
|
|
11729
|
+
import { resolve as resolve12, dirname as dirname3, basename as basename5, extname as extname5 } from "node:path";
|
|
11505
11730
|
import { existsSync as existsSync7 } from "node:fs";
|
|
11506
11731
|
import chalk8 from "chalk";
|
|
11507
11732
|
import ora6 from "ora";
|
|
@@ -11528,11 +11753,11 @@ function filterHighlights(highlights, options) {
|
|
|
11528
11753
|
}
|
|
11529
11754
|
async function executeHighlights(options) {
|
|
11530
11755
|
try {
|
|
11531
|
-
const absPath =
|
|
11756
|
+
const absPath = resolve12(process.cwd(), options.media);
|
|
11532
11757
|
if (!existsSync7(absPath)) {
|
|
11533
11758
|
return { success: false, highlights: [], totalDuration: 0, totalHighlightDuration: 0, error: `File not found: ${absPath}` };
|
|
11534
11759
|
}
|
|
11535
|
-
const ext =
|
|
11760
|
+
const ext = extname5(absPath).toLowerCase();
|
|
11536
11761
|
const videoExtensions = [".mp4", ".mov", ".avi", ".mkv", ".webm", ".m4v"];
|
|
11537
11762
|
const isVideo = videoExtensions.includes(ext);
|
|
11538
11763
|
const targetDuration = options.duration;
|
|
@@ -11676,7 +11901,7 @@ Analyze both what is SHOWN (visual cues, actions, expressions) and what is SAID
|
|
|
11676
11901
|
totalHighlightDuration
|
|
11677
11902
|
};
|
|
11678
11903
|
if (options.output) {
|
|
11679
|
-
const outputPath =
|
|
11904
|
+
const outputPath = resolve12(process.cwd(), options.output);
|
|
11680
11905
|
await writeFile8(outputPath, JSON.stringify({
|
|
11681
11906
|
sourceFile: absPath,
|
|
11682
11907
|
totalDuration: sourceDuration,
|
|
@@ -11691,7 +11916,7 @@ Analyze both what is SHOWN (visual cues, actions, expressions) and what is SAID
|
|
|
11691
11916
|
if (options.project) {
|
|
11692
11917
|
const project = new Project("Highlight Reel");
|
|
11693
11918
|
const source = project.addSource({
|
|
11694
|
-
name:
|
|
11919
|
+
name: basename5(absPath),
|
|
11695
11920
|
url: absPath,
|
|
11696
11921
|
type: isVideo ? "video" : "audio",
|
|
11697
11922
|
duration: sourceDuration
|
|
@@ -11711,7 +11936,7 @@ Analyze both what is SHOWN (visual cues, actions, expressions) and what is SAID
|
|
|
11711
11936
|
currentTime += highlight.duration;
|
|
11712
11937
|
}
|
|
11713
11938
|
}
|
|
11714
|
-
const projectPath =
|
|
11939
|
+
const projectPath = resolve12(process.cwd(), options.project);
|
|
11715
11940
|
await writeFile8(projectPath, JSON.stringify(project.toJSON(), null, 2), "utf-8");
|
|
11716
11941
|
extractResult.projectPath = projectPath;
|
|
11717
11942
|
}
|
|
@@ -11731,7 +11956,7 @@ async function executeAutoShorts(options) {
|
|
|
11731
11956
|
if (!commandExists("ffmpeg")) {
|
|
11732
11957
|
return { success: false, shorts: [], error: "FFmpeg not found" };
|
|
11733
11958
|
}
|
|
11734
|
-
const absPath =
|
|
11959
|
+
const absPath = resolve12(process.cwd(), options.video);
|
|
11735
11960
|
if (!existsSync7(absPath)) {
|
|
11736
11961
|
return { success: false, shorts: [], error: `File not found: ${absPath}` };
|
|
11737
11962
|
}
|
|
@@ -11859,7 +12084,7 @@ Analyze both VISUALS (expressions, actions, scene changes) and AUDIO (speech, re
|
|
|
11859
12084
|
}))
|
|
11860
12085
|
};
|
|
11861
12086
|
}
|
|
11862
|
-
const outputDir = options.outputDir ?
|
|
12087
|
+
const outputDir = options.outputDir ? resolve12(process.cwd(), options.outputDir) : dirname3(absPath);
|
|
11863
12088
|
if (options.outputDir && !existsSync7(outputDir)) {
|
|
11864
12089
|
await mkdir6(outputDir, { recursive: true });
|
|
11865
12090
|
}
|
|
@@ -11869,8 +12094,8 @@ Analyze both VISUALS (expressions, actions, scene changes) and AUDIO (speech, re
|
|
|
11869
12094
|
};
|
|
11870
12095
|
for (let i = 0; i < selectedHighlights.length; i++) {
|
|
11871
12096
|
const h = selectedHighlights[i];
|
|
11872
|
-
const baseName =
|
|
11873
|
-
const outputPath =
|
|
12097
|
+
const baseName = basename5(absPath, extname5(absPath));
|
|
12098
|
+
const outputPath = resolve12(outputDir, `${baseName}-short-${i + 1}.mp4`);
|
|
11874
12099
|
const { stdout: probeOut } = await execSafe("ffprobe", [
|
|
11875
12100
|
"-v",
|
|
11876
12101
|
"error",
|
|
@@ -12127,7 +12352,7 @@ async function handleAiPipelineToolCall(name, args) {
|
|
|
12127
12352
|
}
|
|
12128
12353
|
|
|
12129
12354
|
// ../cli/src/commands/ai-motion.ts
|
|
12130
|
-
import { resolve as
|
|
12355
|
+
import { resolve as resolve13 } from "node:path";
|
|
12131
12356
|
import { existsSync as existsSync8 } from "node:fs";
|
|
12132
12357
|
import { readFile as readFile11, writeFile as writeFile9 } from "node:fs/promises";
|
|
12133
12358
|
import chalk9 from "chalk";
|
|
@@ -12160,7 +12385,7 @@ async function executeMotion(options) {
|
|
|
12160
12385
|
if (!geminiApiKey) {
|
|
12161
12386
|
return { success: false, error: "GOOGLE_API_KEY required for image analysis (--image). Run 'vibe setup' or set GOOGLE_API_KEY in .env" };
|
|
12162
12387
|
}
|
|
12163
|
-
const imagePath =
|
|
12388
|
+
const imagePath = resolve13(process.cwd(), options.image);
|
|
12164
12389
|
const imageBuffer = await readFile11(imagePath);
|
|
12165
12390
|
const gemini = new GeminiProvider();
|
|
12166
12391
|
await gemini.initialize({ apiKey: geminiApiKey });
|
|
@@ -12183,7 +12408,7 @@ Use this image analysis to inform the color palette, typography placement, and o
|
|
|
12183
12408
|
}
|
|
12184
12409
|
let result;
|
|
12185
12410
|
if (options.fromTsx) {
|
|
12186
|
-
const tsxPath =
|
|
12411
|
+
const tsxPath = resolve13(process.cwd(), options.fromTsx);
|
|
12187
12412
|
if (!existsSync8(tsxPath)) {
|
|
12188
12413
|
return { success: false, error: `TSX file not found: ${tsxPath}` };
|
|
12189
12414
|
}
|
|
@@ -12237,7 +12462,7 @@ Use this image analysis to inform the color palette, typography placement, and o
|
|
|
12237
12462
|
}
|
|
12238
12463
|
const { component } = result;
|
|
12239
12464
|
const defaultOutput = options.video || options.image ? "motion-output.mp4" : options.render ? "motion.webm" : "motion.tsx";
|
|
12240
|
-
const outputPath =
|
|
12465
|
+
const outputPath = resolve13(process.cwd(), options.output || defaultOutput);
|
|
12241
12466
|
const codePath = outputPath.replace(/\.\w+$/, ".tsx");
|
|
12242
12467
|
await writeFile9(codePath, component.code, "utf-8");
|
|
12243
12468
|
const shouldRender = options.render || !!options.video || !!options.image;
|
|
@@ -12256,8 +12481,8 @@ Use this image analysis to inform the color palette, typography placement, and o
|
|
|
12256
12481
|
if (notInstalled) {
|
|
12257
12482
|
return { success: false, codePath, componentName: component.name, error: notInstalled };
|
|
12258
12483
|
}
|
|
12259
|
-
const baseVideo = options.video ?
|
|
12260
|
-
const baseImage = options.image ?
|
|
12484
|
+
const baseVideo = options.video ? resolve13(process.cwd(), options.video) : void 0;
|
|
12485
|
+
const baseImage = options.image ? resolve13(process.cwd(), options.image) : void 0;
|
|
12261
12486
|
if (baseVideo) {
|
|
12262
12487
|
const videoFileName = "source_video.mp4";
|
|
12263
12488
|
const wrapped = wrapComponentWithVideo2(component.code, component.name, videoFileName);
|
|
@@ -12392,7 +12617,7 @@ function registerMotionCommand(aiCommand) {
|
|
|
12392
12617
|
}
|
|
12393
12618
|
|
|
12394
12619
|
// ../cli/src/commands/ai-animated-caption.ts
|
|
12395
|
-
import { resolve as
|
|
12620
|
+
import { resolve as resolve14, dirname as dirname4, basename as basename6 } from "node:path";
|
|
12396
12621
|
import { writeFile as writeFile10, mkdir as mkdir7, rm as rm2 } from "node:fs/promises";
|
|
12397
12622
|
import { existsSync as existsSync9 } from "node:fs";
|
|
12398
12623
|
import { tmpdir as tmpdir2 } from "node:os";
|
|
@@ -12546,9 +12771,9 @@ async function executeAnimatedCaption(options) {
|
|
|
12546
12771
|
} catch {
|
|
12547
12772
|
}
|
|
12548
12773
|
const effectiveFontSize = fontSize ?? Math.round(height * 0.04);
|
|
12549
|
-
const tmpAudioDir =
|
|
12774
|
+
const tmpAudioDir = resolve14(tmpdir2(), `vf-ac-${Date.now()}`);
|
|
12550
12775
|
await mkdir7(tmpAudioDir, { recursive: true });
|
|
12551
|
-
const audioPath =
|
|
12776
|
+
const audioPath = resolve14(tmpAudioDir, "audio.wav");
|
|
12552
12777
|
await execSafe("ffmpeg", [
|
|
12553
12778
|
"-y",
|
|
12554
12779
|
"-i",
|
|
@@ -12575,8 +12800,8 @@ async function executeAnimatedCaption(options) {
|
|
|
12575
12800
|
return { success: false, error: "No words detected in transcription" };
|
|
12576
12801
|
}
|
|
12577
12802
|
const groups = groupWords(transcript.words, { wordsPerGroup, maxChars });
|
|
12578
|
-
const absOutputPath =
|
|
12579
|
-
const outDir =
|
|
12803
|
+
const absOutputPath = resolve14(process.cwd(), outputPath);
|
|
12804
|
+
const outDir = dirname4(absOutputPath);
|
|
12580
12805
|
if (!existsSync9(outDir)) {
|
|
12581
12806
|
await mkdir7(outDir, { recursive: true });
|
|
12582
12807
|
}
|
|
@@ -12586,7 +12811,7 @@ async function executeAnimatedCaption(options) {
|
|
|
12586
12811
|
effectiveStyle,
|
|
12587
12812
|
{ highlightColor, fontSize: effectiveFontSize, position, width, height }
|
|
12588
12813
|
);
|
|
12589
|
-
const assPath =
|
|
12814
|
+
const assPath = resolve14(tmpAudioDir, "captions.ass");
|
|
12590
12815
|
await writeFile10(assPath, assContent, "utf-8");
|
|
12591
12816
|
const escapedAssPath = assPath.replace(/\\/g, "\\\\").replace(/:/g, "\\:");
|
|
12592
12817
|
await execSafe("ffmpeg", [
|
|
@@ -12609,7 +12834,7 @@ async function executeAnimatedCaption(options) {
|
|
|
12609
12834
|
width,
|
|
12610
12835
|
height,
|
|
12611
12836
|
fps: videoFps,
|
|
12612
|
-
videoFileName:
|
|
12837
|
+
videoFileName: basename6(videoPath)
|
|
12613
12838
|
});
|
|
12614
12839
|
const durationInFrames = Math.ceil(duration * videoFps);
|
|
12615
12840
|
const renderResult = await renderWithEmbeddedVideo({
|
|
@@ -12620,7 +12845,7 @@ async function executeAnimatedCaption(options) {
|
|
|
12620
12845
|
fps: videoFps,
|
|
12621
12846
|
durationInFrames,
|
|
12622
12847
|
videoPath,
|
|
12623
|
-
videoFileName:
|
|
12848
|
+
videoFileName: basename6(videoPath),
|
|
12624
12849
|
outputPath: absOutputPath
|
|
12625
12850
|
});
|
|
12626
12851
|
if (!renderResult.success) {
|
|
@@ -12649,7 +12874,7 @@ async function executeAnimatedCaption(options) {
|
|
|
12649
12874
|
|
|
12650
12875
|
// ../cli/src/commands/generate.ts
|
|
12651
12876
|
import { Command as Command2 } from "commander";
|
|
12652
|
-
import { resolve as
|
|
12877
|
+
import { resolve as resolve15, dirname as dirname5, basename as basename7, extname as extname6 } from "node:path";
|
|
12653
12878
|
import { fileURLToPath } from "node:url";
|
|
12654
12879
|
import { readFile as readFile12, writeFile as writeFile11, mkdir as mkdir8 } from "node:fs/promises";
|
|
12655
12880
|
import { existsSync as existsSync10 } from "node:fs";
|
|
@@ -13635,10 +13860,10 @@ async function prompt2(question) {
|
|
|
13635
13860
|
input,
|
|
13636
13861
|
output: process.stdout
|
|
13637
13862
|
});
|
|
13638
|
-
return new Promise((
|
|
13863
|
+
return new Promise((resolve20) => {
|
|
13639
13864
|
rl.question(question, (answer) => {
|
|
13640
13865
|
rl.close();
|
|
13641
|
-
|
|
13866
|
+
resolve20(answer);
|
|
13642
13867
|
});
|
|
13643
13868
|
});
|
|
13644
13869
|
}
|
|
@@ -13809,7 +14034,7 @@ Examples:
|
|
|
13809
14034
|
}
|
|
13810
14035
|
spinner.succeed(chalk10.green(`Generated ${result.images.length} image(s) with OpenAI GPT Image 1.5`));
|
|
13811
14036
|
if (isJsonMode()) {
|
|
13812
|
-
const outputPath = options.output ?
|
|
14037
|
+
const outputPath = options.output ? resolve15(process.cwd(), options.output) : void 0;
|
|
13813
14038
|
if (outputPath && result.images.length > 0) {
|
|
13814
14039
|
const img = result.images[0];
|
|
13815
14040
|
let buffer;
|
|
@@ -13821,7 +14046,7 @@ Examples:
|
|
|
13821
14046
|
} else {
|
|
13822
14047
|
throw new Error("No image data available");
|
|
13823
14048
|
}
|
|
13824
|
-
await mkdir8(
|
|
14049
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
13825
14050
|
await writeFile11(outputPath, buffer);
|
|
13826
14051
|
}
|
|
13827
14052
|
outputResult({ success: true, provider: "openai", images: result.images.map((img) => ({ url: img.url, revisedPrompt: img.revisedPrompt })), outputPath });
|
|
@@ -13856,8 +14081,8 @@ Examples:
|
|
|
13856
14081
|
} else {
|
|
13857
14082
|
throw new Error("No image data available");
|
|
13858
14083
|
}
|
|
13859
|
-
const outputPath =
|
|
13860
|
-
await mkdir8(
|
|
14084
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14085
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
13861
14086
|
await writeFile11(outputPath, buffer);
|
|
13862
14087
|
saveSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
13863
14088
|
} catch (err) {
|
|
@@ -13903,11 +14128,11 @@ Examples:
|
|
|
13903
14128
|
}
|
|
13904
14129
|
spinner.succeed(chalk10.green(`Generated ${result.images.length} image(s) with Gemini (${usedLabel})`));
|
|
13905
14130
|
if (isJsonMode()) {
|
|
13906
|
-
const outputPath = options.output ?
|
|
14131
|
+
const outputPath = options.output ? resolve15(process.cwd(), options.output) : void 0;
|
|
13907
14132
|
if (outputPath && result.images.length > 0) {
|
|
13908
14133
|
const img = result.images[0];
|
|
13909
14134
|
const buffer = Buffer.from(img.base64, "base64");
|
|
13910
|
-
await mkdir8(
|
|
14135
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
13911
14136
|
await writeFile11(outputPath, buffer);
|
|
13912
14137
|
}
|
|
13913
14138
|
outputResult({ success: true, provider: "gemini", images: result.images.map((img) => ({ mimeType: img.mimeType })), outputPath });
|
|
@@ -13927,8 +14152,8 @@ Examples:
|
|
|
13927
14152
|
try {
|
|
13928
14153
|
const img = result.images[0];
|
|
13929
14154
|
const buffer = Buffer.from(img.base64, "base64");
|
|
13930
|
-
const outputPath =
|
|
13931
|
-
await mkdir8(
|
|
14155
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14156
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
13932
14157
|
await writeFile11(outputPath, buffer);
|
|
13933
14158
|
saveSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
13934
14159
|
} catch (err) {
|
|
@@ -13955,7 +14180,7 @@ Examples:
|
|
|
13955
14180
|
}
|
|
13956
14181
|
spinner.succeed(chalk10.green(`Generated ${result.images.length} image(s) with xAI Grok`));
|
|
13957
14182
|
if (isJsonMode()) {
|
|
13958
|
-
const outputPath = options.output ?
|
|
14183
|
+
const outputPath = options.output ? resolve15(process.cwd(), options.output) : void 0;
|
|
13959
14184
|
if (outputPath && result.images.length > 0) {
|
|
13960
14185
|
const img = result.images[0];
|
|
13961
14186
|
let buffer;
|
|
@@ -13967,7 +14192,7 @@ Examples:
|
|
|
13967
14192
|
} else {
|
|
13968
14193
|
throw new Error("No image data available");
|
|
13969
14194
|
}
|
|
13970
|
-
await mkdir8(
|
|
14195
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
13971
14196
|
await writeFile11(outputPath, buffer);
|
|
13972
14197
|
}
|
|
13973
14198
|
outputResult({ success: true, provider: "grok", images: result.images.map((img) => ({ url: img.url })), outputPath });
|
|
@@ -13999,8 +14224,8 @@ Examples:
|
|
|
13999
14224
|
} else {
|
|
14000
14225
|
throw new Error("No image data available");
|
|
14001
14226
|
}
|
|
14002
|
-
const outputPath =
|
|
14003
|
-
await mkdir8(
|
|
14227
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14228
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
14004
14229
|
await writeFile11(outputPath, buffer);
|
|
14005
14230
|
saveSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
14006
14231
|
} catch (err) {
|
|
@@ -14010,13 +14235,13 @@ Examples:
|
|
|
14010
14235
|
} else if (provider === "runway") {
|
|
14011
14236
|
const { spawn: spawn2 } = await import("child_process");
|
|
14012
14237
|
const __filename = fileURLToPath(import.meta.url);
|
|
14013
|
-
const __dirname =
|
|
14014
|
-
const scriptPath =
|
|
14238
|
+
const __dirname = dirname5(__filename);
|
|
14239
|
+
const scriptPath = resolve15(__dirname, "../../../../.claude/skills/runway-video/scripts/image.py");
|
|
14015
14240
|
if (!options.output) {
|
|
14016
14241
|
spinner.fail("Output path required for Runway");
|
|
14017
14242
|
exitWithError(usageError("Output path required for Runway. Use -o option."));
|
|
14018
14243
|
}
|
|
14019
|
-
const outputPath =
|
|
14244
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14020
14245
|
const args = [scriptPath, prompt3, "-o", outputPath, "-r", options.ratio || "16:9"];
|
|
14021
14246
|
spinner.text = "Generating image with Runway (gemini_2.5_flash)...";
|
|
14022
14247
|
await new Promise((resolvePromise, reject) => {
|
|
@@ -14100,7 +14325,7 @@ Examples:
|
|
|
14100
14325
|
let referenceImage;
|
|
14101
14326
|
let isImageToVideo = false;
|
|
14102
14327
|
if (options.image) {
|
|
14103
|
-
const imagePath =
|
|
14328
|
+
const imagePath = resolve15(process.cwd(), options.image);
|
|
14104
14329
|
const imageBuffer = await readFile12(imagePath);
|
|
14105
14330
|
const ext = options.image.toLowerCase().split(".").pop();
|
|
14106
14331
|
const mimeTypes = {
|
|
@@ -14273,7 +14498,7 @@ Examples:
|
|
|
14273
14498
|
const veoDuration = parseInt(options.duration) <= 6 ? 6 : 8;
|
|
14274
14499
|
let lastFrame;
|
|
14275
14500
|
if (options.lastFrame) {
|
|
14276
|
-
const lastFramePath =
|
|
14501
|
+
const lastFramePath = resolve15(process.cwd(), options.lastFrame);
|
|
14277
14502
|
const lastFrameBuffer = await readFile12(lastFramePath);
|
|
14278
14503
|
const ext = options.lastFrame.toLowerCase().split(".").pop();
|
|
14279
14504
|
const mimeType = ext === "jpg" || ext === "jpeg" ? "image/jpeg" : `image/${ext || "png"}`;
|
|
@@ -14283,7 +14508,7 @@ Examples:
|
|
|
14283
14508
|
if (options.refImages && options.refImages.length > 0) {
|
|
14284
14509
|
refImages = [];
|
|
14285
14510
|
for (const refPath of options.refImages.slice(0, 3)) {
|
|
14286
|
-
const absRefPath =
|
|
14511
|
+
const absRefPath = resolve15(process.cwd(), refPath);
|
|
14287
14512
|
const refBuffer = await readFile12(absRefPath);
|
|
14288
14513
|
const ext = refPath.toLowerCase().split(".").pop();
|
|
14289
14514
|
const mimeType = ext === "jpg" || ext === "jpeg" ? "image/jpeg" : `image/${ext || "png"}`;
|
|
@@ -14370,7 +14595,7 @@ Examples:
|
|
|
14370
14595
|
let outputPath;
|
|
14371
14596
|
if (options.output && finalResult.videoUrl) {
|
|
14372
14597
|
const buffer = await downloadVideo(finalResult.videoUrl, apiKey);
|
|
14373
|
-
outputPath =
|
|
14598
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
14374
14599
|
await writeFile11(outputPath, buffer);
|
|
14375
14600
|
}
|
|
14376
14601
|
outputResult({ success: true, provider, taskId: result?.id, videoUrl: finalResult.videoUrl, duration: finalResult.duration, outputPath });
|
|
@@ -14388,7 +14613,7 @@ Examples:
|
|
|
14388
14613
|
const downloadSpinner = ora8("Downloading video...").start();
|
|
14389
14614
|
try {
|
|
14390
14615
|
const buffer = await downloadVideo(finalResult.videoUrl, apiKey);
|
|
14391
|
-
const outputPath =
|
|
14616
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14392
14617
|
await writeFile11(outputPath, buffer);
|
|
14393
14618
|
downloadSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
14394
14619
|
} catch (err) {
|
|
@@ -14449,7 +14674,7 @@ generateCommand.command("speech").alias("tts").description("Generate speech from
|
|
|
14449
14674
|
spinner.fail(result.error || "TTS generation failed");
|
|
14450
14675
|
exitWithError(apiError(result.error || "TTS generation failed", true));
|
|
14451
14676
|
}
|
|
14452
|
-
const outputPath =
|
|
14677
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14453
14678
|
await writeFile11(outputPath, result.audioBuffer);
|
|
14454
14679
|
spinner.succeed(chalk10.green("Speech generated"));
|
|
14455
14680
|
if (options.fitDuration && options.fitDuration > 0) {
|
|
@@ -14518,7 +14743,7 @@ generateCommand.command("sound-effect").description("Generate sound effect using
|
|
|
14518
14743
|
spinner.fail(result.error || "Sound effect generation failed");
|
|
14519
14744
|
exitWithError(apiError(result.error || "Sound effect generation failed", true));
|
|
14520
14745
|
}
|
|
14521
|
-
const outputPath =
|
|
14746
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14522
14747
|
await writeFile11(outputPath, result.audioBuffer);
|
|
14523
14748
|
spinner.succeed(chalk10.green("Sound effect generated"));
|
|
14524
14749
|
if (isJsonMode()) {
|
|
@@ -14557,7 +14782,7 @@ generateCommand.command("music").description("Generate background music from a t
|
|
|
14557
14782
|
spinner.fail(result.error || "Music generation failed");
|
|
14558
14783
|
exitWithError(apiError(result.error || "Music generation failed", true));
|
|
14559
14784
|
}
|
|
14560
|
-
const outputPath =
|
|
14785
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14561
14786
|
await writeFile11(outputPath, result.audioBuffer);
|
|
14562
14787
|
spinner.succeed(chalk10.green("Music generated successfully"));
|
|
14563
14788
|
if (isJsonMode()) {
|
|
@@ -14578,7 +14803,7 @@ generateCommand.command("music").description("Generate background music from a t
|
|
|
14578
14803
|
const duration = Math.max(1, Math.min(30, parseFloat(options.duration)));
|
|
14579
14804
|
if (options.melody) {
|
|
14580
14805
|
spinner.text = "Uploading melody reference...";
|
|
14581
|
-
const absPath =
|
|
14806
|
+
const absPath = resolve15(process.cwd(), options.melody);
|
|
14582
14807
|
if (!existsSync10(absPath)) {
|
|
14583
14808
|
spinner.fail(`Melody file not found: ${options.melody}`);
|
|
14584
14809
|
exitWithError(notFoundError(options.melody));
|
|
@@ -14613,7 +14838,7 @@ generateCommand.command("music").description("Generate background music from a t
|
|
|
14613
14838
|
exitWithError(apiError("Failed to download generated audio", true));
|
|
14614
14839
|
}
|
|
14615
14840
|
const audioBuffer = Buffer.from(await response.arrayBuffer());
|
|
14616
|
-
const outputPath =
|
|
14841
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14617
14842
|
await writeFile11(outputPath, audioBuffer);
|
|
14618
14843
|
spinner.succeed(chalk10.green("Music generated successfully"));
|
|
14619
14844
|
if (isJsonMode()) {
|
|
@@ -14673,7 +14898,7 @@ generateCommand.command("storyboard").description("Generate video storyboard fro
|
|
|
14673
14898
|
}
|
|
14674
14899
|
let textContent = content;
|
|
14675
14900
|
if (options.file) {
|
|
14676
|
-
const filePath =
|
|
14901
|
+
const filePath = resolve15(process.cwd(), content);
|
|
14677
14902
|
textContent = await readFile12(filePath, "utf-8");
|
|
14678
14903
|
}
|
|
14679
14904
|
if (options.dryRun) {
|
|
@@ -14700,7 +14925,7 @@ generateCommand.command("storyboard").description("Generate video storyboard fro
|
|
|
14700
14925
|
if (seg.visuals) seg.visuals = sanitizeLLMResponse(seg.visuals);
|
|
14701
14926
|
}
|
|
14702
14927
|
if (options.output) {
|
|
14703
|
-
const outputPath =
|
|
14928
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14704
14929
|
await writeFile11(outputPath, JSON.stringify(segments, null, 2), "utf-8");
|
|
14705
14930
|
if (isJsonMode()) {
|
|
14706
14931
|
outputResult({ success: true, segmentCount: segments.length, segments, outputPath });
|
|
@@ -14727,7 +14952,7 @@ generateCommand.command("storyboard").description("Generate video storyboard fro
|
|
|
14727
14952
|
}
|
|
14728
14953
|
console.log();
|
|
14729
14954
|
if (options.output) {
|
|
14730
|
-
console.log(chalk10.green(`Saved to: ${
|
|
14955
|
+
console.log(chalk10.green(`Saved to: ${resolve15(process.cwd(), options.output)}`));
|
|
14731
14956
|
}
|
|
14732
14957
|
} catch (error) {
|
|
14733
14958
|
const msg = error instanceof Error ? error.message : String(error);
|
|
@@ -14742,7 +14967,7 @@ generateCommand.command("thumbnail").description("Generate video thumbnail (DALL
|
|
|
14742
14967
|
validateOutputPath(options.output);
|
|
14743
14968
|
}
|
|
14744
14969
|
if (options.bestFrame) {
|
|
14745
|
-
const absVideoPath =
|
|
14970
|
+
const absVideoPath = resolve15(process.cwd(), options.bestFrame);
|
|
14746
14971
|
if (!existsSync10(absVideoPath)) {
|
|
14747
14972
|
exitWithError(notFoundError(absVideoPath));
|
|
14748
14973
|
}
|
|
@@ -14750,12 +14975,12 @@ generateCommand.command("thumbnail").description("Generate video thumbnail (DALL
|
|
|
14750
14975
|
exitWithError(generalError("FFmpeg not found", "Install with: brew install ffmpeg (macOS) or apt install ffmpeg (Linux)"));
|
|
14751
14976
|
}
|
|
14752
14977
|
const apiKey2 = await requireApiKey("GOOGLE_API_KEY", "Google", options.apiKey);
|
|
14753
|
-
const name =
|
|
14978
|
+
const name = basename7(options.bestFrame, extname6(options.bestFrame));
|
|
14754
14979
|
const outputPath = options.output || `${name}-thumbnail.png`;
|
|
14755
14980
|
const spinner2 = ora8("Analyzing video for best frame...").start();
|
|
14756
14981
|
const result2 = await executeThumbnailBestFrame({
|
|
14757
14982
|
videoPath: absVideoPath,
|
|
14758
|
-
outputPath:
|
|
14983
|
+
outputPath: resolve15(process.cwd(), outputPath),
|
|
14759
14984
|
prompt: options.prompt,
|
|
14760
14985
|
model: options.model,
|
|
14761
14986
|
apiKey: apiKey2
|
|
@@ -14804,8 +15029,8 @@ generateCommand.command("thumbnail").description("Generate video thumbnail (DALL
|
|
|
14804
15029
|
} else {
|
|
14805
15030
|
throw new Error("No image data available");
|
|
14806
15031
|
}
|
|
14807
|
-
outputPath =
|
|
14808
|
-
await mkdir8(
|
|
15032
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
15033
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
14809
15034
|
await writeFile11(outputPath, buffer);
|
|
14810
15035
|
}
|
|
14811
15036
|
outputResult({ success: true, imageUrl: img.url, outputPath });
|
|
@@ -14831,8 +15056,8 @@ generateCommand.command("thumbnail").description("Generate video thumbnail (DALL
|
|
|
14831
15056
|
} else {
|
|
14832
15057
|
throw new Error("No image data available");
|
|
14833
15058
|
}
|
|
14834
|
-
const outputPath =
|
|
14835
|
-
await mkdir8(
|
|
15059
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
15060
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
14836
15061
|
await writeFile11(outputPath, buffer);
|
|
14837
15062
|
saveSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
14838
15063
|
} catch (err) {
|
|
@@ -14877,8 +15102,8 @@ generateCommand.command("background").description("Generate video background usi
|
|
|
14877
15102
|
} else {
|
|
14878
15103
|
throw new Error("No image data available");
|
|
14879
15104
|
}
|
|
14880
|
-
outputPath =
|
|
14881
|
-
await mkdir8(
|
|
15105
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
15106
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
14882
15107
|
await writeFile11(outputPath, buffer);
|
|
14883
15108
|
}
|
|
14884
15109
|
outputResult({ success: true, imageUrl: img.url, outputPath });
|
|
@@ -14904,8 +15129,8 @@ generateCommand.command("background").description("Generate video background usi
|
|
|
14904
15129
|
} else {
|
|
14905
15130
|
throw new Error("No image data available");
|
|
14906
15131
|
}
|
|
14907
|
-
const outputPath =
|
|
14908
|
-
await mkdir8(
|
|
15132
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
15133
|
+
await mkdir8(dirname5(outputPath), { recursive: true });
|
|
14909
15134
|
await writeFile11(outputPath, buffer);
|
|
14910
15135
|
saveSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
14911
15136
|
} catch (err) {
|
|
@@ -14940,7 +15165,7 @@ generateCommand.command("video-status", { hidden: true }).description("Check vid
|
|
|
14940
15165
|
let outputPath;
|
|
14941
15166
|
if (options.output && result.videoUrl) {
|
|
14942
15167
|
const buffer = await downloadVideo(result.videoUrl);
|
|
14943
|
-
outputPath =
|
|
15168
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
14944
15169
|
await writeFile11(outputPath, buffer);
|
|
14945
15170
|
}
|
|
14946
15171
|
outputResult({ success: true, taskId, provider: "grok", status: result.status, videoUrl: result.videoUrl, error: result.error, outputPath });
|
|
@@ -14963,7 +15188,7 @@ generateCommand.command("video-status", { hidden: true }).description("Check vid
|
|
|
14963
15188
|
const downloadSpinner = ora8("Downloading video...").start();
|
|
14964
15189
|
try {
|
|
14965
15190
|
const buffer = await downloadVideo(result.videoUrl);
|
|
14966
|
-
const outputPath =
|
|
15191
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
14967
15192
|
await writeFile11(outputPath, buffer);
|
|
14968
15193
|
downloadSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
14969
15194
|
} catch (err) {
|
|
@@ -14992,7 +15217,7 @@ generateCommand.command("video-status", { hidden: true }).description("Check vid
|
|
|
14992
15217
|
let outputPath;
|
|
14993
15218
|
if (options.output && result.videoUrl) {
|
|
14994
15219
|
const buffer = await downloadVideo(result.videoUrl, apiKey);
|
|
14995
|
-
outputPath =
|
|
15220
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
14996
15221
|
await writeFile11(outputPath, buffer);
|
|
14997
15222
|
}
|
|
14998
15223
|
outputResult({ success: true, taskId, provider: "runway", status: result.status, videoUrl: result.videoUrl, progress: result.progress, error: result.error, outputPath });
|
|
@@ -15018,7 +15243,7 @@ generateCommand.command("video-status", { hidden: true }).description("Check vid
|
|
|
15018
15243
|
const downloadSpinner = ora8("Downloading video...").start();
|
|
15019
15244
|
try {
|
|
15020
15245
|
const buffer = await downloadVideo(result.videoUrl, apiKey);
|
|
15021
|
-
const outputPath =
|
|
15246
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
15022
15247
|
await writeFile11(outputPath, buffer);
|
|
15023
15248
|
downloadSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
15024
15249
|
} catch (err) {
|
|
@@ -15047,7 +15272,7 @@ generateCommand.command("video-status", { hidden: true }).description("Check vid
|
|
|
15047
15272
|
let outputPath;
|
|
15048
15273
|
if (options.output && result.videoUrl) {
|
|
15049
15274
|
const buffer = await downloadVideo(result.videoUrl, apiKey);
|
|
15050
|
-
outputPath =
|
|
15275
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
15051
15276
|
await writeFile11(outputPath, buffer);
|
|
15052
15277
|
}
|
|
15053
15278
|
outputResult({ success: true, taskId, provider: "kling", status: result.status, videoUrl: result.videoUrl, duration: result.duration, error: result.error, outputPath });
|
|
@@ -15074,7 +15299,7 @@ generateCommand.command("video-status", { hidden: true }).description("Check vid
|
|
|
15074
15299
|
const downloadSpinner = ora8("Downloading video...").start();
|
|
15075
15300
|
try {
|
|
15076
15301
|
const buffer = await downloadVideo(result.videoUrl, apiKey);
|
|
15077
|
-
const outputPath =
|
|
15302
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
15078
15303
|
await writeFile11(outputPath, buffer);
|
|
15079
15304
|
downloadSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
15080
15305
|
} catch (err) {
|
|
@@ -15192,7 +15417,7 @@ generateCommand.command("video-extend", { hidden: true }).description("Extend vi
|
|
|
15192
15417
|
let outputPath;
|
|
15193
15418
|
if (options.output && finalResult.videoUrl) {
|
|
15194
15419
|
const buffer = await downloadVideo(finalResult.videoUrl, apiKey);
|
|
15195
|
-
outputPath =
|
|
15420
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
15196
15421
|
await writeFile11(outputPath, buffer);
|
|
15197
15422
|
}
|
|
15198
15423
|
outputResult({ success: true, provider: "kling", taskId: result.id, videoUrl: finalResult.videoUrl, duration: finalResult.duration, outputPath });
|
|
@@ -15210,7 +15435,7 @@ generateCommand.command("video-extend", { hidden: true }).description("Extend vi
|
|
|
15210
15435
|
const downloadSpinner = ora8("Downloading video...").start();
|
|
15211
15436
|
try {
|
|
15212
15437
|
const buffer = await downloadVideo(finalResult.videoUrl, apiKey);
|
|
15213
|
-
const outputPath =
|
|
15438
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
15214
15439
|
await writeFile11(outputPath, buffer);
|
|
15215
15440
|
downloadSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
15216
15441
|
} catch (err) {
|
|
@@ -15267,7 +15492,7 @@ generateCommand.command("video-extend", { hidden: true }).description("Extend vi
|
|
|
15267
15492
|
let outputPath;
|
|
15268
15493
|
if (options.output && finalResult.videoUrl) {
|
|
15269
15494
|
const buffer = await downloadVideo(finalResult.videoUrl, apiKey);
|
|
15270
|
-
outputPath =
|
|
15495
|
+
outputPath = resolve15(process.cwd(), options.output);
|
|
15271
15496
|
await writeFile11(outputPath, buffer);
|
|
15272
15497
|
}
|
|
15273
15498
|
outputResult({ success: true, provider: "veo", taskId: result.id, videoUrl: finalResult.videoUrl, duration: finalResult.duration, outputPath });
|
|
@@ -15282,7 +15507,7 @@ generateCommand.command("video-extend", { hidden: true }).description("Extend vi
|
|
|
15282
15507
|
const downloadSpinner = ora8("Downloading video...").start();
|
|
15283
15508
|
try {
|
|
15284
15509
|
const buffer = await downloadVideo(finalResult.videoUrl, apiKey);
|
|
15285
|
-
const outputPath =
|
|
15510
|
+
const outputPath = resolve15(process.cwd(), options.output);
|
|
15286
15511
|
await writeFile11(outputPath, buffer);
|
|
15287
15512
|
downloadSpinner.succeed(chalk10.green(`Saved to: ${outputPath}`));
|
|
15288
15513
|
} catch (err) {
|
|
@@ -15309,7 +15534,7 @@ async function executeSpeech(options) {
|
|
|
15309
15534
|
if (!result.success || !result.audioBuffer) {
|
|
15310
15535
|
return { success: false, error: result.error || "TTS generation failed" };
|
|
15311
15536
|
}
|
|
15312
|
-
const outputPath =
|
|
15537
|
+
const outputPath = resolve15(process.cwd(), options.output || "output.mp3");
|
|
15313
15538
|
await writeFile11(outputPath, result.audioBuffer);
|
|
15314
15539
|
return { success: true, outputPath, characterCount: result.characterCount };
|
|
15315
15540
|
} catch (error) {
|
|
@@ -15329,7 +15554,7 @@ async function executeSoundEffect(options) {
|
|
|
15329
15554
|
if (!result.success || !result.audioBuffer) {
|
|
15330
15555
|
return { success: false, error: result.error || "Sound effect generation failed" };
|
|
15331
15556
|
}
|
|
15332
|
-
const outputPath =
|
|
15557
|
+
const outputPath = resolve15(process.cwd(), options.output || "sound-effect.mp3");
|
|
15333
15558
|
await writeFile11(outputPath, result.audioBuffer);
|
|
15334
15559
|
return { success: true, outputPath };
|
|
15335
15560
|
} catch (error) {
|
|
@@ -15352,7 +15577,7 @@ async function executeMusic(options) {
|
|
|
15352
15577
|
if (!result2.success || !result2.audioBuffer) {
|
|
15353
15578
|
return { success: false, error: result2.error || "Music generation failed" };
|
|
15354
15579
|
}
|
|
15355
|
-
const outputPath2 =
|
|
15580
|
+
const outputPath2 = resolve15(process.cwd(), options.output || "music.mp3");
|
|
15356
15581
|
await writeFile11(outputPath2, result2.audioBuffer);
|
|
15357
15582
|
return { success: true, outputPath: outputPath2, provider: "elevenlabs", duration: duration2 };
|
|
15358
15583
|
}
|
|
@@ -15372,7 +15597,7 @@ async function executeMusic(options) {
|
|
|
15372
15597
|
const response = await fetch(finalResult.audioUrl);
|
|
15373
15598
|
if (!response.ok) return { success: false, error: "Failed to download generated audio" };
|
|
15374
15599
|
const audioBuffer = Buffer.from(await response.arrayBuffer());
|
|
15375
|
-
const outputPath =
|
|
15600
|
+
const outputPath = resolve15(process.cwd(), options.output || "music.mp3");
|
|
15376
15601
|
await writeFile11(outputPath, audioBuffer);
|
|
15377
15602
|
return { success: true, outputPath, provider: "replicate", duration };
|
|
15378
15603
|
} catch (error) {
|
|
@@ -15516,6 +15741,52 @@ var aiGenerationTools = [
|
|
|
15516
15741
|
},
|
|
15517
15742
|
required: ["prompt"]
|
|
15518
15743
|
}
|
|
15744
|
+
},
|
|
15745
|
+
{
|
|
15746
|
+
name: "generate_image",
|
|
15747
|
+
description: "Generate an image using AI. Supports Gemini (free), OpenAI GPT Image, or Grok Imagine. Requires GOOGLE_API_KEY (Gemini), OPENAI_API_KEY (OpenAI), or XAI_API_KEY (Grok).",
|
|
15748
|
+
inputSchema: {
|
|
15749
|
+
type: "object",
|
|
15750
|
+
properties: {
|
|
15751
|
+
prompt: { type: "string", description: "Image description prompt" },
|
|
15752
|
+
provider: {
|
|
15753
|
+
type: "string",
|
|
15754
|
+
enum: ["gemini", "openai", "grok"],
|
|
15755
|
+
description: "Image provider (default: gemini)"
|
|
15756
|
+
},
|
|
15757
|
+
output: { type: "string", description: "Output file path" },
|
|
15758
|
+
size: { type: "string", description: "Image size for OpenAI (1024x1024, 1536x1024, 1024x1536)" },
|
|
15759
|
+
ratio: { type: "string", description: "Aspect ratio for Gemini (1:1, 16:9, 9:16, 4:3, 3:4, etc.)" },
|
|
15760
|
+
quality: { type: "string", description: "Quality for OpenAI: standard, hd" },
|
|
15761
|
+
count: { type: "number", description: "Number of images (default: 1)" },
|
|
15762
|
+
model: { type: "string", description: "Gemini model: flash, 3.1-flash, latest, pro" }
|
|
15763
|
+
},
|
|
15764
|
+
required: ["prompt"]
|
|
15765
|
+
}
|
|
15766
|
+
},
|
|
15767
|
+
{
|
|
15768
|
+
name: "edit_image",
|
|
15769
|
+
description: "Edit image(s) using Gemini (Nano Banana). Provide image paths and an edit prompt. Requires GOOGLE_API_KEY.",
|
|
15770
|
+
inputSchema: {
|
|
15771
|
+
type: "object",
|
|
15772
|
+
properties: {
|
|
15773
|
+
imagePaths: {
|
|
15774
|
+
type: "array",
|
|
15775
|
+
items: { type: "string" },
|
|
15776
|
+
description: "Input image file path(s)"
|
|
15777
|
+
},
|
|
15778
|
+
prompt: { type: "string", description: "Edit instruction" },
|
|
15779
|
+
output: { type: "string", description: "Output file path (default: edited.png)" },
|
|
15780
|
+
model: {
|
|
15781
|
+
type: "string",
|
|
15782
|
+
enum: ["flash", "3.1-flash", "latest", "pro"],
|
|
15783
|
+
description: "Gemini model (default: flash)"
|
|
15784
|
+
},
|
|
15785
|
+
ratio: { type: "string", description: "Output aspect ratio" },
|
|
15786
|
+
resolution: { type: "string", description: "Resolution: 1K, 2K, 4K (Pro only)" }
|
|
15787
|
+
},
|
|
15788
|
+
required: ["imagePaths", "prompt"]
|
|
15789
|
+
}
|
|
15519
15790
|
}
|
|
15520
15791
|
];
|
|
15521
15792
|
async function handleAiGenerationToolCall(name, args) {
|
|
@@ -15615,6 +15886,32 @@ async function handleAiGenerationToolCall(name, args) {
|
|
|
15615
15886
|
if (!result.success) return `Music generation failed: ${result.error}`;
|
|
15616
15887
|
return JSON.stringify({ success: true, outputPath: result.outputPath, provider: result.provider, duration: result.duration });
|
|
15617
15888
|
}
|
|
15889
|
+
case "generate_image": {
|
|
15890
|
+
const result = await executeImageGenerate({
|
|
15891
|
+
prompt: args.prompt,
|
|
15892
|
+
provider: args.provider,
|
|
15893
|
+
output: args.output,
|
|
15894
|
+
size: args.size,
|
|
15895
|
+
ratio: args.ratio,
|
|
15896
|
+
quality: args.quality,
|
|
15897
|
+
count: args.count,
|
|
15898
|
+
model: args.model
|
|
15899
|
+
});
|
|
15900
|
+
if (!result.success) return `Image generation failed: ${result.error}`;
|
|
15901
|
+
return JSON.stringify({ success: true, outputPath: result.outputPath, provider: result.provider, model: result.model, imageCount: result.images?.length });
|
|
15902
|
+
}
|
|
15903
|
+
case "edit_image": {
|
|
15904
|
+
const result = await executeGeminiEdit({
|
|
15905
|
+
imagePaths: args.imagePaths,
|
|
15906
|
+
prompt: args.prompt,
|
|
15907
|
+
output: args.output,
|
|
15908
|
+
model: args.model,
|
|
15909
|
+
ratio: args.ratio,
|
|
15910
|
+
resolution: args.resolution
|
|
15911
|
+
});
|
|
15912
|
+
if (!result.success) return `Image editing failed: ${result.error}`;
|
|
15913
|
+
return JSON.stringify({ success: true, outputPath: result.outputPath, model: result.model });
|
|
15914
|
+
}
|
|
15618
15915
|
default:
|
|
15619
15916
|
throw new Error(`Unknown AI generation tool: ${name}`);
|
|
15620
15917
|
}
|
|
@@ -15623,7 +15920,7 @@ async function handleAiGenerationToolCall(name, args) {
|
|
|
15623
15920
|
// ../cli/src/commands/detect.ts
|
|
15624
15921
|
import { Command as Command3 } from "commander";
|
|
15625
15922
|
import { readFile as readFile13, writeFile as writeFile12 } from "node:fs/promises";
|
|
15626
|
-
import { resolve as
|
|
15923
|
+
import { resolve as resolve16, basename as basename8 } from "node:path";
|
|
15627
15924
|
import chalk11 from "chalk";
|
|
15628
15925
|
import ora9 from "ora";
|
|
15629
15926
|
init_exec_safe();
|
|
@@ -15632,7 +15929,7 @@ async function executeDetectScenes(options) {
|
|
|
15632
15929
|
if (!commandExists("ffmpeg")) {
|
|
15633
15930
|
return { success: false, error: "FFmpeg not found. Install with: brew install ffmpeg" };
|
|
15634
15931
|
}
|
|
15635
|
-
const absPath =
|
|
15932
|
+
const absPath = resolve16(process.cwd(), options.videoPath);
|
|
15636
15933
|
const threshold = options.threshold ?? 0.3;
|
|
15637
15934
|
const { stdout: sceneStdout, stderr: sceneStderr } = await execSafe("ffmpeg", [
|
|
15638
15935
|
"-i",
|
|
@@ -15663,7 +15960,7 @@ async function executeDetectScenes(options) {
|
|
|
15663
15960
|
duration: (i < scenes.length - 1 ? scenes[i + 1].timestamp : totalDuration) - s.timestamp
|
|
15664
15961
|
}));
|
|
15665
15962
|
if (options.outputPath) {
|
|
15666
|
-
const outputPath =
|
|
15963
|
+
const outputPath = resolve16(process.cwd(), options.outputPath);
|
|
15667
15964
|
await writeFile12(outputPath, JSON.stringify({ source: absPath, totalDuration, threshold, scenes: result }, null, 2), "utf-8");
|
|
15668
15965
|
}
|
|
15669
15966
|
return { success: true, scenes: result, totalDuration };
|
|
@@ -15676,7 +15973,7 @@ async function executeDetectSilence(options) {
|
|
|
15676
15973
|
if (!commandExists("ffmpeg")) {
|
|
15677
15974
|
return { success: false, error: "FFmpeg not found. Install with: brew install ffmpeg" };
|
|
15678
15975
|
}
|
|
15679
|
-
const absPath =
|
|
15976
|
+
const absPath = resolve16(process.cwd(), options.mediaPath);
|
|
15680
15977
|
const noise = options.noise ?? "-30";
|
|
15681
15978
|
const duration = options.duration ?? "0.5";
|
|
15682
15979
|
const { stdout: silStdout, stderr: silStderr } = await execSafe("ffmpeg", [
|
|
@@ -15710,7 +16007,7 @@ async function executeDetectSilence(options) {
|
|
|
15710
16007
|
}
|
|
15711
16008
|
}
|
|
15712
16009
|
if (options.outputPath) {
|
|
15713
|
-
const outputPath =
|
|
16010
|
+
const outputPath = resolve16(process.cwd(), options.outputPath);
|
|
15714
16011
|
await writeFile12(outputPath, JSON.stringify({ source: absPath, silences }, null, 2), "utf-8");
|
|
15715
16012
|
}
|
|
15716
16013
|
return { success: true, silences };
|
|
@@ -15723,7 +16020,7 @@ async function executeDetectBeats(options) {
|
|
|
15723
16020
|
if (!commandExists("ffmpeg")) {
|
|
15724
16021
|
return { success: false, error: "FFmpeg not found. Install with: brew install ffmpeg" };
|
|
15725
16022
|
}
|
|
15726
|
-
const absPath =
|
|
16023
|
+
const absPath = resolve16(process.cwd(), options.audioPath);
|
|
15727
16024
|
const { stdout: beatStdout, stderr: beatStderr } = await execSafe("ffmpeg", [
|
|
15728
16025
|
"-i",
|
|
15729
16026
|
absPath,
|
|
@@ -15759,7 +16056,7 @@ async function executeDetectBeats(options) {
|
|
|
15759
16056
|
}
|
|
15760
16057
|
}
|
|
15761
16058
|
if (options.outputPath) {
|
|
15762
|
-
const outputPath =
|
|
16059
|
+
const outputPath = resolve16(process.cwd(), options.outputPath);
|
|
15763
16060
|
await writeFile12(outputPath, JSON.stringify({ source: absPath, beatCount: beats.length, beats }, null, 2), "utf-8");
|
|
15764
16061
|
}
|
|
15765
16062
|
return { success: true, beats, beatCount: beats.length };
|
|
@@ -15791,7 +16088,7 @@ detectCommand.command("scenes").description("Detect scene changes in video").arg
|
|
|
15791
16088
|
spinner.fail("FFmpeg not found");
|
|
15792
16089
|
exitWithError(generalError("FFmpeg not found", "Install with: brew install ffmpeg (macOS) or apt install ffmpeg (Linux)"));
|
|
15793
16090
|
}
|
|
15794
|
-
const absPath =
|
|
16091
|
+
const absPath = resolve16(process.cwd(), videoPath);
|
|
15795
16092
|
const threshold = parseFloat(options.threshold);
|
|
15796
16093
|
spinner.text = "Analyzing video...";
|
|
15797
16094
|
const { stdout: sceneStdout, stderr: sceneStderr } = await execSafe("ffmpeg", [
|
|
@@ -15832,7 +16129,7 @@ detectCommand.command("scenes").description("Detect scene changes in video").arg
|
|
|
15832
16129
|
}
|
|
15833
16130
|
console.log();
|
|
15834
16131
|
if (options.output) {
|
|
15835
|
-
const outputPath =
|
|
16132
|
+
const outputPath = resolve16(process.cwd(), options.output);
|
|
15836
16133
|
const result = {
|
|
15837
16134
|
source: absPath,
|
|
15838
16135
|
totalDuration,
|
|
@@ -15848,14 +16145,14 @@ detectCommand.command("scenes").description("Detect scene changes in video").arg
|
|
|
15848
16145
|
console.log(chalk11.green(`Saved to: ${outputPath}`));
|
|
15849
16146
|
}
|
|
15850
16147
|
if (options.project) {
|
|
15851
|
-
const projectPath =
|
|
16148
|
+
const projectPath = resolve16(process.cwd(), options.project);
|
|
15852
16149
|
const content = await readFile13(projectPath, "utf-8");
|
|
15853
16150
|
const data = JSON.parse(content);
|
|
15854
16151
|
const project = Project.fromJSON(data);
|
|
15855
16152
|
let source = project.getSources().find((s) => s.url === absPath);
|
|
15856
16153
|
if (!source) {
|
|
15857
16154
|
source = project.addSource({
|
|
15858
|
-
name:
|
|
16155
|
+
name: basename8(absPath),
|
|
15859
16156
|
type: "video",
|
|
15860
16157
|
url: absPath,
|
|
15861
16158
|
duration: totalDuration
|
|
@@ -15902,7 +16199,7 @@ detectCommand.command("silence").description("Detect silence in audio/video").ar
|
|
|
15902
16199
|
});
|
|
15903
16200
|
return;
|
|
15904
16201
|
}
|
|
15905
|
-
const absPath =
|
|
16202
|
+
const absPath = resolve16(process.cwd(), mediaPath);
|
|
15906
16203
|
const noise = options.noise;
|
|
15907
16204
|
const duration = options.duration;
|
|
15908
16205
|
const { stdout: silStdout, stderr: silStderr } = await execSafe("ffmpeg", [
|
|
@@ -15951,7 +16248,7 @@ detectCommand.command("silence").description("Detect silence in audio/video").ar
|
|
|
15951
16248
|
}
|
|
15952
16249
|
console.log();
|
|
15953
16250
|
if (options.output) {
|
|
15954
|
-
const outputPath =
|
|
16251
|
+
const outputPath = resolve16(process.cwd(), options.output);
|
|
15955
16252
|
await writeFile12(
|
|
15956
16253
|
outputPath,
|
|
15957
16254
|
JSON.stringify({ source: absPath, silences }, null, 2),
|
|
@@ -15982,7 +16279,7 @@ detectCommand.command("beats").description("Detect beats in audio (for music syn
|
|
|
15982
16279
|
});
|
|
15983
16280
|
return;
|
|
15984
16281
|
}
|
|
15985
|
-
const absPath =
|
|
16282
|
+
const absPath = resolve16(process.cwd(), audioPath);
|
|
15986
16283
|
const { stdout: beatStdout, stderr: beatStderr } = await execSafe("ffmpeg", [
|
|
15987
16284
|
"-i",
|
|
15988
16285
|
absPath,
|
|
@@ -16032,7 +16329,7 @@ detectCommand.command("beats").description("Detect beats in audio (for music syn
|
|
|
16032
16329
|
}
|
|
16033
16330
|
console.log();
|
|
16034
16331
|
if (options.output) {
|
|
16035
|
-
const outputPath =
|
|
16332
|
+
const outputPath = resolve16(process.cwd(), options.output);
|
|
16036
16333
|
await writeFile12(
|
|
16037
16334
|
outputPath,
|
|
16038
16335
|
JSON.stringify({ source: absPath, beatCount: beats.length, beats }, null, 2),
|
|
@@ -16141,6 +16438,793 @@ async function handleDetectionToolCall(name, args) {
|
|
|
16141
16438
|
}
|
|
16142
16439
|
}
|
|
16143
16440
|
|
|
16441
|
+
// ../cli/src/commands/ai-video.ts
|
|
16442
|
+
import { readFile as readFile14, writeFile as writeFile13 } from "node:fs/promises";
|
|
16443
|
+
import { resolve as resolve17 } from "node:path";
|
|
16444
|
+
import chalk12 from "chalk";
|
|
16445
|
+
import ora10 from "ora";
|
|
16446
|
+
async function executeVideoGenerate(options) {
|
|
16447
|
+
const {
|
|
16448
|
+
prompt: prompt3,
|
|
16449
|
+
provider = "kling",
|
|
16450
|
+
image,
|
|
16451
|
+
duration = 5,
|
|
16452
|
+
ratio = "16:9",
|
|
16453
|
+
seed,
|
|
16454
|
+
mode = "std",
|
|
16455
|
+
negative,
|
|
16456
|
+
resolution,
|
|
16457
|
+
veoModel = "3.1-fast",
|
|
16458
|
+
output,
|
|
16459
|
+
wait = true,
|
|
16460
|
+
apiKey
|
|
16461
|
+
} = options;
|
|
16462
|
+
try {
|
|
16463
|
+
const envKeyMap = { grok: "XAI_API_KEY", runway: "RUNWAY_API_SECRET", kling: "KLING_API_KEY", veo: "GOOGLE_API_KEY" };
|
|
16464
|
+
const key = apiKey || process.env[envKeyMap[provider] || ""];
|
|
16465
|
+
if (!key) return { success: false, error: `${envKeyMap[provider]} required for ${provider}` };
|
|
16466
|
+
let referenceImage;
|
|
16467
|
+
if (image) {
|
|
16468
|
+
const imagePath = resolve17(process.cwd(), image);
|
|
16469
|
+
const imageBuffer = await readFile14(imagePath);
|
|
16470
|
+
const ext = image.toLowerCase().split(".").pop();
|
|
16471
|
+
const mimeTypes = { jpg: "image/jpeg", jpeg: "image/jpeg", png: "image/png", gif: "image/gif", webp: "image/webp" };
|
|
16472
|
+
const mimeType = mimeTypes[ext || "png"] || "image/png";
|
|
16473
|
+
referenceImage = `data:${mimeType};base64,${imageBuffer.toString("base64")}`;
|
|
16474
|
+
}
|
|
16475
|
+
if (provider === "runway") {
|
|
16476
|
+
const runway = new RunwayProvider();
|
|
16477
|
+
await runway.initialize({ apiKey: key });
|
|
16478
|
+
const result = await runway.generateVideo(prompt3, {
|
|
16479
|
+
prompt: prompt3,
|
|
16480
|
+
referenceImage,
|
|
16481
|
+
duration,
|
|
16482
|
+
aspectRatio: ratio,
|
|
16483
|
+
seed
|
|
16484
|
+
});
|
|
16485
|
+
if (result.status === "failed") return { success: false, error: result.error || "Runway generation failed" };
|
|
16486
|
+
if (!wait) return { success: true, taskId: result.id, status: "processing", provider: "runway" };
|
|
16487
|
+
const finalResult = await runway.waitForCompletion(result.id, () => {
|
|
16488
|
+
}, 3e5);
|
|
16489
|
+
if (finalResult.status !== "completed") return { success: false, error: finalResult.error || "Runway generation failed" };
|
|
16490
|
+
let outputPath;
|
|
16491
|
+
if (output && finalResult.videoUrl) {
|
|
16492
|
+
const buffer = await downloadVideo(finalResult.videoUrl, key);
|
|
16493
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16494
|
+
await writeFile13(outputPath, buffer);
|
|
16495
|
+
}
|
|
16496
|
+
return { success: true, taskId: result.id, status: "completed", videoUrl: finalResult.videoUrl, duration: finalResult.duration, outputPath, provider: "runway" };
|
|
16497
|
+
} else if (provider === "kling") {
|
|
16498
|
+
const kling = new KlingProvider();
|
|
16499
|
+
await kling.initialize({ apiKey: key });
|
|
16500
|
+
if (!kling.isConfigured()) return { success: false, error: "Invalid Kling API key format" };
|
|
16501
|
+
let klingImage = referenceImage;
|
|
16502
|
+
if (klingImage && klingImage.startsWith("data:")) {
|
|
16503
|
+
const imgbbKey = process.env.IMGBB_API_KEY;
|
|
16504
|
+
if (!imgbbKey) return { success: false, error: "IMGBB_API_KEY required for Kling image-to-video" };
|
|
16505
|
+
const base64Data = klingImage.split(",")[1];
|
|
16506
|
+
const uploadResult = await uploadToImgbb(Buffer.from(base64Data, "base64"), imgbbKey);
|
|
16507
|
+
if (!uploadResult.success || !uploadResult.url) return { success: false, error: `ImgBB upload failed: ${uploadResult.error}` };
|
|
16508
|
+
klingImage = uploadResult.url;
|
|
16509
|
+
}
|
|
16510
|
+
const result = await kling.generateVideo(prompt3, {
|
|
16511
|
+
prompt: prompt3,
|
|
16512
|
+
referenceImage: klingImage,
|
|
16513
|
+
duration,
|
|
16514
|
+
aspectRatio: ratio,
|
|
16515
|
+
negativePrompt: negative,
|
|
16516
|
+
mode
|
|
16517
|
+
});
|
|
16518
|
+
if (result.status === "failed") return { success: false, error: result.error || "Kling generation failed" };
|
|
16519
|
+
const taskType = referenceImage ? "image2video" : "text2video";
|
|
16520
|
+
if (!wait) return { success: true, taskId: result.id, status: "processing", provider: "kling" };
|
|
16521
|
+
const finalResult = await kling.waitForCompletion(result.id, taskType, () => {
|
|
16522
|
+
}, 6e5);
|
|
16523
|
+
if (finalResult.status !== "completed") return { success: false, error: finalResult.error || "Kling generation failed" };
|
|
16524
|
+
let outputPath;
|
|
16525
|
+
if (output && finalResult.videoUrl) {
|
|
16526
|
+
const buffer = await downloadVideo(finalResult.videoUrl, key);
|
|
16527
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16528
|
+
await writeFile13(outputPath, buffer);
|
|
16529
|
+
}
|
|
16530
|
+
return { success: true, taskId: result.id, status: "completed", videoUrl: finalResult.videoUrl, duration: finalResult.duration, outputPath, provider: "kling" };
|
|
16531
|
+
} else if (provider === "veo") {
|
|
16532
|
+
const gemini = new GeminiProvider();
|
|
16533
|
+
await gemini.initialize({ apiKey: key });
|
|
16534
|
+
const veoModelMap = { "3.0": "veo-3.0-generate-preview", "3.1": "veo-3.1-generate-preview", "3.1-fast": "veo-3.1-fast-generate-preview" };
|
|
16535
|
+
const model = veoModelMap[veoModel] || "veo-3.1-fast-generate-preview";
|
|
16536
|
+
const veoDuration = duration <= 6 ? 6 : 8;
|
|
16537
|
+
const result = await gemini.generateVideo(prompt3, {
|
|
16538
|
+
prompt: prompt3,
|
|
16539
|
+
referenceImage,
|
|
16540
|
+
duration: veoDuration,
|
|
16541
|
+
aspectRatio: ratio,
|
|
16542
|
+
model,
|
|
16543
|
+
negativePrompt: negative,
|
|
16544
|
+
resolution
|
|
16545
|
+
});
|
|
16546
|
+
if (result.status === "failed") return { success: false, error: result.error || "Veo generation failed" };
|
|
16547
|
+
if (!wait) return { success: true, taskId: result.id, status: "processing", provider: "veo" };
|
|
16548
|
+
const finalResult = await gemini.waitForVideoCompletion(result.id, () => {
|
|
16549
|
+
}, 3e5);
|
|
16550
|
+
if (finalResult.status !== "completed") return { success: false, error: finalResult.error || "Veo generation failed" };
|
|
16551
|
+
let outputPath;
|
|
16552
|
+
if (output && finalResult.videoUrl) {
|
|
16553
|
+
const buffer = await downloadVideo(finalResult.videoUrl, key);
|
|
16554
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16555
|
+
await writeFile13(outputPath, buffer);
|
|
16556
|
+
}
|
|
16557
|
+
return { success: true, taskId: result.id, status: "completed", videoUrl: finalResult.videoUrl, outputPath, provider: "veo" };
|
|
16558
|
+
} else if (provider === "grok") {
|
|
16559
|
+
const grok = new GrokProvider();
|
|
16560
|
+
await grok.initialize({ apiKey: key });
|
|
16561
|
+
const result = await grok.generateVideo(prompt3, {
|
|
16562
|
+
prompt: prompt3,
|
|
16563
|
+
referenceImage,
|
|
16564
|
+
duration,
|
|
16565
|
+
aspectRatio: ratio
|
|
16566
|
+
});
|
|
16567
|
+
if (result.status === "failed") return { success: false, error: result.error || "Grok generation failed" };
|
|
16568
|
+
if (!wait) return { success: true, taskId: result.id, status: "processing", provider: "grok" };
|
|
16569
|
+
const finalResult = await grok.waitForCompletion(result.id, () => {
|
|
16570
|
+
}, 3e5);
|
|
16571
|
+
if (finalResult.status !== "completed") return { success: false, error: finalResult.error || "Grok generation failed" };
|
|
16572
|
+
let outputPath;
|
|
16573
|
+
if (output && finalResult.videoUrl) {
|
|
16574
|
+
const buffer = await downloadVideo(finalResult.videoUrl, key);
|
|
16575
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16576
|
+
await writeFile13(outputPath, buffer);
|
|
16577
|
+
}
|
|
16578
|
+
return { success: true, taskId: result.id, status: "completed", videoUrl: finalResult.videoUrl, duration: finalResult.duration, outputPath, provider: "grok" };
|
|
16579
|
+
}
|
|
16580
|
+
return { success: false, error: `Unsupported provider: ${provider}` };
|
|
16581
|
+
} catch (error) {
|
|
16582
|
+
return { success: false, error: `Video generation failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
16583
|
+
}
|
|
16584
|
+
}
|
|
16585
|
+
async function executeVideoStatus(options) {
|
|
16586
|
+
const { taskId, provider = "runway", taskType = "text2video", wait = false, output, apiKey } = options;
|
|
16587
|
+
try {
|
|
16588
|
+
const envKeyMap = { runway: "RUNWAY_API_SECRET", kling: "KLING_API_KEY" };
|
|
16589
|
+
const key = apiKey || process.env[envKeyMap[provider] || ""];
|
|
16590
|
+
if (!key) return { success: false, error: `${envKeyMap[provider]} required` };
|
|
16591
|
+
if (provider === "runway") {
|
|
16592
|
+
const runway = new RunwayProvider();
|
|
16593
|
+
await runway.initialize({ apiKey: key });
|
|
16594
|
+
let result = await runway.getGenerationStatus(taskId);
|
|
16595
|
+
if (wait && result.status !== "completed" && result.status !== "failed" && result.status !== "cancelled") {
|
|
16596
|
+
result = await runway.waitForCompletion(taskId, () => {
|
|
16597
|
+
});
|
|
16598
|
+
}
|
|
16599
|
+
let outputPath;
|
|
16600
|
+
if (output && result.videoUrl) {
|
|
16601
|
+
const buffer = await downloadVideo(result.videoUrl, key);
|
|
16602
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16603
|
+
await writeFile13(outputPath, buffer);
|
|
16604
|
+
}
|
|
16605
|
+
return { success: true, taskId, status: result.status, progress: result.progress, videoUrl: result.videoUrl, outputPath };
|
|
16606
|
+
} else if (provider === "kling") {
|
|
16607
|
+
const kling = new KlingProvider();
|
|
16608
|
+
await kling.initialize({ apiKey: key });
|
|
16609
|
+
let result = await kling.getGenerationStatus(taskId, taskType);
|
|
16610
|
+
if (wait && result.status !== "completed" && result.status !== "failed" && result.status !== "cancelled") {
|
|
16611
|
+
result = await kling.waitForCompletion(taskId, taskType, () => {
|
|
16612
|
+
});
|
|
16613
|
+
}
|
|
16614
|
+
let outputPath;
|
|
16615
|
+
if (output && result.videoUrl) {
|
|
16616
|
+
const buffer = await downloadVideo(result.videoUrl, key);
|
|
16617
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16618
|
+
await writeFile13(outputPath, buffer);
|
|
16619
|
+
}
|
|
16620
|
+
return { success: true, taskId, status: result.status, videoUrl: result.videoUrl, duration: result.duration, outputPath };
|
|
16621
|
+
}
|
|
16622
|
+
return { success: false, error: `Unsupported provider: ${provider}` };
|
|
16623
|
+
} catch (error) {
|
|
16624
|
+
return { success: false, error: `Status check failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
16625
|
+
}
|
|
16626
|
+
}
|
|
16627
|
+
async function executeVideoCancel(options) {
|
|
16628
|
+
const { taskId, apiKey } = options;
|
|
16629
|
+
try {
|
|
16630
|
+
const key = apiKey || process.env.RUNWAY_API_SECRET;
|
|
16631
|
+
if (!key) return { success: false, error: "RUNWAY_API_SECRET required" };
|
|
16632
|
+
const runway = new RunwayProvider();
|
|
16633
|
+
await runway.initialize({ apiKey: key });
|
|
16634
|
+
const success = await runway.cancelGeneration(taskId);
|
|
16635
|
+
return { success };
|
|
16636
|
+
} catch (error) {
|
|
16637
|
+
return { success: false, error: `Cancel failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
16638
|
+
}
|
|
16639
|
+
}
|
|
16640
|
+
async function executeVideoExtend(options) {
|
|
16641
|
+
const { videoId, provider = "kling", prompt: prompt3, duration = 5, negative, veoModel = "3.1", output, wait = true, apiKey } = options;
|
|
16642
|
+
try {
|
|
16643
|
+
if (provider === "kling") {
|
|
16644
|
+
const key = apiKey || process.env.KLING_API_KEY;
|
|
16645
|
+
if (!key) return { success: false, error: "KLING_API_KEY required" };
|
|
16646
|
+
const kling = new KlingProvider();
|
|
16647
|
+
await kling.initialize({ apiKey: key });
|
|
16648
|
+
if (!kling.isConfigured()) return { success: false, error: "Invalid Kling API key format" };
|
|
16649
|
+
const result = await kling.extendVideo(videoId, {
|
|
16650
|
+
prompt: prompt3,
|
|
16651
|
+
negativePrompt: negative,
|
|
16652
|
+
duration: String(duration)
|
|
16653
|
+
});
|
|
16654
|
+
if (result.status === "failed") return { success: false, error: result.error || "Kling extension failed" };
|
|
16655
|
+
if (!wait) return { success: true, taskId: result.id, status: "processing" };
|
|
16656
|
+
const finalResult = await kling.waitForExtendCompletion(result.id, () => {
|
|
16657
|
+
}, 6e5);
|
|
16658
|
+
if (finalResult.status !== "completed") return { success: false, error: finalResult.error || "Kling extension failed" };
|
|
16659
|
+
let outputPath;
|
|
16660
|
+
if (output && finalResult.videoUrl) {
|
|
16661
|
+
const buffer = await downloadVideo(finalResult.videoUrl, key);
|
|
16662
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16663
|
+
await writeFile13(outputPath, buffer);
|
|
16664
|
+
}
|
|
16665
|
+
return { success: true, taskId: result.id, status: "completed", videoUrl: finalResult.videoUrl, duration: finalResult.duration, outputPath };
|
|
16666
|
+
} else if (provider === "veo") {
|
|
16667
|
+
const key = apiKey || process.env.GOOGLE_API_KEY;
|
|
16668
|
+
if (!key) return { success: false, error: "GOOGLE_API_KEY required" };
|
|
16669
|
+
const gemini = new GeminiProvider();
|
|
16670
|
+
await gemini.initialize({ apiKey: key });
|
|
16671
|
+
const veoModelMap = { "3.0": "veo-3.0-generate-preview", "3.1": "veo-3.1-generate-preview", "3.1-fast": "veo-3.1-fast-generate-preview" };
|
|
16672
|
+
const model = veoModelMap[veoModel] || "veo-3.1-generate-preview";
|
|
16673
|
+
const result = await gemini.extendVideo(videoId, prompt3, {
|
|
16674
|
+
duration,
|
|
16675
|
+
model
|
|
16676
|
+
});
|
|
16677
|
+
if (result.status === "failed") return { success: false, error: result.error || "Veo extension failed" };
|
|
16678
|
+
if (!wait) return { success: true, taskId: result.id, status: "processing" };
|
|
16679
|
+
const finalResult = await gemini.waitForVideoCompletion(result.id, () => {
|
|
16680
|
+
}, 3e5);
|
|
16681
|
+
if (finalResult.status !== "completed") return { success: false, error: finalResult.error || "Veo extension failed" };
|
|
16682
|
+
let outputPath;
|
|
16683
|
+
if (output && finalResult.videoUrl) {
|
|
16684
|
+
const buffer = await downloadVideo(finalResult.videoUrl, key);
|
|
16685
|
+
outputPath = resolve17(process.cwd(), output);
|
|
16686
|
+
await writeFile13(outputPath, buffer);
|
|
16687
|
+
}
|
|
16688
|
+
return { success: true, taskId: result.id, status: "completed", videoUrl: finalResult.videoUrl, outputPath };
|
|
16689
|
+
}
|
|
16690
|
+
return { success: false, error: `Unsupported provider: ${provider}` };
|
|
16691
|
+
} catch (error) {
|
|
16692
|
+
return { success: false, error: `Video extension failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
16693
|
+
}
|
|
16694
|
+
}
|
|
16695
|
+
|
|
16696
|
+
// src/tools/ai-video.ts
|
|
16697
|
+
var aiVideoTools = [
|
|
16698
|
+
{
|
|
16699
|
+
name: "generate_video",
|
|
16700
|
+
description: "Generate video using AI. Supports Grok (default, free with audio), Kling, Runway, and Veo. Requires provider-specific API key.",
|
|
16701
|
+
inputSchema: {
|
|
16702
|
+
type: "object",
|
|
16703
|
+
properties: {
|
|
16704
|
+
prompt: { type: "string", description: "Text prompt describing the video" },
|
|
16705
|
+
provider: {
|
|
16706
|
+
type: "string",
|
|
16707
|
+
enum: ["grok", "kling", "runway", "veo"],
|
|
16708
|
+
description: "Video provider (default: kling)"
|
|
16709
|
+
},
|
|
16710
|
+
image: { type: "string", description: "Reference image path for image-to-video" },
|
|
16711
|
+
duration: { type: "number", description: "Duration in seconds (default: 5)" },
|
|
16712
|
+
ratio: { type: "string", description: "Aspect ratio: 16:9, 9:16, 1:1 (default: 16:9)" },
|
|
16713
|
+
mode: { type: "string", description: "Kling mode: std or pro" },
|
|
16714
|
+
negative: { type: "string", description: "Negative prompt (Kling/Veo)" },
|
|
16715
|
+
resolution: { type: "string", description: "Resolution: 720p, 1080p, 4k (Veo only)" },
|
|
16716
|
+
veoModel: { type: "string", description: "Veo model: 3.0, 3.1, 3.1-fast" },
|
|
16717
|
+
runwayModel: { type: "string", description: "Runway model: gen4.5, gen4_turbo" },
|
|
16718
|
+
output: { type: "string", description: "Output file path (downloads video)" },
|
|
16719
|
+
wait: { type: "boolean", description: "Wait for completion (default: true)" }
|
|
16720
|
+
},
|
|
16721
|
+
required: ["prompt"]
|
|
16722
|
+
}
|
|
16723
|
+
},
|
|
16724
|
+
{
|
|
16725
|
+
name: "generate_video_status",
|
|
16726
|
+
description: "Check video generation status for Runway or Kling tasks.",
|
|
16727
|
+
inputSchema: {
|
|
16728
|
+
type: "object",
|
|
16729
|
+
properties: {
|
|
16730
|
+
taskId: { type: "string", description: "Task ID from video generation" },
|
|
16731
|
+
provider: {
|
|
16732
|
+
type: "string",
|
|
16733
|
+
enum: ["runway", "kling"],
|
|
16734
|
+
description: "Provider (default: runway)"
|
|
16735
|
+
},
|
|
16736
|
+
taskType: {
|
|
16737
|
+
type: "string",
|
|
16738
|
+
enum: ["text2video", "image2video"],
|
|
16739
|
+
description: "Kling task type (default: text2video)"
|
|
16740
|
+
},
|
|
16741
|
+
wait: { type: "boolean", description: "Wait for completion" },
|
|
16742
|
+
output: { type: "string", description: "Download video when complete" }
|
|
16743
|
+
},
|
|
16744
|
+
required: ["taskId"]
|
|
16745
|
+
}
|
|
16746
|
+
},
|
|
16747
|
+
{
|
|
16748
|
+
name: "generate_video_cancel",
|
|
16749
|
+
description: "Cancel a Runway video generation task.",
|
|
16750
|
+
inputSchema: {
|
|
16751
|
+
type: "object",
|
|
16752
|
+
properties: {
|
|
16753
|
+
taskId: { type: "string", description: "Task ID to cancel" }
|
|
16754
|
+
},
|
|
16755
|
+
required: ["taskId"]
|
|
16756
|
+
}
|
|
16757
|
+
},
|
|
16758
|
+
{
|
|
16759
|
+
name: "generate_video_extend",
|
|
16760
|
+
description: "Extend video duration using Kling or Veo. Requires the video/operation ID from a previous generation.",
|
|
16761
|
+
inputSchema: {
|
|
16762
|
+
type: "object",
|
|
16763
|
+
properties: {
|
|
16764
|
+
videoId: { type: "string", description: "Video ID (Kling) or operation name (Veo)" },
|
|
16765
|
+
provider: {
|
|
16766
|
+
type: "string",
|
|
16767
|
+
enum: ["kling", "veo"],
|
|
16768
|
+
description: "Provider (default: kling)"
|
|
16769
|
+
},
|
|
16770
|
+
prompt: { type: "string", description: "Continuation prompt" },
|
|
16771
|
+
duration: { type: "number", description: "Duration in seconds" },
|
|
16772
|
+
negative: { type: "string", description: "Negative prompt (Kling)" },
|
|
16773
|
+
veoModel: { type: "string", description: "Veo model: 3.0, 3.1, 3.1-fast" },
|
|
16774
|
+
output: { type: "string", description: "Output file path" },
|
|
16775
|
+
wait: { type: "boolean", description: "Wait for completion (default: true)" }
|
|
16776
|
+
},
|
|
16777
|
+
required: ["videoId"]
|
|
16778
|
+
}
|
|
16779
|
+
}
|
|
16780
|
+
];
|
|
16781
|
+
async function handleAiVideoToolCall(name, args) {
|
|
16782
|
+
switch (name) {
|
|
16783
|
+
case "generate_video": {
|
|
16784
|
+
const result = await executeVideoGenerate({
|
|
16785
|
+
prompt: args.prompt,
|
|
16786
|
+
provider: args.provider,
|
|
16787
|
+
image: args.image,
|
|
16788
|
+
duration: args.duration,
|
|
16789
|
+
ratio: args.ratio,
|
|
16790
|
+
mode: args.mode,
|
|
16791
|
+
negative: args.negative,
|
|
16792
|
+
resolution: args.resolution,
|
|
16793
|
+
veoModel: args.veoModel,
|
|
16794
|
+
runwayModel: args.runwayModel,
|
|
16795
|
+
output: args.output,
|
|
16796
|
+
wait: args.wait
|
|
16797
|
+
});
|
|
16798
|
+
if (!result.success) return `Video generation failed: ${result.error}`;
|
|
16799
|
+
return JSON.stringify({ success: true, taskId: result.taskId, status: result.status, videoUrl: result.videoUrl, duration: result.duration, outputPath: result.outputPath, provider: result.provider });
|
|
16800
|
+
}
|
|
16801
|
+
case "generate_video_status": {
|
|
16802
|
+
const result = await executeVideoStatus({
|
|
16803
|
+
taskId: args.taskId,
|
|
16804
|
+
provider: args.provider,
|
|
16805
|
+
taskType: args.taskType,
|
|
16806
|
+
wait: args.wait,
|
|
16807
|
+
output: args.output
|
|
16808
|
+
});
|
|
16809
|
+
if (!result.success) return `Status check failed: ${result.error}`;
|
|
16810
|
+
return JSON.stringify({ success: true, taskId: result.taskId, status: result.status, progress: result.progress, videoUrl: result.videoUrl, outputPath: result.outputPath });
|
|
16811
|
+
}
|
|
16812
|
+
case "generate_video_cancel": {
|
|
16813
|
+
const result = await executeVideoCancel({ taskId: args.taskId });
|
|
16814
|
+
if (!result.success) return `Cancel failed: ${result.error}`;
|
|
16815
|
+
return JSON.stringify({ success: true });
|
|
16816
|
+
}
|
|
16817
|
+
case "generate_video_extend": {
|
|
16818
|
+
const result = await executeVideoExtend({
|
|
16819
|
+
videoId: args.videoId,
|
|
16820
|
+
provider: args.provider,
|
|
16821
|
+
prompt: args.prompt,
|
|
16822
|
+
duration: args.duration,
|
|
16823
|
+
negative: args.negative,
|
|
16824
|
+
veoModel: args.veoModel,
|
|
16825
|
+
output: args.output,
|
|
16826
|
+
wait: args.wait
|
|
16827
|
+
});
|
|
16828
|
+
if (!result.success) return `Video extension failed: ${result.error}`;
|
|
16829
|
+
return JSON.stringify({ success: true, taskId: result.taskId, status: result.status, videoUrl: result.videoUrl, duration: result.duration, outputPath: result.outputPath });
|
|
16830
|
+
}
|
|
16831
|
+
default:
|
|
16832
|
+
throw new Error(`Unknown AI video tool: ${name}`);
|
|
16833
|
+
}
|
|
16834
|
+
}
|
|
16835
|
+
|
|
16836
|
+
// ../cli/src/commands/ai-audio.ts
|
|
16837
|
+
import { resolve as resolve18, dirname as dirname6, basename as basename9, extname as extname7 } from "node:path";
|
|
16838
|
+
import { readFile as readFile15, writeFile as writeFile14 } from "node:fs/promises";
|
|
16839
|
+
import { existsSync as existsSync11 } from "node:fs";
|
|
16840
|
+
import chalk13 from "chalk";
|
|
16841
|
+
import ora11 from "ora";
|
|
16842
|
+
init_exec_safe();
|
|
16843
|
+
async function executeTranscribe(options) {
|
|
16844
|
+
const { audioPath, language, output, format, apiKey } = options;
|
|
16845
|
+
try {
|
|
16846
|
+
const key = apiKey || process.env.OPENAI_API_KEY;
|
|
16847
|
+
if (!key) return { success: false, error: "OPENAI_API_KEY required" };
|
|
16848
|
+
const absPath = resolve18(process.cwd(), audioPath);
|
|
16849
|
+
if (!existsSync11(absPath)) return { success: false, error: `File not found: ${absPath}` };
|
|
16850
|
+
const whisper = new WhisperProvider();
|
|
16851
|
+
await whisper.initialize({ apiKey: key });
|
|
16852
|
+
const audioBuffer = await readFile15(absPath);
|
|
16853
|
+
const audioBlob = new Blob([audioBuffer]);
|
|
16854
|
+
const result = await whisper.transcribe(audioBlob, language);
|
|
16855
|
+
if (result.status === "failed") {
|
|
16856
|
+
return { success: false, error: result.error || "Transcription failed" };
|
|
16857
|
+
}
|
|
16858
|
+
let outputPath;
|
|
16859
|
+
if (output) {
|
|
16860
|
+
outputPath = resolve18(process.cwd(), output);
|
|
16861
|
+
const fmt = detectFormat(output, format);
|
|
16862
|
+
const content = formatTranscript(result, fmt);
|
|
16863
|
+
await writeFile14(outputPath, content, "utf-8");
|
|
16864
|
+
}
|
|
16865
|
+
return {
|
|
16866
|
+
success: true,
|
|
16867
|
+
text: result.fullText,
|
|
16868
|
+
segments: result.segments?.map((s) => ({ startTime: s.startTime, endTime: s.endTime, text: s.text })),
|
|
16869
|
+
detectedLanguage: result.detectedLanguage,
|
|
16870
|
+
outputPath
|
|
16871
|
+
};
|
|
16872
|
+
} catch (error) {
|
|
16873
|
+
return { success: false, error: `Transcription failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
16874
|
+
}
|
|
16875
|
+
}
|
|
16876
|
+
async function executeIsolate(options) {
|
|
16877
|
+
const { audioPath, output = "vocals.mp3", apiKey } = options;
|
|
16878
|
+
try {
|
|
16879
|
+
const key = apiKey || process.env.ELEVENLABS_API_KEY;
|
|
16880
|
+
if (!key) return { success: false, error: "ELEVENLABS_API_KEY required" };
|
|
16881
|
+
const absPath = resolve18(process.cwd(), audioPath);
|
|
16882
|
+
if (!existsSync11(absPath)) return { success: false, error: `File not found: ${absPath}` };
|
|
16883
|
+
const audioBuffer = await readFile15(absPath);
|
|
16884
|
+
const elevenlabs = new ElevenLabsProvider();
|
|
16885
|
+
await elevenlabs.initialize({ apiKey: key });
|
|
16886
|
+
const result = await elevenlabs.isolateVocals(audioBuffer);
|
|
16887
|
+
if (!result.success || !result.audioBuffer) {
|
|
16888
|
+
return { success: false, error: result.error || "Audio isolation failed" };
|
|
16889
|
+
}
|
|
16890
|
+
const outputPath = resolve18(process.cwd(), output);
|
|
16891
|
+
await writeFile14(outputPath, result.audioBuffer);
|
|
16892
|
+
return { success: true, outputPath };
|
|
16893
|
+
} catch (error) {
|
|
16894
|
+
return { success: false, error: `Audio isolation failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
16895
|
+
}
|
|
16896
|
+
}
|
|
16897
|
+
async function executeVoiceClone(options) {
|
|
16898
|
+
const { samplePaths, name, description, labels, removeNoise, apiKey } = options;
|
|
16899
|
+
try {
|
|
16900
|
+
const key = apiKey || process.env.ELEVENLABS_API_KEY;
|
|
16901
|
+
if (!key) return { success: false, error: "ELEVENLABS_API_KEY required" };
|
|
16902
|
+
if (!samplePaths || samplePaths.length === 0) {
|
|
16903
|
+
return { success: false, error: "At least one audio sample is required" };
|
|
16904
|
+
}
|
|
16905
|
+
const audioBuffers = [];
|
|
16906
|
+
for (const samplePath of samplePaths) {
|
|
16907
|
+
const absPath = resolve18(process.cwd(), samplePath);
|
|
16908
|
+
if (!existsSync11(absPath)) return { success: false, error: `File not found: ${samplePath}` };
|
|
16909
|
+
const buffer = await readFile15(absPath);
|
|
16910
|
+
audioBuffers.push(buffer);
|
|
16911
|
+
}
|
|
16912
|
+
const elevenlabs = new ElevenLabsProvider();
|
|
16913
|
+
await elevenlabs.initialize({ apiKey: key });
|
|
16914
|
+
const result = await elevenlabs.cloneVoice(audioBuffers, {
|
|
16915
|
+
name,
|
|
16916
|
+
description,
|
|
16917
|
+
labels,
|
|
16918
|
+
removeBackgroundNoise: removeNoise
|
|
16919
|
+
});
|
|
16920
|
+
if (!result.success) return { success: false, error: result.error || "Voice cloning failed" };
|
|
16921
|
+
return { success: true, voiceId: result.voiceId, name };
|
|
16922
|
+
} catch (error) {
|
|
16923
|
+
return { success: false, error: `Voice cloning failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
16924
|
+
}
|
|
16925
|
+
}
|
|
16926
|
+
async function executeDub(options) {
|
|
16927
|
+
const { mediaPath, language, source, voice, analyzeOnly, output } = options;
|
|
16928
|
+
try {
|
|
16929
|
+
const absPath = resolve18(process.cwd(), mediaPath);
|
|
16930
|
+
if (!existsSync11(absPath)) return { success: false, error: `File not found: ${absPath}` };
|
|
16931
|
+
const openaiKey = process.env.OPENAI_API_KEY;
|
|
16932
|
+
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
|
16933
|
+
const elevenlabsKey = process.env.ELEVENLABS_API_KEY;
|
|
16934
|
+
if (!openaiKey) return { success: false, error: "OPENAI_API_KEY required for Whisper transcription" };
|
|
16935
|
+
if (!anthropicKey) return { success: false, error: "ANTHROPIC_API_KEY required for Claude translation" };
|
|
16936
|
+
if (!analyzeOnly && !elevenlabsKey) return { success: false, error: "ELEVENLABS_API_KEY required for TTS" };
|
|
16937
|
+
const ext = extname7(absPath).toLowerCase();
|
|
16938
|
+
const isVideo = [".mp4", ".mov", ".avi", ".mkv", ".webm"].includes(ext);
|
|
16939
|
+
let audioPath = absPath;
|
|
16940
|
+
if (isVideo) {
|
|
16941
|
+
const tempAudioPath = resolve18(dirname6(absPath), `temp-audio-${Date.now()}.mp3`);
|
|
16942
|
+
execSafeSync("ffmpeg", ["-i", absPath, "-vn", "-acodec", "mp3", "-y", tempAudioPath]);
|
|
16943
|
+
audioPath = tempAudioPath;
|
|
16944
|
+
}
|
|
16945
|
+
const whisper = new WhisperProvider();
|
|
16946
|
+
await whisper.initialize({ apiKey: openaiKey });
|
|
16947
|
+
const audioBuffer = await readFile15(audioPath);
|
|
16948
|
+
const audioBlob = new Blob([audioBuffer]);
|
|
16949
|
+
const transcriptResult = await whisper.transcribe(audioBlob, source);
|
|
16950
|
+
if (transcriptResult.status === "failed" || !transcriptResult.segments) {
|
|
16951
|
+
return { success: false, error: `Transcription failed: ${transcriptResult.error}` };
|
|
16952
|
+
}
|
|
16953
|
+
const segments = transcriptResult.segments;
|
|
16954
|
+
const claude = new ClaudeProvider();
|
|
16955
|
+
await claude.initialize({ apiKey: anthropicKey });
|
|
16956
|
+
const languageNames = {
|
|
16957
|
+
en: "English",
|
|
16958
|
+
es: "Spanish",
|
|
16959
|
+
fr: "French",
|
|
16960
|
+
de: "German",
|
|
16961
|
+
it: "Italian",
|
|
16962
|
+
pt: "Portuguese",
|
|
16963
|
+
ja: "Japanese",
|
|
16964
|
+
ko: "Korean",
|
|
16965
|
+
zh: "Chinese",
|
|
16966
|
+
ar: "Arabic",
|
|
16967
|
+
ru: "Russian",
|
|
16968
|
+
hi: "Hindi"
|
|
16969
|
+
};
|
|
16970
|
+
const targetLangName = languageNames[language] || language;
|
|
16971
|
+
const segmentTexts = segments.map((s, i) => `[${i}] ${s.text}`).join("\n");
|
|
16972
|
+
let translatedSegments = [];
|
|
16973
|
+
try {
|
|
16974
|
+
const storyboard = await claude.analyzeContent(
|
|
16975
|
+
`TRANSLATE to ${targetLangName}. Return the translated text only, preserving segment numbers:
|
|
16976
|
+
|
|
16977
|
+
${segmentTexts}`,
|
|
16978
|
+
segments[segments.length - 1]?.endTime || 60
|
|
16979
|
+
);
|
|
16980
|
+
translatedSegments = segments.map((s, i) => ({
|
|
16981
|
+
text: storyboard[i]?.description || s.text,
|
|
16982
|
+
startTime: s.startTime,
|
|
16983
|
+
endTime: s.endTime
|
|
16984
|
+
}));
|
|
16985
|
+
} catch {
|
|
16986
|
+
translatedSegments = segments.map((s) => ({ text: s.text, startTime: s.startTime, endTime: s.endTime }));
|
|
16987
|
+
}
|
|
16988
|
+
if (analyzeOnly) {
|
|
16989
|
+
if (output) {
|
|
16990
|
+
const timingPath = resolve18(process.cwd(), output);
|
|
16991
|
+
const timingData = {
|
|
16992
|
+
sourcePath: absPath,
|
|
16993
|
+
sourceLanguage: transcriptResult.detectedLanguage || source || "auto",
|
|
16994
|
+
targetLanguage: language,
|
|
16995
|
+
segments: segments.map((s, i) => ({
|
|
16996
|
+
startTime: s.startTime,
|
|
16997
|
+
endTime: s.endTime,
|
|
16998
|
+
original: s.text,
|
|
16999
|
+
translated: translatedSegments[i]?.text || s.text
|
|
17000
|
+
}))
|
|
17001
|
+
};
|
|
17002
|
+
await writeFile14(timingPath, JSON.stringify(timingData, null, 2));
|
|
17003
|
+
}
|
|
17004
|
+
return {
|
|
17005
|
+
success: true,
|
|
17006
|
+
sourceLanguage: transcriptResult.detectedLanguage || source || "auto",
|
|
17007
|
+
targetLanguage: language,
|
|
17008
|
+
segmentCount: segments.length,
|
|
17009
|
+
outputPath: output ? resolve18(process.cwd(), output) : void 0
|
|
17010
|
+
};
|
|
17011
|
+
}
|
|
17012
|
+
const elevenlabs = new ElevenLabsProvider();
|
|
17013
|
+
await elevenlabs.initialize({ apiKey: elevenlabsKey });
|
|
17014
|
+
const dubbedBuffers = [];
|
|
17015
|
+
for (const seg of translatedSegments) {
|
|
17016
|
+
const ttsResult = await elevenlabs.textToSpeech(seg.text, { voiceId: voice });
|
|
17017
|
+
if (ttsResult.success && ttsResult.audioBuffer) {
|
|
17018
|
+
dubbedBuffers.push(ttsResult.audioBuffer);
|
|
17019
|
+
}
|
|
17020
|
+
}
|
|
17021
|
+
const combinedBuffer = Buffer.concat(dubbedBuffers);
|
|
17022
|
+
const outputExt = isVideo ? ".mp3" : extname7(absPath);
|
|
17023
|
+
const defaultOutputPath = resolve18(dirname6(absPath), `${basename9(absPath, extname7(absPath))}-${language}${outputExt}`);
|
|
17024
|
+
const finalOutputPath = resolve18(process.cwd(), output || defaultOutputPath);
|
|
17025
|
+
await writeFile14(finalOutputPath, combinedBuffer);
|
|
17026
|
+
if (isVideo && audioPath !== absPath) {
|
|
17027
|
+
try {
|
|
17028
|
+
const { unlink: unlink2 } = await import("node:fs/promises");
|
|
17029
|
+
await unlink2(audioPath);
|
|
17030
|
+
} catch {
|
|
17031
|
+
}
|
|
17032
|
+
}
|
|
17033
|
+
return {
|
|
17034
|
+
success: true,
|
|
17035
|
+
outputPath: finalOutputPath,
|
|
17036
|
+
sourceLanguage: transcriptResult.detectedLanguage || source || "auto",
|
|
17037
|
+
targetLanguage: language,
|
|
17038
|
+
segmentCount: segments.length
|
|
17039
|
+
};
|
|
17040
|
+
} catch (error) {
|
|
17041
|
+
return { success: false, error: `Dubbing failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
17042
|
+
}
|
|
17043
|
+
}
|
|
17044
|
+
async function executeDuck(options) {
|
|
17045
|
+
const {
|
|
17046
|
+
musicPath,
|
|
17047
|
+
voicePath,
|
|
17048
|
+
output,
|
|
17049
|
+
threshold = "-30",
|
|
17050
|
+
ratio = "3",
|
|
17051
|
+
attack = "20",
|
|
17052
|
+
release = "200"
|
|
17053
|
+
} = options;
|
|
17054
|
+
try {
|
|
17055
|
+
if (!commandExists("ffmpeg")) return { success: false, error: "FFmpeg not found" };
|
|
17056
|
+
const absMusicPath = resolve18(process.cwd(), musicPath);
|
|
17057
|
+
const absVoicePath = resolve18(process.cwd(), voicePath);
|
|
17058
|
+
if (!existsSync11(absMusicPath)) return { success: false, error: `Music file not found: ${absMusicPath}` };
|
|
17059
|
+
if (!existsSync11(absVoicePath)) return { success: false, error: `Voice file not found: ${absVoicePath}` };
|
|
17060
|
+
const defaultOutput = resolve18(dirname6(absMusicPath), `${basename9(absMusicPath, extname7(absMusicPath))}-ducked${extname7(absMusicPath)}`);
|
|
17061
|
+
const outputPath = resolve18(process.cwd(), output || defaultOutput);
|
|
17062
|
+
const filter = `[1:a]asplit=2[sc][mix];[0:a][sc]sidechaincompress=threshold=${threshold}dB:ratio=${ratio}:attack=${attack}:release=${release}[ducked];[ducked][mix]amix=inputs=2:duration=longest`;
|
|
17063
|
+
await execSafe("ffmpeg", [
|
|
17064
|
+
"-i",
|
|
17065
|
+
absMusicPath,
|
|
17066
|
+
"-i",
|
|
17067
|
+
absVoicePath,
|
|
17068
|
+
"-filter_complex",
|
|
17069
|
+
filter,
|
|
17070
|
+
"-y",
|
|
17071
|
+
outputPath
|
|
17072
|
+
], { timeout: 12e4, maxBuffer: 50 * 1024 * 1024 });
|
|
17073
|
+
if (!existsSync11(outputPath)) return { success: false, error: "FFmpeg failed to create output" };
|
|
17074
|
+
return { success: true, outputPath };
|
|
17075
|
+
} catch (error) {
|
|
17076
|
+
return { success: false, error: `Audio ducking failed: ${error instanceof Error ? error.message : String(error)}` };
|
|
17077
|
+
}
|
|
17078
|
+
}
|
|
17079
|
+
|
|
17080
|
+
// src/tools/ai-audio.ts
|
|
17081
|
+
var aiAudioTools = [
|
|
17082
|
+
{
|
|
17083
|
+
name: "audio_transcribe",
|
|
17084
|
+
description: "Transcribe audio using OpenAI Whisper. Outputs text, SRT, or VTT. Requires OPENAI_API_KEY.",
|
|
17085
|
+
inputSchema: {
|
|
17086
|
+
type: "object",
|
|
17087
|
+
properties: {
|
|
17088
|
+
audioPath: { type: "string", description: "Input audio file path" },
|
|
17089
|
+
language: { type: "string", description: "Language code (e.g., en, ko)" },
|
|
17090
|
+
output: { type: "string", description: "Output file path (format auto-detected from extension: .json, .srt, .vtt)" },
|
|
17091
|
+
format: { type: "string", description: "Output format override: json, srt, vtt" }
|
|
17092
|
+
},
|
|
17093
|
+
required: ["audioPath"]
|
|
17094
|
+
}
|
|
17095
|
+
},
|
|
17096
|
+
{
|
|
17097
|
+
name: "audio_isolate",
|
|
17098
|
+
description: "Isolate vocals from audio using ElevenLabs. Requires ELEVENLABS_API_KEY.",
|
|
17099
|
+
inputSchema: {
|
|
17100
|
+
type: "object",
|
|
17101
|
+
properties: {
|
|
17102
|
+
audioPath: { type: "string", description: "Input audio file path" },
|
|
17103
|
+
output: { type: "string", description: "Output audio file path (default: vocals.mp3)" }
|
|
17104
|
+
},
|
|
17105
|
+
required: ["audioPath"]
|
|
17106
|
+
}
|
|
17107
|
+
},
|
|
17108
|
+
{
|
|
17109
|
+
name: "audio_voice_clone",
|
|
17110
|
+
description: "Clone a voice from audio samples using ElevenLabs. Requires ELEVENLABS_API_KEY.",
|
|
17111
|
+
inputSchema: {
|
|
17112
|
+
type: "object",
|
|
17113
|
+
properties: {
|
|
17114
|
+
samplePaths: {
|
|
17115
|
+
type: "array",
|
|
17116
|
+
items: { type: "string" },
|
|
17117
|
+
description: "Audio sample file paths (1-25 files)"
|
|
17118
|
+
},
|
|
17119
|
+
name: { type: "string", description: "Voice name" },
|
|
17120
|
+
description: { type: "string", description: "Voice description" },
|
|
17121
|
+
removeNoise: { type: "boolean", description: "Remove background noise from samples" }
|
|
17122
|
+
},
|
|
17123
|
+
required: ["samplePaths", "name"]
|
|
17124
|
+
}
|
|
17125
|
+
},
|
|
17126
|
+
{
|
|
17127
|
+
name: "audio_dub",
|
|
17128
|
+
description: "Dub audio/video to another language (transcribe + translate + TTS). Requires OPENAI_API_KEY, ANTHROPIC_API_KEY, ELEVENLABS_API_KEY.",
|
|
17129
|
+
inputSchema: {
|
|
17130
|
+
type: "object",
|
|
17131
|
+
properties: {
|
|
17132
|
+
mediaPath: { type: "string", description: "Input media file (video or audio)" },
|
|
17133
|
+
language: { type: "string", description: "Target language code (e.g., es, ko, ja)" },
|
|
17134
|
+
source: { type: "string", description: "Source language code (default: auto-detect)" },
|
|
17135
|
+
voice: { type: "string", description: "ElevenLabs voice ID for output" },
|
|
17136
|
+
analyzeOnly: { type: "boolean", description: "Only analyze timing, don't generate audio" },
|
|
17137
|
+
output: { type: "string", description: "Output file path" }
|
|
17138
|
+
},
|
|
17139
|
+
required: ["mediaPath", "language"]
|
|
17140
|
+
}
|
|
17141
|
+
},
|
|
17142
|
+
{
|
|
17143
|
+
name: "audio_duck",
|
|
17144
|
+
description: "Auto-duck background music when voice is present using FFmpeg sidechain compression. Free, no API key needed.",
|
|
17145
|
+
inputSchema: {
|
|
17146
|
+
type: "object",
|
|
17147
|
+
properties: {
|
|
17148
|
+
musicPath: { type: "string", description: "Background music file path" },
|
|
17149
|
+
voicePath: { type: "string", description: "Voice/narration track path" },
|
|
17150
|
+
output: { type: "string", description: "Output audio file path" },
|
|
17151
|
+
threshold: { type: "string", description: "Sidechain threshold in dB (default: -30)" },
|
|
17152
|
+
ratio: { type: "string", description: "Compression ratio (default: 3)" }
|
|
17153
|
+
},
|
|
17154
|
+
required: ["musicPath", "voicePath"]
|
|
17155
|
+
}
|
|
17156
|
+
}
|
|
17157
|
+
];
|
|
17158
|
+
async function handleAiAudioToolCall(name, args) {
|
|
17159
|
+
switch (name) {
|
|
17160
|
+
case "audio_transcribe": {
|
|
17161
|
+
const result = await executeTranscribe({
|
|
17162
|
+
audioPath: args.audioPath,
|
|
17163
|
+
language: args.language,
|
|
17164
|
+
output: args.output,
|
|
17165
|
+
format: args.format
|
|
17166
|
+
});
|
|
17167
|
+
if (!result.success) return `Transcription failed: ${result.error}`;
|
|
17168
|
+
return JSON.stringify({
|
|
17169
|
+
success: true,
|
|
17170
|
+
text: result.text?.slice(0, 500),
|
|
17171
|
+
segmentCount: result.segments?.length,
|
|
17172
|
+
detectedLanguage: result.detectedLanguage,
|
|
17173
|
+
outputPath: result.outputPath
|
|
17174
|
+
});
|
|
17175
|
+
}
|
|
17176
|
+
case "audio_isolate": {
|
|
17177
|
+
const result = await executeIsolate({
|
|
17178
|
+
audioPath: args.audioPath,
|
|
17179
|
+
output: args.output
|
|
17180
|
+
});
|
|
17181
|
+
if (!result.success) return `Audio isolation failed: ${result.error}`;
|
|
17182
|
+
return JSON.stringify({ success: true, outputPath: result.outputPath });
|
|
17183
|
+
}
|
|
17184
|
+
case "audio_voice_clone": {
|
|
17185
|
+
const result = await executeVoiceClone({
|
|
17186
|
+
samplePaths: args.samplePaths,
|
|
17187
|
+
name: args.name,
|
|
17188
|
+
description: args.description,
|
|
17189
|
+
removeNoise: args.removeNoise
|
|
17190
|
+
});
|
|
17191
|
+
if (!result.success) return `Voice cloning failed: ${result.error}`;
|
|
17192
|
+
return JSON.stringify({ success: true, voiceId: result.voiceId, name: result.name });
|
|
17193
|
+
}
|
|
17194
|
+
case "audio_dub": {
|
|
17195
|
+
const result = await executeDub({
|
|
17196
|
+
mediaPath: args.mediaPath,
|
|
17197
|
+
language: args.language,
|
|
17198
|
+
source: args.source,
|
|
17199
|
+
voice: args.voice,
|
|
17200
|
+
analyzeOnly: args.analyzeOnly,
|
|
17201
|
+
output: args.output
|
|
17202
|
+
});
|
|
17203
|
+
if (!result.success) return `Dubbing failed: ${result.error}`;
|
|
17204
|
+
return JSON.stringify({
|
|
17205
|
+
success: true,
|
|
17206
|
+
outputPath: result.outputPath,
|
|
17207
|
+
sourceLanguage: result.sourceLanguage,
|
|
17208
|
+
targetLanguage: result.targetLanguage,
|
|
17209
|
+
segmentCount: result.segmentCount
|
|
17210
|
+
});
|
|
17211
|
+
}
|
|
17212
|
+
case "audio_duck": {
|
|
17213
|
+
const result = await executeDuck({
|
|
17214
|
+
musicPath: args.musicPath,
|
|
17215
|
+
voicePath: args.voicePath,
|
|
17216
|
+
output: args.output,
|
|
17217
|
+
threshold: args.threshold,
|
|
17218
|
+
ratio: args.ratio
|
|
17219
|
+
});
|
|
17220
|
+
if (!result.success) return `Audio ducking failed: ${result.error}`;
|
|
17221
|
+
return JSON.stringify({ success: true, outputPath: result.outputPath });
|
|
17222
|
+
}
|
|
17223
|
+
default:
|
|
17224
|
+
throw new Error(`Unknown AI audio tool: ${name}`);
|
|
17225
|
+
}
|
|
17226
|
+
}
|
|
17227
|
+
|
|
16144
17228
|
// src/tools/index.ts
|
|
16145
17229
|
var tools = [
|
|
16146
17230
|
...projectTools,
|
|
@@ -16150,7 +17234,9 @@ var tools = [
|
|
|
16150
17234
|
...aiAnalysisTools,
|
|
16151
17235
|
...aiPipelineTools,
|
|
16152
17236
|
...aiGenerationTools,
|
|
16153
|
-
...detectionTools
|
|
17237
|
+
...detectionTools,
|
|
17238
|
+
...aiVideoTools,
|
|
17239
|
+
...aiAudioTools
|
|
16154
17240
|
];
|
|
16155
17241
|
var handlers2 = {};
|
|
16156
17242
|
for (const t of projectTools) handlers2[t.name] = handleProjectToolCall;
|
|
@@ -16161,6 +17247,8 @@ for (const t of aiAnalysisTools) handlers2[t.name] = handleAiAnalysisToolCall;
|
|
|
16161
17247
|
for (const t of aiPipelineTools) handlers2[t.name] = handleAiPipelineToolCall;
|
|
16162
17248
|
for (const t of aiGenerationTools) handlers2[t.name] = handleAiGenerationToolCall;
|
|
16163
17249
|
for (const t of detectionTools) handlers2[t.name] = handleDetectionToolCall;
|
|
17250
|
+
for (const t of aiVideoTools) handlers2[t.name] = handleAiVideoToolCall;
|
|
17251
|
+
for (const t of aiAudioTools) handlers2[t.name] = handleAiAudioToolCall;
|
|
16164
17252
|
async function handleToolCall(name, args) {
|
|
16165
17253
|
try {
|
|
16166
17254
|
const handler = handlers2[name];
|
|
@@ -16180,8 +17268,8 @@ async function handleToolCall(name, args) {
|
|
|
16180
17268
|
}
|
|
16181
17269
|
|
|
16182
17270
|
// src/resources/index.ts
|
|
16183
|
-
import { readFile as
|
|
16184
|
-
import { resolve as
|
|
17271
|
+
import { readFile as readFile16 } from "node:fs/promises";
|
|
17272
|
+
import { resolve as resolve19 } from "node:path";
|
|
16185
17273
|
var resources = [
|
|
16186
17274
|
{
|
|
16187
17275
|
uri: "vibe://project/current",
|
|
@@ -16216,8 +17304,8 @@ var resources = [
|
|
|
16216
17304
|
];
|
|
16217
17305
|
var currentProjectPath = process.env.VIBE_PROJECT_PATH || null;
|
|
16218
17306
|
async function loadProject2(projectPath) {
|
|
16219
|
-
const absPath =
|
|
16220
|
-
const content = await
|
|
17307
|
+
const absPath = resolve19(process.cwd(), projectPath);
|
|
17308
|
+
const content = await readFile16(absPath, "utf-8");
|
|
16221
17309
|
const data = JSON.parse(content);
|
|
16222
17310
|
return Project.fromJSON(data);
|
|
16223
17311
|
}
|