@vibeframe/cli 0.27.0 → 0.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/LICENSE +21 -0
  2. package/dist/agent/adapters/index.d.ts +1 -0
  3. package/dist/agent/adapters/index.d.ts.map +1 -1
  4. package/dist/agent/adapters/index.js +5 -0
  5. package/dist/agent/adapters/index.js.map +1 -1
  6. package/dist/agent/adapters/openrouter.d.ts +16 -0
  7. package/dist/agent/adapters/openrouter.d.ts.map +1 -0
  8. package/dist/agent/adapters/openrouter.js +100 -0
  9. package/dist/agent/adapters/openrouter.js.map +1 -0
  10. package/dist/agent/types.d.ts +1 -1
  11. package/dist/agent/types.d.ts.map +1 -1
  12. package/dist/commands/agent.d.ts.map +1 -1
  13. package/dist/commands/agent.js +3 -1
  14. package/dist/commands/agent.js.map +1 -1
  15. package/dist/commands/setup.js +5 -2
  16. package/dist/commands/setup.js.map +1 -1
  17. package/dist/config/schema.d.ts +2 -1
  18. package/dist/config/schema.d.ts.map +1 -1
  19. package/dist/config/schema.js +2 -0
  20. package/dist/config/schema.js.map +1 -1
  21. package/dist/index.js +0 -0
  22. package/package.json +16 -12
  23. package/.turbo/turbo-build.log +0 -4
  24. package/.turbo/turbo-lint.log +0 -21
  25. package/.turbo/turbo-test.log +0 -689
  26. package/src/agent/adapters/claude.ts +0 -143
  27. package/src/agent/adapters/gemini.ts +0 -159
  28. package/src/agent/adapters/index.ts +0 -61
  29. package/src/agent/adapters/ollama.ts +0 -231
  30. package/src/agent/adapters/openai.ts +0 -116
  31. package/src/agent/adapters/xai.ts +0 -119
  32. package/src/agent/index.ts +0 -251
  33. package/src/agent/memory/index.ts +0 -151
  34. package/src/agent/prompts/system.ts +0 -106
  35. package/src/agent/tools/ai-editing.ts +0 -845
  36. package/src/agent/tools/ai-generation.ts +0 -1073
  37. package/src/agent/tools/ai-pipeline.ts +0 -1055
  38. package/src/agent/tools/ai.ts +0 -21
  39. package/src/agent/tools/batch.ts +0 -429
  40. package/src/agent/tools/e2e.test.ts +0 -545
  41. package/src/agent/tools/export.ts +0 -184
  42. package/src/agent/tools/filesystem.ts +0 -237
  43. package/src/agent/tools/index.ts +0 -150
  44. package/src/agent/tools/integration.test.ts +0 -775
  45. package/src/agent/tools/media.ts +0 -697
  46. package/src/agent/tools/project.ts +0 -313
  47. package/src/agent/tools/timeline.ts +0 -951
  48. package/src/agent/types.ts +0 -68
  49. package/src/commands/agent.ts +0 -340
  50. package/src/commands/ai-analyze.ts +0 -429
  51. package/src/commands/ai-animated-caption.ts +0 -390
  52. package/src/commands/ai-audio.ts +0 -941
  53. package/src/commands/ai-broll.ts +0 -490
  54. package/src/commands/ai-edit-cli.ts +0 -658
  55. package/src/commands/ai-edit.ts +0 -1542
  56. package/src/commands/ai-fill-gaps.ts +0 -566
  57. package/src/commands/ai-helpers.ts +0 -65
  58. package/src/commands/ai-highlights.ts +0 -1303
  59. package/src/commands/ai-image.ts +0 -761
  60. package/src/commands/ai-motion.ts +0 -347
  61. package/src/commands/ai-narrate.ts +0 -451
  62. package/src/commands/ai-review.ts +0 -309
  63. package/src/commands/ai-script-pipeline-cli.ts +0 -1710
  64. package/src/commands/ai-script-pipeline.ts +0 -1365
  65. package/src/commands/ai-suggest-edit.ts +0 -264
  66. package/src/commands/ai-video-fx.ts +0 -445
  67. package/src/commands/ai-video.ts +0 -915
  68. package/src/commands/ai-viral.ts +0 -595
  69. package/src/commands/ai-visual-fx.ts +0 -601
  70. package/src/commands/ai.test.ts +0 -627
  71. package/src/commands/ai.ts +0 -307
  72. package/src/commands/analyze.ts +0 -282
  73. package/src/commands/audio.ts +0 -644
  74. package/src/commands/batch.test.ts +0 -279
  75. package/src/commands/batch.ts +0 -440
  76. package/src/commands/detect.ts +0 -329
  77. package/src/commands/doctor.ts +0 -237
  78. package/src/commands/edit-cmd.ts +0 -1014
  79. package/src/commands/export.ts +0 -918
  80. package/src/commands/generate.ts +0 -2146
  81. package/src/commands/media.ts +0 -177
  82. package/src/commands/output.ts +0 -142
  83. package/src/commands/pipeline.ts +0 -398
  84. package/src/commands/project.test.ts +0 -127
  85. package/src/commands/project.ts +0 -149
  86. package/src/commands/sanitize.ts +0 -60
  87. package/src/commands/schema.ts +0 -130
  88. package/src/commands/setup.ts +0 -509
  89. package/src/commands/timeline.test.ts +0 -499
  90. package/src/commands/timeline.ts +0 -529
  91. package/src/commands/validate.ts +0 -77
  92. package/src/config/config.test.ts +0 -197
  93. package/src/config/index.ts +0 -125
  94. package/src/config/schema.ts +0 -82
  95. package/src/engine/index.ts +0 -2
  96. package/src/engine/project.test.ts +0 -702
  97. package/src/engine/project.ts +0 -439
  98. package/src/index.ts +0 -146
  99. package/src/utils/api-key.test.ts +0 -41
  100. package/src/utils/api-key.ts +0 -247
  101. package/src/utils/audio.ts +0 -83
  102. package/src/utils/exec-safe.ts +0 -75
  103. package/src/utils/first-run.ts +0 -52
  104. package/src/utils/provider-resolver.ts +0 -56
  105. package/src/utils/remotion.ts +0 -951
  106. package/src/utils/subtitle.test.ts +0 -227
  107. package/src/utils/subtitle.ts +0 -169
  108. package/src/utils/tty.ts +0 -196
  109. package/tsconfig.json +0 -20
@@ -1,658 +0,0 @@
1
- /**
2
- * ai-edit-cli.ts — CLI command registrations for video/audio editing commands.
3
- *
4
- * Commands: silence-cut, caption, noise-reduce, fade, translate-srt, jump-cut
5
- *
6
- * This file contains only the Commander.js command definitions (UI layer).
7
- * All execute functions and types live in ai-edit.ts.
8
- *
9
- * Extracted from ai-edit.ts as part of modularisation.
10
- * ai.ts calls registerEditCommands(aiCommand).
11
- */
12
-
13
- import { type Command } from 'commander';
14
- import { resolve, extname, basename } from 'node:path';
15
- import { existsSync } from 'node:fs';
16
- import { commandExists } from '../utils/exec-safe.js';
17
- import chalk from 'chalk';
18
- import ora from 'ora';
19
- import { getApiKey } from '../utils/api-key.js';
20
- import {
21
- executeSilenceCut,
22
- executeCaption,
23
- executeNoiseReduce,
24
- executeFade,
25
- executeTranslateSrt,
26
- executeJumpCut,
27
- type CaptionStyle,
28
- } from './ai-edit.js';
29
- import { isJsonMode, outputResult } from "./output.js";
30
- import { rejectControlChars } from "./validate.js";
31
-
32
- // ── Command registrations ───────────────────────────────────────────────────
33
-
34
- export function registerEditCommands(aiCommand: Command): void {
35
- // ============================================================================
36
-
37
- aiCommand
38
- .command("silence-cut")
39
- .alias("sc")
40
- .description("Remove silent segments from video (FFmpeg default, or Gemini for smart detection)")
41
- .argument("<video>", "Video file path")
42
- .option("-o, --output <path>", "Output file path (default: <name>-cut.<ext>)")
43
- .option("-n, --noise <dB>", "Silence threshold in dB (default: -30)", "-30")
44
- .option("-d, --min-duration <seconds>", "Minimum silence duration to cut (default: 0.5)", "0.5")
45
- .option("--padding <seconds>", "Padding around non-silent segments (default: 0.1)", "0.1")
46
- .option("--analyze-only", "Only detect silence, don't cut")
47
- .option("--use-gemini", "Use Gemini Video Understanding for context-aware silence detection")
48
- .option("-m, --model <model>", "Gemini model (default: flash)")
49
- .option("--low-res", "Low resolution mode for longer videos (Gemini only)")
50
- .option("-k, --api-key <key>", "Google API key override (or set GOOGLE_API_KEY env)")
51
- .option("--dry-run", "Preview parameters without executing")
52
- .action(async (videoPath: string, options) => {
53
- try {
54
- const absVideoPath = resolve(process.cwd(), videoPath);
55
- if (!existsSync(absVideoPath)) {
56
- console.error(chalk.red(`Video not found: ${absVideoPath}`));
57
- process.exit(1);
58
- }
59
-
60
- // Check FFmpeg
61
- if (!commandExists("ffmpeg")) {
62
- console.error(chalk.red("FFmpeg not found. Please install FFmpeg."));
63
- process.exit(1);
64
- }
65
-
66
- const ext = extname(videoPath);
67
- const name = basename(videoPath, ext);
68
- const outputPath = options.output || `${name}-cut${ext}`;
69
-
70
- const useGemini = options.useGemini || false;
71
-
72
- if (options.dryRun) {
73
- outputResult({
74
- dryRun: true,
75
- command: "edit silence-cut",
76
- params: {
77
- videoPath: absVideoPath,
78
- noiseThreshold: parseFloat(options.noise),
79
- minDuration: parseFloat(options.minDuration),
80
- padding: parseFloat(options.padding),
81
- useGemini,
82
- analyzeOnly: options.analyzeOnly || false,
83
- },
84
- });
85
- return;
86
- }
87
-
88
- const spinnerText = useGemini
89
- ? "Analyzing video with Gemini (visual + audio)..."
90
- : "Detecting silence...";
91
- const spinner = ora(spinnerText).start();
92
-
93
- const result = await executeSilenceCut({
94
- videoPath: absVideoPath,
95
- outputPath: resolve(process.cwd(), outputPath),
96
- noiseThreshold: parseFloat(options.noise),
97
- minDuration: parseFloat(options.minDuration),
98
- padding: parseFloat(options.padding),
99
- analyzeOnly: options.analyzeOnly || false,
100
- useGemini,
101
- model: options.model,
102
- lowRes: options.lowRes,
103
- apiKey: options.apiKey,
104
- });
105
-
106
- if (!result.success) {
107
- spinner.fail(chalk.red(result.error || "Silence cut failed"));
108
- process.exit(1);
109
- }
110
-
111
- spinner.succeed(chalk.green("Silence detection complete"));
112
-
113
- if (isJsonMode()) {
114
- outputResult({
115
- success: true,
116
- method: result.method,
117
- totalDuration: result.totalDuration,
118
- silentPeriods: result.silentPeriods,
119
- silentDuration: result.silentDuration,
120
- outputPath: result.outputPath,
121
- });
122
- return;
123
- }
124
-
125
- console.log();
126
- console.log(chalk.bold.cyan("Silence Analysis"));
127
- console.log(chalk.dim("─".repeat(60)));
128
- console.log(`Detection method: ${chalk.bold(result.method === "gemini" ? "Gemini Video Understanding" : "FFmpeg silencedetect")}`);
129
- console.log(`Total duration: ${chalk.bold(result.totalDuration!.toFixed(1))}s`);
130
- console.log(`Silent periods: ${chalk.bold(String(result.silentPeriods!.length))}`);
131
- console.log(`Silent duration: ${chalk.bold(result.silentDuration!.toFixed(1))}s`);
132
- console.log(`Non-silent duration: ${chalk.bold((result.totalDuration! - result.silentDuration!).toFixed(1))}s`);
133
-
134
- if (result.silentPeriods!.length > 0) {
135
- console.log();
136
- console.log(chalk.dim("Silent periods:"));
137
- for (const period of result.silentPeriods!) {
138
- console.log(chalk.dim(` ${period.start.toFixed(2)}s - ${period.end.toFixed(2)}s (${period.duration.toFixed(2)}s)`));
139
- }
140
- }
141
-
142
- if (!options.analyzeOnly && result.outputPath) {
143
- console.log();
144
- console.log(chalk.green(`Output: ${result.outputPath}`));
145
- console.log(chalk.dim(`Removed ${result.silentDuration!.toFixed(1)}s of silence`));
146
- }
147
- console.log();
148
- } catch (error) {
149
- console.error(chalk.red("Silence cut failed"));
150
- console.error(error);
151
- process.exit(1);
152
- }
153
- });
154
-
155
- // ============================================================================
156
- // Caption Command
157
- // ============================================================================
158
-
159
- aiCommand
160
- .command("caption")
161
- .alias("cap")
162
- .description("Transcribe and burn styled captions onto video (Whisper + FFmpeg)")
163
- .argument("<video>", "Video file path")
164
- .option("-o, --output <path>", "Output file path (default: <name>-captioned.<ext>)")
165
- .option("-s, --style <style>", "Caption style: minimal, bold, outline, karaoke (default: bold)", "bold")
166
- .option("--font-size <pixels>", "Override auto-calculated font size")
167
- .option("--color <color>", "Font color (default: white)", "white")
168
- .option("-l, --language <lang>", "Language code for transcription (e.g., en, ko)")
169
- .option("--position <pos>", "Caption position: top, center, bottom (default: bottom)", "bottom")
170
- .option("-k, --api-key <key>", "OpenAI API key (or set OPENAI_API_KEY env)")
171
- .option("--dry-run", "Preview parameters without executing")
172
- .action(async (videoPath: string, options) => {
173
- try {
174
- const absVideoPath = resolve(process.cwd(), videoPath);
175
- if (!existsSync(absVideoPath)) {
176
- console.error(chalk.red(`Video not found: ${absVideoPath}`));
177
- process.exit(1);
178
- }
179
-
180
- // Check FFmpeg
181
- if (!commandExists("ffmpeg")) {
182
- console.error(chalk.red("FFmpeg not found. Please install FFmpeg."));
183
- process.exit(1);
184
- }
185
-
186
- if (options.dryRun) {
187
- outputResult({
188
- dryRun: true,
189
- command: "edit caption",
190
- params: {
191
- videoPath: absVideoPath,
192
- style: options.style,
193
- fontSize: options.fontSize ? parseInt(options.fontSize) : undefined,
194
- fontColor: options.color,
195
- language: options.language,
196
- position: options.position,
197
- },
198
- });
199
- return;
200
- }
201
-
202
- const apiKey = await getApiKey("OPENAI_API_KEY", "OpenAI", options.apiKey);
203
- if (!apiKey) {
204
- console.error(chalk.red("OpenAI API key required for Whisper transcription. Set OPENAI_API_KEY in .env or run: vibe setup"));
205
- console.error(chalk.dim("Use --api-key or set OPENAI_API_KEY"));
206
- process.exit(1);
207
- }
208
-
209
- const ext = extname(videoPath);
210
- const name = basename(videoPath, ext);
211
- const outputPath = options.output || `${name}-captioned${ext}`;
212
-
213
- const spinner = ora("Starting caption process...").start();
214
-
215
- const result = await executeCaption({
216
- videoPath: absVideoPath,
217
- outputPath: resolve(process.cwd(), outputPath),
218
- style: options.style as CaptionStyle,
219
- fontSize: options.fontSize ? parseInt(options.fontSize) : undefined,
220
- fontColor: options.color,
221
- language: options.language,
222
- position: options.position as "top" | "center" | "bottom",
223
- apiKey,
224
- });
225
-
226
- if (!result.success) {
227
- spinner.fail(chalk.red(result.error || "Caption failed"));
228
- process.exit(1);
229
- }
230
-
231
- spinner.succeed(chalk.green("Captions applied"));
232
-
233
- if (isJsonMode()) {
234
- outputResult({
235
- success: true,
236
- segmentCount: result.segmentCount,
237
- style: options.style || "bold",
238
- outputPath: result.outputPath,
239
- srtPath: result.srtPath,
240
- });
241
- return;
242
- }
243
-
244
- console.log();
245
- console.log(chalk.bold.cyan("Caption Result"));
246
- console.log(chalk.dim("─".repeat(60)));
247
- console.log(`Segments transcribed: ${chalk.bold(String(result.segmentCount))}`);
248
- console.log(`Style: ${chalk.bold(options.style || "bold")}`);
249
- console.log(`Output: ${chalk.green(result.outputPath!)}`);
250
- if (result.srtPath) {
251
- console.log(`SRT file: ${chalk.dim(result.srtPath)}`);
252
- }
253
- console.log();
254
- } catch (error) {
255
- console.error(chalk.red("Caption failed"));
256
- console.error(error);
257
- process.exit(1);
258
- }
259
- });
260
-
261
- // ============================================================================
262
- // Noise Reduce Command
263
- // ============================================================================
264
-
265
- aiCommand
266
- .command("noise-reduce")
267
- .description("Remove background noise from audio/video using FFmpeg (no API key needed)")
268
- .argument("<input>", "Audio or video file path")
269
- .option("-o, --output <path>", "Output file path (default: <name>-denoised.<ext>)")
270
- .option("-s, --strength <level>", "Noise reduction strength: low, medium, high (default: medium)", "medium")
271
- .option("-n, --noise-floor <dB>", "Custom noise floor in dB (overrides strength preset)")
272
- .option("--dry-run", "Preview parameters without executing")
273
- .action(async (inputPath: string, options) => {
274
- try {
275
- const absInputPath = resolve(process.cwd(), inputPath);
276
- if (!existsSync(absInputPath)) {
277
- console.error(chalk.red(`File not found: ${absInputPath}`));
278
- process.exit(1);
279
- }
280
-
281
- if (!commandExists("ffmpeg")) {
282
- console.error(chalk.red("FFmpeg not found. Please install FFmpeg."));
283
- process.exit(1);
284
- }
285
-
286
- if (options.dryRun) {
287
- outputResult({
288
- dryRun: true,
289
- command: "edit noise-reduce",
290
- params: {
291
- inputPath: absInputPath,
292
- strength: options.strength,
293
- noiseFloor: options.noiseFloor ? parseFloat(options.noiseFloor) : undefined,
294
- },
295
- });
296
- return;
297
- }
298
-
299
- const ext = extname(inputPath);
300
- const name = basename(inputPath, ext);
301
- const outputPath = options.output || `${name}-denoised${ext}`;
302
-
303
- const spinner = ora("Applying noise reduction...").start();
304
-
305
- const result = await executeNoiseReduce({
306
- inputPath: absInputPath,
307
- outputPath: resolve(process.cwd(), outputPath),
308
- strength: options.strength as "low" | "medium" | "high",
309
- noiseFloor: options.noiseFloor ? parseFloat(options.noiseFloor) : undefined,
310
- });
311
-
312
- if (!result.success) {
313
- spinner.fail(chalk.red(result.error || "Noise reduction failed"));
314
- process.exit(1);
315
- }
316
-
317
- spinner.succeed(chalk.green("Noise reduction complete"));
318
-
319
- if (isJsonMode()) {
320
- outputResult({
321
- success: true,
322
- inputDuration: result.inputDuration,
323
- strength: options.strength || "medium",
324
- outputPath: result.outputPath,
325
- });
326
- return;
327
- }
328
-
329
- console.log();
330
- console.log(chalk.bold.cyan("Noise Reduction Result"));
331
- console.log(chalk.dim("─".repeat(60)));
332
- console.log(`Input duration: ${chalk.bold(result.inputDuration!.toFixed(1))}s`);
333
- console.log(`Strength: ${chalk.bold(options.strength || "medium")}`);
334
- console.log(`Output: ${chalk.green(result.outputPath!)}`);
335
- console.log();
336
- } catch (error) {
337
- console.error(chalk.red("Noise reduction failed"));
338
- console.error(error);
339
- process.exit(1);
340
- }
341
- });
342
-
343
- // ============================================================================
344
- // Fade Command
345
- // ============================================================================
346
-
347
- aiCommand
348
- .command("fade")
349
- .description("Apply fade in/out effects to video (FFmpeg only, no API key needed)")
350
- .argument("<video>", "Video file path")
351
- .option("-o, --output <path>", "Output file path (default: <name>-faded.<ext>)")
352
- .option("--fade-in <seconds>", "Fade-in duration in seconds (default: 1)", "1")
353
- .option("--fade-out <seconds>", "Fade-out duration in seconds (default: 1)", "1")
354
- .option("--audio-only", "Apply fade to audio only (video stream copied)")
355
- .option("--video-only", "Apply fade to video only (audio stream copied)")
356
- .option("--dry-run", "Preview parameters without executing")
357
- .action(async (videoPath: string, options) => {
358
- try {
359
- const absVideoPath = resolve(process.cwd(), videoPath);
360
- if (!existsSync(absVideoPath)) {
361
- console.error(chalk.red(`Video not found: ${absVideoPath}`));
362
- process.exit(1);
363
- }
364
-
365
- if (!commandExists("ffmpeg")) {
366
- console.error(chalk.red("FFmpeg not found. Please install FFmpeg."));
367
- process.exit(1);
368
- }
369
-
370
- if (options.dryRun) {
371
- outputResult({
372
- dryRun: true,
373
- command: "edit fade",
374
- params: {
375
- videoPath: absVideoPath,
376
- fadeIn: parseFloat(options.fadeIn),
377
- fadeOut: parseFloat(options.fadeOut),
378
- audioOnly: options.audioOnly || false,
379
- videoOnly: options.videoOnly || false,
380
- },
381
- });
382
- return;
383
- }
384
-
385
- const ext = extname(videoPath);
386
- const name = basename(videoPath, ext);
387
- const outputPath = options.output || `${name}-faded${ext}`;
388
-
389
- const spinner = ora("Applying fade effects...").start();
390
-
391
- const result = await executeFade({
392
- videoPath: absVideoPath,
393
- outputPath: resolve(process.cwd(), outputPath),
394
- fadeIn: parseFloat(options.fadeIn),
395
- fadeOut: parseFloat(options.fadeOut),
396
- audioOnly: options.audioOnly || false,
397
- videoOnly: options.videoOnly || false,
398
- });
399
-
400
- if (!result.success) {
401
- spinner.fail(chalk.red(result.error || "Fade failed"));
402
- process.exit(1);
403
- }
404
-
405
- spinner.succeed(chalk.green("Fade effects applied"));
406
-
407
- if (isJsonMode()) {
408
- outputResult({
409
- success: true,
410
- totalDuration: result.totalDuration,
411
- fadeInApplied: result.fadeInApplied,
412
- fadeOutApplied: result.fadeOutApplied,
413
- outputPath: result.outputPath,
414
- });
415
- return;
416
- }
417
-
418
- console.log();
419
- console.log(chalk.bold.cyan("Fade Result"));
420
- console.log(chalk.dim("─".repeat(60)));
421
- console.log(`Total duration: ${chalk.bold(result.totalDuration!.toFixed(1))}s`);
422
- if (result.fadeInApplied) console.log(`Fade-in: ${chalk.bold(options.fadeIn)}s`);
423
- if (result.fadeOutApplied) console.log(`Fade-out: ${chalk.bold(options.fadeOut)}s`);
424
- console.log(`Output: ${chalk.green(result.outputPath!)}`);
425
- console.log();
426
- } catch (error) {
427
- console.error(chalk.red("Fade failed"));
428
- console.error(error);
429
- process.exit(1);
430
- }
431
- });
432
-
433
- // ============================================================================
434
- // Translate SRT Command
435
- // ============================================================================
436
-
437
- aiCommand
438
- .command("translate-srt")
439
- .description("Translate SRT subtitle file to another language (Claude/OpenAI)")
440
- .argument("<srt>", "SRT file path")
441
- .option("-t, --target <language>", "Target language (e.g., ko, es, fr, ja, zh)")
442
- .option("-o, --output <path>", "Output file path (default: <name>-<target>.srt)")
443
- .option("-p, --provider <provider>", "Translation provider: claude, openai (default: claude)", "claude")
444
- .option("--source <language>", "Source language (auto-detected if omitted)")
445
- .option("-k, --api-key <key>", "API key (or set ANTHROPIC_API_KEY / OPENAI_API_KEY env)")
446
- .option("--dry-run", "Preview parameters without executing")
447
- .action(async (srtPath: string, options) => {
448
- try {
449
- if (!options.target) {
450
- console.error(chalk.red("Target language required. Use -t or --target"));
451
- process.exit(1);
452
- }
453
-
454
- const absSrtPath = resolve(process.cwd(), srtPath);
455
- if (!existsSync(absSrtPath)) {
456
- console.error(chalk.red(`SRT file not found: ${absSrtPath}`));
457
- process.exit(1);
458
- }
459
-
460
- if (options.dryRun) {
461
- outputResult({
462
- dryRun: true,
463
- command: "edit translate-srt",
464
- params: {
465
- srtPath: absSrtPath,
466
- targetLanguage: options.target,
467
- provider: options.provider || "claude",
468
- sourceLanguage: options.source,
469
- },
470
- });
471
- return;
472
- }
473
-
474
- const provider = options.provider || "claude";
475
- const envKey = provider === "openai" ? "OPENAI_API_KEY" : "ANTHROPIC_API_KEY";
476
- const providerName = provider === "openai" ? "OpenAI" : "Anthropic";
477
-
478
- const apiKey = await getApiKey(envKey, providerName, options.apiKey);
479
- if (!apiKey) {
480
- console.error(chalk.red(`${providerName} API key required for translation. Set ${envKey} in .env or run: vibe setup`));
481
- console.error(chalk.dim(`Use --api-key or set ${envKey}`));
482
- process.exit(1);
483
- }
484
-
485
- const ext = extname(srtPath);
486
- const name = basename(srtPath, ext);
487
- const outputPath = options.output || `${name}-${options.target}${ext}`;
488
-
489
- const spinner = ora(`Translating to ${options.target}...`).start();
490
-
491
- const result = await executeTranslateSrt({
492
- srtPath: absSrtPath,
493
- outputPath: resolve(process.cwd(), outputPath),
494
- targetLanguage: options.target,
495
- provider: provider as "claude" | "openai",
496
- sourceLanguage: options.source,
497
- apiKey,
498
- });
499
-
500
- if (!result.success) {
501
- spinner.fail(chalk.red(result.error || "Translation failed"));
502
- process.exit(1);
503
- }
504
-
505
- spinner.succeed(chalk.green("Translation complete"));
506
-
507
- if (isJsonMode()) {
508
- outputResult({
509
- success: true,
510
- segmentCount: result.segmentCount,
511
- sourceLanguage: result.sourceLanguage,
512
- targetLanguage: result.targetLanguage,
513
- outputPath: result.outputPath,
514
- });
515
- return;
516
- }
517
-
518
- console.log();
519
- console.log(chalk.bold.cyan("Translation Result"));
520
- console.log(chalk.dim("─".repeat(60)));
521
- console.log(`Segments translated: ${chalk.bold(String(result.segmentCount))}`);
522
- if (result.sourceLanguage) console.log(`Source language: ${chalk.bold(result.sourceLanguage)}`);
523
- console.log(`Target language: ${chalk.bold(result.targetLanguage!)}`);
524
- console.log(`Output: ${chalk.green(result.outputPath!)}`);
525
- console.log();
526
- } catch (error) {
527
- console.error(chalk.red("Translation failed"));
528
- console.error(error);
529
- process.exit(1);
530
- }
531
- });
532
-
533
- // ============================================================================
534
- // Jump Cut Command
535
- // ============================================================================
536
-
537
- aiCommand
538
- .command("jump-cut")
539
- .description("Remove filler words (um, uh, like, etc.) from video using Whisper word-level timestamps")
540
- .argument("<video>", "Video file path")
541
- .option("-o, --output <path>", "Output file path (default: <name>-jumpcut.<ext>)")
542
- .option("--fillers <words>", "Comma-separated filler words to detect")
543
- .option("--padding <seconds>", "Padding around cuts in seconds (default: 0.05)", "0.05")
544
- .option("-l, --language <lang>", "Language code for transcription (e.g., en, ko)")
545
- .option("--analyze-only", "Only detect fillers, don't cut")
546
- .option("-k, --api-key <key>", "OpenAI API key (or set OPENAI_API_KEY env)")
547
- .option("--dry-run", "Preview parameters without executing")
548
- .action(async (videoPath: string, options) => {
549
- try {
550
- if (options.fillers) rejectControlChars(options.fillers);
551
-
552
- const absVideoPath = resolve(process.cwd(), videoPath);
553
- if (!existsSync(absVideoPath)) {
554
- console.error(chalk.red(`Video not found: ${absVideoPath}`));
555
- process.exit(1);
556
- }
557
-
558
- // Check FFmpeg
559
- if (!commandExists("ffmpeg")) {
560
- console.error(chalk.red("FFmpeg not found. Please install FFmpeg."));
561
- process.exit(1);
562
- }
563
-
564
- if (options.dryRun) {
565
- const fillers = options.fillers
566
- ? options.fillers.split(",").map((f: string) => f.trim())
567
- : undefined;
568
- outputResult({
569
- dryRun: true,
570
- command: "edit jump-cut",
571
- params: {
572
- videoPath: absVideoPath,
573
- fillers,
574
- padding: parseFloat(options.padding),
575
- language: options.language,
576
- analyzeOnly: options.analyzeOnly || false,
577
- },
578
- });
579
- return;
580
- }
581
-
582
- const apiKey = await getApiKey("OPENAI_API_KEY", "OpenAI", options.apiKey);
583
- if (!apiKey) {
584
- console.error(chalk.red("OpenAI API key required for Whisper transcription. Set OPENAI_API_KEY in .env or run: vibe setup"));
585
- console.error(chalk.dim("Use --api-key or set OPENAI_API_KEY"));
586
- process.exit(1);
587
- }
588
-
589
- const ext = extname(videoPath);
590
- const name = basename(videoPath, ext);
591
- const outputPath = options.output || `${name}-jumpcut${ext}`;
592
-
593
- const fillers = options.fillers
594
- ? options.fillers.split(",").map((f: string) => f.trim())
595
- : undefined;
596
-
597
- const spinner = ora("Transcribing with word-level timestamps...").start();
598
-
599
- const result = await executeJumpCut({
600
- videoPath: absVideoPath,
601
- outputPath: resolve(process.cwd(), outputPath),
602
- fillers,
603
- padding: parseFloat(options.padding),
604
- language: options.language,
605
- analyzeOnly: options.analyzeOnly || false,
606
- apiKey,
607
- });
608
-
609
- if (!result.success) {
610
- spinner.fail(chalk.red(result.error || "Jump cut failed"));
611
- process.exit(1);
612
- }
613
-
614
- spinner.succeed(chalk.green("Filler detection complete"));
615
-
616
- if (isJsonMode()) {
617
- outputResult({
618
- success: true,
619
- totalDuration: result.totalDuration,
620
- fillerCount: result.fillerCount,
621
- fillerDuration: result.fillerDuration,
622
- fillers: result.fillers,
623
- outputPath: result.outputPath,
624
- });
625
- return;
626
- }
627
-
628
- console.log();
629
- console.log(chalk.bold.cyan("Filler Word Analysis"));
630
- console.log(chalk.dim("-".repeat(60)));
631
- console.log(`Total duration: ${chalk.bold(result.totalDuration!.toFixed(1))}s`);
632
- console.log(`Filler words found: ${chalk.bold(String(result.fillerCount))}`);
633
- console.log(`Filler duration: ${chalk.bold(result.fillerDuration!.toFixed(1))}s`);
634
- console.log(`Clean duration: ${chalk.bold((result.totalDuration! - result.fillerDuration!).toFixed(1))}s`);
635
-
636
- if (result.fillers && result.fillers.length > 0) {
637
- console.log();
638
- console.log(chalk.dim("Detected fillers:"));
639
- for (const filler of result.fillers) {
640
- console.log(chalk.dim(` "${filler.word}" at ${filler.start.toFixed(2)}s - ${filler.end.toFixed(2)}s`));
641
- }
642
- }
643
-
644
- if (!options.analyzeOnly && result.outputPath) {
645
- console.log();
646
- console.log(chalk.green(`Output: ${result.outputPath}`));
647
- console.log(chalk.dim(`Removed ${result.fillerDuration!.toFixed(1)}s of filler words`));
648
- }
649
- console.log();
650
- } catch (error) {
651
- console.error(chalk.red("Jump cut failed"));
652
- console.error(error);
653
- process.exit(1);
654
- }
655
- });
656
-
657
-
658
- }