@vibeframe/cli 0.27.0 → 0.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. package/LICENSE +21 -0
  2. package/dist/agent/adapters/index.d.ts +1 -0
  3. package/dist/agent/adapters/index.d.ts.map +1 -1
  4. package/dist/agent/adapters/index.js +5 -0
  5. package/dist/agent/adapters/index.js.map +1 -1
  6. package/dist/agent/adapters/openrouter.d.ts +16 -0
  7. package/dist/agent/adapters/openrouter.d.ts.map +1 -0
  8. package/dist/agent/adapters/openrouter.js +100 -0
  9. package/dist/agent/adapters/openrouter.js.map +1 -0
  10. package/dist/agent/types.d.ts +1 -1
  11. package/dist/agent/types.d.ts.map +1 -1
  12. package/dist/commands/agent.d.ts.map +1 -1
  13. package/dist/commands/agent.js +3 -1
  14. package/dist/commands/agent.js.map +1 -1
  15. package/dist/commands/ai-edit-cli.d.ts.map +1 -1
  16. package/dist/commands/ai-edit-cli.js +18 -0
  17. package/dist/commands/ai-edit-cli.js.map +1 -1
  18. package/dist/commands/generate.js +14 -0
  19. package/dist/commands/generate.js.map +1 -1
  20. package/dist/commands/schema.d.ts +1 -0
  21. package/dist/commands/schema.d.ts.map +1 -1
  22. package/dist/commands/schema.js +122 -21
  23. package/dist/commands/schema.js.map +1 -1
  24. package/dist/commands/setup.js +5 -2
  25. package/dist/commands/setup.js.map +1 -1
  26. package/dist/config/schema.d.ts +2 -1
  27. package/dist/config/schema.d.ts.map +1 -1
  28. package/dist/config/schema.js +2 -0
  29. package/dist/config/schema.js.map +1 -1
  30. package/dist/index.js +0 -0
  31. package/package.json +16 -12
  32. package/.turbo/turbo-build.log +0 -4
  33. package/.turbo/turbo-lint.log +0 -21
  34. package/.turbo/turbo-test.log +0 -689
  35. package/src/agent/adapters/claude.ts +0 -143
  36. package/src/agent/adapters/gemini.ts +0 -159
  37. package/src/agent/adapters/index.ts +0 -61
  38. package/src/agent/adapters/ollama.ts +0 -231
  39. package/src/agent/adapters/openai.ts +0 -116
  40. package/src/agent/adapters/xai.ts +0 -119
  41. package/src/agent/index.ts +0 -251
  42. package/src/agent/memory/index.ts +0 -151
  43. package/src/agent/prompts/system.ts +0 -106
  44. package/src/agent/tools/ai-editing.ts +0 -845
  45. package/src/agent/tools/ai-generation.ts +0 -1073
  46. package/src/agent/tools/ai-pipeline.ts +0 -1055
  47. package/src/agent/tools/ai.ts +0 -21
  48. package/src/agent/tools/batch.ts +0 -429
  49. package/src/agent/tools/e2e.test.ts +0 -545
  50. package/src/agent/tools/export.ts +0 -184
  51. package/src/agent/tools/filesystem.ts +0 -237
  52. package/src/agent/tools/index.ts +0 -150
  53. package/src/agent/tools/integration.test.ts +0 -775
  54. package/src/agent/tools/media.ts +0 -697
  55. package/src/agent/tools/project.ts +0 -313
  56. package/src/agent/tools/timeline.ts +0 -951
  57. package/src/agent/types.ts +0 -68
  58. package/src/commands/agent.ts +0 -340
  59. package/src/commands/ai-analyze.ts +0 -429
  60. package/src/commands/ai-animated-caption.ts +0 -390
  61. package/src/commands/ai-audio.ts +0 -941
  62. package/src/commands/ai-broll.ts +0 -490
  63. package/src/commands/ai-edit-cli.ts +0 -658
  64. package/src/commands/ai-edit.ts +0 -1542
  65. package/src/commands/ai-fill-gaps.ts +0 -566
  66. package/src/commands/ai-helpers.ts +0 -65
  67. package/src/commands/ai-highlights.ts +0 -1303
  68. package/src/commands/ai-image.ts +0 -761
  69. package/src/commands/ai-motion.ts +0 -347
  70. package/src/commands/ai-narrate.ts +0 -451
  71. package/src/commands/ai-review.ts +0 -309
  72. package/src/commands/ai-script-pipeline-cli.ts +0 -1710
  73. package/src/commands/ai-script-pipeline.ts +0 -1365
  74. package/src/commands/ai-suggest-edit.ts +0 -264
  75. package/src/commands/ai-video-fx.ts +0 -445
  76. package/src/commands/ai-video.ts +0 -915
  77. package/src/commands/ai-viral.ts +0 -595
  78. package/src/commands/ai-visual-fx.ts +0 -601
  79. package/src/commands/ai.test.ts +0 -627
  80. package/src/commands/ai.ts +0 -307
  81. package/src/commands/analyze.ts +0 -282
  82. package/src/commands/audio.ts +0 -644
  83. package/src/commands/batch.test.ts +0 -279
  84. package/src/commands/batch.ts +0 -440
  85. package/src/commands/detect.ts +0 -329
  86. package/src/commands/doctor.ts +0 -237
  87. package/src/commands/edit-cmd.ts +0 -1014
  88. package/src/commands/export.ts +0 -918
  89. package/src/commands/generate.ts +0 -2146
  90. package/src/commands/media.ts +0 -177
  91. package/src/commands/output.ts +0 -142
  92. package/src/commands/pipeline.ts +0 -398
  93. package/src/commands/project.test.ts +0 -127
  94. package/src/commands/project.ts +0 -149
  95. package/src/commands/sanitize.ts +0 -60
  96. package/src/commands/schema.ts +0 -130
  97. package/src/commands/setup.ts +0 -509
  98. package/src/commands/timeline.test.ts +0 -499
  99. package/src/commands/timeline.ts +0 -529
  100. package/src/commands/validate.ts +0 -77
  101. package/src/config/config.test.ts +0 -197
  102. package/src/config/index.ts +0 -125
  103. package/src/config/schema.ts +0 -82
  104. package/src/engine/index.ts +0 -2
  105. package/src/engine/project.test.ts +0 -702
  106. package/src/engine/project.ts +0 -439
  107. package/src/index.ts +0 -146
  108. package/src/utils/api-key.test.ts +0 -41
  109. package/src/utils/api-key.ts +0 -247
  110. package/src/utils/audio.ts +0 -83
  111. package/src/utils/exec-safe.ts +0 -75
  112. package/src/utils/first-run.ts +0 -52
  113. package/src/utils/provider-resolver.ts +0 -56
  114. package/src/utils/remotion.ts +0 -951
  115. package/src/utils/subtitle.test.ts +0 -227
  116. package/src/utils/subtitle.ts +0 -169
  117. package/src/utils/tty.ts +0 -196
  118. package/tsconfig.json +0 -20
@@ -1,918 +0,0 @@
1
- import { Command } from "commander";
2
- import { readFile, access, stat } from "node:fs/promises";
3
- import { resolve, basename } from "node:path";
4
- import { spawn } from "node:child_process";
5
- import chalk from "chalk";
6
- import ora from "ora";
7
- import { Project, type ProjectFile } from "../engine/index.js";
8
- import { execSafe, ffprobeDuration } from "../utils/exec-safe.js";
9
-
10
- /**
11
- * Resolve project file path - handles both file paths and directory paths
12
- * If path is a directory, looks for project.vibe.json inside
13
- */
14
- async function resolveProjectPath(inputPath: string): Promise<string> {
15
- const filePath = resolve(process.cwd(), inputPath);
16
-
17
- try {
18
- const stats = await stat(filePath);
19
- if (stats.isDirectory()) {
20
- return resolve(filePath, "project.vibe.json");
21
- }
22
- } catch {
23
- // Path doesn't exist or other error - let readFile handle it
24
- }
25
-
26
- return filePath;
27
- }
28
-
29
- /**
30
- * Get the duration of a media file using ffprobe
31
- * For images, returns a default duration since they have no inherent time
32
- */
33
- export async function getMediaDuration(
34
- filePath: string,
35
- mediaType: "video" | "audio" | "image",
36
- defaultImageDuration: number = 5
37
- ): Promise<number> {
38
- if (mediaType === "image") {
39
- return defaultImageDuration;
40
- }
41
-
42
- try {
43
- return await ffprobeDuration(filePath);
44
- } catch {
45
- return defaultImageDuration;
46
- }
47
- }
48
-
49
- /**
50
- * Check if a media file has an audio stream
51
- */
52
- export async function checkHasAudio(filePath: string): Promise<boolean> {
53
- try {
54
- const { stdout } = await execSafe("ffprobe", [
55
- "-v", "error", "-select_streams", "a", "-show_entries", "stream=codec_type", "-of", "default=noprint_wrappers=1:nokey=1", filePath,
56
- ]);
57
- return stdout.trim().length > 0;
58
- } catch {
59
- return false;
60
- }
61
- }
62
-
63
- /**
64
- * Export result for programmatic usage
65
- */
66
- export interface ExportResult {
67
- success: boolean;
68
- message: string;
69
- outputPath?: string;
70
- }
71
-
72
- /**
73
- * Gap filling strategy for timeline gaps
74
- * - "black": Fill gaps with black frames (fallback)
75
- * - "extend": Extend adjacent clips using source media if available
76
- */
77
- export type GapFillStrategy = "black" | "extend";
78
-
79
- /**
80
- * Export options
81
- */
82
- export interface ExportOptions {
83
- preset?: "draft" | "standard" | "high" | "ultra";
84
- format?: "mp4" | "webm" | "mov";
85
- overwrite?: boolean;
86
- gapFill?: GapFillStrategy;
87
- }
88
-
89
- /**
90
- * Reusable export function for programmatic usage
91
- */
92
- export async function runExport(
93
- projectPath: string,
94
- outputPath: string,
95
- options: ExportOptions = {}
96
- ): Promise<ExportResult> {
97
- const { preset = "standard", format = "mp4", overwrite = false, gapFill = "extend" } = options;
98
-
99
- try {
100
- // Check if FFmpeg is installed
101
- const ffmpegPath = await findFFmpeg();
102
- if (!ffmpegPath) {
103
- return {
104
- success: false,
105
- message: "FFmpeg not found. Install with: brew install ffmpeg (macOS) or apt install ffmpeg (Linux)",
106
- };
107
- }
108
-
109
- // Load project
110
- const filePath = await resolveProjectPath(projectPath);
111
- const content = await readFile(filePath, "utf-8");
112
- const data: ProjectFile = JSON.parse(content);
113
- const project = Project.fromJSON(data);
114
-
115
- const summary = project.getSummary();
116
-
117
- if (summary.clipCount === 0) {
118
- return {
119
- success: false,
120
- message: "Project has no clips to export",
121
- };
122
- }
123
-
124
- // Determine output path
125
- const finalOutputPath = resolve(process.cwd(), outputPath);
126
-
127
- // Get preset settings
128
- const presetSettings = getPresetSettings(preset, summary.aspectRatio);
129
-
130
- // Get clips sorted by start time
131
- const clips = project.getClips().sort((a, b) => a.startTime - b.startTime);
132
- const sources = project.getSources();
133
-
134
- // Verify source files exist and check for audio streams
135
- const sourceAudioMap = new Map<string, boolean>();
136
- for (const clip of clips) {
137
- const source = sources.find((s) => s.id === clip.sourceId);
138
- if (source) {
139
- try {
140
- await access(source.url);
141
- // Check if video source has audio
142
- if (source.type === "video" && !sourceAudioMap.has(source.id)) {
143
- sourceAudioMap.set(source.id, await checkHasAudio(source.url));
144
- }
145
- } catch {
146
- return {
147
- success: false,
148
- message: `Source file not found: ${source.url}`,
149
- };
150
- }
151
- }
152
- }
153
-
154
- // Build FFmpeg command
155
- const ffmpegArgs = buildFFmpegArgs(clips, sources, presetSettings, finalOutputPath, { overwrite, format, gapFill }, sourceAudioMap);
156
-
157
- // Run FFmpeg
158
- await runFFmpegProcess(ffmpegPath, ffmpegArgs, () => {});
159
-
160
- return {
161
- success: true,
162
- message: `Exported: ${outputPath}`,
163
- outputPath: finalOutputPath,
164
- };
165
- } catch (error) {
166
- const errorMessage = error instanceof Error ? error.message : String(error);
167
- return {
168
- success: false,
169
- message: `Export failed: ${errorMessage}`,
170
- };
171
- }
172
- }
173
-
174
- export const exportCommand = new Command("export")
175
- .description("Export project to video file")
176
- .argument("<project>", "Project file path")
177
- .option("-o, --output <path>", "Output file path")
178
- .option("-f, --format <format>", "Output format (mp4, webm, mov)", "mp4")
179
- .option(
180
- "-p, --preset <preset>",
181
- "Quality preset (draft, standard, high, ultra)",
182
- "standard"
183
- )
184
- .option("-y, --overwrite", "Overwrite output file if exists", false)
185
- .option("-g, --gap-fill <strategy>", "Gap filling strategy (black, extend)", "extend")
186
- .action(async (projectPath: string, options) => {
187
- const spinner = ora("Checking FFmpeg...").start();
188
-
189
- try {
190
- // Check if FFmpeg is installed
191
- const ffmpegPath = await findFFmpeg();
192
- if (!ffmpegPath) {
193
- spinner.fail(chalk.red("FFmpeg not found"));
194
- console.error();
195
- console.error(chalk.yellow("Please install FFmpeg:"));
196
- console.error(chalk.dim(" macOS: brew install ffmpeg"));
197
- console.error(chalk.dim(" Ubuntu: sudo apt install ffmpeg"));
198
- console.error(chalk.dim(" Windows: winget install ffmpeg"));
199
- process.exit(1);
200
- }
201
-
202
- // Load project
203
- spinner.text = "Loading project...";
204
- const filePath = await resolveProjectPath(projectPath);
205
- const content = await readFile(filePath, "utf-8");
206
- const data: ProjectFile = JSON.parse(content);
207
- const project = Project.fromJSON(data);
208
-
209
- const summary = project.getSummary();
210
-
211
- if (summary.clipCount === 0) {
212
- spinner.fail(chalk.red("Project has no clips to export"));
213
- process.exit(1);
214
- }
215
-
216
- // Determine output path
217
- const outputPath = options.output
218
- ? resolve(process.cwd(), options.output)
219
- : resolve(
220
- process.cwd(),
221
- `${basename(projectPath, ".vibe.json")}.${options.format}`
222
- );
223
-
224
- // Get preset settings
225
- const presetSettings = getPresetSettings(options.preset, summary.aspectRatio);
226
-
227
- // Get clips sorted by start time
228
- const clips = project.getClips().sort((a, b) => a.startTime - b.startTime);
229
- const sources = project.getSources();
230
-
231
- // Verify source files exist and check for audio streams
232
- spinner.text = "Verifying source files...";
233
- const sourceAudioMap = new Map<string, boolean>();
234
- for (const clip of clips) {
235
- const source = sources.find((s) => s.id === clip.sourceId);
236
- if (source) {
237
- try {
238
- await access(source.url);
239
- // Check if video source has audio
240
- if (source.type === "video" && !sourceAudioMap.has(source.id)) {
241
- sourceAudioMap.set(source.id, await checkHasAudio(source.url));
242
- }
243
- } catch {
244
- spinner.fail(chalk.red(`Source file not found: ${source.url}`));
245
- process.exit(1);
246
- }
247
- }
248
- }
249
-
250
- // Build FFmpeg command
251
- spinner.text = "Building export command...";
252
- const gapFillStrategy = (options.gapFill === "black" ? "black" : "extend") as GapFillStrategy;
253
- const ffmpegArgs = buildFFmpegArgs(clips, sources, presetSettings, outputPath, { ...options, gapFill: gapFillStrategy }, sourceAudioMap);
254
-
255
- if (process.env.DEBUG) {
256
- console.log("\nFFmpeg command:");
257
- console.log("ffmpeg", ffmpegArgs.join(" "));
258
- console.log();
259
- }
260
-
261
- // Run FFmpeg
262
- spinner.text = "Encoding...";
263
-
264
- await runFFmpegProcess(ffmpegPath, ffmpegArgs, (progress) => {
265
- spinner.text = `Encoding... ${progress}%`;
266
- });
267
-
268
- spinner.succeed(chalk.green(`Exported: ${outputPath}`));
269
-
270
- console.log();
271
- console.log(chalk.dim(" Duration:"), `${summary.duration.toFixed(1)}s`);
272
- console.log(chalk.dim(" Clips:"), summary.clipCount);
273
- console.log(chalk.dim(" Format:"), options.format);
274
- console.log(chalk.dim(" Preset:"), options.preset);
275
- console.log(chalk.dim(" Resolution:"), presetSettings.resolution);
276
- console.log();
277
- } catch (error) {
278
- spinner.fail(chalk.red("Export failed"));
279
- if (error instanceof Error) {
280
- console.error(chalk.red(error.message));
281
- if (process.env.DEBUG) {
282
- console.error(error.stack);
283
- }
284
- }
285
- process.exit(1);
286
- }
287
- });
288
-
289
- /**
290
- * Find FFmpeg executable
291
- */
292
- async function findFFmpeg(): Promise<string | null> {
293
- try {
294
- const { stdout } = await execSafe("which", ["ffmpeg"]);
295
- return stdout.trim().split("\n")[0];
296
- } catch {
297
- try {
298
- const { stdout } = await execSafe("where", ["ffmpeg"]);
299
- return stdout.trim().split("\n")[0];
300
- } catch {
301
- return null;
302
- }
303
- }
304
- }
305
-
306
- /**
307
- * Detect gaps in timeline between clips
308
- * Returns array of gaps with start and end times
309
- */
310
- function detectTimelineGaps(
311
- clips: Array<{ startTime: number; duration: number }>,
312
- totalDuration?: number
313
- ): Array<{ start: number; end: number }> {
314
- if (clips.length === 0) return [];
315
-
316
- const gaps: Array<{ start: number; end: number }> = [];
317
- const sortedClips = [...clips].sort((a, b) => a.startTime - b.startTime);
318
-
319
- // Check for gap at the start (first clip doesn't start at 0)
320
- if (sortedClips[0].startTime > 0.001) {
321
- gaps.push({ start: 0, end: sortedClips[0].startTime });
322
- }
323
-
324
- // Check for gaps between clips
325
- for (let i = 0; i < sortedClips.length - 1; i++) {
326
- const clipEnd = sortedClips[i].startTime + sortedClips[i].duration;
327
- const nextStart = sortedClips[i + 1].startTime;
328
- // Allow small tolerance for floating point errors
329
- if (nextStart > clipEnd + 0.001) {
330
- gaps.push({ start: clipEnd, end: nextStart });
331
- }
332
- }
333
-
334
- // Check for gap at the end if totalDuration is provided
335
- if (totalDuration !== undefined) {
336
- const lastClip = sortedClips[sortedClips.length - 1];
337
- const lastClipEnd = lastClip.startTime + lastClip.duration;
338
- if (totalDuration > lastClipEnd + 0.001) {
339
- gaps.push({ start: lastClipEnd, end: totalDuration });
340
- }
341
- }
342
-
343
- return gaps;
344
- }
345
-
346
- /**
347
- * Gap fill plan for a single gap
348
- */
349
- interface GapFillPlan {
350
- gap: { start: number; end: number };
351
- fills: Array<{
352
- type: "extend-before" | "extend-after" | "black";
353
- sourceId?: string;
354
- sourceUrl?: string;
355
- start: number;
356
- end: number;
357
- sourceStart?: number;
358
- sourceEnd?: number;
359
- }>;
360
- }
361
-
362
- /**
363
- * Create gap fill plans by extending adjacent clips
364
- * Priority:
365
- * 1. Extend clip AFTER the gap backwards (if sourceStartOffset > 0)
366
- * 2. Extend clip BEFORE the gap forwards (if source has unused duration)
367
- * 3. Fallback to black frames
368
- */
369
- function createGapFillPlans(
370
- gaps: Array<{ start: number; end: number }>,
371
- clips: Array<{ startTime: number; duration: number; sourceId: string; sourceStartOffset: number; sourceEndOffset: number }>,
372
- sources: Array<{ id: string; url: string; type: string; duration: number }>
373
- ): GapFillPlan[] {
374
- const sortedClips = [...clips].sort((a, b) => a.startTime - b.startTime);
375
-
376
- return gaps.map((gap) => {
377
- const fills: GapFillPlan["fills"] = [];
378
- let remainingStart = gap.start;
379
- let remainingEnd = gap.end;
380
-
381
- // Find clip AFTER the gap (for extending backwards)
382
- const clipAfter = sortedClips.find((c) => Math.abs(c.startTime - gap.end) < 0.01);
383
-
384
- // Find clip BEFORE the gap (for extending forwards)
385
- const clipBefore = sortedClips.find((c) => Math.abs((c.startTime + c.duration) - gap.start) < 0.01);
386
-
387
- // Try extending clip after the gap backwards first
388
- if (clipAfter && clipAfter.sourceStartOffset > 0.01) {
389
- const source = sources.find((s) => s.id === clipAfter.sourceId);
390
- if (source && source.type === "video") {
391
- const availableExtension = clipAfter.sourceStartOffset;
392
- const extensionDuration = Math.min(availableExtension, remainingEnd - remainingStart);
393
-
394
- if (extensionDuration > 0.01) {
395
- // Extend from the gap end backwards
396
- const fillStart = remainingEnd - extensionDuration;
397
- const sourceStart = clipAfter.sourceStartOffset - extensionDuration;
398
- const sourceEnd = clipAfter.sourceStartOffset;
399
-
400
- fills.push({
401
- type: "extend-after",
402
- sourceId: source.id,
403
- sourceUrl: source.url,
404
- start: fillStart,
405
- end: remainingEnd,
406
- sourceStart,
407
- sourceEnd,
408
- });
409
-
410
- remainingEnd = fillStart;
411
- }
412
- }
413
- }
414
-
415
- // If there's still a gap, try extending clip before the gap forwards
416
- if (remainingEnd - remainingStart > 0.01 && clipBefore) {
417
- const source = sources.find((s) => s.id === clipBefore.sourceId);
418
- if (source && source.type === "video") {
419
- const usedEndInSource = clipBefore.sourceEndOffset;
420
- const availableExtension = source.duration - usedEndInSource;
421
-
422
- if (availableExtension > 0.01) {
423
- const extensionDuration = Math.min(availableExtension, remainingEnd - remainingStart);
424
-
425
- if (extensionDuration > 0.01) {
426
- const sourceStart = usedEndInSource;
427
- const sourceEnd = usedEndInSource + extensionDuration;
428
-
429
- fills.push({
430
- type: "extend-before",
431
- sourceId: source.id,
432
- sourceUrl: source.url,
433
- start: remainingStart,
434
- end: remainingStart + extensionDuration,
435
- sourceStart,
436
- sourceEnd,
437
- });
438
-
439
- remainingStart = remainingStart + extensionDuration;
440
- }
441
- }
442
- }
443
- }
444
-
445
- // Fill any remaining gap with black
446
- if (remainingEnd - remainingStart > 0.01) {
447
- fills.push({
448
- type: "black",
449
- start: remainingStart,
450
- end: remainingEnd,
451
- });
452
- }
453
-
454
- return { gap, fills };
455
- });
456
- }
457
-
458
- /**
459
- * Build FFmpeg arguments for export
460
- */
461
- function buildFFmpegArgs(
462
- clips: ReturnType<Project["getClips"]>,
463
- sources: ReturnType<Project["getSources"]>,
464
- presetSettings: PresetSettings,
465
- outputPath: string,
466
- options: { overwrite?: boolean; format?: string; gapFill?: GapFillStrategy },
467
- sourceAudioMap: Map<string, boolean> = new Map()
468
- ): string[] {
469
- const args: string[] = [];
470
-
471
- // Overwrite flag first
472
- if (options.overwrite) {
473
- args.push("-y");
474
- }
475
-
476
- // Add input files
477
- const sourceMap = new Map<string, number>();
478
- let inputIndex = 0;
479
-
480
- for (const clip of clips) {
481
- const source = sources.find((s) => s.id === clip.sourceId);
482
- if (source && !sourceMap.has(source.id)) {
483
- // Add -loop 1 before image inputs to create a continuous video stream
484
- if (source.type === "image") {
485
- args.push("-loop", "1");
486
- }
487
- args.push("-i", source.url);
488
- sourceMap.set(source.id, inputIndex);
489
- inputIndex++;
490
- }
491
- }
492
-
493
- // Build filter complex
494
- const filterParts: string[] = [];
495
-
496
- // Separate clips by track type for proper timeline-based export
497
- // Get track info to determine clip types
498
- const videoClips = clips.filter((clip) => {
499
- const source = sources.find((s) => s.id === clip.sourceId);
500
- return source && (source.type === "image" || source.type === "video");
501
- }).sort((a, b) => a.startTime - b.startTime);
502
-
503
- // Include audio clips from:
504
- // 1. Explicit audio sources (narration, music)
505
- // 2. Video sources when there are NO separate audio clips (e.g., highlight reels)
506
- const explicitAudioClips = clips.filter((clip) => {
507
- const source = sources.find((s) => s.id === clip.sourceId);
508
- return source && source.type === "audio";
509
- }).sort((a, b) => a.startTime - b.startTime);
510
-
511
- // If no explicit audio clips, extract audio from video clips
512
- const audioClips = explicitAudioClips.length > 0
513
- ? explicitAudioClips
514
- : clips.filter((clip) => {
515
- const source = sources.find((s) => s.id === clip.sourceId);
516
- return source && source.type === "video";
517
- }).sort((a, b) => a.startTime - b.startTime);
518
-
519
- // Get target resolution for scaling (all clips must match for concat)
520
- const [targetWidth, targetHeight] = presetSettings.resolution.split("x").map(Number);
521
-
522
- // Detect gaps in video timeline
523
- // For totalDuration, use the longest audio clip end time if explicit audio exists
524
- // (audio is usually the reference for timing in b-roll scenarios)
525
- let totalDuration: number | undefined;
526
- if (explicitAudioClips.length > 0) {
527
- const audioEnd = Math.max(...explicitAudioClips.map(c => c.startTime + c.duration));
528
- totalDuration = audioEnd;
529
- }
530
- const videoGaps = detectTimelineGaps(videoClips, totalDuration);
531
-
532
- // Create gap fill plans based on strategy
533
- const gapFillStrategy = options.gapFill || "extend";
534
- const gapFillPlans = gapFillStrategy === "extend"
535
- ? createGapFillPlans(videoGaps, videoClips, sources)
536
- : videoGaps.map((gap) => ({
537
- gap,
538
- fills: [{ type: "black" as const, start: gap.start, end: gap.end }],
539
- }));
540
-
541
- // Build ordered list of video segments (clips and gap fills interleaved)
542
- interface VideoSegment {
543
- type: 'clip' | 'extended' | 'black';
544
- clip?: typeof videoClips[0];
545
- sourceId?: string;
546
- sourceUrl?: string;
547
- startTime: number;
548
- duration?: number;
549
- sourceStart?: number;
550
- sourceEnd?: number;
551
- }
552
- const videoSegments: VideoSegment[] = [];
553
-
554
- // Add video clips as segments
555
- for (const clip of videoClips) {
556
- videoSegments.push({ type: 'clip', clip, startTime: clip.startTime });
557
- }
558
-
559
- // Add gap fills as segments (from gap fill plans)
560
- for (const plan of gapFillPlans) {
561
- for (const fill of plan.fills) {
562
- if (fill.type === "black") {
563
- videoSegments.push({
564
- type: 'black',
565
- startTime: fill.start,
566
- duration: fill.end - fill.start,
567
- });
568
- } else {
569
- // extend-before or extend-after
570
- videoSegments.push({
571
- type: 'extended',
572
- sourceId: fill.sourceId,
573
- sourceUrl: fill.sourceUrl,
574
- startTime: fill.start,
575
- duration: fill.end - fill.start,
576
- sourceStart: fill.sourceStart,
577
- sourceEnd: fill.sourceEnd,
578
- });
579
- }
580
- }
581
- }
582
-
583
- // Sort by start time
584
- videoSegments.sort((a, b) => a.startTime - b.startTime);
585
-
586
- // Process video segments (clips, extended clips, and black frames)
587
- const videoStreams: string[] = [];
588
- let videoStreamIdx = 0;
589
-
590
- for (const segment of videoSegments) {
591
- if (segment.type === 'clip' && segment.clip) {
592
- const clip = segment.clip;
593
- const source = sources.find((s) => s.id === clip.sourceId);
594
- if (!source) continue;
595
-
596
- const srcIdx = sourceMap.get(source.id);
597
- if (srcIdx === undefined) continue;
598
-
599
- // Video filter chain - images need different handling than video
600
- let videoFilter: string;
601
- if (source.type === "image") {
602
- // Images: trim from 0 to clip duration (no source offset since images are looped)
603
- videoFilter = `[${srcIdx}:v]trim=start=0:end=${clip.duration},setpts=PTS-STARTPTS`;
604
- } else {
605
- // Video: use source offsets
606
- const trimStart = clip.sourceStartOffset;
607
- const trimEnd = clip.sourceStartOffset + clip.duration;
608
- videoFilter = `[${srcIdx}:v]trim=start=${trimStart}:end=${trimEnd},setpts=PTS-STARTPTS`;
609
- }
610
-
611
- // Scale to target resolution for concat compatibility (force same size, pad if needed)
612
- videoFilter += `,scale=${targetWidth}:${targetHeight}:force_original_aspect_ratio=decrease,pad=${targetWidth}:${targetHeight}:(ow-iw)/2:(oh-ih)/2,setsar=1`;
613
-
614
- // Apply effects
615
- for (const effect of clip.effects || []) {
616
- if (effect.type === "fadeIn") {
617
- videoFilter += `,fade=t=in:st=0:d=${effect.duration}`;
618
- } else if (effect.type === "fadeOut") {
619
- const fadeStart = clip.duration - effect.duration;
620
- videoFilter += `,fade=t=out:st=${fadeStart}:d=${effect.duration}`;
621
- }
622
- }
623
-
624
- videoFilter += `[v${videoStreamIdx}]`;
625
- filterParts.push(videoFilter);
626
- videoStreams.push(`[v${videoStreamIdx}]`);
627
- videoStreamIdx++;
628
- } else if (segment.type === 'extended' && segment.sourceId) {
629
- // Extended segment - use source video to fill gap
630
- const srcIdx = sourceMap.get(segment.sourceId);
631
- if (srcIdx === undefined) {
632
- // Fallback to black if source not found in input map
633
- const gapFilter = `color=c=black:s=${targetWidth}x${targetHeight}:d=${segment.duration}:r=30,format=yuv420p[v${videoStreamIdx}]`;
634
- filterParts.push(gapFilter);
635
- videoStreams.push(`[v${videoStreamIdx}]`);
636
- videoStreamIdx++;
637
- continue;
638
- }
639
-
640
- const videoFilter = `[${srcIdx}:v]trim=start=${segment.sourceStart}:end=${segment.sourceEnd},setpts=PTS-STARTPTS,scale=${targetWidth}:${targetHeight}:force_original_aspect_ratio=decrease,pad=${targetWidth}:${targetHeight}:(ow-iw)/2:(oh-ih)/2,setsar=1[v${videoStreamIdx}]`;
641
- filterParts.push(videoFilter);
642
- videoStreams.push(`[v${videoStreamIdx}]`);
643
- videoStreamIdx++;
644
- } else if (segment.type === 'black') {
645
- // Generate black frame for the gap duration
646
- const gapFilter = `color=c=black:s=${targetWidth}x${targetHeight}:d=${segment.duration}:r=30,format=yuv420p[v${videoStreamIdx}]`;
647
- filterParts.push(gapFilter);
648
- videoStreams.push(`[v${videoStreamIdx}]`);
649
- videoStreamIdx++;
650
- }
651
- }
652
-
653
- // Detect gaps in audio timeline (use same totalDuration for consistency)
654
- const audioGaps = detectTimelineGaps(audioClips, totalDuration);
655
-
656
- // Build ordered list of audio segments
657
- interface AudioSegment {
658
- type: 'clip' | 'gap';
659
- clip?: typeof audioClips[0];
660
- gap?: { start: number; end: number };
661
- startTime: number;
662
- }
663
- const audioSegments: AudioSegment[] = [];
664
-
665
- // Add audio clips as segments
666
- for (const clip of audioClips) {
667
- audioSegments.push({ type: 'clip', clip, startTime: clip.startTime });
668
- }
669
-
670
- // Add gaps as segments
671
- for (const gap of audioGaps) {
672
- audioSegments.push({ type: 'gap', gap, startTime: gap.start });
673
- }
674
-
675
- // Sort by start time
676
- audioSegments.sort((a, b) => a.startTime - b.startTime);
677
-
678
- // Process audio segments (clips and gaps)
679
- const audioStreams: string[] = [];
680
- let audioStreamIdx = 0;
681
-
682
- for (const segment of audioSegments) {
683
- if (segment.type === 'clip' && segment.clip) {
684
- const clip = segment.clip;
685
- const source = sources.find((s) => s.id === clip.sourceId);
686
- if (!source) continue;
687
-
688
- const srcIdx = sourceMap.get(source.id);
689
- if (srcIdx === undefined) continue;
690
-
691
- // Check if source has audio (audio sources always have audio, video sources need to be checked)
692
- const hasAudio = source.type === "audio" || sourceAudioMap.get(source.id) === true;
693
-
694
- let audioFilter: string;
695
- if (hasAudio) {
696
- const audioTrimStart = clip.sourceStartOffset;
697
- const audioTrimEnd = clip.sourceStartOffset + clip.duration;
698
- const sourceDuration = source.duration || 0;
699
- const clipDuration = clip.duration;
700
-
701
- if (source.type === "audio" && sourceDuration > clipDuration && audioTrimStart === 0) {
702
- // Audio source is longer than clip slot — speed up to fit instead of truncating
703
- const tempo = sourceDuration / clipDuration;
704
- if (tempo <= 2.0) {
705
- // atempo sounds natural up to ~1.3x, acceptable up to 2x
706
- audioFilter = `[${srcIdx}:a]atempo=${tempo.toFixed(4)},asetpts=PTS-STARTPTS`;
707
- } else {
708
- // Too fast would sound bad — fall back to trim
709
- audioFilter = `[${srcIdx}:a]atrim=start=${audioTrimStart}:end=${audioTrimEnd},asetpts=PTS-STARTPTS`;
710
- }
711
- } else {
712
- // Normal trim for video-embedded audio, audio that fits, or offset clips
713
- audioFilter = `[${srcIdx}:a]atrim=start=${audioTrimStart}:end=${audioTrimEnd},asetpts=PTS-STARTPTS`;
714
- }
715
- } else {
716
- // Source has no audio - generate silence for the clip duration
717
- audioFilter = `anullsrc=r=48000:cl=stereo,atrim=0:${clip.duration},asetpts=PTS-STARTPTS`;
718
- }
719
-
720
- // Apply audio effects
721
- for (const effect of clip.effects || []) {
722
- if (effect.type === "fadeIn") {
723
- audioFilter += `,afade=t=in:st=0:d=${effect.duration}`;
724
- } else if (effect.type === "fadeOut") {
725
- const fadeStart = clip.duration - effect.duration;
726
- audioFilter += `,afade=t=out:st=${fadeStart}:d=${effect.duration}`;
727
- }
728
- }
729
-
730
- audioFilter += `[a${audioStreamIdx}]`;
731
- filterParts.push(audioFilter);
732
- audioStreams.push(`[a${audioStreamIdx}]`);
733
- audioStreamIdx++;
734
- } else if (segment.type === 'gap' && segment.gap) {
735
- // Generate silence for the gap duration
736
- const gapDuration = segment.gap.end - segment.gap.start;
737
- const audioGapFilter = `anullsrc=r=48000:cl=stereo,atrim=0:${gapDuration},asetpts=PTS-STARTPTS[a${audioStreamIdx}]`;
738
- filterParts.push(audioGapFilter);
739
- audioStreams.push(`[a${audioStreamIdx}]`);
740
- audioStreamIdx++;
741
- }
742
- }
743
-
744
- // Concatenate video clips
745
- if (videoStreams.length > 1) {
746
- filterParts.push(
747
- `${videoStreams.join("")}concat=n=${videoStreams.length}:v=1:a=0[outv]`
748
- );
749
- } else if (videoStreams.length === 1) {
750
- // Single video clip - just copy
751
- filterParts.push(`${videoStreams[0]}copy[outv]`);
752
- }
753
-
754
- // Concatenate or mix audio clips
755
- if (audioStreams.length > 1) {
756
- filterParts.push(
757
- `${audioStreams.join("")}concat=n=${audioStreams.length}:v=0:a=1[outa]`
758
- );
759
- } else if (audioStreams.length === 1) {
760
- // Single audio clip - just copy
761
- filterParts.push(`${audioStreams[0]}acopy[outa]`);
762
- }
763
-
764
- // Add filter complex
765
- args.push("-filter_complex", filterParts.join(";"));
766
-
767
- // Map outputs
768
- args.push("-map", "[outv]");
769
- if (audioStreams.length > 0) {
770
- args.push("-map", "[outa]");
771
- }
772
-
773
- // Add encoding settings
774
- args.push(...presetSettings.ffmpegArgs);
775
-
776
- // Output file
777
- args.push(outputPath);
778
-
779
- return args;
780
- }
781
-
782
- /**
783
- * Run FFmpeg with progress reporting
784
- */
785
- function runFFmpegProcess(
786
- ffmpegPath: string,
787
- args: string[],
788
- onProgress: (percent: number) => void
789
- ): Promise<void> {
790
- return new Promise((resolve, reject) => {
791
- const ffmpeg = spawn(ffmpegPath, args, {
792
- stdio: ["pipe", "pipe", "pipe"],
793
- });
794
-
795
- let duration = 0;
796
- let stderr = "";
797
-
798
- ffmpeg.stderr?.on("data", (data: Buffer) => {
799
- const output = data.toString();
800
- stderr += output;
801
-
802
- // Parse duration
803
- const durationMatch = output.match(/Duration: (\d+):(\d+):(\d+\.\d+)/);
804
- if (durationMatch) {
805
- const [, hours, minutes, seconds] = durationMatch;
806
- duration =
807
- parseInt(hours) * 3600 +
808
- parseInt(minutes) * 60 +
809
- parseFloat(seconds);
810
- }
811
-
812
- // Parse progress
813
- const timeMatch = output.match(/time=(\d+):(\d+):(\d+\.\d+)/);
814
- if (timeMatch && duration > 0) {
815
- const [, hours, minutes, seconds] = timeMatch;
816
- const currentTime =
817
- parseInt(hours) * 3600 +
818
- parseInt(minutes) * 60 +
819
- parseFloat(seconds);
820
- const percent = Math.min(100, Math.round((currentTime / duration) * 100));
821
- onProgress(percent);
822
- }
823
- });
824
-
825
- ffmpeg.on("close", (code) => {
826
- if (code === 0) {
827
- resolve();
828
- } else {
829
- // Extract error message
830
- const errorMatch = stderr.match(/Error.*$/m);
831
- const errorMsg = errorMatch ? errorMatch[0] : `FFmpeg exited with code ${code}`;
832
- reject(new Error(errorMsg));
833
- }
834
- });
835
-
836
- ffmpeg.on("error", (err) => {
837
- reject(err);
838
- });
839
- });
840
- }
841
-
842
- interface PresetSettings {
843
- resolution: string;
844
- videoBitrate: string;
845
- audioBitrate: string;
846
- ffmpegArgs: string[];
847
- }
848
-
849
- function getPresetSettings(
850
- preset: string,
851
- aspectRatio: string
852
- ): PresetSettings {
853
- const presets: Record<string, PresetSettings> = {
854
- draft: {
855
- resolution: "640x360",
856
- videoBitrate: "1M",
857
- audioBitrate: "128k",
858
- ffmpegArgs: [
859
- "-c:v", "libx264",
860
- "-preset", "ultrafast",
861
- "-crf", "28",
862
- "-c:a", "aac",
863
- "-b:a", "128k",
864
- ],
865
- },
866
- standard: {
867
- resolution: "1280x720",
868
- videoBitrate: "4M",
869
- audioBitrate: "192k",
870
- ffmpegArgs: [
871
- "-c:v", "libx264",
872
- "-preset", "medium",
873
- "-crf", "23",
874
- "-c:a", "aac",
875
- "-b:a", "192k",
876
- ],
877
- },
878
- high: {
879
- resolution: "1920x1080",
880
- videoBitrate: "8M",
881
- audioBitrate: "256k",
882
- ffmpegArgs: [
883
- "-c:v", "libx264",
884
- "-preset", "slow",
885
- "-crf", "18",
886
- "-c:a", "aac",
887
- "-b:a", "256k",
888
- ],
889
- },
890
- ultra: {
891
- resolution: "3840x2160",
892
- videoBitrate: "20M",
893
- audioBitrate: "320k",
894
- ffmpegArgs: [
895
- "-c:v", "libx264",
896
- "-preset", "slow",
897
- "-crf", "15",
898
- "-c:a", "aac",
899
- "-b:a", "320k",
900
- ],
901
- },
902
- };
903
-
904
- // Adjust resolution for aspect ratio
905
- const settings = { ...presets[preset] || presets.standard };
906
-
907
- if (aspectRatio === "9:16") {
908
- // Vertical video
909
- const [w, h] = settings.resolution.split("x");
910
- settings.resolution = `${h}x${w}`;
911
- } else if (aspectRatio === "1:1") {
912
- // Square video
913
- const h = settings.resolution.split("x")[1];
914
- settings.resolution = `${h}x${h}`;
915
- }
916
-
917
- return settings;
918
- }