@studiomeyer/mcp-video 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/ISSUE_TEMPLATE/bug_report.md +31 -0
- package/.github/ISSUE_TEMPLATE/feature_request.md +19 -0
- package/.github/workflows/ci.yml +34 -0
- package/CHANGELOG.md +24 -0
- package/CONTRIBUTING.md +75 -0
- package/LICENSE +21 -0
- package/README.md +198 -0
- package/USAGE.md +144 -0
- package/dist/handlers/capcut.d.ts +6 -0
- package/dist/handlers/capcut.js +229 -0
- package/dist/handlers/capcut.js.map +1 -0
- package/dist/handlers/editing.d.ts +6 -0
- package/dist/handlers/editing.js +242 -0
- package/dist/handlers/editing.js.map +1 -0
- package/dist/handlers/index.d.ts +2 -0
- package/dist/handlers/index.js +33 -0
- package/dist/handlers/index.js.map +1 -0
- package/dist/handlers/post-production.d.ts +5 -0
- package/dist/handlers/post-production.js +109 -0
- package/dist/handlers/post-production.js.map +1 -0
- package/dist/handlers/smart-screenshot.d.ts +5 -0
- package/dist/handlers/smart-screenshot.js +83 -0
- package/dist/handlers/smart-screenshot.js.map +1 -0
- package/dist/handlers/tts.d.ts +5 -0
- package/dist/handlers/tts.js +83 -0
- package/dist/handlers/tts.js.map +1 -0
- package/dist/handlers/video.d.ts +5 -0
- package/dist/handlers/video.js +127 -0
- package/dist/handlers/video.js.map +1 -0
- package/dist/lib/dual-transport.d.ts +42 -0
- package/dist/lib/dual-transport.js +208 -0
- package/dist/lib/dual-transport.js.map +1 -0
- package/dist/lib/logger.d.ts +12 -0
- package/dist/lib/logger.js +42 -0
- package/dist/lib/logger.js.map +1 -0
- package/dist/lib/types.d.ts +16 -0
- package/dist/lib/types.js +15 -0
- package/dist/lib/types.js.map +1 -0
- package/dist/schemas/capcut.d.ts +608 -0
- package/dist/schemas/capcut.js +411 -0
- package/dist/schemas/capcut.js.map +1 -0
- package/dist/schemas/editing.d.ts +822 -0
- package/dist/schemas/editing.js +466 -0
- package/dist/schemas/editing.js.map +1 -0
- package/dist/schemas/index.d.ts +2366 -0
- package/dist/schemas/index.js +15 -0
- package/dist/schemas/index.js.map +1 -0
- package/dist/schemas/post-production.d.ts +379 -0
- package/dist/schemas/post-production.js +268 -0
- package/dist/schemas/post-production.js.map +1 -0
- package/dist/schemas/smart-screenshot.d.ts +127 -0
- package/dist/schemas/smart-screenshot.js +122 -0
- package/dist/schemas/smart-screenshot.js.map +1 -0
- package/dist/schemas/tts.d.ts +220 -0
- package/dist/schemas/tts.js +194 -0
- package/dist/schemas/tts.js.map +1 -0
- package/dist/schemas/video.d.ts +236 -0
- package/dist/schemas/video.js +210 -0
- package/dist/schemas/video.js.map +1 -0
- package/dist/server.d.ts +11 -0
- package/dist/server.js +239 -0
- package/dist/server.js.map +1 -0
- package/dist/server.test.d.ts +1 -0
- package/dist/server.test.js +87 -0
- package/dist/server.test.js.map +1 -0
- package/dist/tools/engine/audio-mixer.d.ts +40 -0
- package/dist/tools/engine/audio-mixer.js +169 -0
- package/dist/tools/engine/audio-mixer.js.map +1 -0
- package/dist/tools/engine/audio.d.ts +22 -0
- package/dist/tools/engine/audio.js +73 -0
- package/dist/tools/engine/audio.js.map +1 -0
- package/dist/tools/engine/beat-sync.d.ts +31 -0
- package/dist/tools/engine/beat-sync.js +270 -0
- package/dist/tools/engine/beat-sync.js.map +1 -0
- package/dist/tools/engine/capture.d.ts +12 -0
- package/dist/tools/engine/capture.js +290 -0
- package/dist/tools/engine/capture.js.map +1 -0
- package/dist/tools/engine/chroma-key.d.ts +27 -0
- package/dist/tools/engine/chroma-key.js +154 -0
- package/dist/tools/engine/chroma-key.js.map +1 -0
- package/dist/tools/engine/concat.d.ts +49 -0
- package/dist/tools/engine/concat.js +149 -0
- package/dist/tools/engine/concat.js.map +1 -0
- package/dist/tools/engine/cursor.d.ts +26 -0
- package/dist/tools/engine/cursor.js +185 -0
- package/dist/tools/engine/cursor.js.map +1 -0
- package/dist/tools/engine/easing.d.ts +15 -0
- package/dist/tools/engine/easing.js +100 -0
- package/dist/tools/engine/easing.js.map +1 -0
- package/dist/tools/engine/editing.d.ts +158 -0
- package/dist/tools/engine/editing.js +541 -0
- package/dist/tools/engine/editing.js.map +1 -0
- package/dist/tools/engine/encoder.d.ts +31 -0
- package/dist/tools/engine/encoder.js +154 -0
- package/dist/tools/engine/encoder.js.map +1 -0
- package/dist/tools/engine/index.d.ts +30 -0
- package/dist/tools/engine/index.js +23 -0
- package/dist/tools/engine/index.js.map +1 -0
- package/dist/tools/engine/lut-presets.d.ts +25 -0
- package/dist/tools/engine/lut-presets.js +141 -0
- package/dist/tools/engine/lut-presets.js.map +1 -0
- package/dist/tools/engine/narrated-video.d.ts +63 -0
- package/dist/tools/engine/narrated-video.js +163 -0
- package/dist/tools/engine/narrated-video.js.map +1 -0
- package/dist/tools/engine/scenes.d.ts +17 -0
- package/dist/tools/engine/scenes.js +223 -0
- package/dist/tools/engine/scenes.js.map +1 -0
- package/dist/tools/engine/smart-screenshot.d.ts +80 -0
- package/dist/tools/engine/smart-screenshot.js +744 -0
- package/dist/tools/engine/smart-screenshot.js.map +1 -0
- package/dist/tools/engine/social-format.d.ts +66 -0
- package/dist/tools/engine/social-format.js +107 -0
- package/dist/tools/engine/social-format.js.map +1 -0
- package/dist/tools/engine/template-renderer.d.ts +45 -0
- package/dist/tools/engine/template-renderer.js +233 -0
- package/dist/tools/engine/template-renderer.js.map +1 -0
- package/dist/tools/engine/templates.d.ts +87 -0
- package/dist/tools/engine/templates.js +272 -0
- package/dist/tools/engine/templates.js.map +1 -0
- package/dist/tools/engine/text-animations.d.ts +33 -0
- package/dist/tools/engine/text-animations.js +192 -0
- package/dist/tools/engine/text-animations.js.map +1 -0
- package/dist/tools/engine/text-overlay.d.ts +27 -0
- package/dist/tools/engine/text-overlay.js +84 -0
- package/dist/tools/engine/text-overlay.js.map +1 -0
- package/dist/tools/engine/tts.d.ts +54 -0
- package/dist/tools/engine/tts.js +186 -0
- package/dist/tools/engine/tts.js.map +1 -0
- package/dist/tools/engine/types.d.ts +166 -0
- package/dist/tools/engine/types.js +13 -0
- package/dist/tools/engine/types.js.map +1 -0
- package/dist/tools/engine/voice-effects.d.ts +18 -0
- package/dist/tools/engine/voice-effects.js +215 -0
- package/dist/tools/engine/voice-effects.js.map +1 -0
- package/dist/tools/index.d.ts +32 -0
- package/dist/tools/index.js +23 -0
- package/dist/tools/index.js.map +1 -0
- package/package.json +56 -0
- package/scripts/check-deps.js +39 -0
- package/src/handlers/capcut.ts +245 -0
- package/src/handlers/editing.ts +260 -0
- package/src/handlers/index.ts +34 -0
- package/src/handlers/post-production.ts +136 -0
- package/src/handlers/smart-screenshot.ts +86 -0
- package/src/handlers/tts.ts +103 -0
- package/src/handlers/video.ts +137 -0
- package/src/lib/dual-transport.ts +272 -0
- package/src/lib/logger.ts +59 -0
- package/src/lib/types.ts +25 -0
- package/src/schemas/capcut.ts +418 -0
- package/src/schemas/editing.ts +476 -0
- package/src/schemas/index.ts +15 -0
- package/src/schemas/post-production.ts +273 -0
- package/src/schemas/smart-screenshot.ts +122 -0
- package/src/schemas/tts.ts +197 -0
- package/src/schemas/video.ts +211 -0
- package/src/server.test.ts +99 -0
- package/src/server.ts +289 -0
- package/src/tools/engine/audio-mixer.ts +244 -0
- package/src/tools/engine/audio.ts +115 -0
- package/src/tools/engine/beat-sync.ts +356 -0
- package/src/tools/engine/capture.ts +360 -0
- package/src/tools/engine/chroma-key.ts +202 -0
- package/src/tools/engine/concat.ts +242 -0
- package/src/tools/engine/cursor.ts +222 -0
- package/src/tools/engine/easing.ts +120 -0
- package/src/tools/engine/editing.ts +809 -0
- package/src/tools/engine/encoder.ts +208 -0
- package/src/tools/engine/index.ts +33 -0
- package/src/tools/engine/lut-presets.ts +235 -0
- package/src/tools/engine/narrated-video.ts +267 -0
- package/src/tools/engine/scenes.ts +309 -0
- package/src/tools/engine/smart-screenshot.ts +923 -0
- package/src/tools/engine/social-format.ts +146 -0
- package/src/tools/engine/template-renderer.ts +294 -0
- package/src/tools/engine/templates.ts +370 -0
- package/src/tools/engine/text-animations.ts +282 -0
- package/src/tools/engine/text-overlay.ts +143 -0
- package/src/tools/engine/tts.ts +284 -0
- package/src/tools/engine/types.ts +191 -0
- package/src/tools/engine/voice-effects.ts +258 -0
- package/src/tools/index.ts +67 -0
- package/tsconfig.json +19 -0
- package/vitest.config.ts +7 -0
|
@@ -0,0 +1,809 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Video editing engine — speed, color grading, effects, crop, reverse,
|
|
3
|
+
* audio extraction, subtitles, keyframe animation, picture-in-picture.
|
|
4
|
+
*
|
|
5
|
+
* All processing via ffmpeg (no npm dependencies).
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { execFile } from 'child_process';
|
|
9
|
+
import * as fs from 'fs';
|
|
10
|
+
import * as path from 'path';
|
|
11
|
+
import { logger } from '../../lib/logger.js';
|
|
12
|
+
import { getMediaDuration } from './audio.js';
|
|
13
|
+
|
|
14
|
+
// ─── Shared ffmpeg runner ────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
function runFfmpeg(args: string[], timeoutMs = 300_000): Promise<string> {
|
|
17
|
+
return new Promise((resolve, reject) => {
|
|
18
|
+
execFile('ffmpeg', args, { maxBuffer: 100 * 1024 * 1024, timeout: timeoutMs }, (error, stdout, stderr) => {
|
|
19
|
+
if (error) {
|
|
20
|
+
logger.error(`ffmpeg failed: ${stderr}`);
|
|
21
|
+
reject(new Error(`ffmpeg failed: ${stderr || error.message}`));
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
resolve(stdout);
|
|
25
|
+
});
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function ensureDir(filePath: string): void {
|
|
30
|
+
const dir = path.dirname(filePath);
|
|
31
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function assertExists(filePath: string, label = 'File'): void {
|
|
35
|
+
if (!fs.existsSync(filePath)) throw new Error(`${label} not found: ${filePath}`);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/** Check if a media file has an audio stream */
|
|
39
|
+
function hasAudioStream(filePath: string): Promise<boolean> {
|
|
40
|
+
return new Promise((resolve) => {
|
|
41
|
+
execFile(
|
|
42
|
+
'ffprobe',
|
|
43
|
+
['-v', 'quiet', '-select_streams', 'a', '-show_entries', 'stream=codec_type', '-of', 'csv=p=0', filePath],
|
|
44
|
+
(error, stdout) => {
|
|
45
|
+
if (error) { resolve(false); return; }
|
|
46
|
+
resolve(stdout.trim().length > 0);
|
|
47
|
+
}
|
|
48
|
+
);
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
function fileInfo(filePath: string): string {
|
|
53
|
+
const stats = fs.statSync(filePath);
|
|
54
|
+
return `${(stats.size / 1024 / 1024).toFixed(2)} MB`;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// ─── 1. Video Speed ──────────────────────────────────────────────────
|
|
58
|
+
|
|
59
|
+
export interface SpeedConfig {
|
|
60
|
+
inputPath: string;
|
|
61
|
+
outputPath: string;
|
|
62
|
+
/** Speed factor: 0.25 (4x slower) to 4.0 (4x faster). 1.0 = original. */
|
|
63
|
+
speed: number;
|
|
64
|
+
/** How to handle audio: 'match' adjusts pitch, 'mute' removes, 'original' keeps (may desync). Default: match */
|
|
65
|
+
audioMode?: 'match' | 'mute' | 'original';
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export async function adjustVideoSpeed(config: SpeedConfig): Promise<string> {
|
|
69
|
+
const { inputPath, outputPath, speed, audioMode = 'match' } = config;
|
|
70
|
+
assertExists(inputPath, 'Input video');
|
|
71
|
+
ensureDir(outputPath);
|
|
72
|
+
|
|
73
|
+
if (speed < 0.25 || speed > 4.0) throw new Error('Speed must be between 0.25 and 4.0');
|
|
74
|
+
|
|
75
|
+
const pts = (1 / speed).toFixed(6);
|
|
76
|
+
const hasAudio = await hasAudioStream(inputPath);
|
|
77
|
+
logger.info(`Adjusting speed: ${speed}x (PTS: ${pts}, audio: ${audioMode}, hasAudio: ${hasAudio})`);
|
|
78
|
+
|
|
79
|
+
const videoFilter = `setpts=${pts}*PTS`;
|
|
80
|
+
|
|
81
|
+
const args = ['-y', '-i', inputPath];
|
|
82
|
+
|
|
83
|
+
if (!hasAudio || audioMode === 'mute') {
|
|
84
|
+
args.push('-vf', videoFilter, '-an');
|
|
85
|
+
} else if (audioMode === 'match') {
|
|
86
|
+
const atempoChain = buildAtempoChain(speed);
|
|
87
|
+
args.push('-filter_complex', `[0:v]${videoFilter}[v];[0:a]${atempoChain}[a]`);
|
|
88
|
+
args.push('-map', '[v]', '-map', '[a]');
|
|
89
|
+
} else {
|
|
90
|
+
args.push('-vf', videoFilter, '-c:a', 'copy');
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
args.push('-c:v', 'libx264', '-crf', '18', '-preset', 'medium',
|
|
94
|
+
'-pix_fmt', 'yuv420p', '-movflags', '+faststart', outputPath);
|
|
95
|
+
|
|
96
|
+
await runFfmpeg(args);
|
|
97
|
+
logger.info(`Speed adjusted: ${outputPath} (${fileInfo(outputPath)})`);
|
|
98
|
+
return outputPath;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/** Build chained atempo filters (each 0.5-2.0 range) */
|
|
102
|
+
function buildAtempoChain(speed: number): string {
|
|
103
|
+
const filters: string[] = [];
|
|
104
|
+
let remaining = speed;
|
|
105
|
+
|
|
106
|
+
while (remaining > 2.0) {
|
|
107
|
+
filters.push('atempo=2.0');
|
|
108
|
+
remaining /= 2.0;
|
|
109
|
+
}
|
|
110
|
+
while (remaining < 0.5) {
|
|
111
|
+
filters.push('atempo=0.5');
|
|
112
|
+
remaining /= 0.5;
|
|
113
|
+
}
|
|
114
|
+
filters.push(`atempo=${remaining.toFixed(4)}`);
|
|
115
|
+
|
|
116
|
+
return filters.join(',');
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// ─── 2. Color Grading ───────────────────────────────────────────────
|
|
120
|
+
|
|
121
|
+
export interface ColorGradeConfig {
|
|
122
|
+
inputPath: string;
|
|
123
|
+
outputPath: string;
|
|
124
|
+
/** Brightness adjustment: -1.0 to 1.0 (0 = no change) */
|
|
125
|
+
brightness?: number;
|
|
126
|
+
/** Contrast: 0.0 to 3.0 (1.0 = no change) */
|
|
127
|
+
contrast?: number;
|
|
128
|
+
/** Saturation: 0.0 to 3.0 (1.0 = no change, 0 = grayscale) */
|
|
129
|
+
saturation?: number;
|
|
130
|
+
/** Gamma: 0.1 to 10.0 (1.0 = no change) */
|
|
131
|
+
gamma?: number;
|
|
132
|
+
/** Color temperature shift: -1.0 (cool/blue) to 1.0 (warm/orange). 0 = neutral */
|
|
133
|
+
temperature?: number;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
export async function applyColorGrade(config: ColorGradeConfig): Promise<string> {
|
|
137
|
+
const {
|
|
138
|
+
inputPath, outputPath,
|
|
139
|
+
brightness = 0, contrast = 1, saturation = 1,
|
|
140
|
+
gamma = 1, temperature = 0,
|
|
141
|
+
} = config;
|
|
142
|
+
|
|
143
|
+
assertExists(inputPath, 'Input video');
|
|
144
|
+
ensureDir(outputPath);
|
|
145
|
+
|
|
146
|
+
const filters: string[] = [];
|
|
147
|
+
|
|
148
|
+
// eq filter for brightness, contrast, saturation, gamma
|
|
149
|
+
const eqParts: string[] = [];
|
|
150
|
+
if (brightness !== 0) eqParts.push(`brightness=${brightness.toFixed(3)}`);
|
|
151
|
+
if (contrast !== 1) eqParts.push(`contrast=${contrast.toFixed(3)}`);
|
|
152
|
+
if (saturation !== 1) eqParts.push(`saturation=${saturation.toFixed(3)}`);
|
|
153
|
+
if (gamma !== 1) eqParts.push(`gamma=${gamma.toFixed(3)}`);
|
|
154
|
+
|
|
155
|
+
if (eqParts.length > 0) filters.push(`eq=${eqParts.join(':')}`);
|
|
156
|
+
|
|
157
|
+
// Temperature via colortemperature filter (ffmpeg 5.1+, fallback to colorchannelmixer)
|
|
158
|
+
if (temperature !== 0) {
|
|
159
|
+
// Warm = boost red/green, reduce blue. Cool = opposite.
|
|
160
|
+
const t = temperature;
|
|
161
|
+
const rr = (1 + t * 0.15).toFixed(3);
|
|
162
|
+
const gg = (1 + t * 0.05).toFixed(3);
|
|
163
|
+
const bb = (1 - t * 0.2).toFixed(3);
|
|
164
|
+
filters.push(`colorchannelmixer=${rr}:0:0:0:0:${gg}:0:0:0:0:${bb}:0`);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (filters.length === 0) {
|
|
168
|
+
throw new Error('No color adjustments specified. Set at least one of: brightness, contrast, saturation, gamma, temperature.');
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
logger.info(`Applying color grade: ${filters.join(', ')}`);
|
|
172
|
+
|
|
173
|
+
const args = [
|
|
174
|
+
'-y', '-i', inputPath,
|
|
175
|
+
'-vf', filters.join(','),
|
|
176
|
+
'-c:a', 'copy',
|
|
177
|
+
'-c:v', 'libx264', '-crf', '18', '-preset', 'medium',
|
|
178
|
+
'-pix_fmt', 'yuv420p', '-movflags', '+faststart',
|
|
179
|
+
outputPath,
|
|
180
|
+
];
|
|
181
|
+
|
|
182
|
+
await runFfmpeg(args);
|
|
183
|
+
logger.info(`Color graded: ${outputPath} (${fileInfo(outputPath)})`);
|
|
184
|
+
return outputPath;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// ─── 3. Video Effects ───────────────────────────────────────────────
|
|
188
|
+
|
|
189
|
+
export type VideoEffect = 'blur' | 'sharpen' | 'vignette' | 'grayscale' | 'sepia' | 'noise' | 'glow';
|
|
190
|
+
|
|
191
|
+
export interface EffectConfig {
|
|
192
|
+
inputPath: string;
|
|
193
|
+
outputPath: string;
|
|
194
|
+
/** Effect to apply */
|
|
195
|
+
effect: VideoEffect;
|
|
196
|
+
/** Intensity 0.0-1.0 (default: 0.5) */
|
|
197
|
+
intensity?: number;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
export async function applyVideoEffect(config: EffectConfig): Promise<string> {
|
|
201
|
+
const { inputPath, outputPath, effect, intensity = 0.5 } = config;
|
|
202
|
+
assertExists(inputPath, 'Input video');
|
|
203
|
+
ensureDir(outputPath);
|
|
204
|
+
|
|
205
|
+
const i = Math.max(0, Math.min(1, intensity));
|
|
206
|
+
let vf: string;
|
|
207
|
+
|
|
208
|
+
switch (effect) {
|
|
209
|
+
case 'blur': {
|
|
210
|
+
const radius = Math.round(2 + i * 18); // 2-20
|
|
211
|
+
vf = `boxblur=${radius}:${radius}`;
|
|
212
|
+
break;
|
|
213
|
+
}
|
|
214
|
+
case 'sharpen': {
|
|
215
|
+
const amount = (i * 2).toFixed(2); // 0-2
|
|
216
|
+
vf = `unsharp=5:5:${amount}:5:5:0`;
|
|
217
|
+
break;
|
|
218
|
+
}
|
|
219
|
+
case 'vignette': {
|
|
220
|
+
const angle = (0.3 + i * 0.5).toFixed(2); // 0.3-0.8 radians
|
|
221
|
+
vf = `vignette=angle=${angle}`;
|
|
222
|
+
break;
|
|
223
|
+
}
|
|
224
|
+
case 'grayscale': {
|
|
225
|
+
// Blend: original*(1-i) + grayscale*i via saturation
|
|
226
|
+
const sat = (1 - i).toFixed(3);
|
|
227
|
+
vf = `eq=saturation=${sat}`;
|
|
228
|
+
break;
|
|
229
|
+
}
|
|
230
|
+
case 'sepia': {
|
|
231
|
+
// Desaturate + warm tone
|
|
232
|
+
const desat = (1 - i * 0.8).toFixed(3);
|
|
233
|
+
const warm = (1 + i * 0.15).toFixed(3);
|
|
234
|
+
const coolB = (1 - i * 0.2).toFixed(3);
|
|
235
|
+
vf = `eq=saturation=${desat},colorchannelmixer=${warm}:0:0:0:0:1:0:0:0:0:${coolB}:0`;
|
|
236
|
+
break;
|
|
237
|
+
}
|
|
238
|
+
case 'noise': {
|
|
239
|
+
const strength = Math.round(5 + i * 40); // 5-45 (higher values explode file size)
|
|
240
|
+
vf = `noise=alls=${strength}:allf=t`;
|
|
241
|
+
break;
|
|
242
|
+
}
|
|
243
|
+
case 'glow': {
|
|
244
|
+
// Duplicate + blur + blend (soft glow)
|
|
245
|
+
const blurR = Math.round(5 + i * 25);
|
|
246
|
+
vf = `split[a][b];[b]boxblur=${blurR}:${blurR}[blurred];[a][blurred]blend=all_mode=screen:all_opacity=${(i * 0.5).toFixed(2)}`;
|
|
247
|
+
break;
|
|
248
|
+
}
|
|
249
|
+
default:
|
|
250
|
+
throw new Error(`Unknown effect: ${effect}`);
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
logger.info(`Applying ${effect} (intensity: ${i.toFixed(2)})`);
|
|
254
|
+
|
|
255
|
+
// Noise is high-entropy — use higher CRF to keep file size sane
|
|
256
|
+
const crf = effect === 'noise' ? '35' : '18';
|
|
257
|
+
|
|
258
|
+
const args = [
|
|
259
|
+
'-y', '-i', inputPath,
|
|
260
|
+
'-vf', vf,
|
|
261
|
+
'-c:a', 'copy',
|
|
262
|
+
'-c:v', 'libx264', '-crf', crf, '-preset', 'medium',
|
|
263
|
+
'-pix_fmt', 'yuv420p', '-movflags', '+faststart',
|
|
264
|
+
outputPath,
|
|
265
|
+
];
|
|
266
|
+
|
|
267
|
+
// glow uses filter_complex (split+blend) — needs different arg structure
|
|
268
|
+
if (effect === 'glow') {
|
|
269
|
+
args.length = 0;
|
|
270
|
+
args.push(
|
|
271
|
+
'-y', '-i', inputPath,
|
|
272
|
+
'-filter_complex', `[0:v]${vf}[out]`,
|
|
273
|
+
'-map', '[out]', '-map', '0:a?',
|
|
274
|
+
'-c:v', 'libx264', '-crf', crf, '-preset', 'medium',
|
|
275
|
+
'-pix_fmt', 'yuv420p', '-movflags', '+faststart',
|
|
276
|
+
outputPath,
|
|
277
|
+
);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
await runFfmpeg(args);
|
|
281
|
+
logger.info(`Effect applied: ${outputPath} (${fileInfo(outputPath)})`);
|
|
282
|
+
return outputPath;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// ─── 4. Crop Video ──────────────────────────────────────────────────
|
|
286
|
+
|
|
287
|
+
export interface CropConfig {
|
|
288
|
+
inputPath: string;
|
|
289
|
+
outputPath: string;
|
|
290
|
+
/** X offset (pixels or 'center') */
|
|
291
|
+
x?: number | 'center';
|
|
292
|
+
/** Y offset (pixels or 'center') */
|
|
293
|
+
y?: number | 'center';
|
|
294
|
+
/** Width of crop region */
|
|
295
|
+
width: number;
|
|
296
|
+
/** Height of crop region */
|
|
297
|
+
height: number;
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
export async function cropVideo(config: CropConfig): Promise<string> {
|
|
301
|
+
const { inputPath, outputPath, width, height, x = 'center', y = 'center' } = config;
|
|
302
|
+
assertExists(inputPath, 'Input video');
|
|
303
|
+
ensureDir(outputPath);
|
|
304
|
+
|
|
305
|
+
const xExpr = x === 'center' ? '(iw-ow)/2' : String(x);
|
|
306
|
+
const yExpr = y === 'center' ? '(ih-oh)/2' : String(y);
|
|
307
|
+
|
|
308
|
+
logger.info(`Cropping to ${width}x${height} at ${xExpr},${yExpr}`);
|
|
309
|
+
|
|
310
|
+
const args = [
|
|
311
|
+
'-y', '-i', inputPath,
|
|
312
|
+
'-vf', `crop=${width}:${height}:${xExpr}:${yExpr}`,
|
|
313
|
+
'-c:a', 'copy',
|
|
314
|
+
'-c:v', 'libx264', '-crf', '18', '-preset', 'medium',
|
|
315
|
+
'-pix_fmt', 'yuv420p', '-movflags', '+faststart',
|
|
316
|
+
outputPath,
|
|
317
|
+
];
|
|
318
|
+
|
|
319
|
+
await runFfmpeg(args);
|
|
320
|
+
logger.info(`Cropped: ${outputPath} (${fileInfo(outputPath)})`);
|
|
321
|
+
return outputPath;
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
// ─── 5. Reverse Clip ────────────────────────────────────────────────
|
|
325
|
+
|
|
326
|
+
export interface ReverseConfig {
|
|
327
|
+
inputPath: string;
|
|
328
|
+
outputPath: string;
|
|
329
|
+
/** Also reverse audio (default: true) */
|
|
330
|
+
reverseAudio?: boolean;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
export async function reverseClip(config: ReverseConfig): Promise<string> {
|
|
334
|
+
const { inputPath, outputPath, reverseAudio = true } = config;
|
|
335
|
+
assertExists(inputPath, 'Input video');
|
|
336
|
+
ensureDir(outputPath);
|
|
337
|
+
|
|
338
|
+
const hasAudio = await hasAudioStream(inputPath);
|
|
339
|
+
logger.info(`Reversing video (audio: ${reverseAudio}, hasAudio: ${hasAudio})`);
|
|
340
|
+
|
|
341
|
+
const args = ['-y', '-i', inputPath];
|
|
342
|
+
|
|
343
|
+
if (!hasAudio) {
|
|
344
|
+
args.push('-vf', 'reverse', '-an');
|
|
345
|
+
} else if (reverseAudio) {
|
|
346
|
+
args.push('-vf', 'reverse', '-af', 'areverse');
|
|
347
|
+
} else {
|
|
348
|
+
args.push('-vf', 'reverse', '-c:a', 'copy');
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
args.push('-c:v', 'libx264', '-crf', '18', '-preset', 'medium',
|
|
352
|
+
'-pix_fmt', 'yuv420p', '-movflags', '+faststart', outputPath);
|
|
353
|
+
|
|
354
|
+
await runFfmpeg(args);
|
|
355
|
+
logger.info(`Reversed: ${outputPath} (${fileInfo(outputPath)})`);
|
|
356
|
+
return outputPath;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
// ─── 6. Extract Audio ───────────────────────────────────────────────
|
|
360
|
+
|
|
361
|
+
export interface ExtractAudioConfig {
|
|
362
|
+
inputPath: string;
|
|
363
|
+
outputPath: string;
|
|
364
|
+
/** Output format: mp3, aac, wav, flac (default: mp3) */
|
|
365
|
+
format?: 'mp3' | 'aac' | 'wav' | 'flac';
|
|
366
|
+
/** Audio bitrate for lossy (default: 192k) */
|
|
367
|
+
bitrate?: string;
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
export async function extractAudio(config: ExtractAudioConfig): Promise<string> {
|
|
371
|
+
const { inputPath, outputPath, format = 'mp3', bitrate = '192k' } = config;
|
|
372
|
+
assertExists(inputPath, 'Input video');
|
|
373
|
+
ensureDir(outputPath);
|
|
374
|
+
|
|
375
|
+
const hasAudio = await hasAudioStream(inputPath);
|
|
376
|
+
if (!hasAudio) throw new Error('Input video has no audio stream to extract');
|
|
377
|
+
|
|
378
|
+
logger.info(`Extracting audio as ${format}`);
|
|
379
|
+
|
|
380
|
+
const args = ['-y', '-i', inputPath, '-vn'];
|
|
381
|
+
|
|
382
|
+
switch (format) {
|
|
383
|
+
case 'mp3':
|
|
384
|
+
args.push('-c:a', 'libmp3lame', '-b:a', bitrate);
|
|
385
|
+
break;
|
|
386
|
+
case 'aac':
|
|
387
|
+
args.push('-c:a', 'aac', '-b:a', bitrate);
|
|
388
|
+
break;
|
|
389
|
+
case 'wav':
|
|
390
|
+
args.push('-c:a', 'pcm_s16le');
|
|
391
|
+
break;
|
|
392
|
+
case 'flac':
|
|
393
|
+
args.push('-c:a', 'flac');
|
|
394
|
+
break;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
args.push(outputPath);
|
|
398
|
+
await runFfmpeg(args);
|
|
399
|
+
logger.info(`Audio extracted: ${outputPath} (${fileInfo(outputPath)})`);
|
|
400
|
+
return outputPath;
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// ─── 7. Burn Subtitles ──────────────────────────────────────────────
|
|
404
|
+
|
|
405
|
+
export interface BurnSubtitlesConfig {
|
|
406
|
+
inputPath: string;
|
|
407
|
+
outputPath: string;
|
|
408
|
+
/** Path to SRT or ASS subtitle file */
|
|
409
|
+
subtitlePath: string;
|
|
410
|
+
/** Font size (default: 24) */
|
|
411
|
+
fontSize?: number;
|
|
412
|
+
/** Font color (default: white) */
|
|
413
|
+
fontColor?: string;
|
|
414
|
+
/** Outline color (default: black) */
|
|
415
|
+
outlineColor?: string;
|
|
416
|
+
/** Outline width (default: 2) */
|
|
417
|
+
outlineWidth?: number;
|
|
418
|
+
/** Position: bottom, top, center (default: bottom) */
|
|
419
|
+
position?: 'bottom' | 'top' | 'center';
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
export async function burnSubtitles(config: BurnSubtitlesConfig): Promise<string> {
|
|
423
|
+
const {
|
|
424
|
+
inputPath, outputPath, subtitlePath,
|
|
425
|
+
fontSize = 24, fontColor = '&Hffffff', outlineColor = '&H000000',
|
|
426
|
+
outlineWidth = 2, position = 'bottom',
|
|
427
|
+
} = config;
|
|
428
|
+
|
|
429
|
+
assertExists(inputPath, 'Input video');
|
|
430
|
+
assertExists(subtitlePath, 'Subtitle file');
|
|
431
|
+
ensureDir(outputPath);
|
|
432
|
+
|
|
433
|
+
// Determine alignment based on position (ASS alignment values)
|
|
434
|
+
const alignment = position === 'top' ? 8 : position === 'center' ? 5 : 2;
|
|
435
|
+
const marginV = position === 'center' ? 0 : 30;
|
|
436
|
+
|
|
437
|
+
// Escape path for ffmpeg (backslashes and colons)
|
|
438
|
+
const escapedSubPath = subtitlePath.replace(/\\/g, '/').replace(/:/g, '\\:');
|
|
439
|
+
|
|
440
|
+
const styleOverride = `FontSize=${fontSize},PrimaryColour=${fontColor},OutlineColour=${outlineColor},Outline=${outlineWidth},Alignment=${alignment},MarginV=${marginV}`;
|
|
441
|
+
|
|
442
|
+
logger.info(`Burning subtitles (${position}, size: ${fontSize})`);
|
|
443
|
+
|
|
444
|
+
const args = [
|
|
445
|
+
'-y', '-i', inputPath,
|
|
446
|
+
'-vf', `subtitles='${escapedSubPath}':force_style='${styleOverride}'`,
|
|
447
|
+
'-c:a', 'copy',
|
|
448
|
+
'-c:v', 'libx264', '-crf', '18', '-preset', 'medium',
|
|
449
|
+
'-pix_fmt', 'yuv420p', '-movflags', '+faststart',
|
|
450
|
+
outputPath,
|
|
451
|
+
];
|
|
452
|
+
|
|
453
|
+
await runFfmpeg(args);
|
|
454
|
+
logger.info(`Subtitles burned: ${outputPath} (${fileInfo(outputPath)})`);
|
|
455
|
+
return outputPath;
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// ─── 8. Auto Caption (Whisper → SRT → burn) ─────────────────────────
|
|
459
|
+
|
|
460
|
+
export interface AutoCaptionConfig {
|
|
461
|
+
inputPath: string;
|
|
462
|
+
outputPath: string;
|
|
463
|
+
/** Language code (default: auto-detect) */
|
|
464
|
+
language?: string;
|
|
465
|
+
/** Font size for captions (default: 28) */
|
|
466
|
+
fontSize?: number;
|
|
467
|
+
/** Caption position (default: bottom) */
|
|
468
|
+
position?: 'bottom' | 'top' | 'center';
|
|
469
|
+
/** Also return the SRT file path (default: true) */
|
|
470
|
+
keepSrt?: boolean;
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
export interface AutoCaptionResult {
|
|
474
|
+
videoPath: string;
|
|
475
|
+
srtPath: string;
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
export async function autoCaption(config: AutoCaptionConfig): Promise<AutoCaptionResult> {
|
|
479
|
+
const {
|
|
480
|
+
inputPath, outputPath,
|
|
481
|
+
language, fontSize = 28, position = 'bottom',
|
|
482
|
+
keepSrt = true,
|
|
483
|
+
} = config;
|
|
484
|
+
|
|
485
|
+
assertExists(inputPath, 'Input video');
|
|
486
|
+
ensureDir(outputPath);
|
|
487
|
+
|
|
488
|
+
// Step 1: Extract audio to temp WAV
|
|
489
|
+
const tempDir = path.join('/tmp', `caption-${Date.now()}`);
|
|
490
|
+
fs.mkdirSync(tempDir, { recursive: true });
|
|
491
|
+
const tempAudio = path.join(tempDir, 'audio.wav');
|
|
492
|
+
const srtPath = outputPath.replace(/\.[^.]+$/, '.srt');
|
|
493
|
+
|
|
494
|
+
logger.info('Step 1/3: Extracting audio for transcription...');
|
|
495
|
+
await extractAudio({
|
|
496
|
+
inputPath,
|
|
497
|
+
outputPath: tempAudio,
|
|
498
|
+
format: 'wav',
|
|
499
|
+
});
|
|
500
|
+
|
|
501
|
+
// Step 2: Transcribe with Whisper API
|
|
502
|
+
logger.info('Step 2/3: Transcribing with Whisper...');
|
|
503
|
+
const srtContent = await transcribeWithWhisper(tempAudio, language);
|
|
504
|
+
fs.writeFileSync(srtPath, srtContent, 'utf-8');
|
|
505
|
+
logger.info(`SRT written: ${srtPath} (${srtContent.split('\n\n').length} segments)`);
|
|
506
|
+
|
|
507
|
+
// Step 3: Burn subtitles
|
|
508
|
+
logger.info('Step 3/3: Burning captions into video...');
|
|
509
|
+
await burnSubtitles({
|
|
510
|
+
inputPath,
|
|
511
|
+
outputPath,
|
|
512
|
+
subtitlePath: srtPath,
|
|
513
|
+
fontSize,
|
|
514
|
+
position,
|
|
515
|
+
});
|
|
516
|
+
|
|
517
|
+
// Cleanup temp
|
|
518
|
+
try { fs.rmSync(tempDir, { recursive: true }); } catch { /* ignore */ }
|
|
519
|
+
if (!keepSrt) try { fs.unlinkSync(srtPath); } catch { /* ignore */ }
|
|
520
|
+
|
|
521
|
+
return { videoPath: outputPath, srtPath };
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
async function transcribeWithWhisper(audioPath: string, language?: string): Promise<string> {
|
|
525
|
+
const apiKey = process.env.OPENAI_API_KEY;
|
|
526
|
+
if (!apiKey) throw new Error('OPENAI_API_KEY required for auto-captioning (Whisper API)');
|
|
527
|
+
|
|
528
|
+
const fileBuffer = fs.readFileSync(audioPath);
|
|
529
|
+
const formData = new FormData();
|
|
530
|
+
formData.append('file', new Blob([fileBuffer], { type: 'audio/wav' }), 'audio.wav');
|
|
531
|
+
formData.append('model', 'whisper-1');
|
|
532
|
+
formData.append('response_format', 'srt');
|
|
533
|
+
if (language) formData.append('language', language);
|
|
534
|
+
|
|
535
|
+
const response = await fetch('https://api.openai.com/v1/audio/transcriptions', {
|
|
536
|
+
method: 'POST',
|
|
537
|
+
headers: { Authorization: `Bearer ${apiKey}` },
|
|
538
|
+
body: formData,
|
|
539
|
+
});
|
|
540
|
+
|
|
541
|
+
if (!response.ok) {
|
|
542
|
+
const error = await response.text();
|
|
543
|
+
throw new Error(`Whisper API failed (${response.status}): ${error}`);
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
return await response.text();
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
// ─── 9. Keyframe Animation ──────────────────────────────────────────
|
|
550
|
+
|
|
551
|
+
export interface Keyframe {
|
|
552
|
+
/** Time in seconds */
|
|
553
|
+
time: number;
|
|
554
|
+
/** Scale factor (1.0 = original, 2.0 = 2x zoom) */
|
|
555
|
+
scale?: number;
|
|
556
|
+
/** Pan X offset in pixels (0 = center) */
|
|
557
|
+
panX?: number;
|
|
558
|
+
/** Pan Y offset in pixels (0 = center) */
|
|
559
|
+
panY?: number;
|
|
560
|
+
/** Rotation in degrees */
|
|
561
|
+
rotate?: number;
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
export interface KeyframeAnimationConfig {
|
|
565
|
+
inputPath: string;
|
|
566
|
+
outputPath: string;
|
|
567
|
+
/** Keyframes defining animation over time */
|
|
568
|
+
keyframes: Keyframe[];
|
|
569
|
+
/** Output width (default: source width) */
|
|
570
|
+
outputWidth?: number;
|
|
571
|
+
/** Output height (default: source height) */
|
|
572
|
+
outputHeight?: number;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
export async function addKeyframeAnimation(config: KeyframeAnimationConfig): Promise<string> {
|
|
576
|
+
const { inputPath, outputPath, keyframes } = config;
|
|
577
|
+
assertExists(inputPath, 'Input video');
|
|
578
|
+
ensureDir(outputPath);
|
|
579
|
+
|
|
580
|
+
if (keyframes.length < 2) throw new Error('Need at least 2 keyframes for animation');
|
|
581
|
+
|
|
582
|
+
// Sort keyframes by time
|
|
583
|
+
const sorted = [...keyframes].sort((a, b) => a.time - b.time);
|
|
584
|
+
|
|
585
|
+
// Get video info for calculating crop/zoom
|
|
586
|
+
const duration = await getMediaDuration(inputPath);
|
|
587
|
+
const videoInfo = await getVideoResolution(inputPath);
|
|
588
|
+
const outW = config.outputWidth ?? videoInfo.width;
|
|
589
|
+
const outH = config.outputHeight ?? videoInfo.height;
|
|
590
|
+
|
|
591
|
+
logger.info(`Keyframe animation: ${sorted.length} keyframes over ${duration.toFixed(1)}s`);
|
|
592
|
+
|
|
593
|
+
// Build zoompan filter expression
|
|
594
|
+
// zoompan requires frame-by-frame zoom/x/y expressions
|
|
595
|
+
// We interpolate between keyframes linearly
|
|
596
|
+
const fps = 60;
|
|
597
|
+
|
|
598
|
+
const zoomExpr = buildInterpolationExpr(sorted, 'scale', 1, fps);
|
|
599
|
+
const panXExpr = buildInterpolationExpr(sorted, 'panX', 0, fps);
|
|
600
|
+
const panYExpr = buildInterpolationExpr(sorted, 'panY', 0, fps);
|
|
601
|
+
|
|
602
|
+
// zoompan: zoom=expr, x=expr, y=expr, d=1 (per-frame), s=output size, fps=fps
|
|
603
|
+
const filter = `zoompan=z='${zoomExpr}':x='${panXExpr}':y='${panYExpr}':d=1:s=${outW}x${outH}:fps=${fps}`;
|
|
604
|
+
|
|
605
|
+
const args = [
|
|
606
|
+
'-y', '-i', inputPath,
|
|
607
|
+
'-vf', filter,
|
|
608
|
+
'-c:v', 'libx264', '-crf', '18', '-preset', 'medium',
|
|
609
|
+
'-pix_fmt', 'yuv420p',
|
|
610
|
+
'-t', String(duration),
|
|
611
|
+
'-c:a', 'copy',
|
|
612
|
+
'-movflags', '+faststart',
|
|
613
|
+
outputPath,
|
|
614
|
+
];
|
|
615
|
+
|
|
616
|
+
await runFfmpeg(args, 600_000); // Longer timeout for keyframe processing
|
|
617
|
+
logger.info(`Keyframe animation applied: ${outputPath} (${fileInfo(outputPath)})`);
|
|
618
|
+
return outputPath;
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
/** Build ffmpeg expression that interpolates between keyframe values per-frame */
|
|
622
|
+
function buildInterpolationExpr(keyframes: Keyframe[], prop: 'scale' | 'panX' | 'panY', defaultVal: number, fps: number): string {
|
|
623
|
+
if (keyframes.length === 0) return String(defaultVal);
|
|
624
|
+
if (keyframes.length === 1) return String(keyframes[0][prop] ?? defaultVal);
|
|
625
|
+
|
|
626
|
+
// Build piecewise linear interpolation using if() expressions
|
|
627
|
+
// Frame number = on (current frame index in zoompan)
|
|
628
|
+
// We use 'on' which is the output frame index
|
|
629
|
+
const parts: string[] = [];
|
|
630
|
+
|
|
631
|
+
for (let i = 0; i < keyframes.length - 1; i++) {
|
|
632
|
+
const kf1 = keyframes[i];
|
|
633
|
+
const kf2 = keyframes[i + 1];
|
|
634
|
+
const v1 = kf1[prop] ?? defaultVal;
|
|
635
|
+
const v2 = kf2[prop] ?? defaultVal;
|
|
636
|
+
const f1 = Math.round(kf1.time * fps);
|
|
637
|
+
const f2 = Math.round(kf2.time * fps);
|
|
638
|
+
const frameRange = f2 - f1;
|
|
639
|
+
|
|
640
|
+
if (frameRange <= 0) continue;
|
|
641
|
+
|
|
642
|
+
// Linear interpolation: v1 + (v2-v1) * (on-f1) / frameRange
|
|
643
|
+
const slope = ((v2 - v1) / frameRange).toFixed(8);
|
|
644
|
+
const interp = `${v1}+${slope}*(on-${f1})`;
|
|
645
|
+
|
|
646
|
+
if (i === 0 && i === keyframes.length - 2) {
|
|
647
|
+
// Only segment — use directly
|
|
648
|
+
parts.push(interp);
|
|
649
|
+
} else if (i === keyframes.length - 2) {
|
|
650
|
+
// Last segment
|
|
651
|
+
parts.push(interp);
|
|
652
|
+
} else {
|
|
653
|
+
parts.push(`if(lt(on,${f2}),${interp},`);
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
// Close all if() statements
|
|
658
|
+
const expr = parts.join('') + ')'.repeat(Math.max(0, parts.length - 1));
|
|
659
|
+
return expr;
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
async function getVideoResolution(filePath: string): Promise<{ width: number; height: number }> {
|
|
663
|
+
return new Promise((resolve, reject) => {
|
|
664
|
+
execFile(
|
|
665
|
+
'ffprobe',
|
|
666
|
+
['-v', 'quiet', '-select_streams', 'v:0', '-show_entries', 'stream=width,height', '-of', 'json', filePath],
|
|
667
|
+
(error, stdout) => {
|
|
668
|
+
if (error) { reject(new Error(`ffprobe failed: ${error.message}`)); return; }
|
|
669
|
+
try {
|
|
670
|
+
const data = JSON.parse(stdout);
|
|
671
|
+
const stream = data.streams?.[0];
|
|
672
|
+
resolve({ width: stream?.width ?? 1920, height: stream?.height ?? 1080 });
|
|
673
|
+
} catch { resolve({ width: 1920, height: 1080 }); }
|
|
674
|
+
}
|
|
675
|
+
);
|
|
676
|
+
});
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// ─── 10. Picture-in-Picture ─────────────────────────────────────────
|
|
680
|
+
|
|
681
|
+
export type PipPosition = 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center';
|
|
682
|
+
|
|
683
|
+
export interface PipConfig {
|
|
684
|
+
/** Main (background) video */
|
|
685
|
+
mainVideo: string;
|
|
686
|
+
/** Overlay (PiP) video */
|
|
687
|
+
overlayVideo: string;
|
|
688
|
+
outputPath: string;
|
|
689
|
+
/** Position of PiP (default: bottom-right) */
|
|
690
|
+
position?: PipPosition;
|
|
691
|
+
/** Scale of PiP relative to main video width: 0.1-0.5 (default: 0.3) */
|
|
692
|
+
scale?: number;
|
|
693
|
+
/** PiP start time in seconds (default: 0) */
|
|
694
|
+
startTime?: number;
|
|
695
|
+
/** PiP end time in seconds (default: end of main video) */
|
|
696
|
+
endTime?: number;
|
|
697
|
+
/** Border radius / rounded corners in px (default: 0) */
|
|
698
|
+
borderWidth?: number;
|
|
699
|
+
/** Border color (default: white) */
|
|
700
|
+
borderColor?: string;
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
export async function composePip(config: PipConfig): Promise<string> {
|
|
704
|
+
const {
|
|
705
|
+
mainVideo, overlayVideo, outputPath,
|
|
706
|
+
position = 'bottom-right', scale = 0.3,
|
|
707
|
+
startTime = 0, endTime,
|
|
708
|
+
borderWidth = 0, borderColor = 'white',
|
|
709
|
+
} = config;
|
|
710
|
+
|
|
711
|
+
assertExists(mainVideo, 'Main video');
|
|
712
|
+
assertExists(overlayVideo, 'Overlay video');
|
|
713
|
+
ensureDir(outputPath);
|
|
714
|
+
|
|
715
|
+
const s = Math.max(0.1, Math.min(0.5, scale));
|
|
716
|
+
logger.info(`PiP: ${position}, scale: ${s}, border: ${borderWidth}px`);
|
|
717
|
+
|
|
718
|
+
// Position expressions
|
|
719
|
+
const margin = 20;
|
|
720
|
+
let xExpr: string, yExpr: string;
|
|
721
|
+
switch (position) {
|
|
722
|
+
case 'top-left': xExpr = String(margin); yExpr = String(margin); break;
|
|
723
|
+
case 'top-right': xExpr = `W-w-${margin}`; yExpr = String(margin); break;
|
|
724
|
+
case 'bottom-left': xExpr = String(margin); yExpr = `H-h-${margin}`; break;
|
|
725
|
+
case 'center': xExpr = '(W-w)/2'; yExpr = '(H-h)/2'; break;
|
|
726
|
+
case 'bottom-right':
|
|
727
|
+
default: xExpr = `W-w-${margin}`; yExpr = `H-h-${margin}`; break;
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
// Enable expression for time-limited overlay
|
|
731
|
+
const enableExpr = endTime
|
|
732
|
+
? `:enable='between(t,${startTime},${endTime})'`
|
|
733
|
+
: startTime > 0 ? `:enable='gte(t,${startTime})'` : '';
|
|
734
|
+
|
|
735
|
+
let filterComplex: string;
|
|
736
|
+
|
|
737
|
+
if (borderWidth > 0) {
|
|
738
|
+
// Add border pad around overlay
|
|
739
|
+
filterComplex = [
|
|
740
|
+
`[1:v]scale=iw*${s}:ih*${s}[pip_raw]`,
|
|
741
|
+
`[pip_raw]pad=iw+${borderWidth * 2}:ih+${borderWidth * 2}:${borderWidth}:${borderWidth}:color=${borderColor}[pip]`,
|
|
742
|
+
`[0:v][pip]overlay=${xExpr}:${yExpr}${enableExpr}[out]`,
|
|
743
|
+
].join(';');
|
|
744
|
+
} else {
|
|
745
|
+
filterComplex = [
|
|
746
|
+
`[1:v]scale=iw*${s}:ih*${s}[pip]`,
|
|
747
|
+
`[0:v][pip]overlay=${xExpr}:${yExpr}${enableExpr}[out]`,
|
|
748
|
+
].join(';');
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
const args = [
|
|
752
|
+
'-y',
|
|
753
|
+
'-i', mainVideo,
|
|
754
|
+
'-i', overlayVideo,
|
|
755
|
+
'-filter_complex', filterComplex,
|
|
756
|
+
'-map', '[out]', '-map', '0:a?',
|
|
757
|
+
'-c:v', 'libx264', '-crf', '18', '-preset', 'medium',
|
|
758
|
+
'-pix_fmt', 'yuv420p', '-c:a', 'aac', '-b:a', '192k',
|
|
759
|
+
'-shortest',
|
|
760
|
+
'-movflags', '+faststart',
|
|
761
|
+
outputPath,
|
|
762
|
+
];
|
|
763
|
+
|
|
764
|
+
await runFfmpeg(args);
|
|
765
|
+
logger.info(`PiP composed: ${outputPath} (${fileInfo(outputPath)})`);
|
|
766
|
+
return outputPath;
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
// ─── 11. Audio Ducking ──────────────────────────────────────────────
|
|
770
|
+
|
|
771
|
+
export interface AudioDuckingConfig {
|
|
772
|
+
inputPath: string;
|
|
773
|
+
outputPath: string;
|
|
774
|
+
/** How much to reduce background when speech detected: 0.0-1.0 (default: 0.3 = reduce to 30%) */
|
|
775
|
+
duckLevel?: number;
|
|
776
|
+
/** Attack time in seconds (default: 0.5) */
|
|
777
|
+
attack?: number;
|
|
778
|
+
/** Release time in seconds (default: 1.0) */
|
|
779
|
+
release?: number;
|
|
780
|
+
}
|
|
781
|
+
|
|
782
|
+
export async function addAudioDucking(config: AudioDuckingConfig): Promise<string> {
|
|
783
|
+
const {
|
|
784
|
+
inputPath, outputPath,
|
|
785
|
+
duckLevel = 0.3, attack = 0.5, release = 1.0,
|
|
786
|
+
} = config;
|
|
787
|
+
|
|
788
|
+
assertExists(inputPath, 'Input video');
|
|
789
|
+
ensureDir(outputPath);
|
|
790
|
+
|
|
791
|
+
logger.info(`Audio ducking: level=${duckLevel}, attack=${attack}s, release=${release}s`);
|
|
792
|
+
|
|
793
|
+
// Use compand filter to reduce loud parts and normalize quiet
|
|
794
|
+
// This simulates ducking by applying dynamic range compression
|
|
795
|
+
const threshold = -20; // dB threshold
|
|
796
|
+
const ratio = (1 / duckLevel).toFixed(1);
|
|
797
|
+
|
|
798
|
+
const args = [
|
|
799
|
+
'-y', '-i', inputPath,
|
|
800
|
+
'-af', `compand=attacks=${attack}:decays=${release}:points=-80/-80|${threshold}/${threshold}|0/-${Math.round((1 - duckLevel) * 20)}:gain=0`,
|
|
801
|
+
'-c:v', 'copy',
|
|
802
|
+
'-movflags', '+faststart',
|
|
803
|
+
outputPath,
|
|
804
|
+
];
|
|
805
|
+
|
|
806
|
+
await runFfmpeg(args);
|
|
807
|
+
logger.info(`Audio ducking applied: ${outputPath} (${fileInfo(outputPath)})`);
|
|
808
|
+
return outputPath;
|
|
809
|
+
}
|