apexify.js 5.0.3 → 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +352 -137
- package/README.md +440 -19
- package/dist/cjs/Canvas/ApexPainter.d.ts +272 -0
- package/dist/cjs/Canvas/ApexPainter.d.ts.map +1 -1
- package/dist/cjs/Canvas/ApexPainter.js +2275 -125
- package/dist/cjs/Canvas/ApexPainter.js.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/advancedLines.d.ts +4 -4
- package/dist/cjs/Canvas/utils/Custom/advancedLines.d.ts.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/advancedLines.js +63 -21
- package/dist/cjs/Canvas/utils/Custom/advancedLines.js.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/customLines.d.ts.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/customLines.js +3 -0
- package/dist/cjs/Canvas/utils/Custom/customLines.js.map +1 -1
- package/dist/cjs/Canvas/utils/types.d.ts +5 -1
- package/dist/cjs/Canvas/utils/types.d.ts.map +1 -1
- package/dist/cjs/Canvas/utils/types.js.map +1 -1
- package/dist/esm/Canvas/ApexPainter.d.ts +272 -0
- package/dist/esm/Canvas/ApexPainter.d.ts.map +1 -1
- package/dist/esm/Canvas/ApexPainter.js +2275 -125
- package/dist/esm/Canvas/ApexPainter.js.map +1 -1
- package/dist/esm/Canvas/utils/Custom/advancedLines.d.ts +4 -4
- package/dist/esm/Canvas/utils/Custom/advancedLines.d.ts.map +1 -1
- package/dist/esm/Canvas/utils/Custom/advancedLines.js +63 -21
- package/dist/esm/Canvas/utils/Custom/advancedLines.js.map +1 -1
- package/dist/esm/Canvas/utils/Custom/customLines.d.ts.map +1 -1
- package/dist/esm/Canvas/utils/Custom/customLines.js +3 -0
- package/dist/esm/Canvas/utils/Custom/customLines.js.map +1 -1
- package/dist/esm/Canvas/utils/types.d.ts +5 -1
- package/dist/esm/Canvas/utils/types.d.ts.map +1 -1
- package/dist/esm/Canvas/utils/types.js.map +1 -1
- package/lib/Canvas/ApexPainter.ts +2973 -136
- package/lib/Canvas/utils/Custom/advancedLines.ts +77 -25
- package/lib/Canvas/utils/Custom/customLines.ts +4 -0
- package/lib/Canvas/utils/types.ts +6 -2
- package/package.json +1 -3
|
@@ -6,11 +6,13 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
6
|
exports.ApexPainter = void 0;
|
|
7
7
|
const canvas_1 = require("@napi-rs/canvas");
|
|
8
8
|
const gifencoder_1 = __importDefault(require("gifencoder"));
|
|
9
|
-
const fluent_ffmpeg_1 = __importDefault(require("fluent-ffmpeg"));
|
|
10
9
|
const stream_1 = require("stream");
|
|
10
|
+
const child_process_1 = require("child_process");
|
|
11
|
+
const util_1 = require("util");
|
|
11
12
|
const axios_1 = __importDefault(require("axios"));
|
|
12
13
|
const fs_1 = __importDefault(require("fs"));
|
|
13
14
|
const path_1 = __importDefault(require("path"));
|
|
15
|
+
const execAsync = (0, util_1.promisify)(child_process_1.exec);
|
|
14
16
|
const utils_1 = require("./utils/utils");
|
|
15
17
|
const enhancedTextRenderer_1 = require("./utils/Texts/enhancedTextRenderer");
|
|
16
18
|
const enhancedPatternRenderer_1 = require("./utils/Patterns/enhancedPatternRenderer");
|
|
@@ -130,7 +132,7 @@ class ApexPainter {
|
|
|
130
132
|
// Handle video background inherit sizing
|
|
131
133
|
if (canvas.videoBg) {
|
|
132
134
|
try {
|
|
133
|
-
const frameBuffer = await this.#extractVideoFrame(canvas.videoBg.source, canvas.videoBg.frame ?? 0);
|
|
135
|
+
const frameBuffer = await this.#extractVideoFrame(canvas.videoBg.source, canvas.videoBg.frame ?? 0, canvas.videoBg.time, canvas.videoBg.format || 'jpg', canvas.videoBg.quality || 2);
|
|
134
136
|
if (frameBuffer) {
|
|
135
137
|
const img = await (0, canvas_1.loadImage)(frameBuffer);
|
|
136
138
|
if (!canvas.width)
|
|
@@ -171,23 +173,60 @@ class ApexPainter {
|
|
|
171
173
|
if (typeof blendMode === 'string') {
|
|
172
174
|
ctx.globalCompositeOperation = blendMode;
|
|
173
175
|
}
|
|
174
|
-
// Draw
|
|
176
|
+
// Draw background - videoBg takes priority, then customBg, then gradientBg, then colorBg
|
|
175
177
|
if (videoBg) {
|
|
176
178
|
try {
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
179
|
+
// For videoBg, always use PNG format to ensure compatibility with loadImage
|
|
180
|
+
// The rgb24 pixel format for JPEG can cause issues with loadImage
|
|
181
|
+
const frameBuffer = await this.#extractVideoFrame(videoBg.source, videoBg.frame ?? 0, videoBg.time, 'png', // Force PNG format for videoBg to ensure proper color rendering
|
|
182
|
+
2);
|
|
183
|
+
if (frameBuffer && frameBuffer.length > 0) {
|
|
184
|
+
// Try loading from buffer first, if that fails, save to temp file and load from file
|
|
185
|
+
// This is a workaround for potential buffer compatibility issues with loadImage
|
|
186
|
+
let videoImg;
|
|
187
|
+
try {
|
|
188
|
+
videoImg = await (0, canvas_1.loadImage)(frameBuffer);
|
|
189
|
+
}
|
|
190
|
+
catch (bufferError) {
|
|
191
|
+
// If loading from buffer fails, try saving to temp file and loading from file
|
|
192
|
+
const tempFramePath = path_1.default.join(process.cwd(), '.temp-frames', `video-bg-temp-${Date.now()}.png`);
|
|
193
|
+
const frameDir = path_1.default.dirname(tempFramePath);
|
|
194
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
195
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
196
|
+
}
|
|
197
|
+
fs_1.default.writeFileSync(tempFramePath, frameBuffer);
|
|
198
|
+
videoImg = await (0, canvas_1.loadImage)(tempFramePath);
|
|
199
|
+
// Cleanup temp file after loading
|
|
200
|
+
if (fs_1.default.existsSync(tempFramePath)) {
|
|
201
|
+
fs_1.default.unlinkSync(tempFramePath);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
if (videoImg && videoImg.width > 0 && videoImg.height > 0) {
|
|
205
|
+
ctx.globalAlpha = videoBg.opacity ?? 1;
|
|
206
|
+
// Draw the video frame to fill the entire canvas
|
|
207
|
+
ctx.drawImage(videoImg, 0, 0, width, height);
|
|
208
|
+
ctx.globalAlpha = opacity;
|
|
209
|
+
}
|
|
210
|
+
else {
|
|
211
|
+
throw new Error(`Extracted video frame has invalid dimensions: ${videoImg?.width}x${videoImg?.height}`);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
else {
|
|
215
|
+
throw new Error('Frame extraction returned empty buffer');
|
|
183
216
|
}
|
|
184
217
|
}
|
|
185
218
|
catch (e) {
|
|
186
|
-
|
|
219
|
+
const errorMsg = e instanceof Error ? e.message : 'Unknown error';
|
|
220
|
+
// Re-throw FFmpeg installation errors so user sees installation guide
|
|
221
|
+
if (errorMsg.includes('FFMPEG NOT FOUND') || errorMsg.includes('FFmpeg')) {
|
|
222
|
+
throw e;
|
|
223
|
+
}
|
|
224
|
+
// Re-throw other errors instead of silently failing with black background
|
|
225
|
+
throw new Error(`createCanvas: videoBg extraction failed: ${errorMsg}`);
|
|
187
226
|
}
|
|
188
227
|
}
|
|
189
|
-
|
|
190
|
-
|
|
228
|
+
else if (customBg) {
|
|
229
|
+
// Draw custom background with filters and opacity support
|
|
191
230
|
await (0, utils_1.customBackground)(ctx, { ...canvas, blur });
|
|
192
231
|
// Apply filters to background if specified
|
|
193
232
|
if (customBg.filters && customBg.filters.length > 0) {
|
|
@@ -215,6 +254,7 @@ class ApexPainter {
|
|
|
215
254
|
await (0, utils_1.drawBackgroundGradient)(ctx, { ...canvas, blur });
|
|
216
255
|
}
|
|
217
256
|
else {
|
|
257
|
+
// Default to black background if no background is specified
|
|
218
258
|
await (0, utils_1.drawBackgroundColor)(ctx, { ...canvas, blur, colorBg: colorBg ?? '#000' });
|
|
219
259
|
}
|
|
220
260
|
if (patternBg)
|
|
@@ -863,18 +903,21 @@ class ApexPainter {
|
|
|
863
903
|
bufferStream.on('data', (chunk) => {
|
|
864
904
|
chunks.push(chunk);
|
|
865
905
|
});
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
}
|
|
906
|
+
// Properly extend the stream object
|
|
907
|
+
const extendedStream = bufferStream;
|
|
908
|
+
extendedStream.getBuffer = function () {
|
|
909
|
+
return Buffer.concat(chunks);
|
|
871
910
|
};
|
|
911
|
+
extendedStream.chunks = chunks;
|
|
912
|
+
return extendedStream;
|
|
872
913
|
}
|
|
873
914
|
// Validation is done in #validateGIFOptions
|
|
874
915
|
const canvasWidth = options.width || 1200;
|
|
875
916
|
const canvasHeight = options.height || 1200;
|
|
876
917
|
const encoder = new gifencoder_1.default(canvasWidth, canvasHeight);
|
|
877
|
-
|
|
918
|
+
// Use buffer stream for buffer/base64/attachment, file stream only for 'file' format
|
|
919
|
+
const useBufferStream = options.outputFormat !== "file";
|
|
920
|
+
const outputStream = useBufferStream ? createBufferStream() : (options.outputFile ? createOutputStream(options.outputFile) : createBufferStream());
|
|
878
921
|
encoder.createReadStream().pipe(outputStream);
|
|
879
922
|
encoder.start();
|
|
880
923
|
encoder.setRepeat(options.repeat || 0);
|
|
@@ -901,12 +944,17 @@ class ApexPainter {
|
|
|
901
944
|
encoder.addFrame(ctx);
|
|
902
945
|
}
|
|
903
946
|
encoder.finish();
|
|
904
|
-
outputStream.end();
|
|
905
947
|
if (options.outputFormat === "file") {
|
|
948
|
+
outputStream.end();
|
|
906
949
|
await new Promise((resolve) => outputStream.on("finish", () => resolve()));
|
|
907
950
|
}
|
|
908
951
|
else if (options.outputFormat === "base64") {
|
|
909
|
-
|
|
952
|
+
// Wait for stream to finish before getting buffer
|
|
953
|
+
await new Promise((resolve) => {
|
|
954
|
+
outputStream.on("end", () => resolve());
|
|
955
|
+
outputStream.end();
|
|
956
|
+
});
|
|
957
|
+
if ('getBuffer' in outputStream && typeof outputStream.getBuffer === 'function') {
|
|
910
958
|
return outputStream.getBuffer().toString("base64");
|
|
911
959
|
}
|
|
912
960
|
throw new Error("createGIF: Unable to get buffer for base64 output.");
|
|
@@ -916,7 +964,12 @@ class ApexPainter {
|
|
|
916
964
|
return [{ attachment: gifStream, name: "gif.js" }];
|
|
917
965
|
}
|
|
918
966
|
else if (options.outputFormat === "buffer") {
|
|
919
|
-
|
|
967
|
+
// Wait for stream to finish before getting buffer
|
|
968
|
+
await new Promise((resolve) => {
|
|
969
|
+
outputStream.on("end", () => resolve());
|
|
970
|
+
outputStream.end();
|
|
971
|
+
});
|
|
972
|
+
if ('getBuffer' in outputStream && typeof outputStream.getBuffer === 'function') {
|
|
920
973
|
return outputStream.getBuffer();
|
|
921
974
|
}
|
|
922
975
|
throw new Error("createGIF: Unable to get buffer for buffer output.");
|
|
@@ -1239,24 +1292,160 @@ class ApexPainter {
|
|
|
1239
1292
|
throw new Error(`cropImage failed: ${errorMessage}`);
|
|
1240
1293
|
}
|
|
1241
1294
|
}
|
|
1295
|
+
_ffmpegAvailable = null;
|
|
1296
|
+
_ffmpegChecked = false;
|
|
1297
|
+
_ffmpegPath = null;
|
|
1298
|
+
/**
|
|
1299
|
+
* Gets comprehensive FFmpeg installation instructions based on OS
|
|
1300
|
+
* @private
|
|
1301
|
+
* @returns Detailed installation instructions
|
|
1302
|
+
*/
|
|
1303
|
+
#getFFmpegInstallInstructions() {
|
|
1304
|
+
const os = process.platform;
|
|
1305
|
+
let instructions = '\n\n📹 FFMPEG INSTALLATION GUIDE\n';
|
|
1306
|
+
instructions += '═'.repeat(50) + '\n\n';
|
|
1307
|
+
if (os === 'win32') {
|
|
1308
|
+
instructions += '🪟 WINDOWS INSTALLATION:\n\n';
|
|
1309
|
+
instructions += 'OPTION 1 - Using Chocolatey (Recommended):\n';
|
|
1310
|
+
instructions += ' 1. Open PowerShell as Administrator\n';
|
|
1311
|
+
instructions += ' 2. Run: choco install ffmpeg\n';
|
|
1312
|
+
instructions += ' 3. Restart your terminal\n\n';
|
|
1313
|
+
instructions += 'OPTION 2 - Using Winget:\n';
|
|
1314
|
+
instructions += ' 1. Open PowerShell\n';
|
|
1315
|
+
instructions += ' 2. Run: winget install ffmpeg\n';
|
|
1316
|
+
instructions += ' 3. Restart your terminal\n\n';
|
|
1317
|
+
instructions += 'OPTION 3 - Manual Installation:\n';
|
|
1318
|
+
instructions += ' 1. Visit: https://www.gyan.dev/ffmpeg/builds/\n';
|
|
1319
|
+
instructions += ' 2. Download "ffmpeg-release-essentials.zip"\n';
|
|
1320
|
+
instructions += ' 3. Extract to C:\\ffmpeg\n';
|
|
1321
|
+
instructions += ' 4. Add C:\\ffmpeg\\bin to System PATH:\n';
|
|
1322
|
+
instructions += ' - Press Win + X → System → Advanced → Environment Variables\n';
|
|
1323
|
+
instructions += ' - Edit "Path" → Add "C:\\ffmpeg\\bin"\n';
|
|
1324
|
+
instructions += ' 5. Restart terminal and verify: ffmpeg -version\n\n';
|
|
1325
|
+
instructions += '🔍 Search Terms: "install ffmpeg windows", "ffmpeg windows tutorial"\n';
|
|
1326
|
+
instructions += '📺 YouTube: Search "How to install FFmpeg on Windows 2024"\n';
|
|
1327
|
+
instructions += '🌐 Official: https://ffmpeg.org/download.html\n';
|
|
1328
|
+
}
|
|
1329
|
+
else if (os === 'darwin') {
|
|
1330
|
+
instructions += '🍎 macOS INSTALLATION:\n\n';
|
|
1331
|
+
instructions += 'OPTION 1 - Using Homebrew (Recommended):\n';
|
|
1332
|
+
instructions += ' 1. Install Homebrew if not installed:\n';
|
|
1333
|
+
instructions += ' /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"\n';
|
|
1334
|
+
instructions += ' 2. Run: brew install ffmpeg\n';
|
|
1335
|
+
instructions += ' 3. Verify: ffmpeg -version\n\n';
|
|
1336
|
+
instructions += 'OPTION 2 - Using MacPorts:\n';
|
|
1337
|
+
instructions += ' 1. Install MacPorts from: https://www.macports.org/\n';
|
|
1338
|
+
instructions += ' 2. Run: sudo port install ffmpeg\n\n';
|
|
1339
|
+
instructions += '🔍 Search Terms: "install ffmpeg mac", "ffmpeg macos homebrew"\n';
|
|
1340
|
+
instructions += '📺 YouTube: Search "Install FFmpeg on Mac using Homebrew"\n';
|
|
1341
|
+
instructions += '🌐 Official: https://ffmpeg.org/download.html\n';
|
|
1342
|
+
}
|
|
1343
|
+
else {
|
|
1344
|
+
instructions += '🐧 LINUX INSTALLATION:\n\n';
|
|
1345
|
+
instructions += 'Ubuntu/Debian:\n';
|
|
1346
|
+
instructions += ' sudo apt-get update\n';
|
|
1347
|
+
instructions += ' sudo apt-get install ffmpeg\n\n';
|
|
1348
|
+
instructions += 'RHEL/CentOS/Fedora:\n';
|
|
1349
|
+
instructions += ' sudo yum install ffmpeg\n';
|
|
1350
|
+
instructions += ' # OR for newer versions:\n';
|
|
1351
|
+
instructions += ' sudo dnf install ffmpeg\n\n';
|
|
1352
|
+
instructions += 'Arch Linux:\n';
|
|
1353
|
+
instructions += ' sudo pacman -S ffmpeg\n\n';
|
|
1354
|
+
instructions += '🔍 Search Terms: "install ffmpeg [your-distro]", "ffmpeg linux tutorial"\n';
|
|
1355
|
+
instructions += '📺 YouTube: Search "Install FFmpeg on Linux"\n';
|
|
1356
|
+
instructions += '🌐 Official: https://ffmpeg.org/download.html\n';
|
|
1357
|
+
}
|
|
1358
|
+
instructions += '\n' + '═'.repeat(50) + '\n';
|
|
1359
|
+
instructions += '✅ After installation, restart your terminal and verify with: ffmpeg -version\n';
|
|
1360
|
+
instructions += '💡 If still not working, ensure FFmpeg is in your system PATH\n';
|
|
1361
|
+
return instructions;
|
|
1362
|
+
}
|
|
1363
|
+
/**
|
|
1364
|
+
* Checks if ffmpeg is available in the system (cached check)
|
|
1365
|
+
* @private
|
|
1366
|
+
* @returns Promise<boolean> - True if ffmpeg is available
|
|
1367
|
+
*/
|
|
1368
|
+
async #checkFFmpegAvailable() {
|
|
1369
|
+
// Cache the result to avoid multiple checks
|
|
1370
|
+
if (this._ffmpegChecked) {
|
|
1371
|
+
return this._ffmpegAvailable ?? false;
|
|
1372
|
+
}
|
|
1373
|
+
try {
|
|
1374
|
+
// Try to execute ffmpeg -version (suppress output)
|
|
1375
|
+
await execAsync('ffmpeg -version', {
|
|
1376
|
+
timeout: 5000,
|
|
1377
|
+
maxBuffer: 1024 * 1024 // 1MB buffer
|
|
1378
|
+
});
|
|
1379
|
+
this._ffmpegAvailable = true;
|
|
1380
|
+
this._ffmpegChecked = true;
|
|
1381
|
+
this._ffmpegPath = 'ffmpeg';
|
|
1382
|
+
return true;
|
|
1383
|
+
}
|
|
1384
|
+
catch {
|
|
1385
|
+
// Try common installation paths
|
|
1386
|
+
const commonPaths = process.platform === 'win32' ? [
|
|
1387
|
+
'C:\\ffmpeg\\bin\\ffmpeg.exe',
|
|
1388
|
+
'C:\\Program Files\\ffmpeg\\bin\\ffmpeg.exe',
|
|
1389
|
+
'C:\\Program Files (x86)\\ffmpeg\\bin\\ffmpeg.exe'
|
|
1390
|
+
] : [
|
|
1391
|
+
'/usr/bin/ffmpeg',
|
|
1392
|
+
'/usr/local/bin/ffmpeg',
|
|
1393
|
+
'/opt/homebrew/bin/ffmpeg',
|
|
1394
|
+
'/opt/local/bin/ffmpeg'
|
|
1395
|
+
];
|
|
1396
|
+
for (const ffmpegPath of commonPaths) {
|
|
1397
|
+
try {
|
|
1398
|
+
await execAsync(`"${ffmpegPath}" -version`, {
|
|
1399
|
+
timeout: 3000,
|
|
1400
|
+
maxBuffer: 1024 * 1024
|
|
1401
|
+
});
|
|
1402
|
+
this._ffmpegAvailable = true;
|
|
1403
|
+
this._ffmpegChecked = true;
|
|
1404
|
+
this._ffmpegPath = ffmpegPath;
|
|
1405
|
+
return true;
|
|
1406
|
+
}
|
|
1407
|
+
catch {
|
|
1408
|
+
continue;
|
|
1409
|
+
}
|
|
1410
|
+
}
|
|
1411
|
+
this._ffmpegAvailable = false;
|
|
1412
|
+
this._ffmpegChecked = true;
|
|
1413
|
+
return false;
|
|
1414
|
+
}
|
|
1415
|
+
}
|
|
1416
|
+
/**
|
|
1417
|
+
* Gets video information (duration, resolution, fps, etc.)
|
|
1418
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1419
|
+
* @returns Video metadata object
|
|
1420
|
+
*/
|
|
1242
1421
|
/**
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
async #extractVideoFrame(videoSource, frameNumber = 0) {
|
|
1422
|
+
* Gets video information (duration, resolution, fps, etc.)
|
|
1423
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1424
|
+
* @param skipFFmpegCheck - Skip FFmpeg availability check (for internal use, default: false)
|
|
1425
|
+
* @returns Video metadata object
|
|
1426
|
+
*/
|
|
1427
|
+
async getVideoInfo(videoSource, skipFFmpegCheck = false) {
|
|
1250
1428
|
try {
|
|
1429
|
+
// Skip FFmpeg check if we already know it's available (for internal calls)
|
|
1430
|
+
if (!skipFFmpegCheck) {
|
|
1431
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
1432
|
+
if (!ffmpegAvailable) {
|
|
1433
|
+
const errorMessage = '❌ FFMPEG NOT FOUND\n' +
|
|
1434
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
1435
|
+
this.#getFFmpegInstallInstructions();
|
|
1436
|
+
throw new Error(errorMessage);
|
|
1437
|
+
}
|
|
1438
|
+
}
|
|
1251
1439
|
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
1252
1440
|
if (!fs_1.default.existsSync(frameDir)) {
|
|
1253
1441
|
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
1254
1442
|
}
|
|
1443
|
+
let videoPath;
|
|
1255
1444
|
const tempVideoPath = path_1.default.join(frameDir, `temp-video-${Date.now()}.mp4`);
|
|
1256
|
-
const frameOutputPath = path_1.default.join(frameDir, `frame-${Date.now()}.jpg`);
|
|
1257
1445
|
// Handle video source
|
|
1258
1446
|
if (Buffer.isBuffer(videoSource)) {
|
|
1259
1447
|
fs_1.default.writeFileSync(tempVideoPath, videoSource);
|
|
1448
|
+
videoPath = tempVideoPath;
|
|
1260
1449
|
}
|
|
1261
1450
|
else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
1262
1451
|
const response = await (0, axios_1.default)({
|
|
@@ -1265,70 +1454,188 @@ class ApexPainter {
|
|
|
1265
1454
|
responseType: 'arraybuffer'
|
|
1266
1455
|
});
|
|
1267
1456
|
fs_1.default.writeFileSync(tempVideoPath, Buffer.from(response.data));
|
|
1457
|
+
videoPath = tempVideoPath;
|
|
1268
1458
|
}
|
|
1269
1459
|
else {
|
|
1270
|
-
// Local file path
|
|
1271
1460
|
if (!fs_1.default.existsSync(videoSource)) {
|
|
1272
1461
|
throw new Error(`Video file not found: ${videoSource}`);
|
|
1273
1462
|
}
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1463
|
+
videoPath = videoSource;
|
|
1464
|
+
}
|
|
1465
|
+
// Use ffprobe to get video info (escape path for Windows)
|
|
1466
|
+
const escapedPath = videoPath.replace(/"/g, '\\"');
|
|
1467
|
+
const { stdout } = await execAsync(`ffprobe -v error -show_entries stream=width,height,r_frame_rate,bit_rate -show_entries format=duration,format_name -of json "${escapedPath}"`, {
|
|
1468
|
+
timeout: 30000, // 30 second timeout
|
|
1469
|
+
maxBuffer: 10 * 1024 * 1024 // 10MB buffer for large JSON responses
|
|
1470
|
+
});
|
|
1471
|
+
const info = JSON.parse(stdout);
|
|
1472
|
+
const videoStream = info.streams?.find((s) => s.width && s.height) || info.streams?.[0];
|
|
1473
|
+
const format = info.format || {};
|
|
1474
|
+
// Parse frame rate (e.g., "30/1" -> 30)
|
|
1475
|
+
const fps = videoStream?.r_frame_rate
|
|
1476
|
+
? (() => {
|
|
1477
|
+
const [num, den] = videoStream.r_frame_rate.split('/').map(Number);
|
|
1478
|
+
return den ? num / den : num;
|
|
1479
|
+
})()
|
|
1480
|
+
: 30;
|
|
1481
|
+
const result = {
|
|
1482
|
+
duration: parseFloat(format.duration || '0'),
|
|
1483
|
+
width: parseInt(videoStream?.width || '0'),
|
|
1484
|
+
height: parseInt(videoStream?.height || '0'),
|
|
1485
|
+
fps: fps,
|
|
1486
|
+
bitrate: parseInt(videoStream?.bit_rate || format.bit_rate || '0'),
|
|
1487
|
+
format: format.format_name || 'unknown'
|
|
1488
|
+
};
|
|
1489
|
+
// Cleanup temp file if created
|
|
1490
|
+
if (videoPath === tempVideoPath && fs_1.default.existsSync(tempVideoPath)) {
|
|
1491
|
+
fs_1.default.unlinkSync(tempVideoPath);
|
|
1492
|
+
}
|
|
1493
|
+
return result;
|
|
1494
|
+
}
|
|
1495
|
+
catch (error) {
|
|
1496
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
1497
|
+
// Re-throw FFmpeg installation errors
|
|
1498
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
1499
|
+
throw error;
|
|
1500
|
+
}
|
|
1501
|
+
throw new Error(`getVideoInfo failed: ${errorMessage}`);
|
|
1502
|
+
}
|
|
1503
|
+
}
|
|
1504
|
+
/**
|
|
1505
|
+
* Extracts a single frame from a video at a specific time or frame number
|
|
1506
|
+
* @private
|
|
1507
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1508
|
+
* @param frameNumber - Frame number to extract (default: 0)
|
|
1509
|
+
* @param timeSeconds - Alternative: time in seconds (overrides frameNumber if provided)
|
|
1510
|
+
* @param outputFormat - Output image format ('jpg' or 'png', default: 'jpg')
|
|
1511
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2) or PNG compression
|
|
1512
|
+
* @returns Buffer containing the frame image
|
|
1513
|
+
*/
|
|
1514
|
+
async #extractVideoFrame(videoSource, frameNumber = 0, timeSeconds, outputFormat = 'jpg', quality = 2) {
|
|
1515
|
+
try {
|
|
1516
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
1517
|
+
if (!ffmpegAvailable) {
|
|
1518
|
+
const errorMessage = '❌ FFMPEG NOT FOUND\n' +
|
|
1519
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
1520
|
+
this.#getFFmpegInstallInstructions();
|
|
1521
|
+
throw new Error(errorMessage);
|
|
1522
|
+
}
|
|
1523
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
1524
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
1525
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
1526
|
+
}
|
|
1527
|
+
const timestamp = Date.now();
|
|
1528
|
+
const tempVideoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
1529
|
+
const frameOutputPath = path_1.default.join(frameDir, `frame-${timestamp}.${outputFormat}`);
|
|
1530
|
+
let videoPath;
|
|
1531
|
+
let shouldCleanupVideo = false;
|
|
1532
|
+
// Handle video source
|
|
1533
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
1534
|
+
fs_1.default.writeFileSync(tempVideoPath, videoSource);
|
|
1535
|
+
videoPath = tempVideoPath;
|
|
1536
|
+
shouldCleanupVideo = true;
|
|
1537
|
+
}
|
|
1538
|
+
else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
1539
|
+
const response = await (0, axios_1.default)({
|
|
1540
|
+
method: 'get',
|
|
1541
|
+
url: videoSource,
|
|
1542
|
+
responseType: 'arraybuffer'
|
|
1296
1543
|
});
|
|
1544
|
+
fs_1.default.writeFileSync(tempVideoPath, Buffer.from(response.data));
|
|
1545
|
+
videoPath = tempVideoPath;
|
|
1546
|
+
shouldCleanupVideo = true;
|
|
1547
|
+
}
|
|
1548
|
+
else {
|
|
1549
|
+
// Resolve relative paths (similar to customBackground)
|
|
1550
|
+
let resolvedPath = videoSource;
|
|
1551
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
1552
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
1553
|
+
}
|
|
1554
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
1555
|
+
throw new Error(`Video file not found: ${videoSource} (resolved to: ${resolvedPath})`);
|
|
1556
|
+
}
|
|
1557
|
+
videoPath = resolvedPath;
|
|
1558
|
+
}
|
|
1559
|
+
// Calculate time in seconds
|
|
1560
|
+
// If time is provided, use it directly (most accurate)
|
|
1561
|
+
// If only frame is provided, we need to get video FPS to convert frame to time
|
|
1562
|
+
let time;
|
|
1563
|
+
if (timeSeconds !== undefined) {
|
|
1564
|
+
time = timeSeconds;
|
|
1297
1565
|
}
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
if (fs_1.default.existsSync(tempVideoPath))
|
|
1309
|
-
fs_1.default.unlinkSync(tempVideoPath);
|
|
1310
|
-
if (fs_1.default.existsSync(frameOutputPath))
|
|
1311
|
-
fs_1.default.unlinkSync(frameOutputPath);
|
|
1312
|
-
resolve(buffer);
|
|
1566
|
+
else if (frameNumber === 0) {
|
|
1567
|
+
// Frame 0 = start of video
|
|
1568
|
+
time = 0;
|
|
1569
|
+
}
|
|
1570
|
+
else {
|
|
1571
|
+
// Get video FPS to convert frame number to time accurately
|
|
1572
|
+
try {
|
|
1573
|
+
const videoInfo = await this.getVideoInfo(videoPath, true); // Skip FFmpeg check (already done)
|
|
1574
|
+
if (videoInfo && videoInfo.fps > 0) {
|
|
1575
|
+
time = frameNumber / videoInfo.fps;
|
|
1313
1576
|
}
|
|
1314
|
-
|
|
1315
|
-
|
|
1577
|
+
else {
|
|
1578
|
+
// Fallback to 30 FPS if we can't get video info
|
|
1579
|
+
console.warn(`Could not get video FPS, assuming 30 FPS for frame ${frameNumber}`);
|
|
1580
|
+
time = frameNumber / 30;
|
|
1316
1581
|
}
|
|
1317
|
-
}
|
|
1318
|
-
|
|
1319
|
-
//
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1582
|
+
}
|
|
1583
|
+
catch (error) {
|
|
1584
|
+
// If getVideoInfo fails, assume 30 FPS (standard video framerate)
|
|
1585
|
+
console.warn(`Could not get video info, assuming 30 FPS for frame ${frameNumber}`);
|
|
1586
|
+
time = frameNumber / 30;
|
|
1587
|
+
}
|
|
1588
|
+
}
|
|
1589
|
+
// Build ffmpeg command (escape paths for Windows)
|
|
1590
|
+
// Don't use -f flag, let FFmpeg infer format from file extension
|
|
1591
|
+
// Use -frames:v 1 instead of -vframes 1 (more explicit)
|
|
1592
|
+
// For PNG: use rgba pixel format (best compatibility with loadImage)
|
|
1593
|
+
// For JPEG: don't specify pixel format, let FFmpeg use default (yuvj420p works better than rgb24)
|
|
1594
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
1595
|
+
const escapedOutputPath = frameOutputPath.replace(/"/g, '\\"');
|
|
1596
|
+
let command;
|
|
1597
|
+
if (outputFormat === 'png') {
|
|
1598
|
+
// PNG: Use rgba pixel format for best compatibility
|
|
1599
|
+
const pixFmt = '-pix_fmt rgba';
|
|
1600
|
+
command = `ffmpeg -i "${escapedVideoPath}" -ss ${time} -frames:v 1 ${pixFmt} -y "${escapedOutputPath}"`;
|
|
1601
|
+
}
|
|
1602
|
+
else {
|
|
1603
|
+
// JPEG: Use quality flag, let FFmpeg choose pixel format (default works better than rgb24)
|
|
1604
|
+
const qualityFlag = `-q:v ${quality}`;
|
|
1605
|
+
command = `ffmpeg -i "${escapedVideoPath}" -ss ${time} -frames:v 1 ${qualityFlag} -y "${escapedOutputPath}"`;
|
|
1606
|
+
}
|
|
1607
|
+
try {
|
|
1608
|
+
await execAsync(command, {
|
|
1609
|
+
timeout: 30000, // 30 second timeout
|
|
1610
|
+
maxBuffer: 10 * 1024 * 1024 // 10MB buffer
|
|
1611
|
+
});
|
|
1612
|
+
if (!fs_1.default.existsSync(frameOutputPath)) {
|
|
1613
|
+
throw new Error('Frame extraction failed - output file not created');
|
|
1614
|
+
}
|
|
1615
|
+
const buffer = fs_1.default.readFileSync(frameOutputPath);
|
|
1616
|
+
// Cleanup
|
|
1617
|
+
if (fs_1.default.existsSync(frameOutputPath))
|
|
1618
|
+
fs_1.default.unlinkSync(frameOutputPath);
|
|
1619
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(tempVideoPath))
|
|
1620
|
+
fs_1.default.unlinkSync(tempVideoPath);
|
|
1621
|
+
return buffer;
|
|
1622
|
+
}
|
|
1623
|
+
catch (error) {
|
|
1624
|
+
// Cleanup on error
|
|
1625
|
+
if (fs_1.default.existsSync(frameOutputPath))
|
|
1626
|
+
fs_1.default.unlinkSync(frameOutputPath);
|
|
1627
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(tempVideoPath))
|
|
1628
|
+
fs_1.default.unlinkSync(tempVideoPath);
|
|
1629
|
+
throw error;
|
|
1630
|
+
}
|
|
1328
1631
|
}
|
|
1329
1632
|
catch (error) {
|
|
1330
|
-
|
|
1331
|
-
|
|
1633
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
1634
|
+
// Re-throw FFmpeg installation errors so user sees installation guide
|
|
1635
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
1636
|
+
throw error;
|
|
1637
|
+
}
|
|
1638
|
+
throw new Error(`extractVideoFrame failed: ${errorMessage}`);
|
|
1332
1639
|
}
|
|
1333
1640
|
}
|
|
1334
1641
|
/**
|
|
@@ -1351,77 +1658,1920 @@ class ApexPainter {
|
|
|
1351
1658
|
throw new Error("extractFrames: outputFormat must be 'jpg' or 'png'.");
|
|
1352
1659
|
}
|
|
1353
1660
|
}
|
|
1661
|
+
/**
|
|
1662
|
+
* Extracts multiple frames from a video at specified intervals
|
|
1663
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1664
|
+
* @param options - Extraction options
|
|
1665
|
+
* @returns Array of frame file paths
|
|
1666
|
+
*/
|
|
1354
1667
|
async extractFrames(videoSource, options) {
|
|
1355
1668
|
try {
|
|
1669
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
1670
|
+
if (!ffmpegAvailable) {
|
|
1671
|
+
const errorMessage = '❌ FFMPEG NOT FOUND\n' +
|
|
1672
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
1673
|
+
this.#getFFmpegInstallInstructions();
|
|
1674
|
+
throw new Error(errorMessage);
|
|
1675
|
+
}
|
|
1356
1676
|
this.#validateExtractFramesInputs(videoSource, options);
|
|
1357
1677
|
const frames = [];
|
|
1358
|
-
const frameDir = path_1.default.join(
|
|
1678
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames', `frames-${Date.now()}`);
|
|
1359
1679
|
if (!fs_1.default.existsSync(frameDir)) {
|
|
1360
|
-
fs_1.default.mkdirSync(frameDir);
|
|
1680
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
1361
1681
|
}
|
|
1362
|
-
const
|
|
1682
|
+
const timestamp = Date.now();
|
|
1683
|
+
const videoPath = typeof videoSource === 'string' ? videoSource : path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
1684
|
+
let shouldCleanupVideo = false;
|
|
1685
|
+
// Handle video source
|
|
1363
1686
|
if (Buffer.isBuffer(videoSource)) {
|
|
1364
1687
|
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
1688
|
+
shouldCleanupVideo = true;
|
|
1365
1689
|
}
|
|
1366
|
-
else if (videoSource.startsWith('http')) {
|
|
1367
|
-
await (0, axios_1.default)({
|
|
1690
|
+
else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
1691
|
+
const response = await (0, axios_1.default)({
|
|
1368
1692
|
method: 'get',
|
|
1369
1693
|
url: videoSource,
|
|
1370
1694
|
responseType: 'arraybuffer'
|
|
1371
|
-
})
|
|
1372
|
-
.then((response) => {
|
|
1373
|
-
fs_1.default.writeFileSync(videoPath, response.data);
|
|
1374
|
-
})
|
|
1375
|
-
.catch(err => {
|
|
1376
|
-
throw new Error(`Error downloading video: ${err.message}`);
|
|
1377
1695
|
});
|
|
1696
|
+
fs_1.default.writeFileSync(videoPath, Buffer.from(response.data));
|
|
1697
|
+
shouldCleanupVideo = true;
|
|
1378
1698
|
}
|
|
1379
1699
|
else if (!fs_1.default.existsSync(videoPath)) {
|
|
1380
1700
|
throw new Error("Video file not found at specified path.");
|
|
1381
1701
|
}
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
|
|
1702
|
+
// Get video duration using ffprobe (escape path for Windows)
|
|
1703
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
1704
|
+
const { stdout: probeOutput } = await execAsync(`ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 "${escapedVideoPath}"`, { maxBuffer: 10 * 1024 * 1024 } // 10MB buffer
|
|
1705
|
+
);
|
|
1706
|
+
const duration = parseFloat(probeOutput.trim());
|
|
1707
|
+
if (isNaN(duration) || duration <= 0) {
|
|
1708
|
+
throw new Error("Video duration not found in metadata.");
|
|
1709
|
+
}
|
|
1710
|
+
const outputFormat = options.outputFormat || 'jpg';
|
|
1711
|
+
const fps = 1000 / options.interval; // Frames per second based on interval
|
|
1712
|
+
const totalFrames = Math.floor(duration * fps);
|
|
1713
|
+
// Apply frame selection if specified
|
|
1714
|
+
const startFrame = options.frameSelection?.start || 0;
|
|
1715
|
+
const endFrame = options.frameSelection?.end !== undefined
|
|
1716
|
+
? Math.min(options.frameSelection.end, totalFrames - 1)
|
|
1717
|
+
: totalFrames - 1;
|
|
1718
|
+
// Build ffmpeg command for frame extraction
|
|
1719
|
+
const outputFileTemplate = path_1.default.join(frameDir, `frame-%03d.${outputFormat}`);
|
|
1720
|
+
const qualityFlag = outputFormat === 'jpg' ? '-q:v 2' : '';
|
|
1721
|
+
const pixFmt = outputFormat === 'png' ? '-pix_fmt rgba' : '-pix_fmt yuvj420p';
|
|
1722
|
+
// Calculate start and end times
|
|
1723
|
+
const startTime = startFrame / fps;
|
|
1724
|
+
const endTime = (endFrame + 1) / fps;
|
|
1725
|
+
const durationToExtract = endTime - startTime;
|
|
1726
|
+
const escapedOutputTemplate = outputFileTemplate.replace(/"/g, '\\"');
|
|
1727
|
+
// Don't use -f flag, let FFmpeg infer format from file extension
|
|
1728
|
+
// -vf fps=${fps} extracts frames at the specified FPS
|
|
1729
|
+
// Use -ss after -i for more accurate frame extraction
|
|
1730
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -ss ${startTime} -t ${durationToExtract} -vf fps=${fps} ${pixFmt} ${qualityFlag} -y "${escapedOutputTemplate}"`;
|
|
1731
|
+
try {
|
|
1732
|
+
await execAsync(command, {
|
|
1733
|
+
timeout: 60000, // 60 second timeout for multiple frames
|
|
1734
|
+
maxBuffer: 10 * 1024 * 1024 // 10MB buffer
|
|
1735
|
+
});
|
|
1736
|
+
// Collect all extracted frame files
|
|
1737
|
+
const actualFrameCount = endFrame - startFrame + 1;
|
|
1738
|
+
for (let i = 0; i < actualFrameCount; i++) {
|
|
1739
|
+
const frameNumber = startFrame + i;
|
|
1740
|
+
const framePath = path_1.default.join(frameDir, `frame-${String(i + 1).padStart(3, '0')}.${outputFormat}`);
|
|
1741
|
+
if (fs_1.default.existsSync(framePath)) {
|
|
1409
1742
|
frames.push({
|
|
1410
|
-
source:
|
|
1743
|
+
source: framePath,
|
|
1411
1744
|
isRemote: false
|
|
1412
1745
|
});
|
|
1413
1746
|
}
|
|
1414
|
-
}
|
|
1747
|
+
}
|
|
1748
|
+
// Cleanup temp video if created
|
|
1749
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
1750
|
+
fs_1.default.unlinkSync(videoPath);
|
|
1751
|
+
}
|
|
1752
|
+
return frames;
|
|
1753
|
+
}
|
|
1754
|
+
catch (error) {
|
|
1755
|
+
// Cleanup on error
|
|
1756
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
1757
|
+
fs_1.default.unlinkSync(videoPath);
|
|
1758
|
+
}
|
|
1759
|
+
throw error;
|
|
1415
1760
|
}
|
|
1416
|
-
return new Promise((resolve, reject) => {
|
|
1417
|
-
processVideoExtraction(videoPath, frames, options, resolve, reject);
|
|
1418
|
-
});
|
|
1419
1761
|
}
|
|
1420
1762
|
catch (error) {
|
|
1421
1763
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
1764
|
+
// Re-throw FFmpeg installation errors so user sees installation guide
|
|
1765
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
1766
|
+
throw error;
|
|
1767
|
+
}
|
|
1422
1768
|
throw new Error(`extractFrames failed: ${errorMessage}`);
|
|
1423
1769
|
}
|
|
1424
1770
|
}
|
|
1771
|
+
/**
|
|
1772
|
+
* Comprehensive video processing method - all video features in one place
|
|
1773
|
+
* @param options - Video processing options
|
|
1774
|
+
* @returns Results based on the operation requested
|
|
1775
|
+
*/
|
|
1776
|
+
async createVideo(options) {
|
|
1777
|
+
try {
|
|
1778
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
1779
|
+
if (!ffmpegAvailable) {
|
|
1780
|
+
const errorMessage = '❌ FFMPEG NOT FOUND\n' +
|
|
1781
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
1782
|
+
this.#getFFmpegInstallInstructions();
|
|
1783
|
+
throw new Error(errorMessage);
|
|
1784
|
+
}
|
|
1785
|
+
// Get video info if requested or needed
|
|
1786
|
+
let videoInfo = null;
|
|
1787
|
+
if (options.getInfo || options.extractFrame?.frame || options.generateThumbnail || options.generatePreview) {
|
|
1788
|
+
videoInfo = await this.getVideoInfo(options.source, true);
|
|
1789
|
+
}
|
|
1790
|
+
// Handle getInfo
|
|
1791
|
+
if (options.getInfo) {
|
|
1792
|
+
return videoInfo || await this.getVideoInfo(options.source, true);
|
|
1793
|
+
}
|
|
1794
|
+
// Handle extractFrame (creates canvas)
|
|
1795
|
+
if (options.extractFrame) {
|
|
1796
|
+
const frameBuffer = await this.#extractVideoFrame(options.source, options.extractFrame.frame ?? 0, options.extractFrame.time, options.extractFrame.outputFormat || 'png', options.extractFrame.quality || 2);
|
|
1797
|
+
if (!frameBuffer || frameBuffer.length === 0) {
|
|
1798
|
+
throw new Error('Failed to extract video frame');
|
|
1799
|
+
}
|
|
1800
|
+
const frameImage = await (0, canvas_1.loadImage)(frameBuffer);
|
|
1801
|
+
const videoWidth = frameImage.width;
|
|
1802
|
+
const videoHeight = frameImage.height;
|
|
1803
|
+
const width = options.extractFrame.width ?? videoWidth;
|
|
1804
|
+
const height = options.extractFrame.height ?? videoHeight;
|
|
1805
|
+
const canvas = (0, canvas_1.createCanvas)(width, height);
|
|
1806
|
+
const ctx = canvas.getContext('2d');
|
|
1807
|
+
if (!ctx) {
|
|
1808
|
+
throw new Error('Unable to get 2D context');
|
|
1809
|
+
}
|
|
1810
|
+
ctx.drawImage(frameImage, 0, 0, width, height);
|
|
1811
|
+
return {
|
|
1812
|
+
buffer: canvas.toBuffer('image/png'),
|
|
1813
|
+
canvas: { width, height }
|
|
1814
|
+
};
|
|
1815
|
+
}
|
|
1816
|
+
// Handle extractFrames (multiple frames at specific times or intervals)
|
|
1817
|
+
if (options.extractFrames) {
|
|
1818
|
+
if (options.extractFrames.times) {
|
|
1819
|
+
// Extract frames at specific times
|
|
1820
|
+
const frames = [];
|
|
1821
|
+
for (const time of options.extractFrames.times) {
|
|
1822
|
+
const frame = await this.#extractVideoFrame(options.source, 0, time, options.extractFrames.outputFormat || 'jpg', options.extractFrames.quality || 2);
|
|
1823
|
+
if (frame) {
|
|
1824
|
+
frames.push(frame);
|
|
1825
|
+
}
|
|
1826
|
+
}
|
|
1827
|
+
return frames;
|
|
1828
|
+
}
|
|
1829
|
+
else if (options.extractFrames.interval) {
|
|
1830
|
+
// Extract frames at intervals
|
|
1831
|
+
return await this.extractFrames(options.source, {
|
|
1832
|
+
interval: options.extractFrames.interval,
|
|
1833
|
+
outputFormat: options.extractFrames.outputFormat || 'jpg',
|
|
1834
|
+
frameSelection: options.extractFrames.frameSelection,
|
|
1835
|
+
outputDirectory: options.extractFrames.outputDirectory
|
|
1836
|
+
});
|
|
1837
|
+
}
|
|
1838
|
+
}
|
|
1839
|
+
// Handle extractAllFrames
|
|
1840
|
+
if (options.extractAllFrames) {
|
|
1841
|
+
return await this.extractAllFrames(options.source, {
|
|
1842
|
+
outputFormat: options.extractAllFrames.outputFormat,
|
|
1843
|
+
outputDirectory: options.extractAllFrames.outputDirectory,
|
|
1844
|
+
quality: options.extractAllFrames.quality,
|
|
1845
|
+
prefix: options.extractAllFrames.prefix,
|
|
1846
|
+
startTime: options.extractAllFrames.startTime,
|
|
1847
|
+
endTime: options.extractAllFrames.endTime
|
|
1848
|
+
});
|
|
1849
|
+
}
|
|
1850
|
+
// Handle generateThumbnail
|
|
1851
|
+
if (options.generateThumbnail) {
|
|
1852
|
+
return await this.#generateVideoThumbnail(options.source, options.generateThumbnail, videoInfo);
|
|
1853
|
+
}
|
|
1854
|
+
// Handle convert
|
|
1855
|
+
if (options.convert) {
|
|
1856
|
+
return await this.#convertVideo(options.source, options.convert);
|
|
1857
|
+
}
|
|
1858
|
+
// Handle trim
|
|
1859
|
+
if (options.trim) {
|
|
1860
|
+
return await this.#trimVideo(options.source, options.trim);
|
|
1861
|
+
}
|
|
1862
|
+
// Handle extractAudio
|
|
1863
|
+
if (options.extractAudio) {
|
|
1864
|
+
return await this.#extractAudio(options.source, options.extractAudio);
|
|
1865
|
+
}
|
|
1866
|
+
// Handle addWatermark
|
|
1867
|
+
if (options.addWatermark) {
|
|
1868
|
+
return await this.#addWatermarkToVideo(options.source, options.addWatermark);
|
|
1869
|
+
}
|
|
1870
|
+
// Handle changeSpeed
|
|
1871
|
+
if (options.changeSpeed) {
|
|
1872
|
+
return await this.#changeVideoSpeed(options.source, options.changeSpeed);
|
|
1873
|
+
}
|
|
1874
|
+
// Handle generatePreview
|
|
1875
|
+
if (options.generatePreview) {
|
|
1876
|
+
return await this.#generateVideoPreview(options.source, options.generatePreview, videoInfo);
|
|
1877
|
+
}
|
|
1878
|
+
// Handle applyEffects
|
|
1879
|
+
if (options.applyEffects) {
|
|
1880
|
+
return await this.#applyVideoEffects(options.source, options.applyEffects);
|
|
1881
|
+
}
|
|
1882
|
+
// Handle merge
|
|
1883
|
+
if (options.merge) {
|
|
1884
|
+
return await this.#mergeVideos(options.merge);
|
|
1885
|
+
}
|
|
1886
|
+
// Handle rotate
|
|
1887
|
+
if (options.rotate) {
|
|
1888
|
+
return await this.#rotateVideo(options.source, options.rotate);
|
|
1889
|
+
}
|
|
1890
|
+
// Handle crop
|
|
1891
|
+
if (options.crop) {
|
|
1892
|
+
return await this.#cropVideo(options.source, options.crop);
|
|
1893
|
+
}
|
|
1894
|
+
// Handle compress
|
|
1895
|
+
if (options.compress) {
|
|
1896
|
+
return await this.#compressVideo(options.source, options.compress);
|
|
1897
|
+
}
|
|
1898
|
+
// Handle addText
|
|
1899
|
+
if (options.addText) {
|
|
1900
|
+
return await this.#addTextToVideo(options.source, options.addText);
|
|
1901
|
+
}
|
|
1902
|
+
// Handle addFade
|
|
1903
|
+
if (options.addFade) {
|
|
1904
|
+
return await this.#addFadeToVideo(options.source, options.addFade);
|
|
1905
|
+
}
|
|
1906
|
+
// Handle reverse
|
|
1907
|
+
if (options.reverse) {
|
|
1908
|
+
return await this.#reverseVideo(options.source, options.reverse);
|
|
1909
|
+
}
|
|
1910
|
+
// Handle createLoop
|
|
1911
|
+
if (options.createLoop) {
|
|
1912
|
+
return await this.#createVideoLoop(options.source, options.createLoop);
|
|
1913
|
+
}
|
|
1914
|
+
// Handle batch
|
|
1915
|
+
if (options.batch) {
|
|
1916
|
+
return await this.#batchProcessVideos(options.batch);
|
|
1917
|
+
}
|
|
1918
|
+
// Handle detectScenes
|
|
1919
|
+
if (options.detectScenes) {
|
|
1920
|
+
return await this.#detectVideoScenes(options.source, options.detectScenes);
|
|
1921
|
+
}
|
|
1922
|
+
// Handle stabilize
|
|
1923
|
+
if (options.stabilize) {
|
|
1924
|
+
return await this.#stabilizeVideo(options.source, options.stabilize);
|
|
1925
|
+
}
|
|
1926
|
+
// Handle colorCorrect
|
|
1927
|
+
if (options.colorCorrect) {
|
|
1928
|
+
return await this.#colorCorrectVideo(options.source, options.colorCorrect);
|
|
1929
|
+
}
|
|
1930
|
+
// Handle pictureInPicture
|
|
1931
|
+
if (options.pictureInPicture) {
|
|
1932
|
+
return await this.#addPictureInPicture(options.source, options.pictureInPicture);
|
|
1933
|
+
}
|
|
1934
|
+
// Handle splitScreen
|
|
1935
|
+
if (options.splitScreen) {
|
|
1936
|
+
return await this.#createSplitScreen(options.splitScreen);
|
|
1937
|
+
}
|
|
1938
|
+
// Handle createTimeLapse
|
|
1939
|
+
if (options.createTimeLapse) {
|
|
1940
|
+
return await this.#createTimeLapseVideo(options.source, options.createTimeLapse);
|
|
1941
|
+
}
|
|
1942
|
+
// Handle mute
|
|
1943
|
+
if (options.mute) {
|
|
1944
|
+
return await this.#muteVideo(options.source, options.mute);
|
|
1945
|
+
}
|
|
1946
|
+
// Handle adjustVolume
|
|
1947
|
+
if (options.adjustVolume) {
|
|
1948
|
+
return await this.#adjustVideoVolume(options.source, options.adjustVolume);
|
|
1949
|
+
}
|
|
1950
|
+
// Handle detectFormat
|
|
1951
|
+
if (options.detectFormat) {
|
|
1952
|
+
const info = await this.getVideoInfo(options.source, true);
|
|
1953
|
+
// Try to get codec from ffprobe
|
|
1954
|
+
let codec = 'unknown';
|
|
1955
|
+
try {
|
|
1956
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
1957
|
+
let videoPath;
|
|
1958
|
+
if (Buffer.isBuffer(options.source)) {
|
|
1959
|
+
const tempPath = path_1.default.join(frameDir, `temp-video-${Date.now()}.mp4`);
|
|
1960
|
+
fs_1.default.writeFileSync(tempPath, options.source);
|
|
1961
|
+
videoPath = tempPath;
|
|
1962
|
+
}
|
|
1963
|
+
else {
|
|
1964
|
+
let resolvedPath = options.source;
|
|
1965
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
1966
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
1967
|
+
}
|
|
1968
|
+
videoPath = resolvedPath;
|
|
1969
|
+
}
|
|
1970
|
+
const escapedPath = videoPath.replace(/"/g, '\\"');
|
|
1971
|
+
const { stdout } = await execAsync(`ffprobe -v error -select_streams v:0 -show_entries stream=codec_name -of default=noprint_wrappers=1:nokey=1 "${escapedPath}"`, { timeout: 10000, maxBuffer: 1024 * 1024 });
|
|
1972
|
+
codec = stdout.toString().trim() || 'unknown';
|
|
1973
|
+
}
|
|
1974
|
+
catch {
|
|
1975
|
+
codec = 'unknown';
|
|
1976
|
+
}
|
|
1977
|
+
return {
|
|
1978
|
+
format: info?.format || 'unknown',
|
|
1979
|
+
codec: codec,
|
|
1980
|
+
container: info?.format || 'unknown',
|
|
1981
|
+
width: info?.width,
|
|
1982
|
+
height: info?.height,
|
|
1983
|
+
fps: info?.fps,
|
|
1984
|
+
bitrate: info?.bitrate,
|
|
1985
|
+
duration: info?.duration
|
|
1986
|
+
};
|
|
1987
|
+
}
|
|
1988
|
+
throw new Error('No video operation specified');
|
|
1989
|
+
}
|
|
1990
|
+
catch (error) {
|
|
1991
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
1992
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
1993
|
+
throw error;
|
|
1994
|
+
}
|
|
1995
|
+
throw new Error(`createVideo failed: ${errorMessage}`);
|
|
1996
|
+
}
|
|
1997
|
+
}
|
|
1998
|
+
/**
|
|
1999
|
+
* Generate video thumbnail (grid of frames)
|
|
2000
|
+
* @private
|
|
2001
|
+
*/
|
|
2002
|
+
async #generateVideoThumbnail(videoSource, options, videoInfo) {
|
|
2003
|
+
const count = options.count || 9;
|
|
2004
|
+
const grid = options.grid || { cols: 3, rows: 3 };
|
|
2005
|
+
const frameWidth = options.width || 320;
|
|
2006
|
+
const frameHeight = options.height || 180;
|
|
2007
|
+
const outputFormat = options.outputFormat || 'jpg';
|
|
2008
|
+
const quality = options.quality || 2;
|
|
2009
|
+
if (!videoInfo) {
|
|
2010
|
+
videoInfo = await this.getVideoInfo(videoSource, true);
|
|
2011
|
+
}
|
|
2012
|
+
const duration = videoInfo.duration;
|
|
2013
|
+
const interval = duration / (count + 1); // Distribute frames evenly
|
|
2014
|
+
// Extract frames
|
|
2015
|
+
const frames = [];
|
|
2016
|
+
for (let i = 1; i <= count; i++) {
|
|
2017
|
+
const time = interval * i;
|
|
2018
|
+
const frame = await this.#extractVideoFrame(videoSource, 0, time, outputFormat, quality);
|
|
2019
|
+
if (frame) {
|
|
2020
|
+
frames.push(frame);
|
|
2021
|
+
}
|
|
2022
|
+
}
|
|
2023
|
+
// Create thumbnail canvas
|
|
2024
|
+
const thumbnailWidth = frameWidth * grid.cols;
|
|
2025
|
+
const thumbnailHeight = frameHeight * grid.rows;
|
|
2026
|
+
const canvas = (0, canvas_1.createCanvas)(thumbnailWidth, thumbnailHeight);
|
|
2027
|
+
const ctx = canvas.getContext('2d');
|
|
2028
|
+
if (!ctx) {
|
|
2029
|
+
throw new Error('Unable to get 2D context');
|
|
2030
|
+
}
|
|
2031
|
+
// Draw frames in grid
|
|
2032
|
+
for (let i = 0; i < frames.length; i++) {
|
|
2033
|
+
const row = Math.floor(i / grid.cols);
|
|
2034
|
+
const col = i % grid.cols;
|
|
2035
|
+
const x = col * frameWidth;
|
|
2036
|
+
const y = row * frameHeight;
|
|
2037
|
+
const frameImage = await (0, canvas_1.loadImage)(frames[i]);
|
|
2038
|
+
ctx.drawImage(frameImage, x, y, frameWidth, frameHeight);
|
|
2039
|
+
}
|
|
2040
|
+
return {
|
|
2041
|
+
buffer: canvas.toBuffer('image/png'),
|
|
2042
|
+
canvas: { width: thumbnailWidth, height: thumbnailHeight }
|
|
2043
|
+
};
|
|
2044
|
+
}
|
|
2045
|
+
/**
|
|
2046
|
+
* Convert video format
|
|
2047
|
+
* @private
|
|
2048
|
+
*/
|
|
2049
|
+
async #convertVideo(videoSource, options) {
|
|
2050
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2051
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2052
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2053
|
+
}
|
|
2054
|
+
let videoPath;
|
|
2055
|
+
let shouldCleanupVideo = false;
|
|
2056
|
+
const timestamp = Date.now();
|
|
2057
|
+
// Handle video source
|
|
2058
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2059
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2060
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2061
|
+
shouldCleanupVideo = true;
|
|
2062
|
+
}
|
|
2063
|
+
else {
|
|
2064
|
+
let resolvedPath = videoSource;
|
|
2065
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2066
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2067
|
+
}
|
|
2068
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2069
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2070
|
+
}
|
|
2071
|
+
videoPath = resolvedPath;
|
|
2072
|
+
}
|
|
2073
|
+
const format = options.format || 'mp4';
|
|
2074
|
+
const qualityPresets = {
|
|
2075
|
+
low: '-crf 28',
|
|
2076
|
+
medium: '-crf 23',
|
|
2077
|
+
high: '-crf 18',
|
|
2078
|
+
ultra: '-crf 15'
|
|
2079
|
+
};
|
|
2080
|
+
const qualityFlag = options.bitrate
|
|
2081
|
+
? `-b:v ${options.bitrate}k`
|
|
2082
|
+
: qualityPresets[options.quality || 'medium'];
|
|
2083
|
+
const fpsFlag = options.fps ? `-r ${options.fps}` : '';
|
|
2084
|
+
const resolutionFlag = options.resolution
|
|
2085
|
+
? `-vf scale=${options.resolution.width}:${options.resolution.height}`
|
|
2086
|
+
: '';
|
|
2087
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2088
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2089
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${qualityFlag} ${fpsFlag} ${resolutionFlag} -y "${escapedOutputPath}"`;
|
|
2090
|
+
try {
|
|
2091
|
+
await execAsync(command, {
|
|
2092
|
+
timeout: 300000, // 5 minute timeout
|
|
2093
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2094
|
+
});
|
|
2095
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2096
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2097
|
+
}
|
|
2098
|
+
return { outputPath: options.outputPath, success: true };
|
|
2099
|
+
}
|
|
2100
|
+
catch (error) {
|
|
2101
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2102
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2103
|
+
}
|
|
2104
|
+
throw error;
|
|
2105
|
+
}
|
|
2106
|
+
}
|
|
2107
|
+
/**
|
|
2108
|
+
* Trim/Cut video
|
|
2109
|
+
* @private
|
|
2110
|
+
*/
|
|
2111
|
+
async #trimVideo(videoSource, options) {
|
|
2112
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2113
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2114
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2115
|
+
}
|
|
2116
|
+
let videoPath;
|
|
2117
|
+
let shouldCleanupVideo = false;
|
|
2118
|
+
const timestamp = Date.now();
|
|
2119
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2120
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2121
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2122
|
+
shouldCleanupVideo = true;
|
|
2123
|
+
}
|
|
2124
|
+
else {
|
|
2125
|
+
let resolvedPath = videoSource;
|
|
2126
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2127
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2128
|
+
}
|
|
2129
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2130
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2131
|
+
}
|
|
2132
|
+
videoPath = resolvedPath;
|
|
2133
|
+
}
|
|
2134
|
+
const duration = options.endTime - options.startTime;
|
|
2135
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2136
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2137
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -ss ${options.startTime} -t ${duration} -c copy -y "${escapedOutputPath}"`;
|
|
2138
|
+
try {
|
|
2139
|
+
await execAsync(command, {
|
|
2140
|
+
timeout: 300000,
|
|
2141
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2142
|
+
});
|
|
2143
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2144
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2145
|
+
}
|
|
2146
|
+
return { outputPath: options.outputPath, success: true };
|
|
2147
|
+
}
|
|
2148
|
+
catch (error) {
|
|
2149
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2150
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2151
|
+
}
|
|
2152
|
+
throw error;
|
|
2153
|
+
}
|
|
2154
|
+
}
|
|
2155
|
+
/**
|
|
2156
|
+
* Extract audio from video
|
|
2157
|
+
* @private
|
|
2158
|
+
*/
|
|
2159
|
+
async #extractAudio(videoSource, options) {
|
|
2160
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2161
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2162
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2163
|
+
}
|
|
2164
|
+
let videoPath;
|
|
2165
|
+
let shouldCleanupVideo = false;
|
|
2166
|
+
const timestamp = Date.now();
|
|
2167
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2168
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2169
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2170
|
+
shouldCleanupVideo = true;
|
|
2171
|
+
}
|
|
2172
|
+
else {
|
|
2173
|
+
let resolvedPath = videoSource;
|
|
2174
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2175
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2176
|
+
}
|
|
2177
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2178
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2179
|
+
}
|
|
2180
|
+
videoPath = resolvedPath;
|
|
2181
|
+
}
|
|
2182
|
+
// Check if video has audio stream
|
|
2183
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2184
|
+
try {
|
|
2185
|
+
const { stdout } = await execAsync(`ffprobe -v error -select_streams a:0 -show_entries stream=codec_type -of default=noprint_wrappers=1:nokey=1 "${escapedVideoPath}"`, { timeout: 10000, maxBuffer: 1024 * 1024 });
|
|
2186
|
+
const hasAudio = stdout.toString().trim() === 'audio';
|
|
2187
|
+
if (!hasAudio) {
|
|
2188
|
+
throw new Error('Video does not contain an audio stream. Cannot extract audio.');
|
|
2189
|
+
}
|
|
2190
|
+
}
|
|
2191
|
+
catch (error) {
|
|
2192
|
+
if (error instanceof Error && error.message.includes('Video does not contain')) {
|
|
2193
|
+
throw error;
|
|
2194
|
+
}
|
|
2195
|
+
// If ffprobe fails, assume no audio
|
|
2196
|
+
throw new Error('Video does not contain an audio stream. Cannot extract audio.');
|
|
2197
|
+
}
|
|
2198
|
+
const format = options.format || 'mp3';
|
|
2199
|
+
const bitrate = options.bitrate || 128;
|
|
2200
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2201
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vn -acodec ${format === 'mp3' ? 'libmp3lame' : format === 'wav' ? 'pcm_s16le' : format === 'aac' ? 'aac' : 'libvorbis'} -ab ${bitrate}k -y "${escapedOutputPath}"`;
|
|
2202
|
+
try {
|
|
2203
|
+
await execAsync(command, {
|
|
2204
|
+
timeout: 300000,
|
|
2205
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2206
|
+
});
|
|
2207
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2208
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2209
|
+
}
|
|
2210
|
+
return { outputPath: options.outputPath, success: true };
|
|
2211
|
+
}
|
|
2212
|
+
catch (error) {
|
|
2213
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2214
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2215
|
+
}
|
|
2216
|
+
throw error;
|
|
2217
|
+
}
|
|
2218
|
+
}
|
|
2219
|
+
/**
|
|
2220
|
+
* Add watermark to video
|
|
2221
|
+
* @private
|
|
2222
|
+
*/
|
|
2223
|
+
async #addWatermarkToVideo(videoSource, options) {
|
|
2224
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2225
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2226
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2227
|
+
}
|
|
2228
|
+
let videoPath;
|
|
2229
|
+
let shouldCleanupVideo = false;
|
|
2230
|
+
const timestamp = Date.now();
|
|
2231
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2232
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2233
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2234
|
+
shouldCleanupVideo = true;
|
|
2235
|
+
}
|
|
2236
|
+
else {
|
|
2237
|
+
let resolvedPath = videoSource;
|
|
2238
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2239
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2240
|
+
}
|
|
2241
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2242
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2243
|
+
}
|
|
2244
|
+
videoPath = resolvedPath;
|
|
2245
|
+
}
|
|
2246
|
+
let watermarkPath = options.watermarkPath;
|
|
2247
|
+
if (!/^https?:\/\//i.test(watermarkPath)) {
|
|
2248
|
+
watermarkPath = path_1.default.join(process.cwd(), watermarkPath);
|
|
2249
|
+
}
|
|
2250
|
+
if (!fs_1.default.existsSync(watermarkPath)) {
|
|
2251
|
+
throw new Error(`Watermark file not found: ${options.watermarkPath}`);
|
|
2252
|
+
}
|
|
2253
|
+
const position = options.position || 'bottom-right';
|
|
2254
|
+
const opacity = options.opacity || 0.5;
|
|
2255
|
+
const size = options.size ? `scale=${options.size.width}:${options.size.height}` : '';
|
|
2256
|
+
const positionMap = {
|
|
2257
|
+
'top-left': '10:10',
|
|
2258
|
+
'top-right': 'W-w-10:10',
|
|
2259
|
+
'bottom-left': '10:H-h-10',
|
|
2260
|
+
'bottom-right': 'W-w-10:H-h-10',
|
|
2261
|
+
'center': '(W-w)/2:(H-h)/2'
|
|
2262
|
+
};
|
|
2263
|
+
const overlay = positionMap[position];
|
|
2264
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2265
|
+
const escapedWatermarkPath = watermarkPath.replace(/"/g, '\\"');
|
|
2266
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2267
|
+
const filter = `[1:v]${size ? size + ',' : ''}format=rgba,colorchannelmixer=aa=${opacity}[wm];[0:v][wm]overlay=${overlay}`;
|
|
2268
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -i "${escapedWatermarkPath}" -filter_complex "${filter}" -y "${escapedOutputPath}"`;
|
|
2269
|
+
try {
|
|
2270
|
+
await execAsync(command, {
|
|
2271
|
+
timeout: 300000,
|
|
2272
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2273
|
+
});
|
|
2274
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2275
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2276
|
+
}
|
|
2277
|
+
return { outputPath: options.outputPath, success: true };
|
|
2278
|
+
}
|
|
2279
|
+
catch (error) {
|
|
2280
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2281
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2282
|
+
}
|
|
2283
|
+
throw error;
|
|
2284
|
+
}
|
|
2285
|
+
}
|
|
2286
|
+
/**
|
|
2287
|
+
* Change video speed
|
|
2288
|
+
* @private
|
|
2289
|
+
*/
|
|
2290
|
+
async #changeVideoSpeed(videoSource, options) {
|
|
2291
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2292
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2293
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2294
|
+
}
|
|
2295
|
+
let videoPath;
|
|
2296
|
+
let shouldCleanupVideo = false;
|
|
2297
|
+
const timestamp = Date.now();
|
|
2298
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2299
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2300
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2301
|
+
shouldCleanupVideo = true;
|
|
2302
|
+
}
|
|
2303
|
+
else {
|
|
2304
|
+
let resolvedPath = videoSource;
|
|
2305
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2306
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2307
|
+
}
|
|
2308
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2309
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2310
|
+
}
|
|
2311
|
+
videoPath = resolvedPath;
|
|
2312
|
+
}
|
|
2313
|
+
// Check if video has audio stream
|
|
2314
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2315
|
+
let hasAudio = false;
|
|
2316
|
+
try {
|
|
2317
|
+
const { stdout } = await execAsync(`ffprobe -v error -select_streams a:0 -show_entries stream=codec_type -of default=noprint_wrappers=1:nokey=1 "${escapedVideoPath}"`, { timeout: 10000, maxBuffer: 1024 * 1024 });
|
|
2318
|
+
hasAudio = stdout.toString().trim() === 'audio';
|
|
2319
|
+
}
|
|
2320
|
+
catch {
|
|
2321
|
+
hasAudio = false;
|
|
2322
|
+
}
|
|
2323
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2324
|
+
let command;
|
|
2325
|
+
if (hasAudio) {
|
|
2326
|
+
// Video has audio - process both video and audio
|
|
2327
|
+
// For speeds > 2.0, we need to chain atempo filters (atempo max is 2.0)
|
|
2328
|
+
if (options.speed > 2.0) {
|
|
2329
|
+
const atempoCount = Math.ceil(Math.log2(options.speed));
|
|
2330
|
+
const atempoValue = Math.pow(2, Math.log2(options.speed) / atempoCount);
|
|
2331
|
+
const atempoFilters = Array(atempoCount).fill(atempoValue).map(v => `atempo=${v}`).join(',');
|
|
2332
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1 / options.speed}*PTS[v];[0:a]${atempoFilters}[a]" -map "[v]" -map "[a]" -y "${escapedOutputPath}"`;
|
|
2333
|
+
}
|
|
2334
|
+
else if (options.speed < 0.5) {
|
|
2335
|
+
// For speeds < 0.5, we need to chain atempo filters
|
|
2336
|
+
const atempoCount = Math.ceil(Math.log2(1 / options.speed));
|
|
2337
|
+
const atempoValue = Math.pow(0.5, Math.log2(1 / options.speed) / atempoCount);
|
|
2338
|
+
const atempoFilters = Array(atempoCount).fill(atempoValue).map(v => `atempo=${v}`).join(',');
|
|
2339
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1 / options.speed}*PTS[v];[0:a]${atempoFilters}[a]" -map "[v]" -map "[a]" -y "${escapedOutputPath}"`;
|
|
2340
|
+
}
|
|
2341
|
+
else {
|
|
2342
|
+
// Normal speed range (0.5 to 2.0)
|
|
2343
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1 / options.speed}*PTS[v];[0:a]atempo=${options.speed}[a]" -map "[v]" -map "[a]" -y "${escapedOutputPath}"`;
|
|
2344
|
+
}
|
|
2345
|
+
}
|
|
2346
|
+
else {
|
|
2347
|
+
// No audio - only process video
|
|
2348
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1 / options.speed}*PTS[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
2349
|
+
}
|
|
2350
|
+
try {
|
|
2351
|
+
await execAsync(command, {
|
|
2352
|
+
timeout: 300000,
|
|
2353
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2354
|
+
});
|
|
2355
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2356
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2357
|
+
}
|
|
2358
|
+
return { outputPath: options.outputPath, success: true };
|
|
2359
|
+
}
|
|
2360
|
+
catch (error) {
|
|
2361
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2362
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2363
|
+
}
|
|
2364
|
+
throw error;
|
|
2365
|
+
}
|
|
2366
|
+
}
|
|
2367
|
+
/**
|
|
2368
|
+
* Generate video preview (multiple frames)
|
|
2369
|
+
* @private
|
|
2370
|
+
*/
|
|
2371
|
+
async #generateVideoPreview(videoSource, options, videoInfo) {
|
|
2372
|
+
const count = options.count || 10;
|
|
2373
|
+
const outputDir = options.outputDirectory || path_1.default.join(process.cwd(), 'video-preview');
|
|
2374
|
+
const outputFormat = options.outputFormat || 'png';
|
|
2375
|
+
const quality = options.quality || 2;
|
|
2376
|
+
if (!fs_1.default.existsSync(outputDir)) {
|
|
2377
|
+
fs_1.default.mkdirSync(outputDir, { recursive: true });
|
|
2378
|
+
}
|
|
2379
|
+
if (!videoInfo) {
|
|
2380
|
+
videoInfo = await this.getVideoInfo(videoSource, true);
|
|
2381
|
+
}
|
|
2382
|
+
const duration = videoInfo.duration;
|
|
2383
|
+
const interval = duration / (count + 1);
|
|
2384
|
+
const frames = [];
|
|
2385
|
+
for (let i = 1; i <= count; i++) {
|
|
2386
|
+
const time = interval * i;
|
|
2387
|
+
const frameBuffer = await this.#extractVideoFrame(videoSource, 0, time, outputFormat, quality);
|
|
2388
|
+
if (frameBuffer) {
|
|
2389
|
+
const framePath = path_1.default.join(outputDir, `preview-${String(i).padStart(3, '0')}.${outputFormat}`);
|
|
2390
|
+
fs_1.default.writeFileSync(framePath, frameBuffer);
|
|
2391
|
+
frames.push({
|
|
2392
|
+
source: framePath,
|
|
2393
|
+
frameNumber: i,
|
|
2394
|
+
time: time
|
|
2395
|
+
});
|
|
2396
|
+
}
|
|
2397
|
+
}
|
|
2398
|
+
return frames;
|
|
2399
|
+
}
|
|
2400
|
+
/**
|
|
2401
|
+
* Apply video effects/filters
|
|
2402
|
+
* @private
|
|
2403
|
+
*/
|
|
2404
|
+
async #applyVideoEffects(videoSource, options) {
|
|
2405
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2406
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2407
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2408
|
+
}
|
|
2409
|
+
let videoPath;
|
|
2410
|
+
let shouldCleanupVideo = false;
|
|
2411
|
+
const timestamp = Date.now();
|
|
2412
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2413
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2414
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2415
|
+
shouldCleanupVideo = true;
|
|
2416
|
+
}
|
|
2417
|
+
else {
|
|
2418
|
+
let resolvedPath = videoSource;
|
|
2419
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2420
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2421
|
+
}
|
|
2422
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2423
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2424
|
+
}
|
|
2425
|
+
videoPath = resolvedPath;
|
|
2426
|
+
}
|
|
2427
|
+
// Build filter chain
|
|
2428
|
+
const filters = [];
|
|
2429
|
+
for (const filter of options.filters) {
|
|
2430
|
+
switch (filter.type) {
|
|
2431
|
+
case 'blur':
|
|
2432
|
+
filters.push(`boxblur=${filter.intensity || 5}`);
|
|
2433
|
+
break;
|
|
2434
|
+
case 'brightness':
|
|
2435
|
+
filters.push(`eq=brightness=${((filter.value || 0) / 100).toFixed(2)}`);
|
|
2436
|
+
break;
|
|
2437
|
+
case 'contrast':
|
|
2438
|
+
filters.push(`eq=contrast=${1 + ((filter.value || 0) / 100)}`);
|
|
2439
|
+
break;
|
|
2440
|
+
case 'saturation':
|
|
2441
|
+
filters.push(`eq=saturation=${1 + ((filter.value || 0) / 100)}`);
|
|
2442
|
+
break;
|
|
2443
|
+
case 'grayscale':
|
|
2444
|
+
filters.push('hue=s=0');
|
|
2445
|
+
break;
|
|
2446
|
+
case 'sepia':
|
|
2447
|
+
filters.push('colorchannelmixer=.393:.769:.189:0:.349:.686:.168:0:.272:.534:.131');
|
|
2448
|
+
break;
|
|
2449
|
+
case 'invert':
|
|
2450
|
+
filters.push('negate');
|
|
2451
|
+
break;
|
|
2452
|
+
case 'sharpen':
|
|
2453
|
+
filters.push(`unsharp=5:5:${filter.intensity || 1.0}:5:5:0.0`);
|
|
2454
|
+
break;
|
|
2455
|
+
case 'noise':
|
|
2456
|
+
filters.push(`noise=alls=${filter.intensity || 20}:allf=t+u`);
|
|
2457
|
+
break;
|
|
2458
|
+
}
|
|
2459
|
+
}
|
|
2460
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
2461
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2462
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2463
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
2464
|
+
try {
|
|
2465
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
2466
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2467
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2468
|
+
}
|
|
2469
|
+
return { outputPath: options.outputPath, success: true };
|
|
2470
|
+
}
|
|
2471
|
+
catch (error) {
|
|
2472
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2473
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2474
|
+
}
|
|
2475
|
+
throw error;
|
|
2476
|
+
}
|
|
2477
|
+
}
|
|
2478
|
+
/**
|
|
2479
|
+
* Merge/Concatenate videos
|
|
2480
|
+
* @private
|
|
2481
|
+
*/
|
|
2482
|
+
async #mergeVideos(options) {
|
|
2483
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2484
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2485
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2486
|
+
}
|
|
2487
|
+
const timestamp = Date.now();
|
|
2488
|
+
const videoPaths = [];
|
|
2489
|
+
const shouldCleanup = [];
|
|
2490
|
+
// Prepare all video files
|
|
2491
|
+
for (let i = 0; i < options.videos.length; i++) {
|
|
2492
|
+
const video = options.videos[i];
|
|
2493
|
+
if (Buffer.isBuffer(video)) {
|
|
2494
|
+
const tempPath = path_1.default.join(frameDir, `temp-video-${timestamp}-${i}.mp4`);
|
|
2495
|
+
fs_1.default.writeFileSync(tempPath, video);
|
|
2496
|
+
videoPaths.push(tempPath);
|
|
2497
|
+
shouldCleanup.push(true);
|
|
2498
|
+
}
|
|
2499
|
+
else {
|
|
2500
|
+
let resolvedPath = video;
|
|
2501
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2502
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2503
|
+
}
|
|
2504
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2505
|
+
throw new Error(`Video file not found: ${video}`);
|
|
2506
|
+
}
|
|
2507
|
+
videoPaths.push(resolvedPath);
|
|
2508
|
+
shouldCleanup.push(false);
|
|
2509
|
+
}
|
|
2510
|
+
}
|
|
2511
|
+
const mode = options.mode || 'sequential';
|
|
2512
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2513
|
+
let command;
|
|
2514
|
+
if (mode === 'sequential') {
|
|
2515
|
+
// Create concat file
|
|
2516
|
+
const concatFile = path_1.default.join(frameDir, `concat-${timestamp}.txt`);
|
|
2517
|
+
const concatContent = videoPaths.map(vp => `file '${vp.replace(/'/g, "\\'")}'`).join('\n');
|
|
2518
|
+
fs_1.default.writeFileSync(concatFile, concatContent);
|
|
2519
|
+
command = `ffmpeg -f concat -safe 0 -i "${concatFile.replace(/"/g, '\\"')}" -c copy -y "${escapedOutputPath}"`;
|
|
2520
|
+
}
|
|
2521
|
+
else if (mode === 'side-by-side') {
|
|
2522
|
+
const escapedPaths = videoPaths.map(vp => vp.replace(/"/g, '\\"'));
|
|
2523
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1] || escapedPaths[0]}" -filter_complex "[0:v][1:v]hstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
2524
|
+
}
|
|
2525
|
+
else if (mode === 'grid') {
|
|
2526
|
+
const grid = options.grid || { cols: 2, rows: 2 };
|
|
2527
|
+
const escapedPaths = videoPaths.map(vp => vp.replace(/"/g, '\\"'));
|
|
2528
|
+
// Simplified grid - would need more complex filter for full grid
|
|
2529
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1] || escapedPaths[0]}" -filter_complex "[0:v][1:v]hstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
2530
|
+
}
|
|
2531
|
+
else {
|
|
2532
|
+
throw new Error(`Unknown merge mode: ${mode}`);
|
|
2533
|
+
}
|
|
2534
|
+
try {
|
|
2535
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
2536
|
+
// Cleanup
|
|
2537
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
2538
|
+
if (shouldCleanup[i] && fs_1.default.existsSync(videoPaths[i])) {
|
|
2539
|
+
fs_1.default.unlinkSync(videoPaths[i]);
|
|
2540
|
+
}
|
|
2541
|
+
}
|
|
2542
|
+
return { outputPath: options.outputPath, success: true };
|
|
2543
|
+
}
|
|
2544
|
+
catch (error) {
|
|
2545
|
+
// Cleanup on error
|
|
2546
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
2547
|
+
if (shouldCleanup[i] && fs_1.default.existsSync(videoPaths[i])) {
|
|
2548
|
+
fs_1.default.unlinkSync(videoPaths[i]);
|
|
2549
|
+
}
|
|
2550
|
+
}
|
|
2551
|
+
throw error;
|
|
2552
|
+
}
|
|
2553
|
+
}
|
|
2554
|
+
/**
|
|
2555
|
+
* Rotate/Flip video
|
|
2556
|
+
* @private
|
|
2557
|
+
*/
|
|
2558
|
+
async #rotateVideo(videoSource, options) {
|
|
2559
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2560
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2561
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2562
|
+
}
|
|
2563
|
+
let videoPath;
|
|
2564
|
+
let shouldCleanupVideo = false;
|
|
2565
|
+
const timestamp = Date.now();
|
|
2566
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2567
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2568
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2569
|
+
shouldCleanupVideo = true;
|
|
2570
|
+
}
|
|
2571
|
+
else {
|
|
2572
|
+
let resolvedPath = videoSource;
|
|
2573
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2574
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2575
|
+
}
|
|
2576
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2577
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2578
|
+
}
|
|
2579
|
+
videoPath = resolvedPath;
|
|
2580
|
+
}
|
|
2581
|
+
const filters = [];
|
|
2582
|
+
if (options.angle) {
|
|
2583
|
+
const rotationMap = {
|
|
2584
|
+
90: 'transpose=1',
|
|
2585
|
+
180: 'transpose=1,transpose=1',
|
|
2586
|
+
270: 'transpose=2'
|
|
2587
|
+
};
|
|
2588
|
+
filters.push(rotationMap[options.angle]);
|
|
2589
|
+
}
|
|
2590
|
+
if (options.flip) {
|
|
2591
|
+
if (options.flip === 'horizontal') {
|
|
2592
|
+
filters.push('hflip');
|
|
2593
|
+
}
|
|
2594
|
+
else if (options.flip === 'vertical') {
|
|
2595
|
+
filters.push('vflip');
|
|
2596
|
+
}
|
|
2597
|
+
else if (options.flip === 'both') {
|
|
2598
|
+
filters.push('hflip', 'vflip');
|
|
2599
|
+
}
|
|
2600
|
+
}
|
|
2601
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
2602
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2603
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2604
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
2605
|
+
try {
|
|
2606
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
2607
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2608
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2609
|
+
}
|
|
2610
|
+
return { outputPath: options.outputPath, success: true };
|
|
2611
|
+
}
|
|
2612
|
+
catch (error) {
|
|
2613
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2614
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2615
|
+
}
|
|
2616
|
+
throw error;
|
|
2617
|
+
}
|
|
2618
|
+
}
|
|
2619
|
+
/**
|
|
2620
|
+
* Crop video
|
|
2621
|
+
* @private
|
|
2622
|
+
*/
|
|
2623
|
+
async #cropVideo(videoSource, options) {
|
|
2624
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2625
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2626
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2627
|
+
}
|
|
2628
|
+
let videoPath;
|
|
2629
|
+
let shouldCleanupVideo = false;
|
|
2630
|
+
const timestamp = Date.now();
|
|
2631
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2632
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2633
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2634
|
+
shouldCleanupVideo = true;
|
|
2635
|
+
}
|
|
2636
|
+
else {
|
|
2637
|
+
let resolvedPath = videoSource;
|
|
2638
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2639
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2640
|
+
}
|
|
2641
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2642
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2643
|
+
}
|
|
2644
|
+
videoPath = resolvedPath;
|
|
2645
|
+
}
|
|
2646
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2647
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2648
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf "crop=${options.width}:${options.height}:${options.x}:${options.y}" -y "${escapedOutputPath}"`;
|
|
2649
|
+
try {
|
|
2650
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
2651
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2652
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2653
|
+
}
|
|
2654
|
+
return { outputPath: options.outputPath, success: true };
|
|
2655
|
+
}
|
|
2656
|
+
catch (error) {
|
|
2657
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2658
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2659
|
+
}
|
|
2660
|
+
throw error;
|
|
2661
|
+
}
|
|
2662
|
+
}
|
|
2663
|
+
/**
|
|
2664
|
+
* Compress/Optimize video
|
|
2665
|
+
* @private
|
|
2666
|
+
*/
|
|
2667
|
+
async #compressVideo(videoSource, options) {
|
|
2668
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2669
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2670
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2671
|
+
}
|
|
2672
|
+
let videoPath;
|
|
2673
|
+
let shouldCleanupVideo = false;
|
|
2674
|
+
const timestamp = Date.now();
|
|
2675
|
+
let originalSize = 0;
|
|
2676
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2677
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2678
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2679
|
+
shouldCleanupVideo = true;
|
|
2680
|
+
originalSize = videoSource.length;
|
|
2681
|
+
}
|
|
2682
|
+
else {
|
|
2683
|
+
let resolvedPath = videoSource;
|
|
2684
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2685
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2686
|
+
}
|
|
2687
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2688
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2689
|
+
}
|
|
2690
|
+
videoPath = resolvedPath;
|
|
2691
|
+
originalSize = fs_1.default.statSync(resolvedPath).size;
|
|
2692
|
+
}
|
|
2693
|
+
const qualityPresets = {
|
|
2694
|
+
low: '-crf 32 -preset fast',
|
|
2695
|
+
medium: '-crf 28 -preset medium',
|
|
2696
|
+
high: '-crf 23 -preset slow',
|
|
2697
|
+
ultra: '-crf 18 -preset veryslow'
|
|
2698
|
+
};
|
|
2699
|
+
let qualityFlag = qualityPresets[options.quality || 'medium'];
|
|
2700
|
+
if (options.maxBitrate) {
|
|
2701
|
+
qualityFlag = `-b:v ${options.maxBitrate}k -maxrate ${options.maxBitrate}k -bufsize ${options.maxBitrate * 2}k`;
|
|
2702
|
+
}
|
|
2703
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2704
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2705
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${qualityFlag} -y "${escapedOutputPath}"`;
|
|
2706
|
+
try {
|
|
2707
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
2708
|
+
const compressedSize = fs_1.default.existsSync(options.outputPath) ? fs_1.default.statSync(options.outputPath).size : 0;
|
|
2709
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2710
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2711
|
+
}
|
|
2712
|
+
return {
|
|
2713
|
+
outputPath: options.outputPath,
|
|
2714
|
+
success: true,
|
|
2715
|
+
originalSize,
|
|
2716
|
+
compressedSize
|
|
2717
|
+
};
|
|
2718
|
+
}
|
|
2719
|
+
catch (error) {
|
|
2720
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2721
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2722
|
+
}
|
|
2723
|
+
throw error;
|
|
2724
|
+
}
|
|
2725
|
+
}
|
|
2726
|
+
/**
|
|
2727
|
+
* Add text overlay to video
|
|
2728
|
+
* @private
|
|
2729
|
+
*/
|
|
2730
|
+
async #addTextToVideo(videoSource, options) {
|
|
2731
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2732
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2733
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2734
|
+
}
|
|
2735
|
+
let videoPath;
|
|
2736
|
+
let shouldCleanupVideo = false;
|
|
2737
|
+
const timestamp = Date.now();
|
|
2738
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2739
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2740
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2741
|
+
shouldCleanupVideo = true;
|
|
2742
|
+
}
|
|
2743
|
+
else {
|
|
2744
|
+
let resolvedPath = videoSource;
|
|
2745
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2746
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2747
|
+
}
|
|
2748
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2749
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2750
|
+
}
|
|
2751
|
+
videoPath = resolvedPath;
|
|
2752
|
+
}
|
|
2753
|
+
const position = options.position || 'bottom-center';
|
|
2754
|
+
const fontSize = options.fontSize || 24;
|
|
2755
|
+
const fontColor = options.fontColor || 'white';
|
|
2756
|
+
const bgColor = options.backgroundColor || 'black@0.5';
|
|
2757
|
+
const positionMap = {
|
|
2758
|
+
'top-left': `x=10:y=10`,
|
|
2759
|
+
'top-center': `x=(w-text_w)/2:y=10`,
|
|
2760
|
+
'top-right': `x=w-text_w-10:y=10`,
|
|
2761
|
+
'center': `x=(w-text_w)/2:y=(h-text_h)/2`,
|
|
2762
|
+
'bottom-left': `x=10:y=h-text_h-10`,
|
|
2763
|
+
'bottom-center': `x=(w-text_w)/2:y=h-text_h-10`,
|
|
2764
|
+
'bottom-right': `x=w-text_w-10:y=h-text_h-10`
|
|
2765
|
+
};
|
|
2766
|
+
const pos = positionMap[position];
|
|
2767
|
+
const textEscaped = options.text.replace(/:/g, '\\:').replace(/'/g, "\\'");
|
|
2768
|
+
const timeFilter = options.startTime !== undefined && options.endTime !== undefined
|
|
2769
|
+
? `:enable='between(t,${options.startTime},${options.endTime})'`
|
|
2770
|
+
: '';
|
|
2771
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2772
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2773
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf "drawtext=text='${textEscaped}':fontsize=${fontSize}:fontcolor=${fontColor}:box=1:boxcolor=${bgColor}:${pos}${timeFilter}" -y "${escapedOutputPath}"`;
|
|
2774
|
+
try {
|
|
2775
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
2776
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2777
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2778
|
+
}
|
|
2779
|
+
return { outputPath: options.outputPath, success: true };
|
|
2780
|
+
}
|
|
2781
|
+
catch (error) {
|
|
2782
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2783
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2784
|
+
}
|
|
2785
|
+
throw error;
|
|
2786
|
+
}
|
|
2787
|
+
}
|
|
2788
|
+
/**
|
|
2789
|
+
* Add fade effects to video
|
|
2790
|
+
* @private
|
|
2791
|
+
*/
|
|
2792
|
+
async #addFadeToVideo(videoSource, options) {
|
|
2793
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2794
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2795
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2796
|
+
}
|
|
2797
|
+
let videoPath;
|
|
2798
|
+
let shouldCleanupVideo = false;
|
|
2799
|
+
const timestamp = Date.now();
|
|
2800
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2801
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2802
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2803
|
+
shouldCleanupVideo = true;
|
|
2804
|
+
}
|
|
2805
|
+
else {
|
|
2806
|
+
let resolvedPath = videoSource;
|
|
2807
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2808
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2809
|
+
}
|
|
2810
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2811
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2812
|
+
}
|
|
2813
|
+
videoPath = resolvedPath;
|
|
2814
|
+
}
|
|
2815
|
+
const videoInfo = await this.getVideoInfo(videoPath, true);
|
|
2816
|
+
const duration = videoInfo?.duration || 0;
|
|
2817
|
+
const filters = [];
|
|
2818
|
+
if (options.fadeIn) {
|
|
2819
|
+
filters.push(`fade=t=in:st=0:d=${options.fadeIn}`);
|
|
2820
|
+
}
|
|
2821
|
+
if (options.fadeOut && duration > options.fadeOut) {
|
|
2822
|
+
filters.push(`fade=t=out:st=${duration - options.fadeOut}:d=${options.fadeOut}`);
|
|
2823
|
+
}
|
|
2824
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
2825
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2826
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2827
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
2828
|
+
try {
|
|
2829
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
2830
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2831
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2832
|
+
}
|
|
2833
|
+
return { outputPath: options.outputPath, success: true };
|
|
2834
|
+
}
|
|
2835
|
+
catch (error) {
|
|
2836
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2837
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2838
|
+
}
|
|
2839
|
+
throw error;
|
|
2840
|
+
}
|
|
2841
|
+
}
|
|
2842
|
+
/**
|
|
2843
|
+
* Reverse video playback
|
|
2844
|
+
* @private
|
|
2845
|
+
*/
|
|
2846
|
+
async #reverseVideo(videoSource, options) {
|
|
2847
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2848
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2849
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2850
|
+
}
|
|
2851
|
+
let videoPath;
|
|
2852
|
+
let shouldCleanupVideo = false;
|
|
2853
|
+
const timestamp = Date.now();
|
|
2854
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2855
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2856
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2857
|
+
shouldCleanupVideo = true;
|
|
2858
|
+
}
|
|
2859
|
+
else {
|
|
2860
|
+
let resolvedPath = videoSource;
|
|
2861
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2862
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2863
|
+
}
|
|
2864
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2865
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2866
|
+
}
|
|
2867
|
+
videoPath = resolvedPath;
|
|
2868
|
+
}
|
|
2869
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2870
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2871
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf reverse -af areverse -y "${escapedOutputPath}"`;
|
|
2872
|
+
try {
|
|
2873
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
2874
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2875
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2876
|
+
}
|
|
2877
|
+
return { outputPath: options.outputPath, success: true };
|
|
2878
|
+
}
|
|
2879
|
+
catch (error) {
|
|
2880
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2881
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2882
|
+
}
|
|
2883
|
+
throw error;
|
|
2884
|
+
}
|
|
2885
|
+
}
|
|
2886
|
+
/**
|
|
2887
|
+
* Create seamless video loop
|
|
2888
|
+
* @private
|
|
2889
|
+
*/
|
|
2890
|
+
async #createVideoLoop(videoSource, options) {
|
|
2891
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2892
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2893
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2894
|
+
}
|
|
2895
|
+
let videoPath;
|
|
2896
|
+
let shouldCleanupVideo = false;
|
|
2897
|
+
const timestamp = Date.now();
|
|
2898
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2899
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2900
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2901
|
+
shouldCleanupVideo = true;
|
|
2902
|
+
}
|
|
2903
|
+
else {
|
|
2904
|
+
let resolvedPath = videoSource;
|
|
2905
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2906
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2907
|
+
}
|
|
2908
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2909
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2910
|
+
}
|
|
2911
|
+
videoPath = resolvedPath;
|
|
2912
|
+
}
|
|
2913
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2914
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2915
|
+
// Create loop by concatenating video with itself
|
|
2916
|
+
const concatFile = path_1.default.join(frameDir, `loop-${timestamp}.txt`);
|
|
2917
|
+
const concatContent = `file '${videoPath.replace(/'/g, "\\'")}'\nfile '${videoPath.replace(/'/g, "\\'")}'`;
|
|
2918
|
+
fs_1.default.writeFileSync(concatFile, concatContent);
|
|
2919
|
+
const command = `ffmpeg -f concat -safe 0 -i "${concatFile.replace(/"/g, '\\"')}" -c copy -y "${escapedOutputPath}"`;
|
|
2920
|
+
try {
|
|
2921
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
2922
|
+
if (fs_1.default.existsSync(concatFile)) {
|
|
2923
|
+
fs_1.default.unlinkSync(concatFile);
|
|
2924
|
+
}
|
|
2925
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2926
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2927
|
+
}
|
|
2928
|
+
return { outputPath: options.outputPath, success: true };
|
|
2929
|
+
}
|
|
2930
|
+
catch (error) {
|
|
2931
|
+
if (fs_1.default.existsSync(concatFile)) {
|
|
2932
|
+
fs_1.default.unlinkSync(concatFile);
|
|
2933
|
+
}
|
|
2934
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
2935
|
+
fs_1.default.unlinkSync(videoPath);
|
|
2936
|
+
}
|
|
2937
|
+
throw error;
|
|
2938
|
+
}
|
|
2939
|
+
}
|
|
2940
|
+
/**
|
|
2941
|
+
* Batch process multiple videos
|
|
2942
|
+
* @private
|
|
2943
|
+
*/
|
|
2944
|
+
async #batchProcessVideos(options) {
|
|
2945
|
+
if (!fs_1.default.existsSync(options.outputDirectory)) {
|
|
2946
|
+
fs_1.default.mkdirSync(options.outputDirectory, { recursive: true });
|
|
2947
|
+
}
|
|
2948
|
+
const results = [];
|
|
2949
|
+
for (let i = 0; i < options.videos.length; i++) {
|
|
2950
|
+
const video = options.videos[i];
|
|
2951
|
+
const outputPath = path_1.default.join(options.outputDirectory, `batch-${i + 1}.mp4`);
|
|
2952
|
+
try {
|
|
2953
|
+
// Process each video with its operations
|
|
2954
|
+
await this.createVideo({
|
|
2955
|
+
source: video.source,
|
|
2956
|
+
...video.operations
|
|
2957
|
+
});
|
|
2958
|
+
results.push({
|
|
2959
|
+
source: typeof video.source === 'string' ? video.source : 'buffer',
|
|
2960
|
+
output: outputPath,
|
|
2961
|
+
success: true
|
|
2962
|
+
});
|
|
2963
|
+
}
|
|
2964
|
+
catch (error) {
|
|
2965
|
+
results.push({
|
|
2966
|
+
source: typeof video.source === 'string' ? video.source : 'buffer',
|
|
2967
|
+
output: outputPath,
|
|
2968
|
+
success: false
|
|
2969
|
+
});
|
|
2970
|
+
}
|
|
2971
|
+
}
|
|
2972
|
+
return results;
|
|
2973
|
+
}
|
|
2974
|
+
/**
|
|
2975
|
+
* Detect scene changes in video
|
|
2976
|
+
* @private
|
|
2977
|
+
*/
|
|
2978
|
+
async #detectVideoScenes(videoSource, options) {
|
|
2979
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
2980
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
2981
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
2982
|
+
}
|
|
2983
|
+
let videoPath;
|
|
2984
|
+
let shouldCleanupVideo = false;
|
|
2985
|
+
const timestamp = Date.now();
|
|
2986
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2987
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2988
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
2989
|
+
shouldCleanupVideo = true;
|
|
2990
|
+
}
|
|
2991
|
+
else {
|
|
2992
|
+
let resolvedPath = videoSource;
|
|
2993
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2994
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
2995
|
+
}
|
|
2996
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
2997
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2998
|
+
}
|
|
2999
|
+
videoPath = resolvedPath;
|
|
3000
|
+
}
|
|
3001
|
+
const threshold = options.threshold || 0.3;
|
|
3002
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3003
|
+
const sceneFile = path_1.default.join(frameDir, `scenes-${timestamp}.txt`);
|
|
3004
|
+
// Use FFmpeg's scene detection
|
|
3005
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf "select='gt(scene,${threshold})',showinfo" -f null - 2>&1 | grep "pts_time" | awk '{print $6}' | sed 's/time=//'`;
|
|
3006
|
+
try {
|
|
3007
|
+
const { stdout } = await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3008
|
+
const times = stdout.toString().trim().split('\n').filter(t => t).map(parseFloat);
|
|
3009
|
+
const scenes = times.map((time, index) => ({ time, scene: index + 1 }));
|
|
3010
|
+
if (options.outputPath && scenes.length > 0) {
|
|
3011
|
+
fs_1.default.writeFileSync(options.outputPath, JSON.stringify(scenes, null, 2));
|
|
3012
|
+
}
|
|
3013
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3014
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3015
|
+
}
|
|
3016
|
+
if (fs_1.default.existsSync(sceneFile)) {
|
|
3017
|
+
fs_1.default.unlinkSync(sceneFile);
|
|
3018
|
+
}
|
|
3019
|
+
return scenes;
|
|
3020
|
+
}
|
|
3021
|
+
catch (error) {
|
|
3022
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3023
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3024
|
+
}
|
|
3025
|
+
if (fs_1.default.existsSync(sceneFile)) {
|
|
3026
|
+
fs_1.default.unlinkSync(sceneFile);
|
|
3027
|
+
}
|
|
3028
|
+
// Return empty array if detection fails
|
|
3029
|
+
return [];
|
|
3030
|
+
}
|
|
3031
|
+
}
|
|
3032
|
+
/**
|
|
3033
|
+
* Stabilize video (reduce shake)
|
|
3034
|
+
* @private
|
|
3035
|
+
*/
|
|
3036
|
+
async #stabilizeVideo(videoSource, options) {
|
|
3037
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
3038
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
3039
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
3040
|
+
}
|
|
3041
|
+
let videoPath;
|
|
3042
|
+
let shouldCleanupVideo = false;
|
|
3043
|
+
const timestamp = Date.now();
|
|
3044
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3045
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3046
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
3047
|
+
shouldCleanupVideo = true;
|
|
3048
|
+
}
|
|
3049
|
+
else {
|
|
3050
|
+
let resolvedPath = videoSource;
|
|
3051
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3052
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
3053
|
+
}
|
|
3054
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
3055
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3056
|
+
}
|
|
3057
|
+
videoPath = resolvedPath;
|
|
3058
|
+
}
|
|
3059
|
+
const smoothing = options.smoothing || 10;
|
|
3060
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3061
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3062
|
+
// Two-pass stabilization
|
|
3063
|
+
const transformsFile = path_1.default.join(frameDir, `transforms-${timestamp}.trf`);
|
|
3064
|
+
// Pass 1: Analyze
|
|
3065
|
+
const analyzeCommand = `ffmpeg -i "${escapedVideoPath}" -vf vidstabdetect=shakiness=5:accuracy=15:result="${transformsFile.replace(/"/g, '\\"')}" -f null -`;
|
|
3066
|
+
// Pass 2: Transform
|
|
3067
|
+
const transformCommand = `ffmpeg -i "${escapedVideoPath}" -vf vidstabtransform=smoothing=${smoothing}:input="${transformsFile.replace(/"/g, '\\"')}" -y "${escapedOutputPath}"`;
|
|
3068
|
+
try {
|
|
3069
|
+
await execAsync(analyzeCommand, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3070
|
+
await execAsync(transformCommand, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3071
|
+
if (fs_1.default.existsSync(transformsFile)) {
|
|
3072
|
+
fs_1.default.unlinkSync(transformsFile);
|
|
3073
|
+
}
|
|
3074
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3075
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3076
|
+
}
|
|
3077
|
+
return { outputPath: options.outputPath, success: true };
|
|
3078
|
+
}
|
|
3079
|
+
catch (error) {
|
|
3080
|
+
// Fallback to simple deshake if vidstab is not available
|
|
3081
|
+
const simpleCommand = `ffmpeg -i "${escapedVideoPath}" -vf "hqdn3d=4:3:6:4.5" -y "${escapedOutputPath}"`;
|
|
3082
|
+
try {
|
|
3083
|
+
await execAsync(simpleCommand, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3084
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3085
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3086
|
+
}
|
|
3087
|
+
return { outputPath: options.outputPath, success: true };
|
|
3088
|
+
}
|
|
3089
|
+
catch (fallbackError) {
|
|
3090
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3091
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3092
|
+
}
|
|
3093
|
+
throw error;
|
|
3094
|
+
}
|
|
3095
|
+
}
|
|
3096
|
+
}
|
|
3097
|
+
/**
|
|
3098
|
+
* Color correct video
|
|
3099
|
+
* @private
|
|
3100
|
+
*/
|
|
3101
|
+
async #colorCorrectVideo(videoSource, options) {
|
|
3102
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
3103
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
3104
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
3105
|
+
}
|
|
3106
|
+
let videoPath;
|
|
3107
|
+
let shouldCleanupVideo = false;
|
|
3108
|
+
const timestamp = Date.now();
|
|
3109
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3110
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3111
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
3112
|
+
shouldCleanupVideo = true;
|
|
3113
|
+
}
|
|
3114
|
+
else {
|
|
3115
|
+
let resolvedPath = videoSource;
|
|
3116
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3117
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
3118
|
+
}
|
|
3119
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
3120
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3121
|
+
}
|
|
3122
|
+
videoPath = resolvedPath;
|
|
3123
|
+
}
|
|
3124
|
+
const filters = [];
|
|
3125
|
+
if (options.brightness !== undefined) {
|
|
3126
|
+
filters.push(`eq=brightness=${(options.brightness / 100).toFixed(2)}`);
|
|
3127
|
+
}
|
|
3128
|
+
if (options.contrast !== undefined) {
|
|
3129
|
+
filters.push(`eq=contrast=${1 + (options.contrast / 100)}`);
|
|
3130
|
+
}
|
|
3131
|
+
if (options.saturation !== undefined) {
|
|
3132
|
+
filters.push(`eq=saturation=${1 + (options.saturation / 100)}`);
|
|
3133
|
+
}
|
|
3134
|
+
if (options.hue !== undefined) {
|
|
3135
|
+
filters.push(`hue=h=${options.hue}`);
|
|
3136
|
+
}
|
|
3137
|
+
if (options.temperature !== undefined) {
|
|
3138
|
+
// Temperature adjustment using colorbalance
|
|
3139
|
+
const temp = options.temperature;
|
|
3140
|
+
if (temp > 0) {
|
|
3141
|
+
filters.push(`colorbalance=rs=${temp / 100}:gs=-${temp / 200}:bs=-${temp / 100}`);
|
|
3142
|
+
}
|
|
3143
|
+
else {
|
|
3144
|
+
filters.push(`colorbalance=rs=${temp / 100}:gs=${-temp / 200}:bs=${-temp / 100}`);
|
|
3145
|
+
}
|
|
3146
|
+
}
|
|
3147
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
3148
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3149
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3150
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
3151
|
+
try {
|
|
3152
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3153
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3154
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3155
|
+
}
|
|
3156
|
+
return { outputPath: options.outputPath, success: true };
|
|
3157
|
+
}
|
|
3158
|
+
catch (error) {
|
|
3159
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3160
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3161
|
+
}
|
|
3162
|
+
throw error;
|
|
3163
|
+
}
|
|
3164
|
+
}
|
|
3165
|
+
/**
|
|
3166
|
+
* Add picture-in-picture
|
|
3167
|
+
* @private
|
|
3168
|
+
*/
|
|
3169
|
+
async #addPictureInPicture(videoSource, options) {
|
|
3170
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
3171
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
3172
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
3173
|
+
}
|
|
3174
|
+
let videoPath;
|
|
3175
|
+
let overlayPath;
|
|
3176
|
+
let shouldCleanupVideo = false;
|
|
3177
|
+
let shouldCleanupOverlay = false;
|
|
3178
|
+
const timestamp = Date.now();
|
|
3179
|
+
// Handle main video
|
|
3180
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3181
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3182
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
3183
|
+
shouldCleanupVideo = true;
|
|
3184
|
+
}
|
|
3185
|
+
else {
|
|
3186
|
+
let resolvedPath = videoSource;
|
|
3187
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3188
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
3189
|
+
}
|
|
3190
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
3191
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3192
|
+
}
|
|
3193
|
+
videoPath = resolvedPath;
|
|
3194
|
+
}
|
|
3195
|
+
// Handle overlay video
|
|
3196
|
+
if (Buffer.isBuffer(options.overlayVideo)) {
|
|
3197
|
+
overlayPath = path_1.default.join(frameDir, `temp-overlay-${timestamp}.mp4`);
|
|
3198
|
+
fs_1.default.writeFileSync(overlayPath, options.overlayVideo);
|
|
3199
|
+
shouldCleanupOverlay = true;
|
|
3200
|
+
}
|
|
3201
|
+
else {
|
|
3202
|
+
let resolvedPath = options.overlayVideo;
|
|
3203
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3204
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
3205
|
+
}
|
|
3206
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
3207
|
+
throw new Error(`Overlay video file not found: ${options.overlayVideo}`);
|
|
3208
|
+
}
|
|
3209
|
+
overlayPath = resolvedPath;
|
|
3210
|
+
}
|
|
3211
|
+
const position = options.position || 'bottom-right';
|
|
3212
|
+
const size = options.size || { width: 320, height: 180 };
|
|
3213
|
+
const opacity = options.opacity || 1.0;
|
|
3214
|
+
const positionMap = {
|
|
3215
|
+
'top-left': '10:10',
|
|
3216
|
+
'top-right': 'W-w-10:10',
|
|
3217
|
+
'bottom-left': '10:H-h-10',
|
|
3218
|
+
'bottom-right': 'W-w-10:H-h-10',
|
|
3219
|
+
'center': '(W-w)/2:(H-h)/2'
|
|
3220
|
+
};
|
|
3221
|
+
const overlay = positionMap[position];
|
|
3222
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3223
|
+
const escapedOverlayPath = overlayPath.replace(/"/g, '\\"');
|
|
3224
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3225
|
+
const filter = `[1:v]scale=${size.width}:${size.height},format=rgba,colorchannelmixer=aa=${opacity}[overlay];[0:v][overlay]overlay=${overlay}`;
|
|
3226
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -i "${escapedOverlayPath}" -filter_complex "${filter}" -y "${escapedOutputPath}"`;
|
|
3227
|
+
try {
|
|
3228
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3229
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3230
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3231
|
+
}
|
|
3232
|
+
if (shouldCleanupOverlay && fs_1.default.existsSync(overlayPath)) {
|
|
3233
|
+
fs_1.default.unlinkSync(overlayPath);
|
|
3234
|
+
}
|
|
3235
|
+
return { outputPath: options.outputPath, success: true };
|
|
3236
|
+
}
|
|
3237
|
+
catch (error) {
|
|
3238
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3239
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3240
|
+
}
|
|
3241
|
+
if (shouldCleanupOverlay && fs_1.default.existsSync(overlayPath)) {
|
|
3242
|
+
fs_1.default.unlinkSync(overlayPath);
|
|
3243
|
+
}
|
|
3244
|
+
throw error;
|
|
3245
|
+
}
|
|
3246
|
+
}
|
|
3247
|
+
/**
|
|
3248
|
+
* Create split screen video
|
|
3249
|
+
* @private
|
|
3250
|
+
*/
|
|
3251
|
+
async #createSplitScreen(options) {
|
|
3252
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
3253
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
3254
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
3255
|
+
}
|
|
3256
|
+
const timestamp = Date.now();
|
|
3257
|
+
const videoPaths = [];
|
|
3258
|
+
const shouldCleanup = [];
|
|
3259
|
+
// Prepare all video files
|
|
3260
|
+
for (let i = 0; i < options.videos.length; i++) {
|
|
3261
|
+
const video = options.videos[i];
|
|
3262
|
+
if (Buffer.isBuffer(video)) {
|
|
3263
|
+
const tempPath = path_1.default.join(frameDir, `temp-video-${timestamp}-${i}.mp4`);
|
|
3264
|
+
fs_1.default.writeFileSync(tempPath, video);
|
|
3265
|
+
videoPaths.push(tempPath);
|
|
3266
|
+
shouldCleanup.push(true);
|
|
3267
|
+
}
|
|
3268
|
+
else {
|
|
3269
|
+
let resolvedPath = video;
|
|
3270
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3271
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
3272
|
+
}
|
|
3273
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
3274
|
+
throw new Error(`Video file not found: ${video}`);
|
|
3275
|
+
}
|
|
3276
|
+
videoPaths.push(resolvedPath);
|
|
3277
|
+
shouldCleanup.push(false);
|
|
3278
|
+
}
|
|
3279
|
+
}
|
|
3280
|
+
const layout = options.layout || 'side-by-side';
|
|
3281
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3282
|
+
const escapedPaths = videoPaths.map(vp => vp.replace(/"/g, '\\"'));
|
|
3283
|
+
let command;
|
|
3284
|
+
if (layout === 'side-by-side' && videoPaths.length >= 2) {
|
|
3285
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1]}" -filter_complex "[0:v][1:v]hstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
3286
|
+
}
|
|
3287
|
+
else if (layout === 'top-bottom' && videoPaths.length >= 2) {
|
|
3288
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1]}" -filter_complex "[0:v][1:v]vstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
3289
|
+
}
|
|
3290
|
+
else if (layout === 'grid' && videoPaths.length >= 4) {
|
|
3291
|
+
const grid = options.grid || { cols: 2, rows: 2 };
|
|
3292
|
+
// Simplified 2x2 grid
|
|
3293
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1]}" -i "${escapedPaths[2]}" -i "${escapedPaths[3]}" -filter_complex "[0:v][1:v]hstack=inputs=2[top];[2:v][3:v]hstack=inputs=2[bottom];[top][bottom]vstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
3294
|
+
}
|
|
3295
|
+
else {
|
|
3296
|
+
throw new Error(`Invalid layout or insufficient videos for ${layout}`);
|
|
3297
|
+
}
|
|
3298
|
+
try {
|
|
3299
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3300
|
+
// Cleanup
|
|
3301
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
3302
|
+
if (shouldCleanup[i] && fs_1.default.existsSync(videoPaths[i])) {
|
|
3303
|
+
fs_1.default.unlinkSync(videoPaths[i]);
|
|
3304
|
+
}
|
|
3305
|
+
}
|
|
3306
|
+
return { outputPath: options.outputPath, success: true };
|
|
3307
|
+
}
|
|
3308
|
+
catch (error) {
|
|
3309
|
+
// Cleanup on error
|
|
3310
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
3311
|
+
if (shouldCleanup[i] && fs_1.default.existsSync(videoPaths[i])) {
|
|
3312
|
+
fs_1.default.unlinkSync(videoPaths[i]);
|
|
3313
|
+
}
|
|
3314
|
+
}
|
|
3315
|
+
throw error;
|
|
3316
|
+
}
|
|
3317
|
+
}
|
|
3318
|
+
/**
|
|
3319
|
+
* Create time-lapse video
|
|
3320
|
+
* @private
|
|
3321
|
+
*/
|
|
3322
|
+
async #createTimeLapseVideo(videoSource, options) {
|
|
3323
|
+
const speed = options.speed || 10;
|
|
3324
|
+
// Time-lapse is essentially speeding up the video
|
|
3325
|
+
return await this.#changeVideoSpeed(videoSource, { speed, outputPath: options.outputPath });
|
|
3326
|
+
}
|
|
3327
|
+
/**
|
|
3328
|
+
* Mute video (remove audio)
|
|
3329
|
+
* @private
|
|
3330
|
+
*/
|
|
3331
|
+
async #muteVideo(videoSource, options) {
|
|
3332
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
3333
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
3334
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
3335
|
+
}
|
|
3336
|
+
let videoPath;
|
|
3337
|
+
let shouldCleanupVideo = false;
|
|
3338
|
+
const timestamp = Date.now();
|
|
3339
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3340
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3341
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
3342
|
+
shouldCleanupVideo = true;
|
|
3343
|
+
}
|
|
3344
|
+
else {
|
|
3345
|
+
let resolvedPath = videoSource;
|
|
3346
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3347
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
3348
|
+
}
|
|
3349
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
3350
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3351
|
+
}
|
|
3352
|
+
videoPath = resolvedPath;
|
|
3353
|
+
}
|
|
3354
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3355
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3356
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -c copy -an -y "${escapedOutputPath}"`;
|
|
3357
|
+
try {
|
|
3358
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3359
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3360
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3361
|
+
}
|
|
3362
|
+
return { outputPath: options.outputPath, success: true };
|
|
3363
|
+
}
|
|
3364
|
+
catch (error) {
|
|
3365
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3366
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3367
|
+
}
|
|
3368
|
+
throw error;
|
|
3369
|
+
}
|
|
3370
|
+
}
|
|
3371
|
+
/**
|
|
3372
|
+
* Adjust video volume
|
|
3373
|
+
* @private
|
|
3374
|
+
*/
|
|
3375
|
+
async #adjustVideoVolume(videoSource, options) {
|
|
3376
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
3377
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
3378
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
3379
|
+
}
|
|
3380
|
+
let videoPath;
|
|
3381
|
+
let shouldCleanupVideo = false;
|
|
3382
|
+
const timestamp = Date.now();
|
|
3383
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3384
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3385
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
3386
|
+
shouldCleanupVideo = true;
|
|
3387
|
+
}
|
|
3388
|
+
else {
|
|
3389
|
+
let resolvedPath = videoSource;
|
|
3390
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3391
|
+
resolvedPath = path_1.default.join(process.cwd(), resolvedPath);
|
|
3392
|
+
}
|
|
3393
|
+
if (!fs_1.default.existsSync(resolvedPath)) {
|
|
3394
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3395
|
+
}
|
|
3396
|
+
videoPath = resolvedPath;
|
|
3397
|
+
}
|
|
3398
|
+
const volume = Math.max(0, Math.min(10, options.volume)); // Clamp between 0 and 10
|
|
3399
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3400
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3401
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -af "volume=${volume}" -y "${escapedOutputPath}"`;
|
|
3402
|
+
try {
|
|
3403
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3404
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3405
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3406
|
+
}
|
|
3407
|
+
return { outputPath: options.outputPath, success: true };
|
|
3408
|
+
}
|
|
3409
|
+
catch (error) {
|
|
3410
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3411
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3412
|
+
}
|
|
3413
|
+
throw error;
|
|
3414
|
+
}
|
|
3415
|
+
}
|
|
3416
|
+
/**
|
|
3417
|
+
* Extracts a frame at a specific time in seconds
|
|
3418
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
3419
|
+
* @param timeSeconds - Time in seconds
|
|
3420
|
+
* @param outputFormat - Output format ('jpg' or 'png', default: 'jpg')
|
|
3421
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2)
|
|
3422
|
+
* @returns Buffer containing the frame image
|
|
3423
|
+
*/
|
|
3424
|
+
async extractFrameAtTime(videoSource, timeSeconds, outputFormat = 'jpg', quality = 2) {
|
|
3425
|
+
return this.#extractVideoFrame(videoSource, 0, timeSeconds, outputFormat, quality);
|
|
3426
|
+
}
|
|
3427
|
+
/**
|
|
3428
|
+
* Extracts a frame by frame number (converts to time using video FPS)
|
|
3429
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
3430
|
+
* @param frameNumber - Frame number to extract (1-based: frame 1 = first frame)
|
|
3431
|
+
* @param outputFormat - Output format ('jpg' or 'png', default: 'jpg')
|
|
3432
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2)
|
|
3433
|
+
* @returns Buffer containing the frame image
|
|
3434
|
+
*/
|
|
3435
|
+
async extractFrameByNumber(videoSource, frameNumber, outputFormat = 'jpg', quality = 2) {
|
|
3436
|
+
// Get video info to convert frame number to time
|
|
3437
|
+
const videoInfo = await this.getVideoInfo(videoSource, true);
|
|
3438
|
+
if (!videoInfo || videoInfo.fps <= 0) {
|
|
3439
|
+
throw new Error('Could not get video FPS to convert frame number to time');
|
|
3440
|
+
}
|
|
3441
|
+
// Convert frame number to time (frame 1 = 0 seconds, frame 2 = 1/fps, etc.)
|
|
3442
|
+
// For 1-based frame numbers: frame 1 = time 0, frame 2 = time 1/fps
|
|
3443
|
+
const timeSeconds = (frameNumber - 1) / videoInfo.fps;
|
|
3444
|
+
return this.#extractVideoFrame(videoSource, frameNumber - 1, timeSeconds, outputFormat, quality);
|
|
3445
|
+
}
|
|
3446
|
+
/**
|
|
3447
|
+
* Extracts multiple frames at specific times
|
|
3448
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
3449
|
+
* @param times - Array of times in seconds
|
|
3450
|
+
* @param outputFormat - Output format ('jpg' or 'png', default: 'jpg')
|
|
3451
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2)
|
|
3452
|
+
* @returns Array of buffers containing frame images
|
|
3453
|
+
*/
|
|
3454
|
+
async extractMultipleFrames(videoSource, times, outputFormat = 'jpg', quality = 2) {
|
|
3455
|
+
const frames = [];
|
|
3456
|
+
for (const time of times) {
|
|
3457
|
+
const frame = await this.extractFrameAtTime(videoSource, time, outputFormat, quality);
|
|
3458
|
+
if (frame) {
|
|
3459
|
+
frames.push(frame);
|
|
3460
|
+
}
|
|
3461
|
+
}
|
|
3462
|
+
return frames;
|
|
3463
|
+
}
|
|
3464
|
+
/**
|
|
3465
|
+
* Extracts ALL frames from a video and saves them as image files
|
|
3466
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
3467
|
+
* @param options - Extraction options
|
|
3468
|
+
* @returns Array of frame file paths
|
|
3469
|
+
*/
|
|
3470
|
+
async extractAllFrames(videoSource, options) {
|
|
3471
|
+
try {
|
|
3472
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
3473
|
+
if (!ffmpegAvailable) {
|
|
3474
|
+
const errorMessage = '❌ FFMPEG NOT FOUND\n' +
|
|
3475
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
3476
|
+
this.#getFFmpegInstallInstructions();
|
|
3477
|
+
throw new Error(errorMessage);
|
|
3478
|
+
}
|
|
3479
|
+
// Get video info first
|
|
3480
|
+
const videoInfo = await this.getVideoInfo(videoSource, true);
|
|
3481
|
+
if (!videoInfo) {
|
|
3482
|
+
throw new Error('Could not get video information');
|
|
3483
|
+
}
|
|
3484
|
+
const outputFormat = options?.outputFormat || 'png';
|
|
3485
|
+
const outputDir = options?.outputDirectory || path_1.default.join(process.cwd(), 'extracted-frames');
|
|
3486
|
+
const prefix = options?.prefix || 'frame';
|
|
3487
|
+
const quality = options?.quality || 2;
|
|
3488
|
+
// Create output directory
|
|
3489
|
+
if (!fs_1.default.existsSync(outputDir)) {
|
|
3490
|
+
fs_1.default.mkdirSync(outputDir, { recursive: true });
|
|
3491
|
+
}
|
|
3492
|
+
const frameDir = path_1.default.join(process.cwd(), '.temp-frames');
|
|
3493
|
+
if (!fs_1.default.existsSync(frameDir)) {
|
|
3494
|
+
fs_1.default.mkdirSync(frameDir, { recursive: true });
|
|
3495
|
+
}
|
|
3496
|
+
const timestamp = Date.now();
|
|
3497
|
+
let videoPath;
|
|
3498
|
+
let shouldCleanupVideo = false;
|
|
3499
|
+
// Handle video source
|
|
3500
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3501
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3502
|
+
fs_1.default.writeFileSync(videoPath, videoSource);
|
|
3503
|
+
shouldCleanupVideo = true;
|
|
3504
|
+
}
|
|
3505
|
+
else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
3506
|
+
const response = await (0, axios_1.default)({
|
|
3507
|
+
method: 'get',
|
|
3508
|
+
url: videoSource,
|
|
3509
|
+
responseType: 'arraybuffer'
|
|
3510
|
+
});
|
|
3511
|
+
videoPath = path_1.default.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3512
|
+
fs_1.default.writeFileSync(videoPath, Buffer.from(response.data));
|
|
3513
|
+
shouldCleanupVideo = true;
|
|
3514
|
+
}
|
|
3515
|
+
else {
|
|
3516
|
+
if (!fs_1.default.existsSync(videoSource)) {
|
|
3517
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3518
|
+
}
|
|
3519
|
+
videoPath = videoSource;
|
|
3520
|
+
}
|
|
3521
|
+
// Calculate time range
|
|
3522
|
+
const startTime = options?.startTime ?? 0;
|
|
3523
|
+
const endTime = options?.endTime ?? videoInfo.duration;
|
|
3524
|
+
const duration = endTime - startTime;
|
|
3525
|
+
// Extract all frames using ffmpeg
|
|
3526
|
+
// Use -fps_mode passthrough to extract every frame (no frame skipping)
|
|
3527
|
+
// Don't use -f flag, let FFmpeg infer format from file extension
|
|
3528
|
+
const qualityFlag = outputFormat === 'jpg' ? `-q:v ${quality}` : '';
|
|
3529
|
+
const pixFmt = outputFormat === 'png' ? '-pix_fmt rgba' : '-pix_fmt rgb24';
|
|
3530
|
+
const outputTemplate = path_1.default.join(outputDir, `${prefix}-%06d.${outputFormat}`);
|
|
3531
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3532
|
+
const escapedOutputTemplate = outputTemplate.replace(/"/g, '\\"');
|
|
3533
|
+
// Use -fps_mode passthrough instead of deprecated -vsync 0
|
|
3534
|
+
// Use -ss after -i for more accurate frame extraction
|
|
3535
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -ss ${startTime} -t ${duration} -fps_mode passthrough ${pixFmt} ${qualityFlag} -y "${escapedOutputTemplate}"`;
|
|
3536
|
+
await execAsync(command, {
|
|
3537
|
+
timeout: 300000, // 5 minute timeout for large videos
|
|
3538
|
+
maxBuffer: 10 * 1024 * 1024
|
|
3539
|
+
});
|
|
3540
|
+
// Collect all extracted frame files
|
|
3541
|
+
const frames = [];
|
|
3542
|
+
let frameIndex = 0;
|
|
3543
|
+
let currentTime = startTime;
|
|
3544
|
+
while (true) {
|
|
3545
|
+
const frameNumber = frameIndex + 1;
|
|
3546
|
+
const framePath = path_1.default.join(outputDir, `${prefix}-${String(frameNumber).padStart(6, '0')}.${outputFormat}`);
|
|
3547
|
+
if (fs_1.default.existsSync(framePath)) {
|
|
3548
|
+
frames.push({
|
|
3549
|
+
source: framePath,
|
|
3550
|
+
frameNumber: frameIndex,
|
|
3551
|
+
time: currentTime
|
|
3552
|
+
});
|
|
3553
|
+
currentTime += 1 / videoInfo.fps; // Increment by frame duration
|
|
3554
|
+
frameIndex++;
|
|
3555
|
+
}
|
|
3556
|
+
else {
|
|
3557
|
+
break; // No more frames
|
|
3558
|
+
}
|
|
3559
|
+
}
|
|
3560
|
+
// Cleanup temp video if created
|
|
3561
|
+
if (shouldCleanupVideo && fs_1.default.existsSync(videoPath)) {
|
|
3562
|
+
fs_1.default.unlinkSync(videoPath);
|
|
3563
|
+
}
|
|
3564
|
+
console.log(`✅ Extracted ${frames.length} frames from video`);
|
|
3565
|
+
return frames;
|
|
3566
|
+
}
|
|
3567
|
+
catch (error) {
|
|
3568
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
3569
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
3570
|
+
throw error;
|
|
3571
|
+
}
|
|
3572
|
+
throw new Error(`extractAllFrames failed: ${errorMessage}`);
|
|
3573
|
+
}
|
|
3574
|
+
}
|
|
1425
3575
|
/**
|
|
1426
3576
|
* Validates masking inputs.
|
|
1427
3577
|
* @private
|