apexify.js 5.0.3 → 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +352 -137
- package/README.md +440 -19
- package/dist/cjs/Canvas/ApexPainter.d.ts +272 -0
- package/dist/cjs/Canvas/ApexPainter.d.ts.map +1 -1
- package/dist/cjs/Canvas/ApexPainter.js +2275 -125
- package/dist/cjs/Canvas/ApexPainter.js.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/advancedLines.d.ts +4 -4
- package/dist/cjs/Canvas/utils/Custom/advancedLines.d.ts.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/advancedLines.js +63 -21
- package/dist/cjs/Canvas/utils/Custom/advancedLines.js.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/customLines.d.ts.map +1 -1
- package/dist/cjs/Canvas/utils/Custom/customLines.js +3 -0
- package/dist/cjs/Canvas/utils/Custom/customLines.js.map +1 -1
- package/dist/cjs/Canvas/utils/types.d.ts +5 -1
- package/dist/cjs/Canvas/utils/types.d.ts.map +1 -1
- package/dist/cjs/Canvas/utils/types.js.map +1 -1
- package/dist/esm/Canvas/ApexPainter.d.ts +272 -0
- package/dist/esm/Canvas/ApexPainter.d.ts.map +1 -1
- package/dist/esm/Canvas/ApexPainter.js +2275 -125
- package/dist/esm/Canvas/ApexPainter.js.map +1 -1
- package/dist/esm/Canvas/utils/Custom/advancedLines.d.ts +4 -4
- package/dist/esm/Canvas/utils/Custom/advancedLines.d.ts.map +1 -1
- package/dist/esm/Canvas/utils/Custom/advancedLines.js +63 -21
- package/dist/esm/Canvas/utils/Custom/advancedLines.js.map +1 -1
- package/dist/esm/Canvas/utils/Custom/customLines.d.ts.map +1 -1
- package/dist/esm/Canvas/utils/Custom/customLines.js +3 -0
- package/dist/esm/Canvas/utils/Custom/customLines.js.map +1 -1
- package/dist/esm/Canvas/utils/types.d.ts +5 -1
- package/dist/esm/Canvas/utils/types.d.ts.map +1 -1
- package/dist/esm/Canvas/utils/types.js.map +1 -1
- package/lib/Canvas/ApexPainter.ts +2973 -136
- package/lib/Canvas/utils/Custom/advancedLines.ts +77 -25
- package/lib/Canvas/utils/Custom/customLines.ts +4 -0
- package/lib/Canvas/utils/types.ts +6 -2
- package/package.json +1 -3
|
@@ -1,10 +1,13 @@
|
|
|
1
|
-
import { createCanvas, loadImage,
|
|
1
|
+
import { createCanvas, loadImage, Image, SKRSContext2D } from "@napi-rs/canvas";
|
|
2
2
|
import GIFEncoder from "gifencoder";
|
|
3
|
-
import ffmpeg from 'fluent-ffmpeg';
|
|
4
3
|
import { PassThrough} from "stream";
|
|
4
|
+
import { exec, execSync } from "child_process";
|
|
5
|
+
import { promisify } from "util";
|
|
5
6
|
import axios from 'axios';
|
|
6
7
|
import fs, { PathLike } from "fs";
|
|
7
8
|
import path from "path";
|
|
9
|
+
|
|
10
|
+
const execAsync = promisify(exec);
|
|
8
11
|
import { OutputFormat, CanvasConfig, TextProperties, ImageProperties, GIFOptions, GIFResults, CustomOptions, cropOptions,
|
|
9
12
|
drawBackgroundGradient, drawBackgroundColor, customBackground, customLines,
|
|
10
13
|
converter, resizingImg, applyColorFilters, imgEffects,verticalBarChart, pieChart,
|
|
@@ -159,7 +162,13 @@ export class ApexPainter {
|
|
|
159
162
|
// Handle video background inherit sizing
|
|
160
163
|
if (canvas.videoBg) {
|
|
161
164
|
try {
|
|
162
|
-
const frameBuffer = await this.#extractVideoFrame(
|
|
165
|
+
const frameBuffer = await this.#extractVideoFrame(
|
|
166
|
+
canvas.videoBg.source,
|
|
167
|
+
canvas.videoBg.frame ?? 0,
|
|
168
|
+
canvas.videoBg.time,
|
|
169
|
+
canvas.videoBg.format || 'jpg',
|
|
170
|
+
canvas.videoBg.quality || 2
|
|
171
|
+
);
|
|
163
172
|
if (frameBuffer) {
|
|
164
173
|
const img = await loadImage(frameBuffer);
|
|
165
174
|
if (!canvas.width) canvas.width = img.width;
|
|
@@ -218,23 +227,61 @@ export class ApexPainter {
|
|
|
218
227
|
ctx.globalCompositeOperation = blendMode as GlobalCompositeOperation;
|
|
219
228
|
}
|
|
220
229
|
|
|
221
|
-
// Draw
|
|
230
|
+
// Draw background - videoBg takes priority, then customBg, then gradientBg, then colorBg
|
|
222
231
|
if (videoBg) {
|
|
223
232
|
try {
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
233
|
+
// For videoBg, always use PNG format to ensure compatibility with loadImage
|
|
234
|
+
// The rgb24 pixel format for JPEG can cause issues with loadImage
|
|
235
|
+
const frameBuffer = await this.#extractVideoFrame(
|
|
236
|
+
videoBg.source,
|
|
237
|
+
videoBg.frame ?? 0,
|
|
238
|
+
videoBg.time,
|
|
239
|
+
'png', // Force PNG format for videoBg to ensure proper color rendering
|
|
240
|
+
2
|
|
241
|
+
);
|
|
242
|
+
if (frameBuffer && frameBuffer.length > 0) {
|
|
243
|
+
// Try loading from buffer first, if that fails, save to temp file and load from file
|
|
244
|
+
// This is a workaround for potential buffer compatibility issues with loadImage
|
|
245
|
+
let videoImg: Image;
|
|
246
|
+
try {
|
|
247
|
+
videoImg = await loadImage(frameBuffer);
|
|
248
|
+
} catch (bufferError) {
|
|
249
|
+
// If loading from buffer fails, try saving to temp file and loading from file
|
|
250
|
+
const tempFramePath = path.join(process.cwd(), '.temp-frames', `video-bg-temp-${Date.now()}.png`);
|
|
251
|
+
const frameDir = path.dirname(tempFramePath);
|
|
252
|
+
if (!fs.existsSync(frameDir)) {
|
|
253
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
254
|
+
}
|
|
255
|
+
fs.writeFileSync(tempFramePath, frameBuffer);
|
|
256
|
+
videoImg = await loadImage(tempFramePath);
|
|
257
|
+
// Cleanup temp file after loading
|
|
258
|
+
if (fs.existsSync(tempFramePath)) {
|
|
259
|
+
fs.unlinkSync(tempFramePath);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
if (videoImg && videoImg.width > 0 && videoImg.height > 0) {
|
|
264
|
+
ctx.globalAlpha = videoBg.opacity ?? 1;
|
|
265
|
+
// Draw the video frame to fill the entire canvas
|
|
266
|
+
ctx.drawImage(videoImg, 0, 0, width, height);
|
|
267
|
+
ctx.globalAlpha = opacity;
|
|
268
|
+
} else {
|
|
269
|
+
throw new Error(`Extracted video frame has invalid dimensions: ${videoImg?.width}x${videoImg?.height}`);
|
|
270
|
+
}
|
|
271
|
+
} else {
|
|
272
|
+
throw new Error('Frame extraction returned empty buffer');
|
|
230
273
|
}
|
|
231
274
|
} catch (e: unknown) {
|
|
232
|
-
|
|
275
|
+
const errorMsg = e instanceof Error ? e.message : 'Unknown error';
|
|
276
|
+
// Re-throw FFmpeg installation errors so user sees installation guide
|
|
277
|
+
if (errorMsg.includes('FFMPEG NOT FOUND') || errorMsg.includes('FFmpeg')) {
|
|
278
|
+
throw e;
|
|
279
|
+
}
|
|
280
|
+
// Re-throw other errors instead of silently failing with black background
|
|
281
|
+
throw new Error(`createCanvas: videoBg extraction failed: ${errorMsg}`);
|
|
233
282
|
}
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
// Draw custom background with filters and opacity support
|
|
237
|
-
if (customBg) {
|
|
283
|
+
} else if (customBg) {
|
|
284
|
+
// Draw custom background with filters and opacity support
|
|
238
285
|
await customBackground(ctx, { ...canvas, blur });
|
|
239
286
|
// Apply filters to background if specified
|
|
240
287
|
if (customBg.filters && customBg.filters.length > 0) {
|
|
@@ -258,6 +305,7 @@ export class ApexPainter {
|
|
|
258
305
|
} else if (gradientBg) {
|
|
259
306
|
await drawBackgroundGradient(ctx, { ...canvas, blur });
|
|
260
307
|
} else {
|
|
308
|
+
// Default to black background if no background is specified
|
|
261
309
|
await drawBackgroundColor(ctx, { ...canvas, blur, colorBg: colorBg ?? '#000' });
|
|
262
310
|
}
|
|
263
311
|
|
|
@@ -1057,7 +1105,7 @@ export class ApexPainter {
|
|
|
1057
1105
|
return fs.createWriteStream(outputFile);
|
|
1058
1106
|
}
|
|
1059
1107
|
|
|
1060
|
-
function createBufferStream() {
|
|
1108
|
+
function createBufferStream(): PassThrough & { getBuffer: () => Buffer; chunks: Buffer[] } {
|
|
1061
1109
|
const bufferStream = new PassThrough();
|
|
1062
1110
|
const chunks: Buffer[] = [];
|
|
1063
1111
|
|
|
@@ -1065,12 +1113,14 @@ export class ApexPainter {
|
|
|
1065
1113
|
chunks.push(chunk);
|
|
1066
1114
|
});
|
|
1067
1115
|
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1116
|
+
// Properly extend the stream object
|
|
1117
|
+
const extendedStream = bufferStream as PassThrough & { getBuffer: () => Buffer; chunks: Buffer[] };
|
|
1118
|
+
extendedStream.getBuffer = function (): Buffer {
|
|
1119
|
+
return Buffer.concat(chunks);
|
|
1120
|
+
};
|
|
1121
|
+
extendedStream.chunks = chunks;
|
|
1122
|
+
|
|
1123
|
+
return extendedStream;
|
|
1074
1124
|
}
|
|
1075
1125
|
|
|
1076
1126
|
// Validation is done in #validateGIFOptions
|
|
@@ -1079,7 +1129,9 @@ export class ApexPainter {
|
|
|
1079
1129
|
const canvasHeight = options.height || 1200;
|
|
1080
1130
|
|
|
1081
1131
|
const encoder = new GIFEncoder(canvasWidth, canvasHeight);
|
|
1082
|
-
|
|
1132
|
+
// Use buffer stream for buffer/base64/attachment, file stream only for 'file' format
|
|
1133
|
+
const useBufferStream = options.outputFormat !== "file";
|
|
1134
|
+
const outputStream = useBufferStream ? createBufferStream() : (options.outputFile ? createOutputStream(options.outputFile) : createBufferStream());
|
|
1083
1135
|
|
|
1084
1136
|
encoder.createReadStream().pipe(outputStream);
|
|
1085
1137
|
|
|
@@ -1114,12 +1166,17 @@ export class ApexPainter {
|
|
|
1114
1166
|
}
|
|
1115
1167
|
|
|
1116
1168
|
encoder.finish();
|
|
1117
|
-
outputStream.end();
|
|
1118
1169
|
|
|
1119
1170
|
if (options.outputFormat === "file") {
|
|
1171
|
+
outputStream.end();
|
|
1120
1172
|
await new Promise<void>((resolve) => outputStream.on("finish", () => resolve()));
|
|
1121
1173
|
} else if (options.outputFormat === "base64") {
|
|
1122
|
-
|
|
1174
|
+
// Wait for stream to finish before getting buffer
|
|
1175
|
+
await new Promise<void>((resolve) => {
|
|
1176
|
+
outputStream.on("end", () => resolve());
|
|
1177
|
+
outputStream.end();
|
|
1178
|
+
});
|
|
1179
|
+
if ('getBuffer' in outputStream && typeof outputStream.getBuffer === 'function') {
|
|
1123
1180
|
return outputStream.getBuffer().toString("base64");
|
|
1124
1181
|
}
|
|
1125
1182
|
throw new Error("createGIF: Unable to get buffer for base64 output.");
|
|
@@ -1127,7 +1184,12 @@ export class ApexPainter {
|
|
|
1127
1184
|
const gifStream = encoder.createReadStream();
|
|
1128
1185
|
return [{ attachment: gifStream, name: "gif.js" }];
|
|
1129
1186
|
} else if (options.outputFormat === "buffer") {
|
|
1130
|
-
|
|
1187
|
+
// Wait for stream to finish before getting buffer
|
|
1188
|
+
await new Promise<void>((resolve) => {
|
|
1189
|
+
outputStream.on("end", () => resolve());
|
|
1190
|
+
outputStream.end();
|
|
1191
|
+
});
|
|
1192
|
+
if ('getBuffer' in outputStream && typeof outputStream.getBuffer === 'function') {
|
|
1131
1193
|
return outputStream.getBuffer();
|
|
1132
1194
|
}
|
|
1133
1195
|
throw new Error("createGIF: Unable to get buffer for buffer output.");
|
|
@@ -1482,26 +1544,185 @@ export class ApexPainter {
|
|
|
1482
1544
|
}
|
|
1483
1545
|
}
|
|
1484
1546
|
|
|
1485
|
-
|
|
1486
|
-
|
|
1547
|
+
private _ffmpegAvailable: boolean | null = null;
|
|
1548
|
+
private _ffmpegChecked: boolean = false;
|
|
1549
|
+
private _ffmpegPath: string | null = null;
|
|
1550
|
+
|
|
1551
|
+
/**
|
|
1552
|
+
* Gets comprehensive FFmpeg installation instructions based on OS
|
|
1553
|
+
* @private
|
|
1554
|
+
* @returns Detailed installation instructions
|
|
1555
|
+
*/
|
|
1556
|
+
#getFFmpegInstallInstructions(): string {
|
|
1557
|
+
const os = process.platform;
|
|
1558
|
+
let instructions = '\n\n📹 FFMPEG INSTALLATION GUIDE\n';
|
|
1559
|
+
instructions += '═'.repeat(50) + '\n\n';
|
|
1560
|
+
|
|
1561
|
+
if (os === 'win32') {
|
|
1562
|
+
instructions += '🪟 WINDOWS INSTALLATION:\n\n';
|
|
1563
|
+
instructions += 'OPTION 1 - Using Chocolatey (Recommended):\n';
|
|
1564
|
+
instructions += ' 1. Open PowerShell as Administrator\n';
|
|
1565
|
+
instructions += ' 2. Run: choco install ffmpeg\n';
|
|
1566
|
+
instructions += ' 3. Restart your terminal\n\n';
|
|
1567
|
+
|
|
1568
|
+
instructions += 'OPTION 2 - Using Winget:\n';
|
|
1569
|
+
instructions += ' 1. Open PowerShell\n';
|
|
1570
|
+
instructions += ' 2. Run: winget install ffmpeg\n';
|
|
1571
|
+
instructions += ' 3. Restart your terminal\n\n';
|
|
1572
|
+
|
|
1573
|
+
instructions += 'OPTION 3 - Manual Installation:\n';
|
|
1574
|
+
instructions += ' 1. Visit: https://www.gyan.dev/ffmpeg/builds/\n';
|
|
1575
|
+
instructions += ' 2. Download "ffmpeg-release-essentials.zip"\n';
|
|
1576
|
+
instructions += ' 3. Extract to C:\\ffmpeg\n';
|
|
1577
|
+
instructions += ' 4. Add C:\\ffmpeg\\bin to System PATH:\n';
|
|
1578
|
+
instructions += ' - Press Win + X → System → Advanced → Environment Variables\n';
|
|
1579
|
+
instructions += ' - Edit "Path" → Add "C:\\ffmpeg\\bin"\n';
|
|
1580
|
+
instructions += ' 5. Restart terminal and verify: ffmpeg -version\n\n';
|
|
1581
|
+
|
|
1582
|
+
instructions += '🔍 Search Terms: "install ffmpeg windows", "ffmpeg windows tutorial"\n';
|
|
1583
|
+
instructions += '📺 YouTube: Search "How to install FFmpeg on Windows 2024"\n';
|
|
1584
|
+
instructions += '🌐 Official: https://ffmpeg.org/download.html\n';
|
|
1585
|
+
} else if (os === 'darwin') {
|
|
1586
|
+
instructions += '🍎 macOS INSTALLATION:\n\n';
|
|
1587
|
+
instructions += 'OPTION 1 - Using Homebrew (Recommended):\n';
|
|
1588
|
+
instructions += ' 1. Install Homebrew if not installed:\n';
|
|
1589
|
+
instructions += ' /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"\n';
|
|
1590
|
+
instructions += ' 2. Run: brew install ffmpeg\n';
|
|
1591
|
+
instructions += ' 3. Verify: ffmpeg -version\n\n';
|
|
1592
|
+
|
|
1593
|
+
instructions += 'OPTION 2 - Using MacPorts:\n';
|
|
1594
|
+
instructions += ' 1. Install MacPorts from: https://www.macports.org/\n';
|
|
1595
|
+
instructions += ' 2. Run: sudo port install ffmpeg\n\n';
|
|
1596
|
+
|
|
1597
|
+
instructions += '🔍 Search Terms: "install ffmpeg mac", "ffmpeg macos homebrew"\n';
|
|
1598
|
+
instructions += '📺 YouTube: Search "Install FFmpeg on Mac using Homebrew"\n';
|
|
1599
|
+
instructions += '🌐 Official: https://ffmpeg.org/download.html\n';
|
|
1600
|
+
} else {
|
|
1601
|
+
instructions += '🐧 LINUX INSTALLATION:\n\n';
|
|
1602
|
+
instructions += 'Ubuntu/Debian:\n';
|
|
1603
|
+
instructions += ' sudo apt-get update\n';
|
|
1604
|
+
instructions += ' sudo apt-get install ffmpeg\n\n';
|
|
1605
|
+
|
|
1606
|
+
instructions += 'RHEL/CentOS/Fedora:\n';
|
|
1607
|
+
instructions += ' sudo yum install ffmpeg\n';
|
|
1608
|
+
instructions += ' # OR for newer versions:\n';
|
|
1609
|
+
instructions += ' sudo dnf install ffmpeg\n\n';
|
|
1610
|
+
|
|
1611
|
+
instructions += 'Arch Linux:\n';
|
|
1612
|
+
instructions += ' sudo pacman -S ffmpeg\n\n';
|
|
1613
|
+
|
|
1614
|
+
instructions += '🔍 Search Terms: "install ffmpeg [your-distro]", "ffmpeg linux tutorial"\n';
|
|
1615
|
+
instructions += '📺 YouTube: Search "Install FFmpeg on Linux"\n';
|
|
1616
|
+
instructions += '🌐 Official: https://ffmpeg.org/download.html\n';
|
|
1617
|
+
}
|
|
1618
|
+
|
|
1619
|
+
instructions += '\n' + '═'.repeat(50) + '\n';
|
|
1620
|
+
instructions += '✅ After installation, restart your terminal and verify with: ffmpeg -version\n';
|
|
1621
|
+
instructions += '💡 If still not working, ensure FFmpeg is in your system PATH\n';
|
|
1622
|
+
|
|
1623
|
+
return instructions;
|
|
1624
|
+
}
|
|
1625
|
+
|
|
1626
|
+
/**
|
|
1627
|
+
* Checks if ffmpeg is available in the system (cached check)
|
|
1487
1628
|
* @private
|
|
1629
|
+
* @returns Promise<boolean> - True if ffmpeg is available
|
|
1630
|
+
*/
|
|
1631
|
+
async #checkFFmpegAvailable(): Promise<boolean> {
|
|
1632
|
+
// Cache the result to avoid multiple checks
|
|
1633
|
+
if (this._ffmpegChecked) {
|
|
1634
|
+
return this._ffmpegAvailable ?? false;
|
|
1635
|
+
}
|
|
1636
|
+
|
|
1637
|
+
try {
|
|
1638
|
+
// Try to execute ffmpeg -version (suppress output)
|
|
1639
|
+
await execAsync('ffmpeg -version', {
|
|
1640
|
+
timeout: 5000,
|
|
1641
|
+
maxBuffer: 1024 * 1024 // 1MB buffer
|
|
1642
|
+
});
|
|
1643
|
+
this._ffmpegAvailable = true;
|
|
1644
|
+
this._ffmpegChecked = true;
|
|
1645
|
+
this._ffmpegPath = 'ffmpeg';
|
|
1646
|
+
return true;
|
|
1647
|
+
} catch {
|
|
1648
|
+
// Try common installation paths
|
|
1649
|
+
const commonPaths = process.platform === 'win32' ? [
|
|
1650
|
+
'C:\\ffmpeg\\bin\\ffmpeg.exe',
|
|
1651
|
+
'C:\\Program Files\\ffmpeg\\bin\\ffmpeg.exe',
|
|
1652
|
+
'C:\\Program Files (x86)\\ffmpeg\\bin\\ffmpeg.exe'
|
|
1653
|
+
] : [
|
|
1654
|
+
'/usr/bin/ffmpeg',
|
|
1655
|
+
'/usr/local/bin/ffmpeg',
|
|
1656
|
+
'/opt/homebrew/bin/ffmpeg',
|
|
1657
|
+
'/opt/local/bin/ffmpeg'
|
|
1658
|
+
];
|
|
1659
|
+
|
|
1660
|
+
for (const ffmpegPath of commonPaths) {
|
|
1661
|
+
try {
|
|
1662
|
+
await execAsync(`"${ffmpegPath}" -version`, {
|
|
1663
|
+
timeout: 3000,
|
|
1664
|
+
maxBuffer: 1024 * 1024
|
|
1665
|
+
});
|
|
1666
|
+
this._ffmpegAvailable = true;
|
|
1667
|
+
this._ffmpegChecked = true;
|
|
1668
|
+
this._ffmpegPath = ffmpegPath;
|
|
1669
|
+
return true;
|
|
1670
|
+
} catch {
|
|
1671
|
+
continue;
|
|
1672
|
+
}
|
|
1673
|
+
}
|
|
1674
|
+
|
|
1675
|
+
this._ffmpegAvailable = false;
|
|
1676
|
+
this._ffmpegChecked = true;
|
|
1677
|
+
return false;
|
|
1678
|
+
}
|
|
1679
|
+
}
|
|
1680
|
+
|
|
1681
|
+
/**
|
|
1682
|
+
* Gets video information (duration, resolution, fps, etc.)
|
|
1488
1683
|
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1489
|
-
* @
|
|
1490
|
-
* @returns Buffer containing the frame image
|
|
1684
|
+
* @returns Video metadata object
|
|
1491
1685
|
*/
|
|
1492
|
-
|
|
1686
|
+
/**
|
|
1687
|
+
* Gets video information (duration, resolution, fps, etc.)
|
|
1688
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1689
|
+
* @param skipFFmpegCheck - Skip FFmpeg availability check (for internal use, default: false)
|
|
1690
|
+
* @returns Video metadata object
|
|
1691
|
+
*/
|
|
1692
|
+
async getVideoInfo(videoSource: string | Buffer, skipFFmpegCheck: boolean = false): Promise<{
|
|
1693
|
+
duration: number;
|
|
1694
|
+
width: number;
|
|
1695
|
+
height: number;
|
|
1696
|
+
fps: number;
|
|
1697
|
+
bitrate: number;
|
|
1698
|
+
format: string;
|
|
1699
|
+
} | null> {
|
|
1493
1700
|
try {
|
|
1701
|
+
// Skip FFmpeg check if we already know it's available (for internal calls)
|
|
1702
|
+
if (!skipFFmpegCheck) {
|
|
1703
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
1704
|
+
if (!ffmpegAvailable) {
|
|
1705
|
+
const errorMessage =
|
|
1706
|
+
'❌ FFMPEG NOT FOUND\n' +
|
|
1707
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
1708
|
+
this.#getFFmpegInstallInstructions();
|
|
1709
|
+
|
|
1710
|
+
throw new Error(errorMessage);
|
|
1711
|
+
}
|
|
1712
|
+
}
|
|
1713
|
+
|
|
1494
1714
|
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
1495
1715
|
if (!fs.existsSync(frameDir)) {
|
|
1496
1716
|
fs.mkdirSync(frameDir, { recursive: true });
|
|
1497
1717
|
}
|
|
1498
1718
|
|
|
1719
|
+
let videoPath: string;
|
|
1499
1720
|
const tempVideoPath = path.join(frameDir, `temp-video-${Date.now()}.mp4`);
|
|
1500
|
-
const frameOutputPath = path.join(frameDir, `frame-${Date.now()}.jpg`);
|
|
1501
1721
|
|
|
1502
1722
|
// Handle video source
|
|
1503
1723
|
if (Buffer.isBuffer(videoSource)) {
|
|
1504
1724
|
fs.writeFileSync(tempVideoPath, videoSource);
|
|
1725
|
+
videoPath = tempVideoPath;
|
|
1505
1726
|
} else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
1506
1727
|
const response = await axios({
|
|
1507
1728
|
method: 'get',
|
|
@@ -1509,62 +1730,204 @@ export class ApexPainter {
|
|
|
1509
1730
|
responseType: 'arraybuffer'
|
|
1510
1731
|
});
|
|
1511
1732
|
fs.writeFileSync(tempVideoPath, Buffer.from(response.data));
|
|
1733
|
+
videoPath = tempVideoPath;
|
|
1512
1734
|
} else {
|
|
1513
|
-
// Local file path
|
|
1514
1735
|
if (!fs.existsSync(videoSource)) {
|
|
1515
1736
|
throw new Error(`Video file not found: ${videoSource}`);
|
|
1516
1737
|
}
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
|
|
1527
|
-
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
|
|
1536
|
-
|
|
1738
|
+
videoPath = videoSource;
|
|
1739
|
+
}
|
|
1740
|
+
|
|
1741
|
+
// Use ffprobe to get video info (escape path for Windows)
|
|
1742
|
+
const escapedPath = videoPath.replace(/"/g, '\\"');
|
|
1743
|
+
const { stdout } = await execAsync(
|
|
1744
|
+
`ffprobe -v error -show_entries stream=width,height,r_frame_rate,bit_rate -show_entries format=duration,format_name -of json "${escapedPath}"`,
|
|
1745
|
+
{
|
|
1746
|
+
timeout: 30000, // 30 second timeout
|
|
1747
|
+
maxBuffer: 10 * 1024 * 1024 // 10MB buffer for large JSON responses
|
|
1748
|
+
}
|
|
1749
|
+
);
|
|
1750
|
+
|
|
1751
|
+
const info = JSON.parse(stdout);
|
|
1752
|
+
const videoStream = info.streams?.find((s: any) => s.width && s.height) || info.streams?.[0];
|
|
1753
|
+
const format = info.format || {};
|
|
1754
|
+
|
|
1755
|
+
// Parse frame rate (e.g., "30/1" -> 30)
|
|
1756
|
+
const fps = videoStream?.r_frame_rate
|
|
1757
|
+
? (() => {
|
|
1758
|
+
const [num, den] = videoStream.r_frame_rate.split('/').map(Number);
|
|
1759
|
+
return den ? num / den : num;
|
|
1760
|
+
})()
|
|
1761
|
+
: 30;
|
|
1762
|
+
|
|
1763
|
+
const result = {
|
|
1764
|
+
duration: parseFloat(format.duration || '0'),
|
|
1765
|
+
width: parseInt(videoStream?.width || '0'),
|
|
1766
|
+
height: parseInt(videoStream?.height || '0'),
|
|
1767
|
+
fps: fps,
|
|
1768
|
+
bitrate: parseInt(videoStream?.bit_rate || format.bit_rate || '0'),
|
|
1769
|
+
format: format.format_name || 'unknown'
|
|
1770
|
+
};
|
|
1771
|
+
|
|
1772
|
+
// Cleanup temp file if created
|
|
1773
|
+
if (videoPath === tempVideoPath && fs.existsSync(tempVideoPath)) {
|
|
1774
|
+
fs.unlinkSync(tempVideoPath);
|
|
1775
|
+
}
|
|
1776
|
+
|
|
1777
|
+
return result;
|
|
1778
|
+
} catch (error) {
|
|
1779
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
1780
|
+
// Re-throw FFmpeg installation errors
|
|
1781
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
1782
|
+
throw error;
|
|
1783
|
+
}
|
|
1784
|
+
throw new Error(`getVideoInfo failed: ${errorMessage}`);
|
|
1785
|
+
}
|
|
1786
|
+
}
|
|
1787
|
+
|
|
1788
|
+
/**
|
|
1789
|
+
* Extracts a single frame from a video at a specific time or frame number
|
|
1790
|
+
* @private
|
|
1791
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1792
|
+
* @param frameNumber - Frame number to extract (default: 0)
|
|
1793
|
+
* @param timeSeconds - Alternative: time in seconds (overrides frameNumber if provided)
|
|
1794
|
+
* @param outputFormat - Output image format ('jpg' or 'png', default: 'jpg')
|
|
1795
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2) or PNG compression
|
|
1796
|
+
* @returns Buffer containing the frame image
|
|
1797
|
+
*/
|
|
1798
|
+
async #extractVideoFrame(
|
|
1799
|
+
videoSource: string | Buffer,
|
|
1800
|
+
frameNumber: number = 0,
|
|
1801
|
+
timeSeconds?: number,
|
|
1802
|
+
outputFormat: 'jpg' | 'png' = 'jpg',
|
|
1803
|
+
quality: number = 2
|
|
1804
|
+
): Promise<Buffer | null> {
|
|
1805
|
+
try {
|
|
1806
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
1807
|
+
if (!ffmpegAvailable) {
|
|
1808
|
+
const errorMessage =
|
|
1809
|
+
'❌ FFMPEG NOT FOUND\n' +
|
|
1810
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
1811
|
+
this.#getFFmpegInstallInstructions();
|
|
1812
|
+
|
|
1813
|
+
throw new Error(errorMessage);
|
|
1814
|
+
}
|
|
1815
|
+
|
|
1816
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
1817
|
+
if (!fs.existsSync(frameDir)) {
|
|
1818
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
1819
|
+
}
|
|
1820
|
+
|
|
1821
|
+
const timestamp = Date.now();
|
|
1822
|
+
const tempVideoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
1823
|
+
const frameOutputPath = path.join(frameDir, `frame-${timestamp}.${outputFormat}`);
|
|
1824
|
+
|
|
1825
|
+
let videoPath: string;
|
|
1826
|
+
let shouldCleanupVideo = false;
|
|
1827
|
+
|
|
1828
|
+
// Handle video source
|
|
1829
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
1830
|
+
fs.writeFileSync(tempVideoPath, videoSource);
|
|
1831
|
+
videoPath = tempVideoPath;
|
|
1832
|
+
shouldCleanupVideo = true;
|
|
1833
|
+
} else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
1834
|
+
const response = await axios({
|
|
1835
|
+
method: 'get',
|
|
1836
|
+
url: videoSource,
|
|
1837
|
+
responseType: 'arraybuffer'
|
|
1537
1838
|
});
|
|
1839
|
+
fs.writeFileSync(tempVideoPath, Buffer.from(response.data));
|
|
1840
|
+
videoPath = tempVideoPath;
|
|
1841
|
+
shouldCleanupVideo = true;
|
|
1842
|
+
} else {
|
|
1843
|
+
// Resolve relative paths (similar to customBackground)
|
|
1844
|
+
let resolvedPath = videoSource;
|
|
1845
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
1846
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
1847
|
+
}
|
|
1848
|
+
|
|
1849
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
1850
|
+
throw new Error(`Video file not found: ${videoSource} (resolved to: ${resolvedPath})`);
|
|
1851
|
+
}
|
|
1852
|
+
videoPath = resolvedPath;
|
|
1538
1853
|
}
|
|
1539
1854
|
|
|
1540
|
-
//
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
|
|
1545
|
-
|
|
1546
|
-
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
1562
|
-
})
|
|
1563
|
-
|
|
1564
|
-
|
|
1855
|
+
// Calculate time in seconds
|
|
1856
|
+
// If time is provided, use it directly (most accurate)
|
|
1857
|
+
// If only frame is provided, we need to get video FPS to convert frame to time
|
|
1858
|
+
let time: number;
|
|
1859
|
+
if (timeSeconds !== undefined) {
|
|
1860
|
+
time = timeSeconds;
|
|
1861
|
+
} else if (frameNumber === 0) {
|
|
1862
|
+
// Frame 0 = start of video
|
|
1863
|
+
time = 0;
|
|
1864
|
+
} else {
|
|
1865
|
+
// Get video FPS to convert frame number to time accurately
|
|
1866
|
+
try {
|
|
1867
|
+
const videoInfo = await this.getVideoInfo(videoPath, true); // Skip FFmpeg check (already done)
|
|
1868
|
+
if (videoInfo && videoInfo.fps > 0) {
|
|
1869
|
+
time = frameNumber / videoInfo.fps;
|
|
1870
|
+
} else {
|
|
1871
|
+
// Fallback to 30 FPS if we can't get video info
|
|
1872
|
+
console.warn(`Could not get video FPS, assuming 30 FPS for frame ${frameNumber}`);
|
|
1873
|
+
time = frameNumber / 30;
|
|
1874
|
+
}
|
|
1875
|
+
} catch (error) {
|
|
1876
|
+
// If getVideoInfo fails, assume 30 FPS (standard video framerate)
|
|
1877
|
+
console.warn(`Could not get video info, assuming 30 FPS for frame ${frameNumber}`);
|
|
1878
|
+
time = frameNumber / 30;
|
|
1879
|
+
}
|
|
1880
|
+
}
|
|
1881
|
+
|
|
1882
|
+
// Build ffmpeg command (escape paths for Windows)
|
|
1883
|
+
// Don't use -f flag, let FFmpeg infer format from file extension
|
|
1884
|
+
// Use -frames:v 1 instead of -vframes 1 (more explicit)
|
|
1885
|
+
// For PNG: use rgba pixel format (best compatibility with loadImage)
|
|
1886
|
+
// For JPEG: don't specify pixel format, let FFmpeg use default (yuvj420p works better than rgb24)
|
|
1887
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
1888
|
+
const escapedOutputPath = frameOutputPath.replace(/"/g, '\\"');
|
|
1889
|
+
|
|
1890
|
+
let command: string;
|
|
1891
|
+
if (outputFormat === 'png') {
|
|
1892
|
+
// PNG: Use rgba pixel format for best compatibility
|
|
1893
|
+
const pixFmt = '-pix_fmt rgba';
|
|
1894
|
+
command = `ffmpeg -i "${escapedVideoPath}" -ss ${time} -frames:v 1 ${pixFmt} -y "${escapedOutputPath}"`;
|
|
1895
|
+
} else {
|
|
1896
|
+
// JPEG: Use quality flag, let FFmpeg choose pixel format (default works better than rgb24)
|
|
1897
|
+
const qualityFlag = `-q:v ${quality}`;
|
|
1898
|
+
command = `ffmpeg -i "${escapedVideoPath}" -ss ${time} -frames:v 1 ${qualityFlag} -y "${escapedOutputPath}"`;
|
|
1899
|
+
}
|
|
1900
|
+
|
|
1901
|
+
try {
|
|
1902
|
+
await execAsync(command, {
|
|
1903
|
+
timeout: 30000, // 30 second timeout
|
|
1904
|
+
maxBuffer: 10 * 1024 * 1024 // 10MB buffer
|
|
1905
|
+
});
|
|
1906
|
+
|
|
1907
|
+
if (!fs.existsSync(frameOutputPath)) {
|
|
1908
|
+
throw new Error('Frame extraction failed - output file not created');
|
|
1909
|
+
}
|
|
1910
|
+
|
|
1911
|
+
const buffer = fs.readFileSync(frameOutputPath);
|
|
1912
|
+
|
|
1913
|
+
// Cleanup
|
|
1914
|
+
if (fs.existsSync(frameOutputPath)) fs.unlinkSync(frameOutputPath);
|
|
1915
|
+
if (shouldCleanupVideo && fs.existsSync(tempVideoPath)) fs.unlinkSync(tempVideoPath);
|
|
1916
|
+
|
|
1917
|
+
return buffer;
|
|
1918
|
+
} catch (error) {
|
|
1919
|
+
// Cleanup on error
|
|
1920
|
+
if (fs.existsSync(frameOutputPath)) fs.unlinkSync(frameOutputPath);
|
|
1921
|
+
if (shouldCleanupVideo && fs.existsSync(tempVideoPath)) fs.unlinkSync(tempVideoPath);
|
|
1922
|
+
throw error;
|
|
1923
|
+
}
|
|
1565
1924
|
} catch (error) {
|
|
1566
|
-
|
|
1567
|
-
|
|
1925
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
1926
|
+
// Re-throw FFmpeg installation errors so user sees installation guide
|
|
1927
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
1928
|
+
throw error;
|
|
1929
|
+
}
|
|
1930
|
+
throw new Error(`extractVideoFrame failed: ${errorMessage}`);
|
|
1568
1931
|
}
|
|
1569
1932
|
}
|
|
1570
1933
|
|
|
@@ -1589,83 +1952,2557 @@ export class ApexPainter {
|
|
|
1589
1952
|
}
|
|
1590
1953
|
}
|
|
1591
1954
|
|
|
1955
|
+
/**
|
|
1956
|
+
* Extracts multiple frames from a video at specified intervals
|
|
1957
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
1958
|
+
* @param options - Extraction options
|
|
1959
|
+
* @returns Array of frame file paths
|
|
1960
|
+
*/
|
|
1592
1961
|
async extractFrames(videoSource: string | Buffer, options: ExtractFramesOptions): Promise<Array<{ source: string; isRemote: boolean }>> {
|
|
1593
1962
|
try {
|
|
1963
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
1964
|
+
if (!ffmpegAvailable) {
|
|
1965
|
+
const errorMessage =
|
|
1966
|
+
'❌ FFMPEG NOT FOUND\n' +
|
|
1967
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
1968
|
+
this.#getFFmpegInstallInstructions();
|
|
1969
|
+
|
|
1970
|
+
throw new Error(errorMessage);
|
|
1971
|
+
}
|
|
1972
|
+
|
|
1594
1973
|
this.#validateExtractFramesInputs(videoSource, options);
|
|
1595
1974
|
|
|
1596
1975
|
const frames: Array<{ source: string; isRemote: boolean }> = [];
|
|
1597
|
-
const frameDir = path.join(
|
|
1976
|
+
const frameDir = path.join(process.cwd(), '.temp-frames', `frames-${Date.now()}`);
|
|
1598
1977
|
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1978
|
+
if (!fs.existsSync(frameDir)) {
|
|
1979
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
1980
|
+
}
|
|
1602
1981
|
|
|
1603
|
-
|
|
1982
|
+
const timestamp = Date.now();
|
|
1983
|
+
const videoPath = typeof videoSource === 'string' ? videoSource : path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
1984
|
+
let shouldCleanupVideo = false;
|
|
1604
1985
|
|
|
1605
|
-
|
|
1606
|
-
|
|
1607
|
-
|
|
1608
|
-
|
|
1986
|
+
// Handle video source
|
|
1987
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
1988
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
1989
|
+
shouldCleanupVideo = true;
|
|
1990
|
+
} else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
1991
|
+
const response = await axios({
|
|
1609
1992
|
method: 'get',
|
|
1610
1993
|
url: videoSource,
|
|
1611
1994
|
responseType: 'arraybuffer'
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
})
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1995
|
+
});
|
|
1996
|
+
fs.writeFileSync(videoPath, Buffer.from(response.data));
|
|
1997
|
+
shouldCleanupVideo = true;
|
|
1998
|
+
} else if (!fs.existsSync(videoPath)) {
|
|
1999
|
+
throw new Error("Video file not found at specified path.");
|
|
2000
|
+
}
|
|
2001
|
+
|
|
2002
|
+
// Get video duration using ffprobe (escape path for Windows)
|
|
2003
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2004
|
+
const { stdout: probeOutput } = await execAsync(
|
|
2005
|
+
`ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 "${escapedVideoPath}"`,
|
|
2006
|
+
{ maxBuffer: 10 * 1024 * 1024 } // 10MB buffer
|
|
2007
|
+
);
|
|
2008
|
+
|
|
2009
|
+
const duration = parseFloat(probeOutput.trim());
|
|
2010
|
+
if (isNaN(duration) || duration <= 0) {
|
|
2011
|
+
throw new Error("Video duration not found in metadata.");
|
|
2012
|
+
}
|
|
1622
2013
|
|
|
1623
|
-
function processVideoExtraction(videoPath: string, frames: any[], options: ExtractFramesOptions, resolve: any, reject: any) {
|
|
1624
2014
|
const outputFormat = options.outputFormat || 'jpg';
|
|
1625
|
-
const
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1629
|
-
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
2015
|
+
const fps = 1000 / options.interval; // Frames per second based on interval
|
|
2016
|
+
const totalFrames = Math.floor(duration * fps);
|
|
2017
|
+
|
|
2018
|
+
// Apply frame selection if specified
|
|
2019
|
+
const startFrame = options.frameSelection?.start || 0;
|
|
2020
|
+
const endFrame = options.frameSelection?.end !== undefined
|
|
2021
|
+
? Math.min(options.frameSelection.end, totalFrames - 1)
|
|
2022
|
+
: totalFrames - 1;
|
|
2023
|
+
|
|
2024
|
+
// Build ffmpeg command for frame extraction
|
|
2025
|
+
const outputFileTemplate = path.join(frameDir, `frame-%03d.${outputFormat}`);
|
|
2026
|
+
const qualityFlag = outputFormat === 'jpg' ? '-q:v 2' : '';
|
|
2027
|
+
const pixFmt = outputFormat === 'png' ? '-pix_fmt rgba' : '-pix_fmt yuvj420p';
|
|
2028
|
+
|
|
2029
|
+
// Calculate start and end times
|
|
2030
|
+
const startTime = startFrame / fps;
|
|
2031
|
+
const endTime = (endFrame + 1) / fps;
|
|
2032
|
+
const durationToExtract = endTime - startTime;
|
|
2033
|
+
|
|
2034
|
+
const escapedOutputTemplate = outputFileTemplate.replace(/"/g, '\\"');
|
|
2035
|
+
// Don't use -f flag, let FFmpeg infer format from file extension
|
|
2036
|
+
// -vf fps=${fps} extracts frames at the specified FPS
|
|
2037
|
+
// Use -ss after -i for more accurate frame extraction
|
|
2038
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -ss ${startTime} -t ${durationToExtract} -vf fps=${fps} ${pixFmt} ${qualityFlag} -y "${escapedOutputTemplate}"`;
|
|
2039
|
+
|
|
2040
|
+
try {
|
|
2041
|
+
await execAsync(command, {
|
|
2042
|
+
timeout: 60000, // 60 second timeout for multiple frames
|
|
2043
|
+
maxBuffer: 10 * 1024 * 1024 // 10MB buffer
|
|
2044
|
+
});
|
|
2045
|
+
|
|
2046
|
+
// Collect all extracted frame files
|
|
2047
|
+
const actualFrameCount = endFrame - startFrame + 1;
|
|
2048
|
+
for (let i = 0; i < actualFrameCount; i++) {
|
|
2049
|
+
const frameNumber = startFrame + i;
|
|
2050
|
+
const framePath = path.join(frameDir, `frame-${String(i + 1).padStart(3, '0')}.${outputFormat}`);
|
|
2051
|
+
|
|
2052
|
+
if (fs.existsSync(framePath)) {
|
|
2053
|
+
frames.push({
|
|
2054
|
+
source: framePath,
|
|
2055
|
+
isRemote: false
|
|
2056
|
+
});
|
|
1656
2057
|
}
|
|
1657
|
-
|
|
1658
|
-
}
|
|
2058
|
+
}
|
|
1659
2059
|
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
2060
|
+
// Cleanup temp video if created
|
|
2061
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2062
|
+
fs.unlinkSync(videoPath);
|
|
2063
|
+
}
|
|
2064
|
+
|
|
2065
|
+
return frames;
|
|
2066
|
+
} catch (error) {
|
|
2067
|
+
// Cleanup on error
|
|
2068
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2069
|
+
fs.unlinkSync(videoPath);
|
|
2070
|
+
}
|
|
2071
|
+
throw error;
|
|
2072
|
+
}
|
|
1663
2073
|
} catch (error) {
|
|
1664
2074
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
2075
|
+
// Re-throw FFmpeg installation errors so user sees installation guide
|
|
2076
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
2077
|
+
throw error;
|
|
2078
|
+
}
|
|
1665
2079
|
throw new Error(`extractFrames failed: ${errorMessage}`);
|
|
1666
2080
|
}
|
|
1667
2081
|
}
|
|
1668
2082
|
|
|
2083
|
+
/**
|
|
2084
|
+
* Comprehensive video processing method - all video features in one place
|
|
2085
|
+
* @param options - Video processing options
|
|
2086
|
+
* @returns Results based on the operation requested
|
|
2087
|
+
*/
|
|
2088
|
+
async createVideo(options: {
|
|
2089
|
+
source: string | Buffer;
|
|
2090
|
+
|
|
2091
|
+
// Get video information
|
|
2092
|
+
getInfo?: boolean;
|
|
2093
|
+
|
|
2094
|
+
// Extract single frame (creates canvas)
|
|
2095
|
+
extractFrame?: {
|
|
2096
|
+
time?: number; // Time in seconds
|
|
2097
|
+
frame?: number; // Frame number (1-based, will be converted to time using video FPS)
|
|
2098
|
+
width?: number; // Canvas width (default: video width)
|
|
2099
|
+
height?: number; // Canvas height (default: video height)
|
|
2100
|
+
outputFormat?: 'jpg' | 'png'; // Frame extraction format (default: 'png')
|
|
2101
|
+
quality?: number; // JPEG quality 1-31 (lower = better, default: 2)
|
|
2102
|
+
};
|
|
2103
|
+
|
|
2104
|
+
// Extract multiple frames at specific times
|
|
2105
|
+
extractFrames?: {
|
|
2106
|
+
times?: number[]; // Array of times in seconds
|
|
2107
|
+
interval?: number; // Extract frames at intervals (milliseconds)
|
|
2108
|
+
frameSelection?: { start?: number; end?: number }; // Frame range for interval extraction
|
|
2109
|
+
outputFormat?: 'jpg' | 'png';
|
|
2110
|
+
quality?: number;
|
|
2111
|
+
outputDirectory?: string; // Directory to save frames (for interval extraction)
|
|
2112
|
+
};
|
|
2113
|
+
|
|
2114
|
+
// Extract ALL frames from video
|
|
2115
|
+
extractAllFrames?: {
|
|
2116
|
+
outputFormat?: 'jpg' | 'png';
|
|
2117
|
+
outputDirectory?: string;
|
|
2118
|
+
quality?: number;
|
|
2119
|
+
prefix?: string; // Filename prefix (default: 'frame')
|
|
2120
|
+
startTime?: number; // Start time in seconds (default: 0)
|
|
2121
|
+
endTime?: number; // End time in seconds (default: video duration)
|
|
2122
|
+
};
|
|
2123
|
+
|
|
2124
|
+
// Generate video thumbnail (multiple frames in grid)
|
|
2125
|
+
generateThumbnail?: {
|
|
2126
|
+
count?: number; // Number of frames to extract (default: 9)
|
|
2127
|
+
grid?: { cols: number; rows: number }; // Grid layout (default: 3x3)
|
|
2128
|
+
width?: number; // Thumbnail width (default: 320)
|
|
2129
|
+
height?: number; // Thumbnail height (default: 180)
|
|
2130
|
+
outputFormat?: 'jpg' | 'png';
|
|
2131
|
+
quality?: number;
|
|
2132
|
+
};
|
|
2133
|
+
|
|
2134
|
+
// Convert video format
|
|
2135
|
+
convert?: {
|
|
2136
|
+
outputPath: string; // Output video file path
|
|
2137
|
+
format?: 'mp4' | 'webm' | 'avi' | 'mov' | 'mkv'; // Output format (default: 'mp4')
|
|
2138
|
+
quality?: 'low' | 'medium' | 'high' | 'ultra'; // Quality preset
|
|
2139
|
+
bitrate?: number; // Custom bitrate in kbps
|
|
2140
|
+
fps?: number; // Output FPS (default: source FPS)
|
|
2141
|
+
resolution?: { width: number; height: number }; // Output resolution
|
|
2142
|
+
};
|
|
2143
|
+
|
|
2144
|
+
// Trim/Cut video
|
|
2145
|
+
trim?: {
|
|
2146
|
+
startTime: number; // Start time in seconds
|
|
2147
|
+
endTime: number; // End time in seconds
|
|
2148
|
+
outputPath: string; // Output video file path
|
|
2149
|
+
};
|
|
2150
|
+
|
|
2151
|
+
// Extract audio from video
|
|
2152
|
+
extractAudio?: {
|
|
2153
|
+
outputPath: string; // Output audio file path
|
|
2154
|
+
format?: 'mp3' | 'wav' | 'aac' | 'ogg'; // Audio format (default: 'mp3')
|
|
2155
|
+
bitrate?: number; // Audio bitrate in kbps (default: 128)
|
|
2156
|
+
};
|
|
2157
|
+
|
|
2158
|
+
// Add watermark to video
|
|
2159
|
+
addWatermark?: {
|
|
2160
|
+
watermarkPath: string; // Watermark image path
|
|
2161
|
+
position?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center';
|
|
2162
|
+
opacity?: number; // Watermark opacity 0-1 (default: 0.5)
|
|
2163
|
+
size?: { width: number; height: number }; // Watermark size
|
|
2164
|
+
outputPath: string; // Output video file path
|
|
2165
|
+
};
|
|
2166
|
+
|
|
2167
|
+
// Adjust video speed
|
|
2168
|
+
changeSpeed?: {
|
|
2169
|
+
speed: number; // Speed multiplier (0.5 = half speed, 2 = double speed)
|
|
2170
|
+
outputPath: string; // Output video file path
|
|
2171
|
+
};
|
|
2172
|
+
|
|
2173
|
+
// Extract video preview (multiple frames as images)
|
|
2174
|
+
generatePreview?: {
|
|
2175
|
+
count?: number; // Number of preview frames (default: 10)
|
|
2176
|
+
outputDirectory?: string; // Directory to save preview frames
|
|
2177
|
+
outputFormat?: 'jpg' | 'png';
|
|
2178
|
+
quality?: number;
|
|
2179
|
+
};
|
|
2180
|
+
|
|
2181
|
+
// Apply video effects/filters
|
|
2182
|
+
applyEffects?: {
|
|
2183
|
+
filters: Array<{
|
|
2184
|
+
type: 'blur' | 'brightness' | 'contrast' | 'saturation' | 'grayscale' | 'sepia' | 'invert' | 'sharpen' | 'noise';
|
|
2185
|
+
intensity?: number; // 0-100
|
|
2186
|
+
value?: number; // For brightness, contrast, saturation (-100 to 100)
|
|
2187
|
+
}>;
|
|
2188
|
+
outputPath: string;
|
|
2189
|
+
};
|
|
2190
|
+
|
|
2191
|
+
// Merge/Concatenate videos
|
|
2192
|
+
merge?: {
|
|
2193
|
+
videos: Array<string | Buffer>; // Array of video sources
|
|
2194
|
+
outputPath: string;
|
|
2195
|
+
mode?: 'sequential' | 'side-by-side' | 'grid'; // Merge mode
|
|
2196
|
+
grid?: { cols: number; rows: number }; // For grid mode
|
|
2197
|
+
};
|
|
2198
|
+
|
|
2199
|
+
// Rotate/Flip video
|
|
2200
|
+
rotate?: {
|
|
2201
|
+
angle?: 90 | 180 | 270; // Rotation angle
|
|
2202
|
+
flip?: 'horizontal' | 'vertical' | 'both'; // Flip direction
|
|
2203
|
+
outputPath: string;
|
|
2204
|
+
};
|
|
2205
|
+
|
|
2206
|
+
// Crop video
|
|
2207
|
+
crop?: {
|
|
2208
|
+
x: number; // Start X position
|
|
2209
|
+
y: number; // Start Y position
|
|
2210
|
+
width: number; // Crop width
|
|
2211
|
+
height: number; // Crop height
|
|
2212
|
+
outputPath: string;
|
|
2213
|
+
};
|
|
2214
|
+
|
|
2215
|
+
// Compress/Optimize video
|
|
2216
|
+
compress?: {
|
|
2217
|
+
outputPath: string;
|
|
2218
|
+
quality?: 'low' | 'medium' | 'high' | 'ultra'; // Quality preset
|
|
2219
|
+
targetSize?: number; // Target file size in MB
|
|
2220
|
+
maxBitrate?: number; // Max bitrate in kbps
|
|
2221
|
+
};
|
|
2222
|
+
|
|
2223
|
+
// Add text overlay to video
|
|
2224
|
+
addText?: {
|
|
2225
|
+
text: string;
|
|
2226
|
+
position?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center' | 'top-center' | 'bottom-center';
|
|
2227
|
+
fontSize?: number;
|
|
2228
|
+
fontColor?: string;
|
|
2229
|
+
backgroundColor?: string; // Text background color
|
|
2230
|
+
startTime?: number; // Start time in seconds
|
|
2231
|
+
endTime?: number; // End time in seconds
|
|
2232
|
+
outputPath: string;
|
|
2233
|
+
};
|
|
2234
|
+
|
|
2235
|
+
// Add fade effects
|
|
2236
|
+
addFade?: {
|
|
2237
|
+
fadeIn?: number; // Fade in duration in seconds
|
|
2238
|
+
fadeOut?: number; // Fade out duration in seconds
|
|
2239
|
+
outputPath: string;
|
|
2240
|
+
};
|
|
2241
|
+
|
|
2242
|
+
// Reverse video playback
|
|
2243
|
+
reverse?: {
|
|
2244
|
+
outputPath: string;
|
|
2245
|
+
};
|
|
2246
|
+
|
|
2247
|
+
// Create seamless loop
|
|
2248
|
+
createLoop?: {
|
|
2249
|
+
outputPath: string;
|
|
2250
|
+
smooth?: boolean; // Try to create smooth loop
|
|
2251
|
+
};
|
|
2252
|
+
|
|
2253
|
+
// Batch process multiple videos
|
|
2254
|
+
batch?: {
|
|
2255
|
+
videos: Array<{ source: string | Buffer; operations: any }>; // Array of videos with their operations
|
|
2256
|
+
outputDirectory: string;
|
|
2257
|
+
};
|
|
2258
|
+
|
|
2259
|
+
// Detect scene changes
|
|
2260
|
+
detectScenes?: {
|
|
2261
|
+
threshold?: number; // Scene change threshold (0-1)
|
|
2262
|
+
outputPath?: string; // Optional: save scene markers to file
|
|
2263
|
+
};
|
|
2264
|
+
|
|
2265
|
+
// Stabilize video (reduce shake)
|
|
2266
|
+
stabilize?: {
|
|
2267
|
+
outputPath: string;
|
|
2268
|
+
smoothing?: number; // Smoothing factor (default: 10)
|
|
2269
|
+
};
|
|
2270
|
+
|
|
2271
|
+
// Color correction
|
|
2272
|
+
colorCorrect?: {
|
|
2273
|
+
brightness?: number; // -100 to 100
|
|
2274
|
+
contrast?: number; // -100 to 100
|
|
2275
|
+
saturation?: number; // -100 to 100
|
|
2276
|
+
hue?: number; // -180 to 180
|
|
2277
|
+
temperature?: number; // Color temperature adjustment
|
|
2278
|
+
outputPath: string;
|
|
2279
|
+
};
|
|
2280
|
+
|
|
2281
|
+
// Picture-in-picture
|
|
2282
|
+
pictureInPicture?: {
|
|
2283
|
+
overlayVideo: string | Buffer; // Overlay video source
|
|
2284
|
+
position?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center';
|
|
2285
|
+
size?: { width: number; height: number }; // Overlay size
|
|
2286
|
+
opacity?: number; // 0-1
|
|
2287
|
+
outputPath: string;
|
|
2288
|
+
};
|
|
2289
|
+
|
|
2290
|
+
// Split screen (side-by-side or grid)
|
|
2291
|
+
splitScreen?: {
|
|
2292
|
+
videos: Array<string | Buffer>; // Array of videos
|
|
2293
|
+
layout?: 'side-by-side' | 'top-bottom' | 'grid'; // Layout type
|
|
2294
|
+
grid?: { cols: number; rows: number }; // For grid layout
|
|
2295
|
+
outputPath: string;
|
|
2296
|
+
};
|
|
2297
|
+
|
|
2298
|
+
// Create time-lapse
|
|
2299
|
+
createTimeLapse?: {
|
|
2300
|
+
speed?: number; // Speed multiplier (default: 10x)
|
|
2301
|
+
outputPath: string;
|
|
2302
|
+
};
|
|
2303
|
+
|
|
2304
|
+
// Mute/Unmute video
|
|
2305
|
+
mute?: {
|
|
2306
|
+
outputPath: string;
|
|
2307
|
+
};
|
|
2308
|
+
|
|
2309
|
+
// Adjust audio volume
|
|
2310
|
+
adjustVolume?: {
|
|
2311
|
+
volume: number; // Volume multiplier (0.0 = mute, 1.0 = original, 2.0 = double)
|
|
2312
|
+
outputPath: string;
|
|
2313
|
+
};
|
|
2314
|
+
|
|
2315
|
+
// Detect video format and codec
|
|
2316
|
+
detectFormat?: boolean; // Returns detailed format information
|
|
2317
|
+
}): Promise<any> {
|
|
2318
|
+
try {
|
|
2319
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
2320
|
+
if (!ffmpegAvailable) {
|
|
2321
|
+
const errorMessage =
|
|
2322
|
+
'❌ FFMPEG NOT FOUND\n' +
|
|
2323
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
2324
|
+
this.#getFFmpegInstallInstructions();
|
|
2325
|
+
throw new Error(errorMessage);
|
|
2326
|
+
}
|
|
2327
|
+
|
|
2328
|
+
// Get video info if requested or needed
|
|
2329
|
+
let videoInfo: any = null;
|
|
2330
|
+
if (options.getInfo || options.extractFrame?.frame || options.generateThumbnail || options.generatePreview) {
|
|
2331
|
+
videoInfo = await this.getVideoInfo(options.source, true);
|
|
2332
|
+
}
|
|
2333
|
+
|
|
2334
|
+
// Handle getInfo
|
|
2335
|
+
if (options.getInfo) {
|
|
2336
|
+
return videoInfo || await this.getVideoInfo(options.source, true);
|
|
2337
|
+
}
|
|
2338
|
+
|
|
2339
|
+
// Handle extractFrame (creates canvas)
|
|
2340
|
+
if (options.extractFrame) {
|
|
2341
|
+
const frameBuffer = await this.#extractVideoFrame(
|
|
2342
|
+
options.source,
|
|
2343
|
+
options.extractFrame.frame ?? 0,
|
|
2344
|
+
options.extractFrame.time,
|
|
2345
|
+
options.extractFrame.outputFormat || 'png',
|
|
2346
|
+
options.extractFrame.quality || 2
|
|
2347
|
+
);
|
|
2348
|
+
|
|
2349
|
+
if (!frameBuffer || frameBuffer.length === 0) {
|
|
2350
|
+
throw new Error('Failed to extract video frame');
|
|
2351
|
+
}
|
|
2352
|
+
|
|
2353
|
+
const frameImage = await loadImage(frameBuffer);
|
|
2354
|
+
const videoWidth = frameImage.width;
|
|
2355
|
+
const videoHeight = frameImage.height;
|
|
2356
|
+
|
|
2357
|
+
const width = options.extractFrame.width ?? videoWidth;
|
|
2358
|
+
const height = options.extractFrame.height ?? videoHeight;
|
|
2359
|
+
|
|
2360
|
+
const canvas = createCanvas(width, height);
|
|
2361
|
+
const ctx = canvas.getContext('2d') as SKRSContext2D;
|
|
2362
|
+
if (!ctx) {
|
|
2363
|
+
throw new Error('Unable to get 2D context');
|
|
2364
|
+
}
|
|
2365
|
+
|
|
2366
|
+
ctx.drawImage(frameImage, 0, 0, width, height);
|
|
2367
|
+
|
|
2368
|
+
return {
|
|
2369
|
+
buffer: canvas.toBuffer('image/png'),
|
|
2370
|
+
canvas: { width, height }
|
|
2371
|
+
};
|
|
2372
|
+
}
|
|
2373
|
+
|
|
2374
|
+
// Handle extractFrames (multiple frames at specific times or intervals)
|
|
2375
|
+
if (options.extractFrames) {
|
|
2376
|
+
if (options.extractFrames.times) {
|
|
2377
|
+
// Extract frames at specific times
|
|
2378
|
+
const frames: Buffer[] = [];
|
|
2379
|
+
for (const time of options.extractFrames.times) {
|
|
2380
|
+
const frame = await this.#extractVideoFrame(
|
|
2381
|
+
options.source,
|
|
2382
|
+
0,
|
|
2383
|
+
time,
|
|
2384
|
+
options.extractFrames.outputFormat || 'jpg',
|
|
2385
|
+
options.extractFrames.quality || 2
|
|
2386
|
+
);
|
|
2387
|
+
if (frame) {
|
|
2388
|
+
frames.push(frame);
|
|
2389
|
+
}
|
|
2390
|
+
}
|
|
2391
|
+
return frames;
|
|
2392
|
+
} else if (options.extractFrames.interval) {
|
|
2393
|
+
// Extract frames at intervals
|
|
2394
|
+
return await this.extractFrames(options.source, {
|
|
2395
|
+
interval: options.extractFrames.interval,
|
|
2396
|
+
outputFormat: options.extractFrames.outputFormat || 'jpg',
|
|
2397
|
+
frameSelection: options.extractFrames.frameSelection,
|
|
2398
|
+
outputDirectory: options.extractFrames.outputDirectory
|
|
2399
|
+
});
|
|
2400
|
+
}
|
|
2401
|
+
}
|
|
2402
|
+
|
|
2403
|
+
// Handle extractAllFrames
|
|
2404
|
+
if (options.extractAllFrames) {
|
|
2405
|
+
return await this.extractAllFrames(options.source, {
|
|
2406
|
+
outputFormat: options.extractAllFrames.outputFormat,
|
|
2407
|
+
outputDirectory: options.extractAllFrames.outputDirectory,
|
|
2408
|
+
quality: options.extractAllFrames.quality,
|
|
2409
|
+
prefix: options.extractAllFrames.prefix,
|
|
2410
|
+
startTime: options.extractAllFrames.startTime,
|
|
2411
|
+
endTime: options.extractAllFrames.endTime
|
|
2412
|
+
});
|
|
2413
|
+
}
|
|
2414
|
+
|
|
2415
|
+
// Handle generateThumbnail
|
|
2416
|
+
if (options.generateThumbnail) {
|
|
2417
|
+
return await this.#generateVideoThumbnail(options.source, options.generateThumbnail, videoInfo);
|
|
2418
|
+
}
|
|
2419
|
+
|
|
2420
|
+
// Handle convert
|
|
2421
|
+
if (options.convert) {
|
|
2422
|
+
return await this.#convertVideo(options.source, options.convert);
|
|
2423
|
+
}
|
|
2424
|
+
|
|
2425
|
+
// Handle trim
|
|
2426
|
+
if (options.trim) {
|
|
2427
|
+
return await this.#trimVideo(options.source, options.trim);
|
|
2428
|
+
}
|
|
2429
|
+
|
|
2430
|
+
// Handle extractAudio
|
|
2431
|
+
if (options.extractAudio) {
|
|
2432
|
+
return await this.#extractAudio(options.source, options.extractAudio);
|
|
2433
|
+
}
|
|
2434
|
+
|
|
2435
|
+
// Handle addWatermark
|
|
2436
|
+
if (options.addWatermark) {
|
|
2437
|
+
return await this.#addWatermarkToVideo(options.source, options.addWatermark);
|
|
2438
|
+
}
|
|
2439
|
+
|
|
2440
|
+
// Handle changeSpeed
|
|
2441
|
+
if (options.changeSpeed) {
|
|
2442
|
+
return await this.#changeVideoSpeed(options.source, options.changeSpeed);
|
|
2443
|
+
}
|
|
2444
|
+
|
|
2445
|
+
// Handle generatePreview
|
|
2446
|
+
if (options.generatePreview) {
|
|
2447
|
+
return await this.#generateVideoPreview(options.source, options.generatePreview, videoInfo);
|
|
2448
|
+
}
|
|
2449
|
+
|
|
2450
|
+
// Handle applyEffects
|
|
2451
|
+
if (options.applyEffects) {
|
|
2452
|
+
return await this.#applyVideoEffects(options.source, options.applyEffects);
|
|
2453
|
+
}
|
|
2454
|
+
|
|
2455
|
+
// Handle merge
|
|
2456
|
+
if (options.merge) {
|
|
2457
|
+
return await this.#mergeVideos(options.merge);
|
|
2458
|
+
}
|
|
2459
|
+
|
|
2460
|
+
// Handle rotate
|
|
2461
|
+
if (options.rotate) {
|
|
2462
|
+
return await this.#rotateVideo(options.source, options.rotate);
|
|
2463
|
+
}
|
|
2464
|
+
|
|
2465
|
+
// Handle crop
|
|
2466
|
+
if (options.crop) {
|
|
2467
|
+
return await this.#cropVideo(options.source, options.crop);
|
|
2468
|
+
}
|
|
2469
|
+
|
|
2470
|
+
// Handle compress
|
|
2471
|
+
if (options.compress) {
|
|
2472
|
+
return await this.#compressVideo(options.source, options.compress);
|
|
2473
|
+
}
|
|
2474
|
+
|
|
2475
|
+
// Handle addText
|
|
2476
|
+
if (options.addText) {
|
|
2477
|
+
return await this.#addTextToVideo(options.source, options.addText);
|
|
2478
|
+
}
|
|
2479
|
+
|
|
2480
|
+
// Handle addFade
|
|
2481
|
+
if (options.addFade) {
|
|
2482
|
+
return await this.#addFadeToVideo(options.source, options.addFade);
|
|
2483
|
+
}
|
|
2484
|
+
|
|
2485
|
+
// Handle reverse
|
|
2486
|
+
if (options.reverse) {
|
|
2487
|
+
return await this.#reverseVideo(options.source, options.reverse);
|
|
2488
|
+
}
|
|
2489
|
+
|
|
2490
|
+
// Handle createLoop
|
|
2491
|
+
if (options.createLoop) {
|
|
2492
|
+
return await this.#createVideoLoop(options.source, options.createLoop);
|
|
2493
|
+
}
|
|
2494
|
+
|
|
2495
|
+
// Handle batch
|
|
2496
|
+
if (options.batch) {
|
|
2497
|
+
return await this.#batchProcessVideos(options.batch);
|
|
2498
|
+
}
|
|
2499
|
+
|
|
2500
|
+
// Handle detectScenes
|
|
2501
|
+
if (options.detectScenes) {
|
|
2502
|
+
return await this.#detectVideoScenes(options.source, options.detectScenes);
|
|
2503
|
+
}
|
|
2504
|
+
|
|
2505
|
+
// Handle stabilize
|
|
2506
|
+
if (options.stabilize) {
|
|
2507
|
+
return await this.#stabilizeVideo(options.source, options.stabilize);
|
|
2508
|
+
}
|
|
2509
|
+
|
|
2510
|
+
// Handle colorCorrect
|
|
2511
|
+
if (options.colorCorrect) {
|
|
2512
|
+
return await this.#colorCorrectVideo(options.source, options.colorCorrect);
|
|
2513
|
+
}
|
|
2514
|
+
|
|
2515
|
+
// Handle pictureInPicture
|
|
2516
|
+
if (options.pictureInPicture) {
|
|
2517
|
+
return await this.#addPictureInPicture(options.source, options.pictureInPicture);
|
|
2518
|
+
}
|
|
2519
|
+
|
|
2520
|
+
// Handle splitScreen
|
|
2521
|
+
if (options.splitScreen) {
|
|
2522
|
+
return await this.#createSplitScreen(options.splitScreen);
|
|
2523
|
+
}
|
|
2524
|
+
|
|
2525
|
+
// Handle createTimeLapse
|
|
2526
|
+
if (options.createTimeLapse) {
|
|
2527
|
+
return await this.#createTimeLapseVideo(options.source, options.createTimeLapse);
|
|
2528
|
+
}
|
|
2529
|
+
|
|
2530
|
+
// Handle mute
|
|
2531
|
+
if (options.mute) {
|
|
2532
|
+
return await this.#muteVideo(options.source, options.mute);
|
|
2533
|
+
}
|
|
2534
|
+
|
|
2535
|
+
// Handle adjustVolume
|
|
2536
|
+
if (options.adjustVolume) {
|
|
2537
|
+
return await this.#adjustVideoVolume(options.source, options.adjustVolume);
|
|
2538
|
+
}
|
|
2539
|
+
|
|
2540
|
+
// Handle detectFormat
|
|
2541
|
+
if (options.detectFormat) {
|
|
2542
|
+
const info = await this.getVideoInfo(options.source, true);
|
|
2543
|
+
// Try to get codec from ffprobe
|
|
2544
|
+
let codec = 'unknown';
|
|
2545
|
+
try {
|
|
2546
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
2547
|
+
let videoPath: string;
|
|
2548
|
+
if (Buffer.isBuffer(options.source)) {
|
|
2549
|
+
const tempPath = path.join(frameDir, `temp-video-${Date.now()}.mp4`);
|
|
2550
|
+
fs.writeFileSync(tempPath, options.source);
|
|
2551
|
+
videoPath = tempPath;
|
|
2552
|
+
} else {
|
|
2553
|
+
let resolvedPath = options.source;
|
|
2554
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2555
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
2556
|
+
}
|
|
2557
|
+
videoPath = resolvedPath;
|
|
2558
|
+
}
|
|
2559
|
+
const escapedPath = videoPath.replace(/"/g, '\\"');
|
|
2560
|
+
const { stdout } = await execAsync(
|
|
2561
|
+
`ffprobe -v error -select_streams v:0 -show_entries stream=codec_name -of default=noprint_wrappers=1:nokey=1 "${escapedPath}"`,
|
|
2562
|
+
{ timeout: 10000, maxBuffer: 1024 * 1024 }
|
|
2563
|
+
);
|
|
2564
|
+
codec = stdout.toString().trim() || 'unknown';
|
|
2565
|
+
} catch {
|
|
2566
|
+
codec = 'unknown';
|
|
2567
|
+
}
|
|
2568
|
+
|
|
2569
|
+
return {
|
|
2570
|
+
format: info?.format || 'unknown',
|
|
2571
|
+
codec: codec,
|
|
2572
|
+
container: info?.format || 'unknown',
|
|
2573
|
+
width: info?.width,
|
|
2574
|
+
height: info?.height,
|
|
2575
|
+
fps: info?.fps,
|
|
2576
|
+
bitrate: info?.bitrate,
|
|
2577
|
+
duration: info?.duration
|
|
2578
|
+
};
|
|
2579
|
+
}
|
|
2580
|
+
|
|
2581
|
+
throw new Error('No video operation specified');
|
|
2582
|
+
} catch (error) {
|
|
2583
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
2584
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
2585
|
+
throw error;
|
|
2586
|
+
}
|
|
2587
|
+
throw new Error(`createVideo failed: ${errorMessage}`);
|
|
2588
|
+
}
|
|
2589
|
+
}
|
|
2590
|
+
|
|
2591
|
+
/**
|
|
2592
|
+
* Generate video thumbnail (grid of frames)
|
|
2593
|
+
* @private
|
|
2594
|
+
*/
|
|
2595
|
+
async #generateVideoThumbnail(
|
|
2596
|
+
videoSource: string | Buffer,
|
|
2597
|
+
options: {
|
|
2598
|
+
count?: number;
|
|
2599
|
+
grid?: { cols: number; rows: number };
|
|
2600
|
+
width?: number;
|
|
2601
|
+
height?: number;
|
|
2602
|
+
outputFormat?: 'jpg' | 'png';
|
|
2603
|
+
quality?: number;
|
|
2604
|
+
},
|
|
2605
|
+
videoInfo: any
|
|
2606
|
+
): Promise<CanvasResults> {
|
|
2607
|
+
const count = options.count || 9;
|
|
2608
|
+
const grid = options.grid || { cols: 3, rows: 3 };
|
|
2609
|
+
const frameWidth = options.width || 320;
|
|
2610
|
+
const frameHeight = options.height || 180;
|
|
2611
|
+
const outputFormat = options.outputFormat || 'jpg';
|
|
2612
|
+
const quality = options.quality || 2;
|
|
2613
|
+
|
|
2614
|
+
if (!videoInfo) {
|
|
2615
|
+
videoInfo = await this.getVideoInfo(videoSource, true);
|
|
2616
|
+
}
|
|
2617
|
+
|
|
2618
|
+
const duration = videoInfo.duration;
|
|
2619
|
+
const interval = duration / (count + 1); // Distribute frames evenly
|
|
2620
|
+
|
|
2621
|
+
// Extract frames
|
|
2622
|
+
const frames: Buffer[] = [];
|
|
2623
|
+
for (let i = 1; i <= count; i++) {
|
|
2624
|
+
const time = interval * i;
|
|
2625
|
+
const frame = await this.#extractVideoFrame(videoSource, 0, time, outputFormat, quality);
|
|
2626
|
+
if (frame) {
|
|
2627
|
+
frames.push(frame);
|
|
2628
|
+
}
|
|
2629
|
+
}
|
|
2630
|
+
|
|
2631
|
+
// Create thumbnail canvas
|
|
2632
|
+
const thumbnailWidth = frameWidth * grid.cols;
|
|
2633
|
+
const thumbnailHeight = frameHeight * grid.rows;
|
|
2634
|
+
const canvas = createCanvas(thumbnailWidth, thumbnailHeight);
|
|
2635
|
+
const ctx = canvas.getContext('2d') as SKRSContext2D;
|
|
2636
|
+
if (!ctx) {
|
|
2637
|
+
throw new Error('Unable to get 2D context');
|
|
2638
|
+
}
|
|
2639
|
+
|
|
2640
|
+
// Draw frames in grid
|
|
2641
|
+
for (let i = 0; i < frames.length; i++) {
|
|
2642
|
+
const row = Math.floor(i / grid.cols);
|
|
2643
|
+
const col = i % grid.cols;
|
|
2644
|
+
const x = col * frameWidth;
|
|
2645
|
+
const y = row * frameHeight;
|
|
2646
|
+
|
|
2647
|
+
const frameImage = await loadImage(frames[i]);
|
|
2648
|
+
ctx.drawImage(frameImage, x, y, frameWidth, frameHeight);
|
|
2649
|
+
}
|
|
2650
|
+
|
|
2651
|
+
return {
|
|
2652
|
+
buffer: canvas.toBuffer('image/png'),
|
|
2653
|
+
canvas: { width: thumbnailWidth, height: thumbnailHeight }
|
|
2654
|
+
};
|
|
2655
|
+
}
|
|
2656
|
+
|
|
2657
|
+
/**
|
|
2658
|
+
* Convert video format
|
|
2659
|
+
* @private
|
|
2660
|
+
*/
|
|
2661
|
+
async #convertVideo(
|
|
2662
|
+
videoSource: string | Buffer,
|
|
2663
|
+
options: {
|
|
2664
|
+
outputPath: string;
|
|
2665
|
+
format?: 'mp4' | 'webm' | 'avi' | 'mov' | 'mkv';
|
|
2666
|
+
quality?: 'low' | 'medium' | 'high' | 'ultra';
|
|
2667
|
+
bitrate?: number;
|
|
2668
|
+
fps?: number;
|
|
2669
|
+
resolution?: { width: number; height: number };
|
|
2670
|
+
}
|
|
2671
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
2672
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
2673
|
+
if (!fs.existsSync(frameDir)) {
|
|
2674
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
2675
|
+
}
|
|
2676
|
+
|
|
2677
|
+
let videoPath: string;
|
|
2678
|
+
let shouldCleanupVideo = false;
|
|
2679
|
+
const timestamp = Date.now();
|
|
2680
|
+
|
|
2681
|
+
// Handle video source
|
|
2682
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2683
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2684
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
2685
|
+
shouldCleanupVideo = true;
|
|
2686
|
+
} else {
|
|
2687
|
+
let resolvedPath = videoSource;
|
|
2688
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2689
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
2690
|
+
}
|
|
2691
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
2692
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2693
|
+
}
|
|
2694
|
+
videoPath = resolvedPath;
|
|
2695
|
+
}
|
|
2696
|
+
|
|
2697
|
+
const format = options.format || 'mp4';
|
|
2698
|
+
const qualityPresets: Record<string, string> = {
|
|
2699
|
+
low: '-crf 28',
|
|
2700
|
+
medium: '-crf 23',
|
|
2701
|
+
high: '-crf 18',
|
|
2702
|
+
ultra: '-crf 15'
|
|
2703
|
+
};
|
|
2704
|
+
const qualityFlag = options.bitrate
|
|
2705
|
+
? `-b:v ${options.bitrate}k`
|
|
2706
|
+
: qualityPresets[options.quality || 'medium'];
|
|
2707
|
+
|
|
2708
|
+
const fpsFlag = options.fps ? `-r ${options.fps}` : '';
|
|
2709
|
+
const resolutionFlag = options.resolution
|
|
2710
|
+
? `-vf scale=${options.resolution.width}:${options.resolution.height}`
|
|
2711
|
+
: '';
|
|
2712
|
+
|
|
2713
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2714
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2715
|
+
|
|
2716
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${qualityFlag} ${fpsFlag} ${resolutionFlag} -y "${escapedOutputPath}"`;
|
|
2717
|
+
|
|
2718
|
+
try {
|
|
2719
|
+
await execAsync(command, {
|
|
2720
|
+
timeout: 300000, // 5 minute timeout
|
|
2721
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2722
|
+
});
|
|
2723
|
+
|
|
2724
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2725
|
+
fs.unlinkSync(videoPath);
|
|
2726
|
+
}
|
|
2727
|
+
|
|
2728
|
+
return { outputPath: options.outputPath, success: true };
|
|
2729
|
+
} catch (error) {
|
|
2730
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2731
|
+
fs.unlinkSync(videoPath);
|
|
2732
|
+
}
|
|
2733
|
+
throw error;
|
|
2734
|
+
}
|
|
2735
|
+
}
|
|
2736
|
+
|
|
2737
|
+
/**
|
|
2738
|
+
* Trim/Cut video
|
|
2739
|
+
* @private
|
|
2740
|
+
*/
|
|
2741
|
+
async #trimVideo(
|
|
2742
|
+
videoSource: string | Buffer,
|
|
2743
|
+
options: { startTime: number; endTime: number; outputPath: string }
|
|
2744
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
2745
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
2746
|
+
if (!fs.existsSync(frameDir)) {
|
|
2747
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
2748
|
+
}
|
|
2749
|
+
|
|
2750
|
+
let videoPath: string;
|
|
2751
|
+
let shouldCleanupVideo = false;
|
|
2752
|
+
const timestamp = Date.now();
|
|
2753
|
+
|
|
2754
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2755
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2756
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
2757
|
+
shouldCleanupVideo = true;
|
|
2758
|
+
} else {
|
|
2759
|
+
let resolvedPath = videoSource;
|
|
2760
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2761
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
2762
|
+
}
|
|
2763
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
2764
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2765
|
+
}
|
|
2766
|
+
videoPath = resolvedPath;
|
|
2767
|
+
}
|
|
2768
|
+
|
|
2769
|
+
const duration = options.endTime - options.startTime;
|
|
2770
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2771
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2772
|
+
|
|
2773
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -ss ${options.startTime} -t ${duration} -c copy -y "${escapedOutputPath}"`;
|
|
2774
|
+
|
|
2775
|
+
try {
|
|
2776
|
+
await execAsync(command, {
|
|
2777
|
+
timeout: 300000,
|
|
2778
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2779
|
+
});
|
|
2780
|
+
|
|
2781
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2782
|
+
fs.unlinkSync(videoPath);
|
|
2783
|
+
}
|
|
2784
|
+
|
|
2785
|
+
return { outputPath: options.outputPath, success: true };
|
|
2786
|
+
} catch (error) {
|
|
2787
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2788
|
+
fs.unlinkSync(videoPath);
|
|
2789
|
+
}
|
|
2790
|
+
throw error;
|
|
2791
|
+
}
|
|
2792
|
+
}
|
|
2793
|
+
|
|
2794
|
+
/**
|
|
2795
|
+
* Extract audio from video
|
|
2796
|
+
* @private
|
|
2797
|
+
*/
|
|
2798
|
+
async #extractAudio(
|
|
2799
|
+
videoSource: string | Buffer,
|
|
2800
|
+
options: { outputPath: string; format?: 'mp3' | 'wav' | 'aac' | 'ogg'; bitrate?: number }
|
|
2801
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
2802
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
2803
|
+
if (!fs.existsSync(frameDir)) {
|
|
2804
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
2805
|
+
}
|
|
2806
|
+
|
|
2807
|
+
let videoPath: string;
|
|
2808
|
+
let shouldCleanupVideo = false;
|
|
2809
|
+
const timestamp = Date.now();
|
|
2810
|
+
|
|
2811
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2812
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2813
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
2814
|
+
shouldCleanupVideo = true;
|
|
2815
|
+
} else {
|
|
2816
|
+
let resolvedPath = videoSource;
|
|
2817
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2818
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
2819
|
+
}
|
|
2820
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
2821
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2822
|
+
}
|
|
2823
|
+
videoPath = resolvedPath;
|
|
2824
|
+
}
|
|
2825
|
+
|
|
2826
|
+
// Check if video has audio stream
|
|
2827
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2828
|
+
try {
|
|
2829
|
+
const { stdout } = await execAsync(
|
|
2830
|
+
`ffprobe -v error -select_streams a:0 -show_entries stream=codec_type -of default=noprint_wrappers=1:nokey=1 "${escapedVideoPath}"`,
|
|
2831
|
+
{ timeout: 10000, maxBuffer: 1024 * 1024 }
|
|
2832
|
+
);
|
|
2833
|
+
const hasAudio = stdout.toString().trim() === 'audio';
|
|
2834
|
+
if (!hasAudio) {
|
|
2835
|
+
throw new Error('Video does not contain an audio stream. Cannot extract audio.');
|
|
2836
|
+
}
|
|
2837
|
+
} catch (error) {
|
|
2838
|
+
if (error instanceof Error && error.message.includes('Video does not contain')) {
|
|
2839
|
+
throw error;
|
|
2840
|
+
}
|
|
2841
|
+
// If ffprobe fails, assume no audio
|
|
2842
|
+
throw new Error('Video does not contain an audio stream. Cannot extract audio.');
|
|
2843
|
+
}
|
|
2844
|
+
|
|
2845
|
+
const format = options.format || 'mp3';
|
|
2846
|
+
const bitrate = options.bitrate || 128;
|
|
2847
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2848
|
+
|
|
2849
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vn -acodec ${format === 'mp3' ? 'libmp3lame' : format === 'wav' ? 'pcm_s16le' : format === 'aac' ? 'aac' : 'libvorbis'} -ab ${bitrate}k -y "${escapedOutputPath}"`;
|
|
2850
|
+
|
|
2851
|
+
try {
|
|
2852
|
+
await execAsync(command, {
|
|
2853
|
+
timeout: 300000,
|
|
2854
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2855
|
+
});
|
|
2856
|
+
|
|
2857
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2858
|
+
fs.unlinkSync(videoPath);
|
|
2859
|
+
}
|
|
2860
|
+
|
|
2861
|
+
return { outputPath: options.outputPath, success: true };
|
|
2862
|
+
} catch (error) {
|
|
2863
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2864
|
+
fs.unlinkSync(videoPath);
|
|
2865
|
+
}
|
|
2866
|
+
throw error;
|
|
2867
|
+
}
|
|
2868
|
+
}
|
|
2869
|
+
|
|
2870
|
+
/**
|
|
2871
|
+
* Add watermark to video
|
|
2872
|
+
* @private
|
|
2873
|
+
*/
|
|
2874
|
+
async #addWatermarkToVideo(
|
|
2875
|
+
videoSource: string | Buffer,
|
|
2876
|
+
options: {
|
|
2877
|
+
watermarkPath: string;
|
|
2878
|
+
position?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center';
|
|
2879
|
+
opacity?: number;
|
|
2880
|
+
size?: { width: number; height: number };
|
|
2881
|
+
outputPath: string;
|
|
2882
|
+
}
|
|
2883
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
2884
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
2885
|
+
if (!fs.existsSync(frameDir)) {
|
|
2886
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
2887
|
+
}
|
|
2888
|
+
|
|
2889
|
+
let videoPath: string;
|
|
2890
|
+
let shouldCleanupVideo = false;
|
|
2891
|
+
const timestamp = Date.now();
|
|
2892
|
+
|
|
2893
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2894
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2895
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
2896
|
+
shouldCleanupVideo = true;
|
|
2897
|
+
} else {
|
|
2898
|
+
let resolvedPath = videoSource;
|
|
2899
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2900
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
2901
|
+
}
|
|
2902
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
2903
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2904
|
+
}
|
|
2905
|
+
videoPath = resolvedPath;
|
|
2906
|
+
}
|
|
2907
|
+
|
|
2908
|
+
let watermarkPath = options.watermarkPath;
|
|
2909
|
+
if (!/^https?:\/\//i.test(watermarkPath)) {
|
|
2910
|
+
watermarkPath = path.join(process.cwd(), watermarkPath);
|
|
2911
|
+
}
|
|
2912
|
+
if (!fs.existsSync(watermarkPath)) {
|
|
2913
|
+
throw new Error(`Watermark file not found: ${options.watermarkPath}`);
|
|
2914
|
+
}
|
|
2915
|
+
|
|
2916
|
+
const position = options.position || 'bottom-right';
|
|
2917
|
+
const opacity = options.opacity || 0.5;
|
|
2918
|
+
const size = options.size ? `scale=${options.size.width}:${options.size.height}` : '';
|
|
2919
|
+
|
|
2920
|
+
const positionMap: Record<string, string> = {
|
|
2921
|
+
'top-left': '10:10',
|
|
2922
|
+
'top-right': 'W-w-10:10',
|
|
2923
|
+
'bottom-left': '10:H-h-10',
|
|
2924
|
+
'bottom-right': 'W-w-10:H-h-10',
|
|
2925
|
+
'center': '(W-w)/2:(H-h)/2'
|
|
2926
|
+
};
|
|
2927
|
+
|
|
2928
|
+
const overlay = positionMap[position];
|
|
2929
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2930
|
+
const escapedWatermarkPath = watermarkPath.replace(/"/g, '\\"');
|
|
2931
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
2932
|
+
|
|
2933
|
+
const filter = `[1:v]${size ? size + ',' : ''}format=rgba,colorchannelmixer=aa=${opacity}[wm];[0:v][wm]overlay=${overlay}`;
|
|
2934
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -i "${escapedWatermarkPath}" -filter_complex "${filter}" -y "${escapedOutputPath}"`;
|
|
2935
|
+
|
|
2936
|
+
try {
|
|
2937
|
+
await execAsync(command, {
|
|
2938
|
+
timeout: 300000,
|
|
2939
|
+
maxBuffer: 10 * 1024 * 1024
|
|
2940
|
+
});
|
|
2941
|
+
|
|
2942
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2943
|
+
fs.unlinkSync(videoPath);
|
|
2944
|
+
}
|
|
2945
|
+
|
|
2946
|
+
return { outputPath: options.outputPath, success: true };
|
|
2947
|
+
} catch (error) {
|
|
2948
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
2949
|
+
fs.unlinkSync(videoPath);
|
|
2950
|
+
}
|
|
2951
|
+
throw error;
|
|
2952
|
+
}
|
|
2953
|
+
}
|
|
2954
|
+
|
|
2955
|
+
/**
|
|
2956
|
+
* Change video speed
|
|
2957
|
+
* @private
|
|
2958
|
+
*/
|
|
2959
|
+
async #changeVideoSpeed(
|
|
2960
|
+
videoSource: string | Buffer,
|
|
2961
|
+
options: { speed: number; outputPath: string }
|
|
2962
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
2963
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
2964
|
+
if (!fs.existsSync(frameDir)) {
|
|
2965
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
2966
|
+
}
|
|
2967
|
+
|
|
2968
|
+
let videoPath: string;
|
|
2969
|
+
let shouldCleanupVideo = false;
|
|
2970
|
+
const timestamp = Date.now();
|
|
2971
|
+
|
|
2972
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
2973
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
2974
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
2975
|
+
shouldCleanupVideo = true;
|
|
2976
|
+
} else {
|
|
2977
|
+
let resolvedPath = videoSource;
|
|
2978
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
2979
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
2980
|
+
}
|
|
2981
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
2982
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
2983
|
+
}
|
|
2984
|
+
videoPath = resolvedPath;
|
|
2985
|
+
}
|
|
2986
|
+
|
|
2987
|
+
// Check if video has audio stream
|
|
2988
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
2989
|
+
let hasAudio = false;
|
|
2990
|
+
try {
|
|
2991
|
+
const { stdout } = await execAsync(
|
|
2992
|
+
`ffprobe -v error -select_streams a:0 -show_entries stream=codec_type -of default=noprint_wrappers=1:nokey=1 "${escapedVideoPath}"`,
|
|
2993
|
+
{ timeout: 10000, maxBuffer: 1024 * 1024 }
|
|
2994
|
+
);
|
|
2995
|
+
hasAudio = stdout.toString().trim() === 'audio';
|
|
2996
|
+
} catch {
|
|
2997
|
+
hasAudio = false;
|
|
2998
|
+
}
|
|
2999
|
+
|
|
3000
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3001
|
+
let command: string;
|
|
3002
|
+
|
|
3003
|
+
if (hasAudio) {
|
|
3004
|
+
// Video has audio - process both video and audio
|
|
3005
|
+
// For speeds > 2.0, we need to chain atempo filters (atempo max is 2.0)
|
|
3006
|
+
if (options.speed > 2.0) {
|
|
3007
|
+
const atempoCount = Math.ceil(Math.log2(options.speed));
|
|
3008
|
+
const atempoValue = Math.pow(2, Math.log2(options.speed) / atempoCount);
|
|
3009
|
+
const atempoFilters = Array(atempoCount).fill(atempoValue).map(v => `atempo=${v}`).join(',');
|
|
3010
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1/options.speed}*PTS[v];[0:a]${atempoFilters}[a]" -map "[v]" -map "[a]" -y "${escapedOutputPath}"`;
|
|
3011
|
+
} else if (options.speed < 0.5) {
|
|
3012
|
+
// For speeds < 0.5, we need to chain atempo filters
|
|
3013
|
+
const atempoCount = Math.ceil(Math.log2(1 / options.speed));
|
|
3014
|
+
const atempoValue = Math.pow(0.5, Math.log2(1 / options.speed) / atempoCount);
|
|
3015
|
+
const atempoFilters = Array(atempoCount).fill(atempoValue).map(v => `atempo=${v}`).join(',');
|
|
3016
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1/options.speed}*PTS[v];[0:a]${atempoFilters}[a]" -map "[v]" -map "[a]" -y "${escapedOutputPath}"`;
|
|
3017
|
+
} else {
|
|
3018
|
+
// Normal speed range (0.5 to 2.0)
|
|
3019
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1/options.speed}*PTS[v];[0:a]atempo=${options.speed}[a]" -map "[v]" -map "[a]" -y "${escapedOutputPath}"`;
|
|
3020
|
+
}
|
|
3021
|
+
} else {
|
|
3022
|
+
// No audio - only process video
|
|
3023
|
+
command = `ffmpeg -i "${escapedVideoPath}" -filter_complex "[0:v]setpts=${1/options.speed}*PTS[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
3024
|
+
}
|
|
3025
|
+
|
|
3026
|
+
try {
|
|
3027
|
+
await execAsync(command, {
|
|
3028
|
+
timeout: 300000,
|
|
3029
|
+
maxBuffer: 10 * 1024 * 1024
|
|
3030
|
+
});
|
|
3031
|
+
|
|
3032
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3033
|
+
fs.unlinkSync(videoPath);
|
|
3034
|
+
}
|
|
3035
|
+
|
|
3036
|
+
return { outputPath: options.outputPath, success: true };
|
|
3037
|
+
} catch (error) {
|
|
3038
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3039
|
+
fs.unlinkSync(videoPath);
|
|
3040
|
+
}
|
|
3041
|
+
throw error;
|
|
3042
|
+
}
|
|
3043
|
+
}
|
|
3044
|
+
|
|
3045
|
+
/**
|
|
3046
|
+
* Generate video preview (multiple frames)
|
|
3047
|
+
* @private
|
|
3048
|
+
*/
|
|
3049
|
+
async #generateVideoPreview(
|
|
3050
|
+
videoSource: string | Buffer,
|
|
3051
|
+
options: {
|
|
3052
|
+
count?: number;
|
|
3053
|
+
outputDirectory?: string;
|
|
3054
|
+
outputFormat?: 'jpg' | 'png';
|
|
3055
|
+
quality?: number;
|
|
3056
|
+
},
|
|
3057
|
+
videoInfo: any
|
|
3058
|
+
): Promise<Array<{ source: string; frameNumber: number; time: number }>> {
|
|
3059
|
+
const count = options.count || 10;
|
|
3060
|
+
const outputDir = options.outputDirectory || path.join(process.cwd(), 'video-preview');
|
|
3061
|
+
const outputFormat = options.outputFormat || 'png';
|
|
3062
|
+
const quality = options.quality || 2;
|
|
3063
|
+
|
|
3064
|
+
if (!fs.existsSync(outputDir)) {
|
|
3065
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
3066
|
+
}
|
|
3067
|
+
|
|
3068
|
+
if (!videoInfo) {
|
|
3069
|
+
videoInfo = await this.getVideoInfo(videoSource, true);
|
|
3070
|
+
}
|
|
3071
|
+
|
|
3072
|
+
const duration = videoInfo.duration;
|
|
3073
|
+
const interval = duration / (count + 1);
|
|
3074
|
+
|
|
3075
|
+
const frames: Array<{ source: string; frameNumber: number; time: number }> = [];
|
|
3076
|
+
|
|
3077
|
+
for (let i = 1; i <= count; i++) {
|
|
3078
|
+
const time = interval * i;
|
|
3079
|
+
const frameBuffer = await this.#extractVideoFrame(videoSource, 0, time, outputFormat, quality);
|
|
3080
|
+
|
|
3081
|
+
if (frameBuffer) {
|
|
3082
|
+
const framePath = path.join(outputDir, `preview-${String(i).padStart(3, '0')}.${outputFormat}`);
|
|
3083
|
+
fs.writeFileSync(framePath, frameBuffer);
|
|
3084
|
+
frames.push({
|
|
3085
|
+
source: framePath,
|
|
3086
|
+
frameNumber: i,
|
|
3087
|
+
time: time
|
|
3088
|
+
});
|
|
3089
|
+
}
|
|
3090
|
+
}
|
|
3091
|
+
|
|
3092
|
+
return frames;
|
|
3093
|
+
}
|
|
3094
|
+
|
|
3095
|
+
/**
|
|
3096
|
+
* Apply video effects/filters
|
|
3097
|
+
* @private
|
|
3098
|
+
*/
|
|
3099
|
+
async #applyVideoEffects(
|
|
3100
|
+
videoSource: string | Buffer,
|
|
3101
|
+
options: {
|
|
3102
|
+
filters: Array<{
|
|
3103
|
+
type: 'blur' | 'brightness' | 'contrast' | 'saturation' | 'grayscale' | 'sepia' | 'invert' | 'sharpen' | 'noise';
|
|
3104
|
+
intensity?: number;
|
|
3105
|
+
value?: number;
|
|
3106
|
+
}>;
|
|
3107
|
+
outputPath: string;
|
|
3108
|
+
}
|
|
3109
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3110
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3111
|
+
if (!fs.existsSync(frameDir)) {
|
|
3112
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3113
|
+
}
|
|
3114
|
+
|
|
3115
|
+
let videoPath: string;
|
|
3116
|
+
let shouldCleanupVideo = false;
|
|
3117
|
+
const timestamp = Date.now();
|
|
3118
|
+
|
|
3119
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3120
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3121
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3122
|
+
shouldCleanupVideo = true;
|
|
3123
|
+
} else {
|
|
3124
|
+
let resolvedPath = videoSource;
|
|
3125
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3126
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3127
|
+
}
|
|
3128
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3129
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3130
|
+
}
|
|
3131
|
+
videoPath = resolvedPath;
|
|
3132
|
+
}
|
|
3133
|
+
|
|
3134
|
+
// Build filter chain
|
|
3135
|
+
const filters: string[] = [];
|
|
3136
|
+
for (const filter of options.filters) {
|
|
3137
|
+
switch (filter.type) {
|
|
3138
|
+
case 'blur':
|
|
3139
|
+
filters.push(`boxblur=${filter.intensity || 5}`);
|
|
3140
|
+
break;
|
|
3141
|
+
case 'brightness':
|
|
3142
|
+
filters.push(`eq=brightness=${((filter.value || 0) / 100).toFixed(2)}`);
|
|
3143
|
+
break;
|
|
3144
|
+
case 'contrast':
|
|
3145
|
+
filters.push(`eq=contrast=${1 + ((filter.value || 0) / 100)}`);
|
|
3146
|
+
break;
|
|
3147
|
+
case 'saturation':
|
|
3148
|
+
filters.push(`eq=saturation=${1 + ((filter.value || 0) / 100)}`);
|
|
3149
|
+
break;
|
|
3150
|
+
case 'grayscale':
|
|
3151
|
+
filters.push('hue=s=0');
|
|
3152
|
+
break;
|
|
3153
|
+
case 'sepia':
|
|
3154
|
+
filters.push('colorchannelmixer=.393:.769:.189:0:.349:.686:.168:0:.272:.534:.131');
|
|
3155
|
+
break;
|
|
3156
|
+
case 'invert':
|
|
3157
|
+
filters.push('negate');
|
|
3158
|
+
break;
|
|
3159
|
+
case 'sharpen':
|
|
3160
|
+
filters.push(`unsharp=5:5:${filter.intensity || 1.0}:5:5:0.0`);
|
|
3161
|
+
break;
|
|
3162
|
+
case 'noise':
|
|
3163
|
+
filters.push(`noise=alls=${filter.intensity || 20}:allf=t+u`);
|
|
3164
|
+
break;
|
|
3165
|
+
}
|
|
3166
|
+
}
|
|
3167
|
+
|
|
3168
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
3169
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3170
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3171
|
+
|
|
3172
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
3173
|
+
|
|
3174
|
+
try {
|
|
3175
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3176
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3177
|
+
fs.unlinkSync(videoPath);
|
|
3178
|
+
}
|
|
3179
|
+
return { outputPath: options.outputPath, success: true };
|
|
3180
|
+
} catch (error) {
|
|
3181
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3182
|
+
fs.unlinkSync(videoPath);
|
|
3183
|
+
}
|
|
3184
|
+
throw error;
|
|
3185
|
+
}
|
|
3186
|
+
}
|
|
3187
|
+
|
|
3188
|
+
/**
|
|
3189
|
+
* Merge/Concatenate videos
|
|
3190
|
+
* @private
|
|
3191
|
+
*/
|
|
3192
|
+
async #mergeVideos(
|
|
3193
|
+
options: {
|
|
3194
|
+
videos: Array<string | Buffer>;
|
|
3195
|
+
outputPath: string;
|
|
3196
|
+
mode?: 'sequential' | 'side-by-side' | 'grid';
|
|
3197
|
+
grid?: { cols: number; rows: number };
|
|
3198
|
+
}
|
|
3199
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3200
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3201
|
+
if (!fs.existsSync(frameDir)) {
|
|
3202
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3203
|
+
}
|
|
3204
|
+
|
|
3205
|
+
const timestamp = Date.now();
|
|
3206
|
+
const videoPaths: string[] = [];
|
|
3207
|
+
const shouldCleanup: boolean[] = [];
|
|
3208
|
+
|
|
3209
|
+
// Prepare all video files
|
|
3210
|
+
for (let i = 0; i < options.videos.length; i++) {
|
|
3211
|
+
const video = options.videos[i];
|
|
3212
|
+
if (Buffer.isBuffer(video)) {
|
|
3213
|
+
const tempPath = path.join(frameDir, `temp-video-${timestamp}-${i}.mp4`);
|
|
3214
|
+
fs.writeFileSync(tempPath, video);
|
|
3215
|
+
videoPaths.push(tempPath);
|
|
3216
|
+
shouldCleanup.push(true);
|
|
3217
|
+
} else {
|
|
3218
|
+
let resolvedPath = video;
|
|
3219
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3220
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3221
|
+
}
|
|
3222
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3223
|
+
throw new Error(`Video file not found: ${video}`);
|
|
3224
|
+
}
|
|
3225
|
+
videoPaths.push(resolvedPath);
|
|
3226
|
+
shouldCleanup.push(false);
|
|
3227
|
+
}
|
|
3228
|
+
}
|
|
3229
|
+
|
|
3230
|
+
const mode = options.mode || 'sequential';
|
|
3231
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3232
|
+
|
|
3233
|
+
let command: string;
|
|
3234
|
+
|
|
3235
|
+
if (mode === 'sequential') {
|
|
3236
|
+
// Create concat file
|
|
3237
|
+
const concatFile = path.join(frameDir, `concat-${timestamp}.txt`);
|
|
3238
|
+
const concatContent = videoPaths.map(vp => `file '${vp.replace(/'/g, "\\'")}'`).join('\n');
|
|
3239
|
+
fs.writeFileSync(concatFile, concatContent);
|
|
3240
|
+
|
|
3241
|
+
command = `ffmpeg -f concat -safe 0 -i "${concatFile.replace(/"/g, '\\"')}" -c copy -y "${escapedOutputPath}"`;
|
|
3242
|
+
} else if (mode === 'side-by-side') {
|
|
3243
|
+
const escapedPaths = videoPaths.map(vp => vp.replace(/"/g, '\\"'));
|
|
3244
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1] || escapedPaths[0]}" -filter_complex "[0:v][1:v]hstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
3245
|
+
} else if (mode === 'grid') {
|
|
3246
|
+
const grid = options.grid || { cols: 2, rows: 2 };
|
|
3247
|
+
const escapedPaths = videoPaths.map(vp => vp.replace(/"/g, '\\"'));
|
|
3248
|
+
// Simplified grid - would need more complex filter for full grid
|
|
3249
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1] || escapedPaths[0]}" -filter_complex "[0:v][1:v]hstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
3250
|
+
} else {
|
|
3251
|
+
throw new Error(`Unknown merge mode: ${mode}`);
|
|
3252
|
+
}
|
|
3253
|
+
|
|
3254
|
+
try {
|
|
3255
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3256
|
+
|
|
3257
|
+
// Cleanup
|
|
3258
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
3259
|
+
if (shouldCleanup[i] && fs.existsSync(videoPaths[i])) {
|
|
3260
|
+
fs.unlinkSync(videoPaths[i]);
|
|
3261
|
+
}
|
|
3262
|
+
}
|
|
3263
|
+
|
|
3264
|
+
return { outputPath: options.outputPath, success: true };
|
|
3265
|
+
} catch (error) {
|
|
3266
|
+
// Cleanup on error
|
|
3267
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
3268
|
+
if (shouldCleanup[i] && fs.existsSync(videoPaths[i])) {
|
|
3269
|
+
fs.unlinkSync(videoPaths[i]);
|
|
3270
|
+
}
|
|
3271
|
+
}
|
|
3272
|
+
throw error;
|
|
3273
|
+
}
|
|
3274
|
+
}
|
|
3275
|
+
|
|
3276
|
+
/**
|
|
3277
|
+
* Rotate/Flip video
|
|
3278
|
+
* @private
|
|
3279
|
+
*/
|
|
3280
|
+
async #rotateVideo(
|
|
3281
|
+
videoSource: string | Buffer,
|
|
3282
|
+
options: { angle?: 90 | 180 | 270; flip?: 'horizontal' | 'vertical' | 'both'; outputPath: string }
|
|
3283
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3284
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3285
|
+
if (!fs.existsSync(frameDir)) {
|
|
3286
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3287
|
+
}
|
|
3288
|
+
|
|
3289
|
+
let videoPath: string;
|
|
3290
|
+
let shouldCleanupVideo = false;
|
|
3291
|
+
const timestamp = Date.now();
|
|
3292
|
+
|
|
3293
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3294
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3295
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3296
|
+
shouldCleanupVideo = true;
|
|
3297
|
+
} else {
|
|
3298
|
+
let resolvedPath = videoSource;
|
|
3299
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3300
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3301
|
+
}
|
|
3302
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3303
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3304
|
+
}
|
|
3305
|
+
videoPath = resolvedPath;
|
|
3306
|
+
}
|
|
3307
|
+
|
|
3308
|
+
const filters: string[] = [];
|
|
3309
|
+
|
|
3310
|
+
if (options.angle) {
|
|
3311
|
+
const rotationMap: Record<number, string> = {
|
|
3312
|
+
90: 'transpose=1',
|
|
3313
|
+
180: 'transpose=1,transpose=1',
|
|
3314
|
+
270: 'transpose=2'
|
|
3315
|
+
};
|
|
3316
|
+
filters.push(rotationMap[options.angle]);
|
|
3317
|
+
}
|
|
3318
|
+
|
|
3319
|
+
if (options.flip) {
|
|
3320
|
+
if (options.flip === 'horizontal') {
|
|
3321
|
+
filters.push('hflip');
|
|
3322
|
+
} else if (options.flip === 'vertical') {
|
|
3323
|
+
filters.push('vflip');
|
|
3324
|
+
} else if (options.flip === 'both') {
|
|
3325
|
+
filters.push('hflip', 'vflip');
|
|
3326
|
+
}
|
|
3327
|
+
}
|
|
3328
|
+
|
|
3329
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
3330
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3331
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3332
|
+
|
|
3333
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
3334
|
+
|
|
3335
|
+
try {
|
|
3336
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3337
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3338
|
+
fs.unlinkSync(videoPath);
|
|
3339
|
+
}
|
|
3340
|
+
return { outputPath: options.outputPath, success: true };
|
|
3341
|
+
} catch (error) {
|
|
3342
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3343
|
+
fs.unlinkSync(videoPath);
|
|
3344
|
+
}
|
|
3345
|
+
throw error;
|
|
3346
|
+
}
|
|
3347
|
+
}
|
|
3348
|
+
|
|
3349
|
+
/**
|
|
3350
|
+
* Crop video
|
|
3351
|
+
* @private
|
|
3352
|
+
*/
|
|
3353
|
+
async #cropVideo(
|
|
3354
|
+
videoSource: string | Buffer,
|
|
3355
|
+
options: { x: number; y: number; width: number; height: number; outputPath: string }
|
|
3356
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3357
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3358
|
+
if (!fs.existsSync(frameDir)) {
|
|
3359
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3360
|
+
}
|
|
3361
|
+
|
|
3362
|
+
let videoPath: string;
|
|
3363
|
+
let shouldCleanupVideo = false;
|
|
3364
|
+
const timestamp = Date.now();
|
|
3365
|
+
|
|
3366
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3367
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3368
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3369
|
+
shouldCleanupVideo = true;
|
|
3370
|
+
} else {
|
|
3371
|
+
let resolvedPath = videoSource;
|
|
3372
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3373
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3374
|
+
}
|
|
3375
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3376
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3377
|
+
}
|
|
3378
|
+
videoPath = resolvedPath;
|
|
3379
|
+
}
|
|
3380
|
+
|
|
3381
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3382
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3383
|
+
|
|
3384
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf "crop=${options.width}:${options.height}:${options.x}:${options.y}" -y "${escapedOutputPath}"`;
|
|
3385
|
+
|
|
3386
|
+
try {
|
|
3387
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3388
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3389
|
+
fs.unlinkSync(videoPath);
|
|
3390
|
+
}
|
|
3391
|
+
return { outputPath: options.outputPath, success: true };
|
|
3392
|
+
} catch (error) {
|
|
3393
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3394
|
+
fs.unlinkSync(videoPath);
|
|
3395
|
+
}
|
|
3396
|
+
throw error;
|
|
3397
|
+
}
|
|
3398
|
+
}
|
|
3399
|
+
|
|
3400
|
+
/**
|
|
3401
|
+
* Compress/Optimize video
|
|
3402
|
+
* @private
|
|
3403
|
+
*/
|
|
3404
|
+
async #compressVideo(
|
|
3405
|
+
videoSource: string | Buffer,
|
|
3406
|
+
options: { outputPath: string; quality?: 'low' | 'medium' | 'high' | 'ultra'; targetSize?: number; maxBitrate?: number }
|
|
3407
|
+
): Promise<{ outputPath: string; success: boolean; originalSize?: number; compressedSize?: number }> {
|
|
3408
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3409
|
+
if (!fs.existsSync(frameDir)) {
|
|
3410
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3411
|
+
}
|
|
3412
|
+
|
|
3413
|
+
let videoPath: string;
|
|
3414
|
+
let shouldCleanupVideo = false;
|
|
3415
|
+
const timestamp = Date.now();
|
|
3416
|
+
let originalSize = 0;
|
|
3417
|
+
|
|
3418
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3419
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3420
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3421
|
+
shouldCleanupVideo = true;
|
|
3422
|
+
originalSize = videoSource.length;
|
|
3423
|
+
} else {
|
|
3424
|
+
let resolvedPath = videoSource;
|
|
3425
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3426
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3427
|
+
}
|
|
3428
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3429
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3430
|
+
}
|
|
3431
|
+
videoPath = resolvedPath;
|
|
3432
|
+
originalSize = fs.statSync(resolvedPath).size;
|
|
3433
|
+
}
|
|
3434
|
+
|
|
3435
|
+
const qualityPresets: Record<string, string> = {
|
|
3436
|
+
low: '-crf 32 -preset fast',
|
|
3437
|
+
medium: '-crf 28 -preset medium',
|
|
3438
|
+
high: '-crf 23 -preset slow',
|
|
3439
|
+
ultra: '-crf 18 -preset veryslow'
|
|
3440
|
+
};
|
|
3441
|
+
|
|
3442
|
+
let qualityFlag = qualityPresets[options.quality || 'medium'];
|
|
3443
|
+
|
|
3444
|
+
if (options.maxBitrate) {
|
|
3445
|
+
qualityFlag = `-b:v ${options.maxBitrate}k -maxrate ${options.maxBitrate}k -bufsize ${options.maxBitrate * 2}k`;
|
|
3446
|
+
}
|
|
3447
|
+
|
|
3448
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3449
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3450
|
+
|
|
3451
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${qualityFlag} -y "${escapedOutputPath}"`;
|
|
3452
|
+
|
|
3453
|
+
try {
|
|
3454
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3455
|
+
|
|
3456
|
+
const compressedSize = fs.existsSync(options.outputPath) ? fs.statSync(options.outputPath).size : 0;
|
|
3457
|
+
|
|
3458
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3459
|
+
fs.unlinkSync(videoPath);
|
|
3460
|
+
}
|
|
3461
|
+
|
|
3462
|
+
return {
|
|
3463
|
+
outputPath: options.outputPath,
|
|
3464
|
+
success: true,
|
|
3465
|
+
originalSize,
|
|
3466
|
+
compressedSize
|
|
3467
|
+
};
|
|
3468
|
+
} catch (error) {
|
|
3469
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3470
|
+
fs.unlinkSync(videoPath);
|
|
3471
|
+
}
|
|
3472
|
+
throw error;
|
|
3473
|
+
}
|
|
3474
|
+
}
|
|
3475
|
+
|
|
3476
|
+
/**
|
|
3477
|
+
* Add text overlay to video
|
|
3478
|
+
* @private
|
|
3479
|
+
*/
|
|
3480
|
+
async #addTextToVideo(
|
|
3481
|
+
videoSource: string | Buffer,
|
|
3482
|
+
options: {
|
|
3483
|
+
text: string;
|
|
3484
|
+
position?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center' | 'top-center' | 'bottom-center';
|
|
3485
|
+
fontSize?: number;
|
|
3486
|
+
fontColor?: string;
|
|
3487
|
+
backgroundColor?: string;
|
|
3488
|
+
startTime?: number;
|
|
3489
|
+
endTime?: number;
|
|
3490
|
+
outputPath: string;
|
|
3491
|
+
}
|
|
3492
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3493
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3494
|
+
if (!fs.existsSync(frameDir)) {
|
|
3495
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3496
|
+
}
|
|
3497
|
+
|
|
3498
|
+
let videoPath: string;
|
|
3499
|
+
let shouldCleanupVideo = false;
|
|
3500
|
+
const timestamp = Date.now();
|
|
3501
|
+
|
|
3502
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3503
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3504
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3505
|
+
shouldCleanupVideo = true;
|
|
3506
|
+
} else {
|
|
3507
|
+
let resolvedPath = videoSource;
|
|
3508
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3509
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3510
|
+
}
|
|
3511
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3512
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3513
|
+
}
|
|
3514
|
+
videoPath = resolvedPath;
|
|
3515
|
+
}
|
|
3516
|
+
|
|
3517
|
+
const position = options.position || 'bottom-center';
|
|
3518
|
+
const fontSize = options.fontSize || 24;
|
|
3519
|
+
const fontColor = options.fontColor || 'white';
|
|
3520
|
+
const bgColor = options.backgroundColor || 'black@0.5';
|
|
3521
|
+
|
|
3522
|
+
const positionMap: Record<string, string> = {
|
|
3523
|
+
'top-left': `x=10:y=10`,
|
|
3524
|
+
'top-center': `x=(w-text_w)/2:y=10`,
|
|
3525
|
+
'top-right': `x=w-text_w-10:y=10`,
|
|
3526
|
+
'center': `x=(w-text_w)/2:y=(h-text_h)/2`,
|
|
3527
|
+
'bottom-left': `x=10:y=h-text_h-10`,
|
|
3528
|
+
'bottom-center': `x=(w-text_w)/2:y=h-text_h-10`,
|
|
3529
|
+
'bottom-right': `x=w-text_w-10:y=h-text_h-10`
|
|
3530
|
+
};
|
|
3531
|
+
|
|
3532
|
+
const pos = positionMap[position];
|
|
3533
|
+
const textEscaped = options.text.replace(/:/g, '\\:').replace(/'/g, "\\'");
|
|
3534
|
+
const timeFilter = options.startTime !== undefined && options.endTime !== undefined
|
|
3535
|
+
? `:enable='between(t,${options.startTime},${options.endTime})'`
|
|
3536
|
+
: '';
|
|
3537
|
+
|
|
3538
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3539
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3540
|
+
|
|
3541
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf "drawtext=text='${textEscaped}':fontsize=${fontSize}:fontcolor=${fontColor}:box=1:boxcolor=${bgColor}:${pos}${timeFilter}" -y "${escapedOutputPath}"`;
|
|
3542
|
+
|
|
3543
|
+
try {
|
|
3544
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3545
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3546
|
+
fs.unlinkSync(videoPath);
|
|
3547
|
+
}
|
|
3548
|
+
return { outputPath: options.outputPath, success: true };
|
|
3549
|
+
} catch (error) {
|
|
3550
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3551
|
+
fs.unlinkSync(videoPath);
|
|
3552
|
+
}
|
|
3553
|
+
throw error;
|
|
3554
|
+
}
|
|
3555
|
+
}
|
|
3556
|
+
|
|
3557
|
+
/**
|
|
3558
|
+
* Add fade effects to video
|
|
3559
|
+
* @private
|
|
3560
|
+
*/
|
|
3561
|
+
async #addFadeToVideo(
|
|
3562
|
+
videoSource: string | Buffer,
|
|
3563
|
+
options: { fadeIn?: number; fadeOut?: number; outputPath: string }
|
|
3564
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3565
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3566
|
+
if (!fs.existsSync(frameDir)) {
|
|
3567
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3568
|
+
}
|
|
3569
|
+
|
|
3570
|
+
let videoPath: string;
|
|
3571
|
+
let shouldCleanupVideo = false;
|
|
3572
|
+
const timestamp = Date.now();
|
|
3573
|
+
|
|
3574
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3575
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3576
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3577
|
+
shouldCleanupVideo = true;
|
|
3578
|
+
} else {
|
|
3579
|
+
let resolvedPath = videoSource;
|
|
3580
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3581
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3582
|
+
}
|
|
3583
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3584
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3585
|
+
}
|
|
3586
|
+
videoPath = resolvedPath;
|
|
3587
|
+
}
|
|
3588
|
+
|
|
3589
|
+
const videoInfo = await this.getVideoInfo(videoPath, true);
|
|
3590
|
+
const duration = videoInfo?.duration || 0;
|
|
3591
|
+
|
|
3592
|
+
const filters: string[] = [];
|
|
3593
|
+
|
|
3594
|
+
if (options.fadeIn) {
|
|
3595
|
+
filters.push(`fade=t=in:st=0:d=${options.fadeIn}`);
|
|
3596
|
+
}
|
|
3597
|
+
|
|
3598
|
+
if (options.fadeOut && duration > options.fadeOut) {
|
|
3599
|
+
filters.push(`fade=t=out:st=${duration - options.fadeOut}:d=${options.fadeOut}`);
|
|
3600
|
+
}
|
|
3601
|
+
|
|
3602
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
3603
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3604
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3605
|
+
|
|
3606
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
3607
|
+
|
|
3608
|
+
try {
|
|
3609
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3610
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3611
|
+
fs.unlinkSync(videoPath);
|
|
3612
|
+
}
|
|
3613
|
+
return { outputPath: options.outputPath, success: true };
|
|
3614
|
+
} catch (error) {
|
|
3615
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3616
|
+
fs.unlinkSync(videoPath);
|
|
3617
|
+
}
|
|
3618
|
+
throw error;
|
|
3619
|
+
}
|
|
3620
|
+
}
|
|
3621
|
+
|
|
3622
|
+
/**
|
|
3623
|
+
* Reverse video playback
|
|
3624
|
+
* @private
|
|
3625
|
+
*/
|
|
3626
|
+
async #reverseVideo(
|
|
3627
|
+
videoSource: string | Buffer,
|
|
3628
|
+
options: { outputPath: string }
|
|
3629
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3630
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3631
|
+
if (!fs.existsSync(frameDir)) {
|
|
3632
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3633
|
+
}
|
|
3634
|
+
|
|
3635
|
+
let videoPath: string;
|
|
3636
|
+
let shouldCleanupVideo = false;
|
|
3637
|
+
const timestamp = Date.now();
|
|
3638
|
+
|
|
3639
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3640
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3641
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3642
|
+
shouldCleanupVideo = true;
|
|
3643
|
+
} else {
|
|
3644
|
+
let resolvedPath = videoSource;
|
|
3645
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3646
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3647
|
+
}
|
|
3648
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3649
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3650
|
+
}
|
|
3651
|
+
videoPath = resolvedPath;
|
|
3652
|
+
}
|
|
3653
|
+
|
|
3654
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3655
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3656
|
+
|
|
3657
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf reverse -af areverse -y "${escapedOutputPath}"`;
|
|
3658
|
+
|
|
3659
|
+
try {
|
|
3660
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3661
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3662
|
+
fs.unlinkSync(videoPath);
|
|
3663
|
+
}
|
|
3664
|
+
return { outputPath: options.outputPath, success: true };
|
|
3665
|
+
} catch (error) {
|
|
3666
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3667
|
+
fs.unlinkSync(videoPath);
|
|
3668
|
+
}
|
|
3669
|
+
throw error;
|
|
3670
|
+
}
|
|
3671
|
+
}
|
|
3672
|
+
|
|
3673
|
+
/**
|
|
3674
|
+
* Create seamless video loop
|
|
3675
|
+
* @private
|
|
3676
|
+
*/
|
|
3677
|
+
async #createVideoLoop(
|
|
3678
|
+
videoSource: string | Buffer,
|
|
3679
|
+
options: { outputPath: string; smooth?: boolean }
|
|
3680
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3681
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3682
|
+
if (!fs.existsSync(frameDir)) {
|
|
3683
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3684
|
+
}
|
|
3685
|
+
|
|
3686
|
+
let videoPath: string;
|
|
3687
|
+
let shouldCleanupVideo = false;
|
|
3688
|
+
const timestamp = Date.now();
|
|
3689
|
+
|
|
3690
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3691
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3692
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3693
|
+
shouldCleanupVideo = true;
|
|
3694
|
+
} else {
|
|
3695
|
+
let resolvedPath = videoSource;
|
|
3696
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3697
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3698
|
+
}
|
|
3699
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3700
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3701
|
+
}
|
|
3702
|
+
videoPath = resolvedPath;
|
|
3703
|
+
}
|
|
3704
|
+
|
|
3705
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3706
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3707
|
+
|
|
3708
|
+
// Create loop by concatenating video with itself
|
|
3709
|
+
const concatFile = path.join(frameDir, `loop-${timestamp}.txt`);
|
|
3710
|
+
const concatContent = `file '${videoPath.replace(/'/g, "\\'")}'\nfile '${videoPath.replace(/'/g, "\\'")}'`;
|
|
3711
|
+
fs.writeFileSync(concatFile, concatContent);
|
|
3712
|
+
|
|
3713
|
+
const command = `ffmpeg -f concat -safe 0 -i "${concatFile.replace(/"/g, '\\"')}" -c copy -y "${escapedOutputPath}"`;
|
|
3714
|
+
|
|
3715
|
+
try {
|
|
3716
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3717
|
+
|
|
3718
|
+
if (fs.existsSync(concatFile)) {
|
|
3719
|
+
fs.unlinkSync(concatFile);
|
|
3720
|
+
}
|
|
3721
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3722
|
+
fs.unlinkSync(videoPath);
|
|
3723
|
+
}
|
|
3724
|
+
|
|
3725
|
+
return { outputPath: options.outputPath, success: true };
|
|
3726
|
+
} catch (error) {
|
|
3727
|
+
if (fs.existsSync(concatFile)) {
|
|
3728
|
+
fs.unlinkSync(concatFile);
|
|
3729
|
+
}
|
|
3730
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3731
|
+
fs.unlinkSync(videoPath);
|
|
3732
|
+
}
|
|
3733
|
+
throw error;
|
|
3734
|
+
}
|
|
3735
|
+
}
|
|
3736
|
+
|
|
3737
|
+
/**
|
|
3738
|
+
* Batch process multiple videos
|
|
3739
|
+
* @private
|
|
3740
|
+
*/
|
|
3741
|
+
async #batchProcessVideos(
|
|
3742
|
+
options: { videos: Array<{ source: string | Buffer; operations: any }>; outputDirectory: string }
|
|
3743
|
+
): Promise<Array<{ source: string; output: string; success: boolean }>> {
|
|
3744
|
+
if (!fs.existsSync(options.outputDirectory)) {
|
|
3745
|
+
fs.mkdirSync(options.outputDirectory, { recursive: true });
|
|
3746
|
+
}
|
|
3747
|
+
|
|
3748
|
+
const results: Array<{ source: string; output: string; success: boolean }> = [];
|
|
3749
|
+
|
|
3750
|
+
for (let i = 0; i < options.videos.length; i++) {
|
|
3751
|
+
const video = options.videos[i];
|
|
3752
|
+
const outputPath = path.join(options.outputDirectory, `batch-${i + 1}.mp4`);
|
|
3753
|
+
|
|
3754
|
+
try {
|
|
3755
|
+
// Process each video with its operations
|
|
3756
|
+
await this.createVideo({
|
|
3757
|
+
source: video.source,
|
|
3758
|
+
...video.operations
|
|
3759
|
+
});
|
|
3760
|
+
|
|
3761
|
+
results.push({
|
|
3762
|
+
source: typeof video.source === 'string' ? video.source : 'buffer',
|
|
3763
|
+
output: outputPath,
|
|
3764
|
+
success: true
|
|
3765
|
+
});
|
|
3766
|
+
} catch (error) {
|
|
3767
|
+
results.push({
|
|
3768
|
+
source: typeof video.source === 'string' ? video.source : 'buffer',
|
|
3769
|
+
output: outputPath,
|
|
3770
|
+
success: false
|
|
3771
|
+
});
|
|
3772
|
+
}
|
|
3773
|
+
}
|
|
3774
|
+
|
|
3775
|
+
return results;
|
|
3776
|
+
}
|
|
3777
|
+
|
|
3778
|
+
/**
|
|
3779
|
+
* Detect scene changes in video
|
|
3780
|
+
* @private
|
|
3781
|
+
*/
|
|
3782
|
+
async #detectVideoScenes(
|
|
3783
|
+
videoSource: string | Buffer,
|
|
3784
|
+
options: { threshold?: number; outputPath?: string }
|
|
3785
|
+
): Promise<Array<{ time: number; scene: number }>> {
|
|
3786
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3787
|
+
if (!fs.existsSync(frameDir)) {
|
|
3788
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3789
|
+
}
|
|
3790
|
+
|
|
3791
|
+
let videoPath: string;
|
|
3792
|
+
let shouldCleanupVideo = false;
|
|
3793
|
+
const timestamp = Date.now();
|
|
3794
|
+
|
|
3795
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3796
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3797
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3798
|
+
shouldCleanupVideo = true;
|
|
3799
|
+
} else {
|
|
3800
|
+
let resolvedPath = videoSource;
|
|
3801
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3802
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3803
|
+
}
|
|
3804
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3805
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3806
|
+
}
|
|
3807
|
+
videoPath = resolvedPath;
|
|
3808
|
+
}
|
|
3809
|
+
|
|
3810
|
+
const threshold = options.threshold || 0.3;
|
|
3811
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3812
|
+
const sceneFile = path.join(frameDir, `scenes-${timestamp}.txt`);
|
|
3813
|
+
|
|
3814
|
+
// Use FFmpeg's scene detection
|
|
3815
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -vf "select='gt(scene,${threshold})',showinfo" -f null - 2>&1 | grep "pts_time" | awk '{print $6}' | sed 's/time=//'`;
|
|
3816
|
+
|
|
3817
|
+
try {
|
|
3818
|
+
const { stdout } = await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3819
|
+
const times = stdout.toString().trim().split('\n').filter(t => t).map(parseFloat);
|
|
3820
|
+
|
|
3821
|
+
const scenes = times.map((time, index) => ({ time, scene: index + 1 }));
|
|
3822
|
+
|
|
3823
|
+
if (options.outputPath && scenes.length > 0) {
|
|
3824
|
+
fs.writeFileSync(options.outputPath, JSON.stringify(scenes, null, 2));
|
|
3825
|
+
}
|
|
3826
|
+
|
|
3827
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3828
|
+
fs.unlinkSync(videoPath);
|
|
3829
|
+
}
|
|
3830
|
+
if (fs.existsSync(sceneFile)) {
|
|
3831
|
+
fs.unlinkSync(sceneFile);
|
|
3832
|
+
}
|
|
3833
|
+
|
|
3834
|
+
return scenes;
|
|
3835
|
+
} catch (error) {
|
|
3836
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3837
|
+
fs.unlinkSync(videoPath);
|
|
3838
|
+
}
|
|
3839
|
+
if (fs.existsSync(sceneFile)) {
|
|
3840
|
+
fs.unlinkSync(sceneFile);
|
|
3841
|
+
}
|
|
3842
|
+
// Return empty array if detection fails
|
|
3843
|
+
return [];
|
|
3844
|
+
}
|
|
3845
|
+
}
|
|
3846
|
+
|
|
3847
|
+
/**
|
|
3848
|
+
* Stabilize video (reduce shake)
|
|
3849
|
+
* @private
|
|
3850
|
+
*/
|
|
3851
|
+
async #stabilizeVideo(
|
|
3852
|
+
videoSource: string | Buffer,
|
|
3853
|
+
options: { outputPath: string; smoothing?: number }
|
|
3854
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3855
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3856
|
+
if (!fs.existsSync(frameDir)) {
|
|
3857
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3858
|
+
}
|
|
3859
|
+
|
|
3860
|
+
let videoPath: string;
|
|
3861
|
+
let shouldCleanupVideo = false;
|
|
3862
|
+
const timestamp = Date.now();
|
|
3863
|
+
|
|
3864
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3865
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3866
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3867
|
+
shouldCleanupVideo = true;
|
|
3868
|
+
} else {
|
|
3869
|
+
let resolvedPath = videoSource;
|
|
3870
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3871
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3872
|
+
}
|
|
3873
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3874
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3875
|
+
}
|
|
3876
|
+
videoPath = resolvedPath;
|
|
3877
|
+
}
|
|
3878
|
+
|
|
3879
|
+
const smoothing = options.smoothing || 10;
|
|
3880
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3881
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3882
|
+
|
|
3883
|
+
// Two-pass stabilization
|
|
3884
|
+
const transformsFile = path.join(frameDir, `transforms-${timestamp}.trf`);
|
|
3885
|
+
|
|
3886
|
+
// Pass 1: Analyze
|
|
3887
|
+
const analyzeCommand = `ffmpeg -i "${escapedVideoPath}" -vf vidstabdetect=shakiness=5:accuracy=15:result="${transformsFile.replace(/"/g, '\\"')}" -f null -`;
|
|
3888
|
+
|
|
3889
|
+
// Pass 2: Transform
|
|
3890
|
+
const transformCommand = `ffmpeg -i "${escapedVideoPath}" -vf vidstabtransform=smoothing=${smoothing}:input="${transformsFile.replace(/"/g, '\\"')}" -y "${escapedOutputPath}"`;
|
|
3891
|
+
|
|
3892
|
+
try {
|
|
3893
|
+
await execAsync(analyzeCommand, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3894
|
+
await execAsync(transformCommand, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
3895
|
+
|
|
3896
|
+
if (fs.existsSync(transformsFile)) {
|
|
3897
|
+
fs.unlinkSync(transformsFile);
|
|
3898
|
+
}
|
|
3899
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3900
|
+
fs.unlinkSync(videoPath);
|
|
3901
|
+
}
|
|
3902
|
+
|
|
3903
|
+
return { outputPath: options.outputPath, success: true };
|
|
3904
|
+
} catch (error) {
|
|
3905
|
+
// Fallback to simple deshake if vidstab is not available
|
|
3906
|
+
const simpleCommand = `ffmpeg -i "${escapedVideoPath}" -vf "hqdn3d=4:3:6:4.5" -y "${escapedOutputPath}"`;
|
|
3907
|
+
try {
|
|
3908
|
+
await execAsync(simpleCommand, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3909
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3910
|
+
fs.unlinkSync(videoPath);
|
|
3911
|
+
}
|
|
3912
|
+
return { outputPath: options.outputPath, success: true };
|
|
3913
|
+
} catch (fallbackError) {
|
|
3914
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3915
|
+
fs.unlinkSync(videoPath);
|
|
3916
|
+
}
|
|
3917
|
+
throw error;
|
|
3918
|
+
}
|
|
3919
|
+
}
|
|
3920
|
+
}
|
|
3921
|
+
|
|
3922
|
+
/**
|
|
3923
|
+
* Color correct video
|
|
3924
|
+
* @private
|
|
3925
|
+
*/
|
|
3926
|
+
async #colorCorrectVideo(
|
|
3927
|
+
videoSource: string | Buffer,
|
|
3928
|
+
options: {
|
|
3929
|
+
brightness?: number;
|
|
3930
|
+
contrast?: number;
|
|
3931
|
+
saturation?: number;
|
|
3932
|
+
hue?: number;
|
|
3933
|
+
temperature?: number;
|
|
3934
|
+
outputPath: string;
|
|
3935
|
+
}
|
|
3936
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
3937
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
3938
|
+
if (!fs.existsSync(frameDir)) {
|
|
3939
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
3940
|
+
}
|
|
3941
|
+
|
|
3942
|
+
let videoPath: string;
|
|
3943
|
+
let shouldCleanupVideo = false;
|
|
3944
|
+
const timestamp = Date.now();
|
|
3945
|
+
|
|
3946
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
3947
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
3948
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
3949
|
+
shouldCleanupVideo = true;
|
|
3950
|
+
} else {
|
|
3951
|
+
let resolvedPath = videoSource;
|
|
3952
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
3953
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
3954
|
+
}
|
|
3955
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
3956
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
3957
|
+
}
|
|
3958
|
+
videoPath = resolvedPath;
|
|
3959
|
+
}
|
|
3960
|
+
|
|
3961
|
+
const filters: string[] = [];
|
|
3962
|
+
|
|
3963
|
+
if (options.brightness !== undefined) {
|
|
3964
|
+
filters.push(`eq=brightness=${(options.brightness / 100).toFixed(2)}`);
|
|
3965
|
+
}
|
|
3966
|
+
if (options.contrast !== undefined) {
|
|
3967
|
+
filters.push(`eq=contrast=${1 + (options.contrast / 100)}`);
|
|
3968
|
+
}
|
|
3969
|
+
if (options.saturation !== undefined) {
|
|
3970
|
+
filters.push(`eq=saturation=${1 + (options.saturation / 100)}`);
|
|
3971
|
+
}
|
|
3972
|
+
if (options.hue !== undefined) {
|
|
3973
|
+
filters.push(`hue=h=${options.hue}`);
|
|
3974
|
+
}
|
|
3975
|
+
if (options.temperature !== undefined) {
|
|
3976
|
+
// Temperature adjustment using colorbalance
|
|
3977
|
+
const temp = options.temperature;
|
|
3978
|
+
if (temp > 0) {
|
|
3979
|
+
filters.push(`colorbalance=rs=${temp/100}:gs=-${temp/200}:bs=-${temp/100}`);
|
|
3980
|
+
} else {
|
|
3981
|
+
filters.push(`colorbalance=rs=${temp/100}:gs=${-temp/200}:bs=${-temp/100}`);
|
|
3982
|
+
}
|
|
3983
|
+
}
|
|
3984
|
+
|
|
3985
|
+
const filterChain = filters.length > 0 ? `-vf "${filters.join(',')}"` : '';
|
|
3986
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
3987
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
3988
|
+
|
|
3989
|
+
const command = `ffmpeg -i "${escapedVideoPath}" ${filterChain} -y "${escapedOutputPath}"`;
|
|
3990
|
+
|
|
3991
|
+
try {
|
|
3992
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
3993
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3994
|
+
fs.unlinkSync(videoPath);
|
|
3995
|
+
}
|
|
3996
|
+
return { outputPath: options.outputPath, success: true };
|
|
3997
|
+
} catch (error) {
|
|
3998
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
3999
|
+
fs.unlinkSync(videoPath);
|
|
4000
|
+
}
|
|
4001
|
+
throw error;
|
|
4002
|
+
}
|
|
4003
|
+
}
|
|
4004
|
+
|
|
4005
|
+
/**
|
|
4006
|
+
* Add picture-in-picture
|
|
4007
|
+
* @private
|
|
4008
|
+
*/
|
|
4009
|
+
async #addPictureInPicture(
|
|
4010
|
+
videoSource: string | Buffer,
|
|
4011
|
+
options: {
|
|
4012
|
+
overlayVideo: string | Buffer;
|
|
4013
|
+
position?: 'top-left' | 'top-right' | 'bottom-left' | 'bottom-right' | 'center';
|
|
4014
|
+
size?: { width: number; height: number };
|
|
4015
|
+
opacity?: number;
|
|
4016
|
+
outputPath: string;
|
|
4017
|
+
}
|
|
4018
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
4019
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
4020
|
+
if (!fs.existsSync(frameDir)) {
|
|
4021
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
4022
|
+
}
|
|
4023
|
+
|
|
4024
|
+
let videoPath: string;
|
|
4025
|
+
let overlayPath: string;
|
|
4026
|
+
let shouldCleanupVideo = false;
|
|
4027
|
+
let shouldCleanupOverlay = false;
|
|
4028
|
+
const timestamp = Date.now();
|
|
4029
|
+
|
|
4030
|
+
// Handle main video
|
|
4031
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
4032
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
4033
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
4034
|
+
shouldCleanupVideo = true;
|
|
4035
|
+
} else {
|
|
4036
|
+
let resolvedPath = videoSource;
|
|
4037
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
4038
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
4039
|
+
}
|
|
4040
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
4041
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
4042
|
+
}
|
|
4043
|
+
videoPath = resolvedPath;
|
|
4044
|
+
}
|
|
4045
|
+
|
|
4046
|
+
// Handle overlay video
|
|
4047
|
+
if (Buffer.isBuffer(options.overlayVideo)) {
|
|
4048
|
+
overlayPath = path.join(frameDir, `temp-overlay-${timestamp}.mp4`);
|
|
4049
|
+
fs.writeFileSync(overlayPath, options.overlayVideo);
|
|
4050
|
+
shouldCleanupOverlay = true;
|
|
4051
|
+
} else {
|
|
4052
|
+
let resolvedPath = options.overlayVideo;
|
|
4053
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
4054
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
4055
|
+
}
|
|
4056
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
4057
|
+
throw new Error(`Overlay video file not found: ${options.overlayVideo}`);
|
|
4058
|
+
}
|
|
4059
|
+
overlayPath = resolvedPath;
|
|
4060
|
+
}
|
|
4061
|
+
|
|
4062
|
+
const position = options.position || 'bottom-right';
|
|
4063
|
+
const size = options.size || { width: 320, height: 180 };
|
|
4064
|
+
const opacity = options.opacity || 1.0;
|
|
4065
|
+
|
|
4066
|
+
const positionMap: Record<string, string> = {
|
|
4067
|
+
'top-left': '10:10',
|
|
4068
|
+
'top-right': 'W-w-10:10',
|
|
4069
|
+
'bottom-left': '10:H-h-10',
|
|
4070
|
+
'bottom-right': 'W-w-10:H-h-10',
|
|
4071
|
+
'center': '(W-w)/2:(H-h)/2'
|
|
4072
|
+
};
|
|
4073
|
+
|
|
4074
|
+
const overlay = positionMap[position];
|
|
4075
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
4076
|
+
const escapedOverlayPath = overlayPath.replace(/"/g, '\\"');
|
|
4077
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
4078
|
+
|
|
4079
|
+
const filter = `[1:v]scale=${size.width}:${size.height},format=rgba,colorchannelmixer=aa=${opacity}[overlay];[0:v][overlay]overlay=${overlay}`;
|
|
4080
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -i "${escapedOverlayPath}" -filter_complex "${filter}" -y "${escapedOutputPath}"`;
|
|
4081
|
+
|
|
4082
|
+
try {
|
|
4083
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
4084
|
+
|
|
4085
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
4086
|
+
fs.unlinkSync(videoPath);
|
|
4087
|
+
}
|
|
4088
|
+
if (shouldCleanupOverlay && fs.existsSync(overlayPath)) {
|
|
4089
|
+
fs.unlinkSync(overlayPath);
|
|
4090
|
+
}
|
|
4091
|
+
|
|
4092
|
+
return { outputPath: options.outputPath, success: true };
|
|
4093
|
+
} catch (error) {
|
|
4094
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
4095
|
+
fs.unlinkSync(videoPath);
|
|
4096
|
+
}
|
|
4097
|
+
if (shouldCleanupOverlay && fs.existsSync(overlayPath)) {
|
|
4098
|
+
fs.unlinkSync(overlayPath);
|
|
4099
|
+
}
|
|
4100
|
+
throw error;
|
|
4101
|
+
}
|
|
4102
|
+
}
|
|
4103
|
+
|
|
4104
|
+
/**
|
|
4105
|
+
* Create split screen video
|
|
4106
|
+
* @private
|
|
4107
|
+
*/
|
|
4108
|
+
async #createSplitScreen(
|
|
4109
|
+
options: {
|
|
4110
|
+
videos: Array<string | Buffer>;
|
|
4111
|
+
layout?: 'side-by-side' | 'top-bottom' | 'grid';
|
|
4112
|
+
grid?: { cols: number; rows: number };
|
|
4113
|
+
outputPath: string;
|
|
4114
|
+
}
|
|
4115
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
4116
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
4117
|
+
if (!fs.existsSync(frameDir)) {
|
|
4118
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
4119
|
+
}
|
|
4120
|
+
|
|
4121
|
+
const timestamp = Date.now();
|
|
4122
|
+
const videoPaths: string[] = [];
|
|
4123
|
+
const shouldCleanup: boolean[] = [];
|
|
4124
|
+
|
|
4125
|
+
// Prepare all video files
|
|
4126
|
+
for (let i = 0; i < options.videos.length; i++) {
|
|
4127
|
+
const video = options.videos[i];
|
|
4128
|
+
if (Buffer.isBuffer(video)) {
|
|
4129
|
+
const tempPath = path.join(frameDir, `temp-video-${timestamp}-${i}.mp4`);
|
|
4130
|
+
fs.writeFileSync(tempPath, video);
|
|
4131
|
+
videoPaths.push(tempPath);
|
|
4132
|
+
shouldCleanup.push(true);
|
|
4133
|
+
} else {
|
|
4134
|
+
let resolvedPath = video;
|
|
4135
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
4136
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
4137
|
+
}
|
|
4138
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
4139
|
+
throw new Error(`Video file not found: ${video}`);
|
|
4140
|
+
}
|
|
4141
|
+
videoPaths.push(resolvedPath);
|
|
4142
|
+
shouldCleanup.push(false);
|
|
4143
|
+
}
|
|
4144
|
+
}
|
|
4145
|
+
|
|
4146
|
+
const layout = options.layout || 'side-by-side';
|
|
4147
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
4148
|
+
const escapedPaths = videoPaths.map(vp => vp.replace(/"/g, '\\"'));
|
|
4149
|
+
|
|
4150
|
+
let command: string;
|
|
4151
|
+
|
|
4152
|
+
if (layout === 'side-by-side' && videoPaths.length >= 2) {
|
|
4153
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1]}" -filter_complex "[0:v][1:v]hstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
4154
|
+
} else if (layout === 'top-bottom' && videoPaths.length >= 2) {
|
|
4155
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1]}" -filter_complex "[0:v][1:v]vstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
4156
|
+
} else if (layout === 'grid' && videoPaths.length >= 4) {
|
|
4157
|
+
const grid = options.grid || { cols: 2, rows: 2 };
|
|
4158
|
+
// Simplified 2x2 grid
|
|
4159
|
+
command = `ffmpeg -i "${escapedPaths[0]}" -i "${escapedPaths[1]}" -i "${escapedPaths[2]}" -i "${escapedPaths[3]}" -filter_complex "[0:v][1:v]hstack=inputs=2[top];[2:v][3:v]hstack=inputs=2[bottom];[top][bottom]vstack=inputs=2[v]" -map "[v]" -y "${escapedOutputPath}"`;
|
|
4160
|
+
} else {
|
|
4161
|
+
throw new Error(`Invalid layout or insufficient videos for ${layout}`);
|
|
4162
|
+
}
|
|
4163
|
+
|
|
4164
|
+
try {
|
|
4165
|
+
await execAsync(command, { timeout: 600000, maxBuffer: 20 * 1024 * 1024 });
|
|
4166
|
+
|
|
4167
|
+
// Cleanup
|
|
4168
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
4169
|
+
if (shouldCleanup[i] && fs.existsSync(videoPaths[i])) {
|
|
4170
|
+
fs.unlinkSync(videoPaths[i]);
|
|
4171
|
+
}
|
|
4172
|
+
}
|
|
4173
|
+
|
|
4174
|
+
return { outputPath: options.outputPath, success: true };
|
|
4175
|
+
} catch (error) {
|
|
4176
|
+
// Cleanup on error
|
|
4177
|
+
for (let i = 0; i < videoPaths.length; i++) {
|
|
4178
|
+
if (shouldCleanup[i] && fs.existsSync(videoPaths[i])) {
|
|
4179
|
+
fs.unlinkSync(videoPaths[i]);
|
|
4180
|
+
}
|
|
4181
|
+
}
|
|
4182
|
+
throw error;
|
|
4183
|
+
}
|
|
4184
|
+
}
|
|
4185
|
+
|
|
4186
|
+
/**
|
|
4187
|
+
* Create time-lapse video
|
|
4188
|
+
* @private
|
|
4189
|
+
*/
|
|
4190
|
+
async #createTimeLapseVideo(
|
|
4191
|
+
videoSource: string | Buffer,
|
|
4192
|
+
options: { speed?: number; outputPath: string }
|
|
4193
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
4194
|
+
const speed = options.speed || 10;
|
|
4195
|
+
// Time-lapse is essentially speeding up the video
|
|
4196
|
+
return await this.#changeVideoSpeed(videoSource, { speed, outputPath: options.outputPath });
|
|
4197
|
+
}
|
|
4198
|
+
|
|
4199
|
+
/**
|
|
4200
|
+
* Mute video (remove audio)
|
|
4201
|
+
* @private
|
|
4202
|
+
*/
|
|
4203
|
+
async #muteVideo(
|
|
4204
|
+
videoSource: string | Buffer,
|
|
4205
|
+
options: { outputPath: string }
|
|
4206
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
4207
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
4208
|
+
if (!fs.existsSync(frameDir)) {
|
|
4209
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
4210
|
+
}
|
|
4211
|
+
|
|
4212
|
+
let videoPath: string;
|
|
4213
|
+
let shouldCleanupVideo = false;
|
|
4214
|
+
const timestamp = Date.now();
|
|
4215
|
+
|
|
4216
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
4217
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
4218
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
4219
|
+
shouldCleanupVideo = true;
|
|
4220
|
+
} else {
|
|
4221
|
+
let resolvedPath = videoSource;
|
|
4222
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
4223
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
4224
|
+
}
|
|
4225
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
4226
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
4227
|
+
}
|
|
4228
|
+
videoPath = resolvedPath;
|
|
4229
|
+
}
|
|
4230
|
+
|
|
4231
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
4232
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
4233
|
+
|
|
4234
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -c copy -an -y "${escapedOutputPath}"`;
|
|
4235
|
+
|
|
4236
|
+
try {
|
|
4237
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
4238
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
4239
|
+
fs.unlinkSync(videoPath);
|
|
4240
|
+
}
|
|
4241
|
+
return { outputPath: options.outputPath, success: true };
|
|
4242
|
+
} catch (error) {
|
|
4243
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
4244
|
+
fs.unlinkSync(videoPath);
|
|
4245
|
+
}
|
|
4246
|
+
throw error;
|
|
4247
|
+
}
|
|
4248
|
+
}
|
|
4249
|
+
|
|
4250
|
+
/**
|
|
4251
|
+
* Adjust video volume
|
|
4252
|
+
* @private
|
|
4253
|
+
*/
|
|
4254
|
+
async #adjustVideoVolume(
|
|
4255
|
+
videoSource: string | Buffer,
|
|
4256
|
+
options: { volume: number; outputPath: string }
|
|
4257
|
+
): Promise<{ outputPath: string; success: boolean }> {
|
|
4258
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
4259
|
+
if (!fs.existsSync(frameDir)) {
|
|
4260
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
4261
|
+
}
|
|
4262
|
+
|
|
4263
|
+
let videoPath: string;
|
|
4264
|
+
let shouldCleanupVideo = false;
|
|
4265
|
+
const timestamp = Date.now();
|
|
4266
|
+
|
|
4267
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
4268
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
4269
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
4270
|
+
shouldCleanupVideo = true;
|
|
4271
|
+
} else {
|
|
4272
|
+
let resolvedPath = videoSource;
|
|
4273
|
+
if (!/^https?:\/\//i.test(resolvedPath)) {
|
|
4274
|
+
resolvedPath = path.join(process.cwd(), resolvedPath);
|
|
4275
|
+
}
|
|
4276
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
4277
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
4278
|
+
}
|
|
4279
|
+
videoPath = resolvedPath;
|
|
4280
|
+
}
|
|
4281
|
+
|
|
4282
|
+
const volume = Math.max(0, Math.min(10, options.volume)); // Clamp between 0 and 10
|
|
4283
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
4284
|
+
const escapedOutputPath = options.outputPath.replace(/"/g, '\\"');
|
|
4285
|
+
|
|
4286
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -af "volume=${volume}" -y "${escapedOutputPath}"`;
|
|
4287
|
+
|
|
4288
|
+
try {
|
|
4289
|
+
await execAsync(command, { timeout: 300000, maxBuffer: 10 * 1024 * 1024 });
|
|
4290
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
4291
|
+
fs.unlinkSync(videoPath);
|
|
4292
|
+
}
|
|
4293
|
+
return { outputPath: options.outputPath, success: true };
|
|
4294
|
+
} catch (error) {
|
|
4295
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
4296
|
+
fs.unlinkSync(videoPath);
|
|
4297
|
+
}
|
|
4298
|
+
throw error;
|
|
4299
|
+
}
|
|
4300
|
+
}
|
|
4301
|
+
|
|
4302
|
+
/**
|
|
4303
|
+
* Extracts a frame at a specific time in seconds
|
|
4304
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
4305
|
+
* @param timeSeconds - Time in seconds
|
|
4306
|
+
* @param outputFormat - Output format ('jpg' or 'png', default: 'jpg')
|
|
4307
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2)
|
|
4308
|
+
* @returns Buffer containing the frame image
|
|
4309
|
+
*/
|
|
4310
|
+
async extractFrameAtTime(
|
|
4311
|
+
videoSource: string | Buffer,
|
|
4312
|
+
timeSeconds: number,
|
|
4313
|
+
outputFormat: 'jpg' | 'png' = 'jpg',
|
|
4314
|
+
quality: number = 2
|
|
4315
|
+
): Promise<Buffer | null> {
|
|
4316
|
+
return this.#extractVideoFrame(videoSource, 0, timeSeconds, outputFormat, quality);
|
|
4317
|
+
}
|
|
4318
|
+
|
|
4319
|
+
/**
|
|
4320
|
+
* Extracts a frame by frame number (converts to time using video FPS)
|
|
4321
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
4322
|
+
* @param frameNumber - Frame number to extract (1-based: frame 1 = first frame)
|
|
4323
|
+
* @param outputFormat - Output format ('jpg' or 'png', default: 'jpg')
|
|
4324
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2)
|
|
4325
|
+
* @returns Buffer containing the frame image
|
|
4326
|
+
*/
|
|
4327
|
+
async extractFrameByNumber(
|
|
4328
|
+
videoSource: string | Buffer,
|
|
4329
|
+
frameNumber: number,
|
|
4330
|
+
outputFormat: 'jpg' | 'png' = 'jpg',
|
|
4331
|
+
quality: number = 2
|
|
4332
|
+
): Promise<Buffer | null> {
|
|
4333
|
+
// Get video info to convert frame number to time
|
|
4334
|
+
const videoInfo = await this.getVideoInfo(videoSource, true);
|
|
4335
|
+
if (!videoInfo || videoInfo.fps <= 0) {
|
|
4336
|
+
throw new Error('Could not get video FPS to convert frame number to time');
|
|
4337
|
+
}
|
|
4338
|
+
|
|
4339
|
+
// Convert frame number to time (frame 1 = 0 seconds, frame 2 = 1/fps, etc.)
|
|
4340
|
+
// For 1-based frame numbers: frame 1 = time 0, frame 2 = time 1/fps
|
|
4341
|
+
const timeSeconds = (frameNumber - 1) / videoInfo.fps;
|
|
4342
|
+
|
|
4343
|
+
return this.#extractVideoFrame(videoSource, frameNumber - 1, timeSeconds, outputFormat, quality);
|
|
4344
|
+
}
|
|
4345
|
+
|
|
4346
|
+
/**
|
|
4347
|
+
* Extracts multiple frames at specific times
|
|
4348
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
4349
|
+
* @param times - Array of times in seconds
|
|
4350
|
+
* @param outputFormat - Output format ('jpg' or 'png', default: 'jpg')
|
|
4351
|
+
* @param quality - JPEG quality 1-31 (lower = better, default: 2)
|
|
4352
|
+
* @returns Array of buffers containing frame images
|
|
4353
|
+
*/
|
|
4354
|
+
async extractMultipleFrames(
|
|
4355
|
+
videoSource: string | Buffer,
|
|
4356
|
+
times: number[],
|
|
4357
|
+
outputFormat: 'jpg' | 'png' = 'jpg',
|
|
4358
|
+
quality: number = 2
|
|
4359
|
+
): Promise<Buffer[]> {
|
|
4360
|
+
const frames: Buffer[] = [];
|
|
4361
|
+
for (const time of times) {
|
|
4362
|
+
const frame = await this.extractFrameAtTime(videoSource, time, outputFormat, quality);
|
|
4363
|
+
if (frame) {
|
|
4364
|
+
frames.push(frame);
|
|
4365
|
+
}
|
|
4366
|
+
}
|
|
4367
|
+
return frames;
|
|
4368
|
+
}
|
|
4369
|
+
|
|
4370
|
+
/**
|
|
4371
|
+
* Extracts ALL frames from a video and saves them as image files
|
|
4372
|
+
* @param videoSource - Video source (path, URL, or Buffer)
|
|
4373
|
+
* @param options - Extraction options
|
|
4374
|
+
* @returns Array of frame file paths
|
|
4375
|
+
*/
|
|
4376
|
+
async extractAllFrames(
|
|
4377
|
+
videoSource: string | Buffer,
|
|
4378
|
+
options?: {
|
|
4379
|
+
outputFormat?: 'jpg' | 'png';
|
|
4380
|
+
outputDirectory?: string;
|
|
4381
|
+
quality?: number; // JPEG quality 1-31 (lower = better, default: 2)
|
|
4382
|
+
prefix?: string; // Filename prefix (default: 'frame')
|
|
4383
|
+
startTime?: number; // Start time in seconds (default: 0)
|
|
4384
|
+
endTime?: number; // End time in seconds (default: video duration)
|
|
4385
|
+
}
|
|
4386
|
+
): Promise<Array<{ source: string; frameNumber: number; time: number }>> {
|
|
4387
|
+
try {
|
|
4388
|
+
const ffmpegAvailable = await this.#checkFFmpegAvailable();
|
|
4389
|
+
if (!ffmpegAvailable) {
|
|
4390
|
+
const errorMessage =
|
|
4391
|
+
'❌ FFMPEG NOT FOUND\n' +
|
|
4392
|
+
'Video processing features require FFmpeg to be installed on your system.\n' +
|
|
4393
|
+
this.#getFFmpegInstallInstructions();
|
|
4394
|
+
|
|
4395
|
+
throw new Error(errorMessage);
|
|
4396
|
+
}
|
|
4397
|
+
|
|
4398
|
+
// Get video info first
|
|
4399
|
+
const videoInfo = await this.getVideoInfo(videoSource, true);
|
|
4400
|
+
if (!videoInfo) {
|
|
4401
|
+
throw new Error('Could not get video information');
|
|
4402
|
+
}
|
|
4403
|
+
|
|
4404
|
+
const outputFormat = options?.outputFormat || 'png';
|
|
4405
|
+
const outputDir = options?.outputDirectory || path.join(process.cwd(), 'extracted-frames');
|
|
4406
|
+
const prefix = options?.prefix || 'frame';
|
|
4407
|
+
const quality = options?.quality || 2;
|
|
4408
|
+
|
|
4409
|
+
// Create output directory
|
|
4410
|
+
if (!fs.existsSync(outputDir)) {
|
|
4411
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
4412
|
+
}
|
|
4413
|
+
|
|
4414
|
+
const frameDir = path.join(process.cwd(), '.temp-frames');
|
|
4415
|
+
if (!fs.existsSync(frameDir)) {
|
|
4416
|
+
fs.mkdirSync(frameDir, { recursive: true });
|
|
4417
|
+
}
|
|
4418
|
+
|
|
4419
|
+
const timestamp = Date.now();
|
|
4420
|
+
let videoPath: string;
|
|
4421
|
+
let shouldCleanupVideo = false;
|
|
4422
|
+
|
|
4423
|
+
// Handle video source
|
|
4424
|
+
if (Buffer.isBuffer(videoSource)) {
|
|
4425
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
4426
|
+
fs.writeFileSync(videoPath, videoSource);
|
|
4427
|
+
shouldCleanupVideo = true;
|
|
4428
|
+
} else if (typeof videoSource === 'string' && videoSource.startsWith('http')) {
|
|
4429
|
+
const response = await axios({
|
|
4430
|
+
method: 'get',
|
|
4431
|
+
url: videoSource,
|
|
4432
|
+
responseType: 'arraybuffer'
|
|
4433
|
+
});
|
|
4434
|
+
videoPath = path.join(frameDir, `temp-video-${timestamp}.mp4`);
|
|
4435
|
+
fs.writeFileSync(videoPath, Buffer.from(response.data));
|
|
4436
|
+
shouldCleanupVideo = true;
|
|
4437
|
+
} else {
|
|
4438
|
+
if (!fs.existsSync(videoSource)) {
|
|
4439
|
+
throw new Error(`Video file not found: ${videoSource}`);
|
|
4440
|
+
}
|
|
4441
|
+
videoPath = videoSource;
|
|
4442
|
+
}
|
|
4443
|
+
|
|
4444
|
+
// Calculate time range
|
|
4445
|
+
const startTime = options?.startTime ?? 0;
|
|
4446
|
+
const endTime = options?.endTime ?? videoInfo.duration;
|
|
4447
|
+
const duration = endTime - startTime;
|
|
4448
|
+
|
|
4449
|
+
// Extract all frames using ffmpeg
|
|
4450
|
+
// Use -fps_mode passthrough to extract every frame (no frame skipping)
|
|
4451
|
+
// Don't use -f flag, let FFmpeg infer format from file extension
|
|
4452
|
+
const qualityFlag = outputFormat === 'jpg' ? `-q:v ${quality}` : '';
|
|
4453
|
+
const pixFmt = outputFormat === 'png' ? '-pix_fmt rgba' : '-pix_fmt rgb24';
|
|
4454
|
+
const outputTemplate = path.join(outputDir, `${prefix}-%06d.${outputFormat}`);
|
|
4455
|
+
|
|
4456
|
+
const escapedVideoPath = videoPath.replace(/"/g, '\\"');
|
|
4457
|
+
const escapedOutputTemplate = outputTemplate.replace(/"/g, '\\"');
|
|
4458
|
+
|
|
4459
|
+
// Use -fps_mode passthrough instead of deprecated -vsync 0
|
|
4460
|
+
// Use -ss after -i for more accurate frame extraction
|
|
4461
|
+
const command = `ffmpeg -i "${escapedVideoPath}" -ss ${startTime} -t ${duration} -fps_mode passthrough ${pixFmt} ${qualityFlag} -y "${escapedOutputTemplate}"`;
|
|
4462
|
+
|
|
4463
|
+
await execAsync(command, {
|
|
4464
|
+
timeout: 300000, // 5 minute timeout for large videos
|
|
4465
|
+
maxBuffer: 10 * 1024 * 1024
|
|
4466
|
+
});
|
|
4467
|
+
|
|
4468
|
+
// Collect all extracted frame files
|
|
4469
|
+
const frames: Array<{ source: string; frameNumber: number; time: number }> = [];
|
|
4470
|
+
let frameIndex = 0;
|
|
4471
|
+
let currentTime = startTime;
|
|
4472
|
+
|
|
4473
|
+
while (true) {
|
|
4474
|
+
const frameNumber = frameIndex + 1;
|
|
4475
|
+
const framePath = path.join(outputDir, `${prefix}-${String(frameNumber).padStart(6, '0')}.${outputFormat}`);
|
|
4476
|
+
|
|
4477
|
+
if (fs.existsSync(framePath)) {
|
|
4478
|
+
frames.push({
|
|
4479
|
+
source: framePath,
|
|
4480
|
+
frameNumber: frameIndex,
|
|
4481
|
+
time: currentTime
|
|
4482
|
+
});
|
|
4483
|
+
currentTime += 1 / videoInfo.fps; // Increment by frame duration
|
|
4484
|
+
frameIndex++;
|
|
4485
|
+
} else {
|
|
4486
|
+
break; // No more frames
|
|
4487
|
+
}
|
|
4488
|
+
}
|
|
4489
|
+
|
|
4490
|
+
// Cleanup temp video if created
|
|
4491
|
+
if (shouldCleanupVideo && fs.existsSync(videoPath)) {
|
|
4492
|
+
fs.unlinkSync(videoPath);
|
|
4493
|
+
}
|
|
4494
|
+
|
|
4495
|
+
console.log(`✅ Extracted ${frames.length} frames from video`);
|
|
4496
|
+
return frames;
|
|
4497
|
+
} catch (error) {
|
|
4498
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
4499
|
+
if (errorMessage.includes('FFMPEG NOT FOUND') || errorMessage.includes('FFmpeg')) {
|
|
4500
|
+
throw error;
|
|
4501
|
+
}
|
|
4502
|
+
throw new Error(`extractAllFrames failed: ${errorMessage}`);
|
|
4503
|
+
}
|
|
4504
|
+
}
|
|
4505
|
+
|
|
1669
4506
|
|
|
1670
4507
|
|
|
1671
4508
|
|