@effing/ffs 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-5SGOYTM2.js +341 -0
- package/dist/chunk-5SGOYTM2.js.map +1 -0
- package/dist/{chunk-J64HSZNQ.js → chunk-N3D6I2BD.js} +179 -499
- package/dist/chunk-N3D6I2BD.js.map +1 -0
- package/dist/chunk-QPZEAH3J.js +342 -0
- package/dist/{chunk-7FMPCMLO.js → chunk-ZERUSI5T.js} +10 -5
- package/dist/chunk-ZERUSI5T.js.map +1 -0
- package/dist/handlers/index.d.ts +1 -1
- package/dist/handlers/index.js +2 -2
- package/dist/index.js +2 -1
- package/dist/render-NEDCS65O.js +8 -0
- package/dist/render-NEDCS65O.js.map +1 -0
- package/dist/render-VWBOR3Y2.js +936 -0
- package/dist/server.js +22 -1259
- package/dist/server.js.map +1 -1
- package/package.json +5 -3
- package/dist/chunk-7FMPCMLO.js.map +0 -1
- package/dist/chunk-J64HSZNQ.js.map +0 -1
|
@@ -0,0 +1,936 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
ffsFetch,
|
|
4
|
+
storeKeys
|
|
5
|
+
} from "./chunk-QPZEAH3J.js";
|
|
6
|
+
|
|
7
|
+
// src/render.ts
|
|
8
|
+
import { Readable } from "stream";
|
|
9
|
+
import { createReadStream } from "fs";
|
|
10
|
+
|
|
11
|
+
// src/motion.ts
|
|
12
|
+
function getEasingExpression(tNormExpr, easingType) {
|
|
13
|
+
switch (easingType) {
|
|
14
|
+
case "ease-in":
|
|
15
|
+
return `pow(${tNormExpr},2)`;
|
|
16
|
+
case "ease-out":
|
|
17
|
+
return `(1-pow(1-(${tNormExpr}),2))`;
|
|
18
|
+
case "ease-in-out":
|
|
19
|
+
return `if(lt(${tNormExpr},0.5),2*pow(${tNormExpr},2),1-pow(-2*(${tNormExpr})+2,2)/2)`;
|
|
20
|
+
case "linear":
|
|
21
|
+
default:
|
|
22
|
+
return `(${tNormExpr})`;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function processSlideMotion(motion, relativeTimeExpr) {
|
|
26
|
+
const duration = motion.duration ?? 1;
|
|
27
|
+
const distance = motion.distance ?? 1;
|
|
28
|
+
const reverse = motion.reverse ?? false;
|
|
29
|
+
const easing = motion.easing ?? "linear";
|
|
30
|
+
const tNormExpr = `(${relativeTimeExpr})/${duration}`;
|
|
31
|
+
const easedProgressExpr = getEasingExpression(tNormExpr, easing);
|
|
32
|
+
const finalTimeFactorExpr = reverse ? easedProgressExpr : `(1-(${easedProgressExpr}))`;
|
|
33
|
+
let activeX;
|
|
34
|
+
let activeY;
|
|
35
|
+
let initialX;
|
|
36
|
+
let initialY;
|
|
37
|
+
let finalX;
|
|
38
|
+
let finalY;
|
|
39
|
+
switch (motion.direction) {
|
|
40
|
+
case "left": {
|
|
41
|
+
const offsetXLeft = `${distance}*W`;
|
|
42
|
+
activeX = `(${offsetXLeft})*${finalTimeFactorExpr}`;
|
|
43
|
+
activeY = "0";
|
|
44
|
+
initialX = reverse ? "0" : offsetXLeft;
|
|
45
|
+
initialY = "0";
|
|
46
|
+
finalX = reverse ? offsetXLeft : "0";
|
|
47
|
+
finalY = "0";
|
|
48
|
+
break;
|
|
49
|
+
}
|
|
50
|
+
case "right": {
|
|
51
|
+
const offsetXRight = `-${distance}*W`;
|
|
52
|
+
activeX = `(${offsetXRight})*${finalTimeFactorExpr}`;
|
|
53
|
+
activeY = "0";
|
|
54
|
+
initialX = reverse ? "0" : offsetXRight;
|
|
55
|
+
initialY = "0";
|
|
56
|
+
finalX = reverse ? offsetXRight : "0";
|
|
57
|
+
finalY = "0";
|
|
58
|
+
break;
|
|
59
|
+
}
|
|
60
|
+
case "up": {
|
|
61
|
+
const offsetYUp = `${distance}*H`;
|
|
62
|
+
activeX = "0";
|
|
63
|
+
activeY = `(${offsetYUp})*${finalTimeFactorExpr}`;
|
|
64
|
+
initialX = "0";
|
|
65
|
+
initialY = reverse ? "0" : offsetYUp;
|
|
66
|
+
finalX = "0";
|
|
67
|
+
finalY = reverse ? offsetYUp : "0";
|
|
68
|
+
break;
|
|
69
|
+
}
|
|
70
|
+
case "down": {
|
|
71
|
+
const offsetYDown = `-${distance}*H`;
|
|
72
|
+
activeX = "0";
|
|
73
|
+
activeY = `(${offsetYDown})*${finalTimeFactorExpr}`;
|
|
74
|
+
initialX = "0";
|
|
75
|
+
initialY = reverse ? "0" : offsetYDown;
|
|
76
|
+
finalX = "0";
|
|
77
|
+
finalY = reverse ? offsetYDown : "0";
|
|
78
|
+
break;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return { initialX, initialY, activeX, activeY, finalX, finalY, duration };
|
|
82
|
+
}
|
|
83
|
+
function processBounceMotion(motion, relativeTimeExpr) {
|
|
84
|
+
const amplitude = motion.amplitude ?? 0.5;
|
|
85
|
+
const duration = motion.duration ?? 1;
|
|
86
|
+
const initialY = `-overlay_h*${amplitude}`;
|
|
87
|
+
const finalY = "0";
|
|
88
|
+
const tNormExpr = `(${relativeTimeExpr})/${duration}`;
|
|
89
|
+
const activeBounceExpression = `if(lt(${tNormExpr},0.363636),${initialY}+overlay_h*${amplitude}*(7.5625*${tNormExpr}*${tNormExpr}),if(lt(${tNormExpr},0.727273),${initialY}+overlay_h*${amplitude}*(7.5625*(${tNormExpr}-0.545455)*(${tNormExpr}-0.545455)+0.75),if(lt(${tNormExpr},0.909091),${initialY}+overlay_h*${amplitude}*(7.5625*(${tNormExpr}-0.818182)*(${tNormExpr}-0.818182)+0.9375),if(lt(${tNormExpr},0.954545),${initialY}+overlay_h*${amplitude}*(7.5625*(${tNormExpr}-0.954545)*(${tNormExpr}-0.954545)+0.984375),${finalY}))))`;
|
|
90
|
+
return {
|
|
91
|
+
initialX: "0",
|
|
92
|
+
initialY,
|
|
93
|
+
activeX: "0",
|
|
94
|
+
activeY: activeBounceExpression,
|
|
95
|
+
// This expression now scales with duration
|
|
96
|
+
finalX: "0",
|
|
97
|
+
finalY,
|
|
98
|
+
duration
|
|
99
|
+
// Return the actual duration used
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
function processShakeMotion(motion, relativeTimeExpr) {
|
|
103
|
+
const intensity = motion.intensity ?? 10;
|
|
104
|
+
const frequency = motion.frequency ?? 4;
|
|
105
|
+
const duration = motion.duration ?? 1;
|
|
106
|
+
const activeX = `${intensity}*sin(${relativeTimeExpr}*PI*${frequency})`;
|
|
107
|
+
const activeY = `${intensity}*cos(${relativeTimeExpr}*PI*${frequency})`;
|
|
108
|
+
return {
|
|
109
|
+
initialX: "0",
|
|
110
|
+
initialY: "0",
|
|
111
|
+
activeX,
|
|
112
|
+
activeY,
|
|
113
|
+
finalX: "0",
|
|
114
|
+
finalY: "0",
|
|
115
|
+
duration
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
function processMotion(delay, motion) {
|
|
119
|
+
if (!motion) return "x=0:y=0";
|
|
120
|
+
const start = delay + (motion.start ?? 0);
|
|
121
|
+
const relativeTimeExpr = `(t-${start})`;
|
|
122
|
+
let components;
|
|
123
|
+
switch (motion.type) {
|
|
124
|
+
case "bounce":
|
|
125
|
+
components = processBounceMotion(motion, relativeTimeExpr);
|
|
126
|
+
break;
|
|
127
|
+
case "shake":
|
|
128
|
+
components = processShakeMotion(motion, relativeTimeExpr);
|
|
129
|
+
break;
|
|
130
|
+
case "slide":
|
|
131
|
+
components = processSlideMotion(motion, relativeTimeExpr);
|
|
132
|
+
break;
|
|
133
|
+
default:
|
|
134
|
+
motion;
|
|
135
|
+
throw new Error(
|
|
136
|
+
`Unsupported motion type: ${motion.type}`
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
const motionEndTime = start + components.duration;
|
|
140
|
+
const xArg = `if(lt(t,${start}),${components.initialX},if(lt(t,${motionEndTime}),${components.activeX},${components.finalX}))`;
|
|
141
|
+
const yArg = `if(lt(t,${start}),${components.initialY},if(lt(t,${motionEndTime}),${components.activeY},${components.finalY}))`;
|
|
142
|
+
return `x='${xArg}':y='${yArg}'`;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// src/effect.ts
|
|
146
|
+
function processFadeIn(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
147
|
+
return `fade=t=in:st=${effect.start}:d=${effect.duration}:alpha=1`;
|
|
148
|
+
}
|
|
149
|
+
function processFadeOut(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
150
|
+
return `fade=t=out:st=${effect.start}:d=${effect.duration}:alpha=1`;
|
|
151
|
+
}
|
|
152
|
+
function processSaturateIn(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
153
|
+
return `hue='s=max(0,min(1,(t-${effect.start})/${effect.duration}))'`;
|
|
154
|
+
}
|
|
155
|
+
function processSaturateOut(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
156
|
+
return `hue='s=max(0,min(1,(${effect.start + effect.duration}-t)/${effect.duration}))'`;
|
|
157
|
+
}
|
|
158
|
+
function processScroll(effect, frameRate, _frameWidth, _frameHeight) {
|
|
159
|
+
const distance = effect.distance ?? 1;
|
|
160
|
+
const scroll = distance / (1 + distance);
|
|
161
|
+
const speed = scroll / (effect.duration * frameRate);
|
|
162
|
+
switch (effect.direction) {
|
|
163
|
+
case "left":
|
|
164
|
+
return `scroll=h=${speed}`;
|
|
165
|
+
case "right":
|
|
166
|
+
return `scroll=hpos=${1 - scroll}:h=-${speed}`;
|
|
167
|
+
case "up":
|
|
168
|
+
return `scroll=v=${speed}`;
|
|
169
|
+
case "down":
|
|
170
|
+
return `scroll=vpos=${1 - scroll}:v=-${speed}`;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
function processEffect(effect, frameRate, frameWidth, frameHeight) {
|
|
174
|
+
switch (effect.type) {
|
|
175
|
+
case "fade-in":
|
|
176
|
+
return processFadeIn(effect, frameRate, frameWidth, frameHeight);
|
|
177
|
+
case "fade-out":
|
|
178
|
+
return processFadeOut(effect, frameRate, frameWidth, frameHeight);
|
|
179
|
+
case "saturate-in":
|
|
180
|
+
return processSaturateIn(effect, frameRate, frameWidth, frameHeight);
|
|
181
|
+
case "saturate-out":
|
|
182
|
+
return processSaturateOut(effect, frameRate, frameWidth, frameHeight);
|
|
183
|
+
case "scroll":
|
|
184
|
+
return processScroll(effect, frameRate, frameWidth, frameHeight);
|
|
185
|
+
default:
|
|
186
|
+
effect;
|
|
187
|
+
throw new Error(
|
|
188
|
+
`Unsupported effect type: ${effect.type}`
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
function processEffects(effects, frameRate, frameWidth, frameHeight) {
|
|
193
|
+
if (!effects || effects.length === 0) return "";
|
|
194
|
+
const filters = [];
|
|
195
|
+
for (const effect of effects) {
|
|
196
|
+
const filter = processEffect(effect, frameRate, frameWidth, frameHeight);
|
|
197
|
+
filters.push(filter);
|
|
198
|
+
}
|
|
199
|
+
return filters.join(",");
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// src/ffmpeg.ts
|
|
203
|
+
import { spawn } from "child_process";
|
|
204
|
+
import { pipeline } from "stream";
|
|
205
|
+
import fs from "fs/promises";
|
|
206
|
+
import os from "os";
|
|
207
|
+
import path from "path";
|
|
208
|
+
import tar from "tar-stream";
|
|
209
|
+
import { createWriteStream } from "fs";
|
|
210
|
+
import { promisify } from "util";
|
|
211
|
+
var pump = promisify(pipeline);
|
|
212
|
+
var resolvedBin;
|
|
213
|
+
async function getFFmpegBin() {
|
|
214
|
+
if (resolvedBin) return resolvedBin;
|
|
215
|
+
if (process.env.FFMPEG) {
|
|
216
|
+
resolvedBin = process.env.FFMPEG;
|
|
217
|
+
return resolvedBin;
|
|
218
|
+
}
|
|
219
|
+
try {
|
|
220
|
+
const { pathToFFmpeg } = await import("@effing/ffmpeg");
|
|
221
|
+
if (pathToFFmpeg) {
|
|
222
|
+
resolvedBin = pathToFFmpeg;
|
|
223
|
+
return resolvedBin;
|
|
224
|
+
}
|
|
225
|
+
} catch {
|
|
226
|
+
}
|
|
227
|
+
resolvedBin = "ffmpeg";
|
|
228
|
+
return resolvedBin;
|
|
229
|
+
}
|
|
230
|
+
var FFmpegCommand = class {
|
|
231
|
+
globalArgs;
|
|
232
|
+
inputs;
|
|
233
|
+
filterComplex;
|
|
234
|
+
outputArgs;
|
|
235
|
+
constructor(globalArgs, inputs, filterComplex, outputArgs) {
|
|
236
|
+
this.globalArgs = globalArgs;
|
|
237
|
+
this.inputs = inputs;
|
|
238
|
+
this.filterComplex = filterComplex;
|
|
239
|
+
this.outputArgs = outputArgs;
|
|
240
|
+
}
|
|
241
|
+
buildArgs(inputResolver) {
|
|
242
|
+
const inputArgs = [];
|
|
243
|
+
for (const input of this.inputs) {
|
|
244
|
+
if (input.type === "color") {
|
|
245
|
+
inputArgs.push(...input.preArgs);
|
|
246
|
+
} else if (input.type === "animation") {
|
|
247
|
+
inputArgs.push(
|
|
248
|
+
...input.preArgs,
|
|
249
|
+
"-i",
|
|
250
|
+
path.join(inputResolver(input), "frame_%05d")
|
|
251
|
+
);
|
|
252
|
+
} else {
|
|
253
|
+
inputArgs.push(...input.preArgs, "-i", inputResolver(input));
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
const args = [
|
|
257
|
+
...this.globalArgs,
|
|
258
|
+
...inputArgs,
|
|
259
|
+
"-filter_complex",
|
|
260
|
+
this.filterComplex,
|
|
261
|
+
...this.outputArgs
|
|
262
|
+
];
|
|
263
|
+
return args;
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
var FFmpegRunner = class {
|
|
267
|
+
command;
|
|
268
|
+
ffmpegProc;
|
|
269
|
+
constructor(command) {
|
|
270
|
+
this.command = command;
|
|
271
|
+
}
|
|
272
|
+
async run(sourceFetcher, imageTransformer, referenceResolver, urlTransformer) {
|
|
273
|
+
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "ffs-"));
|
|
274
|
+
const fileMapping = /* @__PURE__ */ new Map();
|
|
275
|
+
const fetchCache = /* @__PURE__ */ new Map();
|
|
276
|
+
const fetchAndSaveSource = async (input, sourceUrl, inputName) => {
|
|
277
|
+
const stream = await sourceFetcher({
|
|
278
|
+
type: input.type,
|
|
279
|
+
src: sourceUrl
|
|
280
|
+
});
|
|
281
|
+
if (input.type === "animation") {
|
|
282
|
+
const extractionDir = path.join(tempDir, inputName);
|
|
283
|
+
await fs.mkdir(extractionDir, { recursive: true });
|
|
284
|
+
const extract = tar.extract();
|
|
285
|
+
const extractPromise = new Promise((resolve, reject) => {
|
|
286
|
+
extract.on("entry", async (header, stream2, next) => {
|
|
287
|
+
if (header.name.startsWith("frame_")) {
|
|
288
|
+
const transformedStream = imageTransformer ? await imageTransformer(stream2) : stream2;
|
|
289
|
+
const outputPath = path.join(extractionDir, header.name);
|
|
290
|
+
const writeStream = createWriteStream(outputPath);
|
|
291
|
+
transformedStream.pipe(writeStream);
|
|
292
|
+
writeStream.on("finish", next);
|
|
293
|
+
writeStream.on("error", reject);
|
|
294
|
+
}
|
|
295
|
+
});
|
|
296
|
+
extract.on("finish", resolve);
|
|
297
|
+
extract.on("error", reject);
|
|
298
|
+
});
|
|
299
|
+
stream.pipe(extract);
|
|
300
|
+
await extractPromise;
|
|
301
|
+
return extractionDir;
|
|
302
|
+
} else if (input.type === "image" && imageTransformer) {
|
|
303
|
+
const tempFile = path.join(tempDir, inputName);
|
|
304
|
+
const transformedStream = await imageTransformer(stream);
|
|
305
|
+
const writeStream = createWriteStream(tempFile);
|
|
306
|
+
transformedStream.on("error", (e) => writeStream.destroy(e));
|
|
307
|
+
await pump(transformedStream, writeStream);
|
|
308
|
+
return tempFile;
|
|
309
|
+
} else {
|
|
310
|
+
const tempFile = path.join(tempDir, inputName);
|
|
311
|
+
const writeStream = createWriteStream(tempFile);
|
|
312
|
+
stream.on("error", (e) => writeStream.destroy(e));
|
|
313
|
+
await pump(stream, writeStream);
|
|
314
|
+
return tempFile;
|
|
315
|
+
}
|
|
316
|
+
};
|
|
317
|
+
await Promise.all(
|
|
318
|
+
this.command.inputs.map(async (input) => {
|
|
319
|
+
if (input.type === "color") return;
|
|
320
|
+
const inputName = `ffmpeg_input_${input.index.toString().padStart(3, "0")}`;
|
|
321
|
+
const sourceUrl = referenceResolver ? referenceResolver(input.source) : input.source;
|
|
322
|
+
if ((input.type === "video" || input.type === "audio") && (sourceUrl.startsWith("http://") || sourceUrl.startsWith("https://"))) {
|
|
323
|
+
const finalUrl = urlTransformer ? urlTransformer(sourceUrl) : sourceUrl;
|
|
324
|
+
fileMapping.set(input.index, finalUrl);
|
|
325
|
+
return;
|
|
326
|
+
}
|
|
327
|
+
const shouldCache = input.source.startsWith("#");
|
|
328
|
+
if (shouldCache) {
|
|
329
|
+
let fetchPromise = fetchCache.get(input.source);
|
|
330
|
+
if (!fetchPromise) {
|
|
331
|
+
fetchPromise = fetchAndSaveSource(input, sourceUrl, inputName);
|
|
332
|
+
fetchCache.set(input.source, fetchPromise);
|
|
333
|
+
}
|
|
334
|
+
const filePath = await fetchPromise;
|
|
335
|
+
fileMapping.set(input.index, filePath);
|
|
336
|
+
} else {
|
|
337
|
+
const filePath = await fetchAndSaveSource(
|
|
338
|
+
input,
|
|
339
|
+
sourceUrl,
|
|
340
|
+
inputName
|
|
341
|
+
);
|
|
342
|
+
fileMapping.set(input.index, filePath);
|
|
343
|
+
}
|
|
344
|
+
})
|
|
345
|
+
);
|
|
346
|
+
const finalArgs = this.command.buildArgs((input) => {
|
|
347
|
+
const filePath = fileMapping.get(input.index);
|
|
348
|
+
if (!filePath)
|
|
349
|
+
throw new Error(`File for input index ${input.index} not found`);
|
|
350
|
+
return filePath;
|
|
351
|
+
});
|
|
352
|
+
const ffmpegProc = spawn(await getFFmpegBin(), finalArgs);
|
|
353
|
+
ffmpegProc.stderr.on("data", (data) => {
|
|
354
|
+
console.error(data.toString());
|
|
355
|
+
});
|
|
356
|
+
ffmpegProc.on("close", async () => {
|
|
357
|
+
try {
|
|
358
|
+
await fs.rm(tempDir, { recursive: true, force: true });
|
|
359
|
+
} catch (err) {
|
|
360
|
+
console.error("Error removing temp directory:", err);
|
|
361
|
+
}
|
|
362
|
+
});
|
|
363
|
+
this.ffmpegProc = ffmpegProc;
|
|
364
|
+
return ffmpegProc.stdout;
|
|
365
|
+
}
|
|
366
|
+
close() {
|
|
367
|
+
if (this.ffmpegProc) {
|
|
368
|
+
this.ffmpegProc.kill("SIGTERM");
|
|
369
|
+
this.ffmpegProc = void 0;
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
};
|
|
373
|
+
|
|
374
|
+
// src/transition.ts
|
|
375
|
+
function processTransition(transition) {
|
|
376
|
+
switch (transition.type) {
|
|
377
|
+
case "fade": {
|
|
378
|
+
if ("through" in transition) {
|
|
379
|
+
return `fade${transition.through}`;
|
|
380
|
+
}
|
|
381
|
+
const easing = transition.easing ?? "linear";
|
|
382
|
+
return {
|
|
383
|
+
linear: "fade",
|
|
384
|
+
"ease-in": "fadeslow",
|
|
385
|
+
"ease-out": "fadefast"
|
|
386
|
+
}[easing];
|
|
387
|
+
}
|
|
388
|
+
case "barn": {
|
|
389
|
+
const orientation = transition.orientation ?? "horizontal";
|
|
390
|
+
const mode = transition.mode ?? "open";
|
|
391
|
+
const prefix = orientation === "vertical" ? "vert" : "horz";
|
|
392
|
+
return `${prefix}${mode}`;
|
|
393
|
+
}
|
|
394
|
+
case "circle": {
|
|
395
|
+
const mode = transition.mode ?? "open";
|
|
396
|
+
return `circle${mode}`;
|
|
397
|
+
}
|
|
398
|
+
case "wipe":
|
|
399
|
+
case "slide":
|
|
400
|
+
case "smooth": {
|
|
401
|
+
const direction = transition.direction ?? "left";
|
|
402
|
+
return `${transition.type}${direction}`;
|
|
403
|
+
}
|
|
404
|
+
case "slice": {
|
|
405
|
+
const direction = transition.direction ?? "left";
|
|
406
|
+
const prefix = {
|
|
407
|
+
left: "hl",
|
|
408
|
+
right: "hr",
|
|
409
|
+
up: "vu",
|
|
410
|
+
down: "vd"
|
|
411
|
+
}[direction];
|
|
412
|
+
return `${prefix}${transition.type}`;
|
|
413
|
+
}
|
|
414
|
+
case "zoom": {
|
|
415
|
+
return "zoomin";
|
|
416
|
+
}
|
|
417
|
+
case "dissolve":
|
|
418
|
+
case "pixelize":
|
|
419
|
+
case "radial":
|
|
420
|
+
return transition.type;
|
|
421
|
+
default:
|
|
422
|
+
transition;
|
|
423
|
+
throw new Error(
|
|
424
|
+
`Unsupported transition type: ${transition.type}`
|
|
425
|
+
);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
// src/render.ts
|
|
430
|
+
import sharp from "sharp";
|
|
431
|
+
import { fileURLToPath } from "url";
|
|
432
|
+
var EffieRenderer = class {
|
|
433
|
+
effieData;
|
|
434
|
+
ffmpegRunner;
|
|
435
|
+
allowLocalFiles;
|
|
436
|
+
transientStore;
|
|
437
|
+
httpProxy;
|
|
438
|
+
constructor(effieData, options) {
|
|
439
|
+
this.effieData = effieData;
|
|
440
|
+
this.allowLocalFiles = options?.allowLocalFiles ?? false;
|
|
441
|
+
this.transientStore = options?.transientStore;
|
|
442
|
+
this.httpProxy = options?.httpProxy;
|
|
443
|
+
}
|
|
444
|
+
async fetchSource(src) {
|
|
445
|
+
if (src.startsWith("data:")) {
|
|
446
|
+
const commaIndex = src.indexOf(",");
|
|
447
|
+
if (commaIndex === -1) {
|
|
448
|
+
throw new Error("Invalid data URL");
|
|
449
|
+
}
|
|
450
|
+
const meta = src.slice(5, commaIndex);
|
|
451
|
+
const isBase64 = meta.endsWith(";base64");
|
|
452
|
+
const data = src.slice(commaIndex + 1);
|
|
453
|
+
const buffer = isBase64 ? Buffer.from(data, "base64") : Buffer.from(decodeURIComponent(data));
|
|
454
|
+
return Readable.from(buffer);
|
|
455
|
+
}
|
|
456
|
+
if (src.startsWith("file:")) {
|
|
457
|
+
if (!this.allowLocalFiles) {
|
|
458
|
+
throw new Error(
|
|
459
|
+
"Local file paths are not allowed. Use allowLocalFiles option for trusted operations."
|
|
460
|
+
);
|
|
461
|
+
}
|
|
462
|
+
return createReadStream(fileURLToPath(src));
|
|
463
|
+
}
|
|
464
|
+
if (this.transientStore) {
|
|
465
|
+
const cachedStream = await this.transientStore.getStream(
|
|
466
|
+
storeKeys.source(src)
|
|
467
|
+
);
|
|
468
|
+
if (cachedStream) {
|
|
469
|
+
return cachedStream;
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
const response = await ffsFetch(src, {
|
|
473
|
+
headersTimeout: 10 * 60 * 1e3,
|
|
474
|
+
// 10 minutes
|
|
475
|
+
bodyTimeout: 20 * 60 * 1e3
|
|
476
|
+
// 20 minutes
|
|
477
|
+
});
|
|
478
|
+
if (!response.ok) {
|
|
479
|
+
throw new Error(
|
|
480
|
+
`Failed to fetch ${src}: ${response.status} ${response.statusText}`
|
|
481
|
+
);
|
|
482
|
+
}
|
|
483
|
+
if (!response.body) {
|
|
484
|
+
throw new Error(`No body for ${src}`);
|
|
485
|
+
}
|
|
486
|
+
return Readable.fromWeb(response.body);
|
|
487
|
+
}
|
|
488
|
+
buildAudioFilter({
|
|
489
|
+
duration,
|
|
490
|
+
volume,
|
|
491
|
+
fadeIn,
|
|
492
|
+
fadeOut
|
|
493
|
+
}) {
|
|
494
|
+
const filters = [];
|
|
495
|
+
if (volume !== void 0) {
|
|
496
|
+
filters.push(`volume=${volume}`);
|
|
497
|
+
}
|
|
498
|
+
if (fadeIn !== void 0) {
|
|
499
|
+
filters.push(`afade=type=in:start_time=0:duration=${fadeIn}`);
|
|
500
|
+
}
|
|
501
|
+
if (fadeOut !== void 0) {
|
|
502
|
+
filters.push(
|
|
503
|
+
`afade=type=out:start_time=${duration - fadeOut}:duration=${fadeOut}`
|
|
504
|
+
);
|
|
505
|
+
}
|
|
506
|
+
return filters.length ? filters.join(",") : "anull";
|
|
507
|
+
}
|
|
508
|
+
getFrameDimensions(scaleFactor) {
|
|
509
|
+
return {
|
|
510
|
+
frameWidth: Math.floor(this.effieData.width * scaleFactor / 2) * 2,
|
|
511
|
+
frameHeight: Math.floor(this.effieData.height * scaleFactor / 2) * 2
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
/**
|
|
515
|
+
* Builds an FFmpeg input for a background (global or segment).
|
|
516
|
+
*/
|
|
517
|
+
buildBackgroundInput(background, inputIndex, frameWidth, frameHeight) {
|
|
518
|
+
if (background.type === "image") {
|
|
519
|
+
return {
|
|
520
|
+
index: inputIndex,
|
|
521
|
+
source: background.source,
|
|
522
|
+
preArgs: ["-loop", "1", "-framerate", this.effieData.fps.toString()],
|
|
523
|
+
type: "image"
|
|
524
|
+
};
|
|
525
|
+
} else if (background.type === "video") {
|
|
526
|
+
return {
|
|
527
|
+
index: inputIndex,
|
|
528
|
+
source: background.source,
|
|
529
|
+
preArgs: ["-stream_loop", "-1"],
|
|
530
|
+
type: "video"
|
|
531
|
+
};
|
|
532
|
+
}
|
|
533
|
+
return {
|
|
534
|
+
index: inputIndex,
|
|
535
|
+
source: "",
|
|
536
|
+
preArgs: [
|
|
537
|
+
"-f",
|
|
538
|
+
"lavfi",
|
|
539
|
+
"-i",
|
|
540
|
+
`color=${background.color}:size=${frameWidth}x${frameHeight}:rate=${this.effieData.fps}`
|
|
541
|
+
],
|
|
542
|
+
type: "color"
|
|
543
|
+
};
|
|
544
|
+
}
|
|
545
|
+
buildOutputArgs(outputFilename) {
|
|
546
|
+
return [
|
|
547
|
+
"-map",
|
|
548
|
+
"[outv]",
|
|
549
|
+
"-map",
|
|
550
|
+
"[outa]",
|
|
551
|
+
"-c:v",
|
|
552
|
+
"libx264",
|
|
553
|
+
"-r",
|
|
554
|
+
this.effieData.fps.toString(),
|
|
555
|
+
"-pix_fmt",
|
|
556
|
+
"yuv420p",
|
|
557
|
+
"-preset",
|
|
558
|
+
"fast",
|
|
559
|
+
"-crf",
|
|
560
|
+
"28",
|
|
561
|
+
"-c:a",
|
|
562
|
+
"aac",
|
|
563
|
+
"-movflags",
|
|
564
|
+
"frag_keyframe+empty_moov",
|
|
565
|
+
"-f",
|
|
566
|
+
"mp4",
|
|
567
|
+
outputFilename
|
|
568
|
+
];
|
|
569
|
+
}
|
|
570
|
+
buildLayerInput(layer, duration, inputIndex) {
|
|
571
|
+
let preArgs = [];
|
|
572
|
+
if (layer.type === "image") {
|
|
573
|
+
preArgs = [
|
|
574
|
+
"-loop",
|
|
575
|
+
"1",
|
|
576
|
+
"-t",
|
|
577
|
+
duration.toString(),
|
|
578
|
+
"-framerate",
|
|
579
|
+
this.effieData.fps.toString()
|
|
580
|
+
];
|
|
581
|
+
} else if (layer.type === "animation") {
|
|
582
|
+
preArgs = ["-f", "image2", "-framerate", this.effieData.fps.toString()];
|
|
583
|
+
}
|
|
584
|
+
return {
|
|
585
|
+
index: inputIndex,
|
|
586
|
+
source: layer.source,
|
|
587
|
+
preArgs,
|
|
588
|
+
type: layer.type
|
|
589
|
+
};
|
|
590
|
+
}
|
|
591
|
+
/**
|
|
592
|
+
* Builds filter chain for all layers in a segment.
|
|
593
|
+
* @param segment - The segment containing layers
|
|
594
|
+
* @param bgLabel - Label for the background input (e.g., "bg_seg0" or "bg_seg")
|
|
595
|
+
* @param labelPrefix - Prefix for generated labels (e.g., "seg0_" or "")
|
|
596
|
+
* @param layerInputOffset - Starting input index for layers
|
|
597
|
+
* @param frameWidth - Frame width for nullsrc
|
|
598
|
+
* @param frameHeight - Frame height for nullsrc
|
|
599
|
+
* @param outputLabel - Label for the final video output
|
|
600
|
+
* @returns Array of filter parts to add to the filter chain
|
|
601
|
+
*/
|
|
602
|
+
buildLayerFilters(segment, bgLabel, labelPrefix, layerInputOffset, frameWidth, frameHeight, outputLabel) {
|
|
603
|
+
const filterParts = [];
|
|
604
|
+
let currentVidLabel = bgLabel;
|
|
605
|
+
for (let l = 0; l < segment.layers.length; l++) {
|
|
606
|
+
const inputIdx = layerInputOffset + l;
|
|
607
|
+
const layerLabel = `${labelPrefix}layer${l}`;
|
|
608
|
+
const layer = segment.layers[l];
|
|
609
|
+
const effectChain = layer.effects ? processEffects(
|
|
610
|
+
layer.effects,
|
|
611
|
+
this.effieData.fps,
|
|
612
|
+
frameWidth,
|
|
613
|
+
frameHeight
|
|
614
|
+
) : "";
|
|
615
|
+
filterParts.push(
|
|
616
|
+
`[${inputIdx}:v]trim=start=0:duration=${segment.duration},${effectChain ? effectChain + "," : ""}setsar=1,setpts=PTS-STARTPTS[${layerLabel}]`
|
|
617
|
+
);
|
|
618
|
+
let overlayInputLabel = layerLabel;
|
|
619
|
+
const delay = layer.delay ?? 0;
|
|
620
|
+
if (delay > 0) {
|
|
621
|
+
filterParts.push(
|
|
622
|
+
`nullsrc=size=${frameWidth}x${frameHeight}:duration=${delay},setpts=PTS-STARTPTS[null_${layerLabel}]`
|
|
623
|
+
);
|
|
624
|
+
filterParts.push(
|
|
625
|
+
`[null_${layerLabel}][${layerLabel}]concat=n=2:v=1:a=0[delayed_${layerLabel}]`
|
|
626
|
+
);
|
|
627
|
+
overlayInputLabel = `delayed_${layerLabel}`;
|
|
628
|
+
}
|
|
629
|
+
const overlayOutputLabel = `${labelPrefix}tmp${l}`;
|
|
630
|
+
const offset = layer.motion ? processMotion(delay, layer.motion) : "0:0";
|
|
631
|
+
const fromTime = layer.from ?? 0;
|
|
632
|
+
const untilTime = layer.until ?? segment.duration;
|
|
633
|
+
filterParts.push(
|
|
634
|
+
`[${currentVidLabel}][${overlayInputLabel}]overlay=${offset}:enable='between(t,${fromTime},${untilTime})',fps=${this.effieData.fps}[${overlayOutputLabel}]`
|
|
635
|
+
);
|
|
636
|
+
currentVidLabel = overlayOutputLabel;
|
|
637
|
+
}
|
|
638
|
+
filterParts.push(`[${currentVidLabel}]null[${outputLabel}]`);
|
|
639
|
+
return filterParts;
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* Applies xfade/concat transitions between video segments.
|
|
643
|
+
* Modifies videoSegmentLabels in place to update labels after transitions.
|
|
644
|
+
* @param filterParts - Array to append filter parts to
|
|
645
|
+
* @param videoSegmentLabels - Array of video segment labels (modified in place)
|
|
646
|
+
*/
|
|
647
|
+
applyTransitions(filterParts, videoSegmentLabels) {
|
|
648
|
+
let transitionOffset = 0;
|
|
649
|
+
this.effieData.segments.forEach((segment, i) => {
|
|
650
|
+
if (i === 0) {
|
|
651
|
+
transitionOffset = segment.duration;
|
|
652
|
+
return;
|
|
653
|
+
}
|
|
654
|
+
const combineLabel = `[vid_com${i}]`;
|
|
655
|
+
if (!segment.transition) {
|
|
656
|
+
transitionOffset += segment.duration;
|
|
657
|
+
filterParts.push(
|
|
658
|
+
`${videoSegmentLabels[i - 1]}${videoSegmentLabels[i]}concat=n=2:v=1:a=0,fps=${this.effieData.fps}${combineLabel}`
|
|
659
|
+
);
|
|
660
|
+
videoSegmentLabels[i] = combineLabel;
|
|
661
|
+
return;
|
|
662
|
+
}
|
|
663
|
+
const transitionName = processTransition(segment.transition);
|
|
664
|
+
const transitionDuration = segment.transition.duration;
|
|
665
|
+
transitionOffset -= transitionDuration;
|
|
666
|
+
filterParts.push(
|
|
667
|
+
`${videoSegmentLabels[i - 1]}${videoSegmentLabels[i]}xfade=transition=${transitionName}:duration=${transitionDuration}:offset=${transitionOffset}${combineLabel}`
|
|
668
|
+
);
|
|
669
|
+
videoSegmentLabels[i] = combineLabel;
|
|
670
|
+
transitionOffset += segment.duration;
|
|
671
|
+
});
|
|
672
|
+
filterParts.push(`${videoSegmentLabels.at(-1)}null[outv]`);
|
|
673
|
+
}
|
|
674
|
+
/**
|
|
675
|
+
* Applies general audio mixing: concats segment audio and mixes with global audio if present.
|
|
676
|
+
* @param filterParts - Array to append filter parts to
|
|
677
|
+
* @param audioSegmentLabels - Array of audio segment labels to concat
|
|
678
|
+
* @param totalDuration - Total duration for audio trimming
|
|
679
|
+
* @param generalAudioInputIndex - Input index for general audio (if present)
|
|
680
|
+
*/
|
|
681
|
+
applyGeneralAudio(filterParts, audioSegmentLabels, totalDuration, generalAudioInputIndex) {
|
|
682
|
+
if (this.effieData.audio) {
|
|
683
|
+
const audioSeek = this.effieData.audio.seek ?? 0;
|
|
684
|
+
const generalAudioFilter = this.buildAudioFilter({
|
|
685
|
+
duration: totalDuration,
|
|
686
|
+
volume: this.effieData.audio.volume,
|
|
687
|
+
fadeIn: this.effieData.audio.fadeIn,
|
|
688
|
+
fadeOut: this.effieData.audio.fadeOut
|
|
689
|
+
});
|
|
690
|
+
filterParts.push(
|
|
691
|
+
`[${generalAudioInputIndex}:a]atrim=start=${audioSeek}:duration=${totalDuration},${generalAudioFilter},asetpts=PTS-STARTPTS[general_audio]`
|
|
692
|
+
);
|
|
693
|
+
filterParts.push(
|
|
694
|
+
`${audioSegmentLabels.join("")}concat=n=${this.effieData.segments.length}:v=0:a=1,atrim=start=0:duration=${totalDuration}[segments_audio]`
|
|
695
|
+
);
|
|
696
|
+
filterParts.push(
|
|
697
|
+
`[general_audio][segments_audio]amix=inputs=2:duration=longest[outa]`
|
|
698
|
+
);
|
|
699
|
+
} else {
|
|
700
|
+
filterParts.push(
|
|
701
|
+
`${audioSegmentLabels.join("")}concat=n=${this.effieData.segments.length}:v=0:a=1[outa]`
|
|
702
|
+
);
|
|
703
|
+
}
|
|
704
|
+
}
|
|
705
|
+
buildFFmpegCommand(outputFilename, scaleFactor = 1) {
|
|
706
|
+
const globalArgs = ["-y", "-loglevel", "error"];
|
|
707
|
+
const inputs = [];
|
|
708
|
+
let inputIndex = 0;
|
|
709
|
+
const { frameWidth, frameHeight } = this.getFrameDimensions(scaleFactor);
|
|
710
|
+
const backgroundSeek = this.effieData.background.type === "video" ? this.effieData.background.seek ?? 0 : 0;
|
|
711
|
+
inputs.push(
|
|
712
|
+
this.buildBackgroundInput(
|
|
713
|
+
this.effieData.background,
|
|
714
|
+
inputIndex,
|
|
715
|
+
frameWidth,
|
|
716
|
+
frameHeight
|
|
717
|
+
)
|
|
718
|
+
);
|
|
719
|
+
const globalBgInputIdx = inputIndex;
|
|
720
|
+
inputIndex++;
|
|
721
|
+
const segmentBgInputIndices = [];
|
|
722
|
+
for (const segment of this.effieData.segments) {
|
|
723
|
+
if (segment.background) {
|
|
724
|
+
inputs.push(
|
|
725
|
+
this.buildBackgroundInput(
|
|
726
|
+
segment.background,
|
|
727
|
+
inputIndex,
|
|
728
|
+
frameWidth,
|
|
729
|
+
frameHeight
|
|
730
|
+
)
|
|
731
|
+
);
|
|
732
|
+
segmentBgInputIndices.push(inputIndex);
|
|
733
|
+
inputIndex++;
|
|
734
|
+
} else {
|
|
735
|
+
segmentBgInputIndices.push(null);
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
const globalBgSegmentIndices = [];
|
|
739
|
+
for (let i = 0; i < this.effieData.segments.length; i++) {
|
|
740
|
+
if (segmentBgInputIndices[i] === null) {
|
|
741
|
+
globalBgSegmentIndices.push(i);
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
for (const segment of this.effieData.segments) {
|
|
745
|
+
for (const layer of segment.layers) {
|
|
746
|
+
inputs.push(this.buildLayerInput(layer, segment.duration, inputIndex));
|
|
747
|
+
inputIndex++;
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
for (const segment of this.effieData.segments) {
|
|
751
|
+
if (segment.audio) {
|
|
752
|
+
inputs.push({
|
|
753
|
+
index: inputIndex,
|
|
754
|
+
source: segment.audio.source,
|
|
755
|
+
preArgs: [],
|
|
756
|
+
type: "audio"
|
|
757
|
+
});
|
|
758
|
+
inputIndex++;
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
if (this.effieData.audio) {
|
|
762
|
+
inputs.push({
|
|
763
|
+
index: inputIndex,
|
|
764
|
+
source: this.effieData.audio.source,
|
|
765
|
+
preArgs: [],
|
|
766
|
+
type: "audio"
|
|
767
|
+
});
|
|
768
|
+
inputIndex++;
|
|
769
|
+
}
|
|
770
|
+
const numSegmentBgInputs = segmentBgInputIndices.filter(
|
|
771
|
+
(i) => i !== null
|
|
772
|
+
).length;
|
|
773
|
+
const numVideoInputs = 1 + numSegmentBgInputs + this.effieData.segments.reduce((sum, seg) => sum + seg.layers.length, 0);
|
|
774
|
+
let audioCounter = 0;
|
|
775
|
+
let currentTime = 0;
|
|
776
|
+
let layerInputOffset = 1 + numSegmentBgInputs;
|
|
777
|
+
const filterParts = [];
|
|
778
|
+
const videoSegmentLabels = [];
|
|
779
|
+
const audioSegmentLabels = [];
|
|
780
|
+
const globalBgFifoLabels = /* @__PURE__ */ new Map();
|
|
781
|
+
const bgFilter = `fps=${this.effieData.fps},scale=${frameWidth}x${frameHeight}:force_original_aspect_ratio=increase,crop=${frameWidth}:${frameHeight}`;
|
|
782
|
+
if (globalBgSegmentIndices.length === 1) {
|
|
783
|
+
const fifoLabel = `bg_fifo_0`;
|
|
784
|
+
filterParts.push(`[${globalBgInputIdx}:v]${bgFilter},fifo[${fifoLabel}]`);
|
|
785
|
+
globalBgFifoLabels.set(globalBgSegmentIndices[0], fifoLabel);
|
|
786
|
+
} else if (globalBgSegmentIndices.length > 1) {
|
|
787
|
+
const splitCount = globalBgSegmentIndices.length;
|
|
788
|
+
const splitOutputLabels = globalBgSegmentIndices.map(
|
|
789
|
+
(_, i) => `bg_split_${i}`
|
|
790
|
+
);
|
|
791
|
+
filterParts.push(
|
|
792
|
+
`[${globalBgInputIdx}:v]${bgFilter},split=${splitCount}${splitOutputLabels.map((l) => `[${l}]`).join("")}`
|
|
793
|
+
);
|
|
794
|
+
for (let i = 0; i < splitCount; i++) {
|
|
795
|
+
const fifoLabel = `bg_fifo_${i}`;
|
|
796
|
+
filterParts.push(`[${splitOutputLabels[i]}]fifo[${fifoLabel}]`);
|
|
797
|
+
globalBgFifoLabels.set(globalBgSegmentIndices[i], fifoLabel);
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
for (let segIdx = 0; segIdx < this.effieData.segments.length; segIdx++) {
|
|
801
|
+
const segment = this.effieData.segments[segIdx];
|
|
802
|
+
const bgLabel = `bg_seg${segIdx}`;
|
|
803
|
+
if (segment.background) {
|
|
804
|
+
const segBgInputIdx = segmentBgInputIndices[segIdx];
|
|
805
|
+
const segBgSeek = segment.background.type === "video" ? segment.background.seek ?? 0 : 0;
|
|
806
|
+
filterParts.push(
|
|
807
|
+
`[${segBgInputIdx}:v]fps=${this.effieData.fps},scale=${frameWidth}x${frameHeight},trim=start=${segBgSeek}:duration=${segment.duration},setpts=PTS-STARTPTS[${bgLabel}]`
|
|
808
|
+
);
|
|
809
|
+
} else {
|
|
810
|
+
const fifoLabel = globalBgFifoLabels.get(segIdx);
|
|
811
|
+
if (fifoLabel) {
|
|
812
|
+
filterParts.push(
|
|
813
|
+
`[${fifoLabel}]trim=start=${backgroundSeek + currentTime}:duration=${segment.duration},setpts=PTS-STARTPTS[${bgLabel}]`
|
|
814
|
+
);
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
const vidLabel = `vid_seg${segIdx}`;
|
|
818
|
+
filterParts.push(
|
|
819
|
+
...this.buildLayerFilters(
|
|
820
|
+
segment,
|
|
821
|
+
bgLabel,
|
|
822
|
+
`seg${segIdx}_`,
|
|
823
|
+
layerInputOffset,
|
|
824
|
+
frameWidth,
|
|
825
|
+
frameHeight,
|
|
826
|
+
vidLabel
|
|
827
|
+
)
|
|
828
|
+
);
|
|
829
|
+
layerInputOffset += segment.layers.length;
|
|
830
|
+
videoSegmentLabels.push(`[${vidLabel}]`);
|
|
831
|
+
const nextSegment = this.effieData.segments[segIdx + 1];
|
|
832
|
+
const transitionDuration = nextSegment?.transition?.duration ?? 0;
|
|
833
|
+
const realDuration = Math.max(
|
|
834
|
+
1e-3,
|
|
835
|
+
segment.duration - transitionDuration
|
|
836
|
+
);
|
|
837
|
+
if (segment.audio) {
|
|
838
|
+
const audioInputIndex = numVideoInputs + audioCounter;
|
|
839
|
+
const audioFilter = this.buildAudioFilter({
|
|
840
|
+
duration: realDuration,
|
|
841
|
+
volume: segment.audio.volume,
|
|
842
|
+
fadeIn: segment.audio.fadeIn,
|
|
843
|
+
fadeOut: segment.audio.fadeOut
|
|
844
|
+
});
|
|
845
|
+
filterParts.push(
|
|
846
|
+
`[${audioInputIndex}:a]atrim=start=0:duration=${realDuration},${audioFilter},asetpts=PTS-STARTPTS[aud_seg${segIdx}]`
|
|
847
|
+
);
|
|
848
|
+
audioCounter++;
|
|
849
|
+
} else {
|
|
850
|
+
filterParts.push(
|
|
851
|
+
`anullsrc=r=44100:cl=stereo,atrim=start=0:duration=${realDuration},asetpts=PTS-STARTPTS[aud_seg${segIdx}]`
|
|
852
|
+
);
|
|
853
|
+
}
|
|
854
|
+
audioSegmentLabels.push(`[aud_seg${segIdx}]`);
|
|
855
|
+
currentTime += realDuration;
|
|
856
|
+
}
|
|
857
|
+
this.applyGeneralAudio(
|
|
858
|
+
filterParts,
|
|
859
|
+
audioSegmentLabels,
|
|
860
|
+
currentTime,
|
|
861
|
+
numVideoInputs + audioCounter
|
|
862
|
+
);
|
|
863
|
+
this.applyTransitions(filterParts, videoSegmentLabels);
|
|
864
|
+
const filterComplex = filterParts.join(";");
|
|
865
|
+
const outputArgs = this.buildOutputArgs(outputFilename);
|
|
866
|
+
return new FFmpegCommand(globalArgs, inputs, filterComplex, outputArgs);
|
|
867
|
+
}
|
|
868
|
+
createImageTransformer(scaleFactor) {
|
|
869
|
+
return async (imageStream) => {
|
|
870
|
+
if (scaleFactor === 1) return imageStream;
|
|
871
|
+
const sharpTransformer = sharp();
|
|
872
|
+
imageStream.on("error", (err) => {
|
|
873
|
+
if (!sharpTransformer.destroyed) {
|
|
874
|
+
sharpTransformer.destroy(err);
|
|
875
|
+
}
|
|
876
|
+
});
|
|
877
|
+
sharpTransformer.on("error", (err) => {
|
|
878
|
+
if (!imageStream.destroyed) {
|
|
879
|
+
imageStream.destroy(err);
|
|
880
|
+
}
|
|
881
|
+
});
|
|
882
|
+
imageStream.pipe(sharpTransformer);
|
|
883
|
+
try {
|
|
884
|
+
const metadata = await sharpTransformer.metadata();
|
|
885
|
+
const imageWidth = metadata.width ?? this.effieData.width;
|
|
886
|
+
const imageHeight = metadata.height ?? this.effieData.height;
|
|
887
|
+
return sharpTransformer.resize({
|
|
888
|
+
width: Math.floor(imageWidth * scaleFactor),
|
|
889
|
+
height: Math.floor(imageHeight * scaleFactor)
|
|
890
|
+
});
|
|
891
|
+
} catch (error) {
|
|
892
|
+
if (!sharpTransformer.destroyed) {
|
|
893
|
+
sharpTransformer.destroy(error);
|
|
894
|
+
}
|
|
895
|
+
throw error;
|
|
896
|
+
}
|
|
897
|
+
};
|
|
898
|
+
}
|
|
899
|
+
/**
|
|
900
|
+
* Resolves a source reference to its actual URL.
|
|
901
|
+
* If the source is a #reference, returns the resolved URL.
|
|
902
|
+
* Otherwise, returns the source as-is.
|
|
903
|
+
*/
|
|
904
|
+
resolveReference(src) {
|
|
905
|
+
if (src.startsWith("#")) {
|
|
906
|
+
const sourceName = src.slice(1);
|
|
907
|
+
if (sourceName in this.effieData.sources) {
|
|
908
|
+
return this.effieData.sources[sourceName];
|
|
909
|
+
}
|
|
910
|
+
}
|
|
911
|
+
return src;
|
|
912
|
+
}
|
|
913
|
+
/**
|
|
914
|
+
* Renders the effie data to a video stream.
|
|
915
|
+
* @param scaleFactor - Scale factor for output dimensions
|
|
916
|
+
*/
|
|
917
|
+
async render(scaleFactor = 1) {
|
|
918
|
+
const ffmpegCommand = this.buildFFmpegCommand("-", scaleFactor);
|
|
919
|
+
this.ffmpegRunner = new FFmpegRunner(ffmpegCommand);
|
|
920
|
+
const urlTransformer = this.httpProxy ? (url) => this.httpProxy.transformUrl(url) : void 0;
|
|
921
|
+
return this.ffmpegRunner.run(
|
|
922
|
+
async ({ src }) => this.fetchSource(src),
|
|
923
|
+
this.createImageTransformer(scaleFactor),
|
|
924
|
+
(src) => this.resolveReference(src),
|
|
925
|
+
urlTransformer
|
|
926
|
+
);
|
|
927
|
+
}
|
|
928
|
+
close() {
|
|
929
|
+
if (this.ffmpegRunner) {
|
|
930
|
+
this.ffmpegRunner.close();
|
|
931
|
+
}
|
|
932
|
+
}
|
|
933
|
+
};
|
|
934
|
+
export {
|
|
935
|
+
EffieRenderer
|
|
936
|
+
};
|