@effing/ffs 0.4.1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -20
- package/dist/chunk-4N2GLGC5.js +341 -0
- package/dist/chunk-4N2GLGC5.js.map +1 -0
- package/dist/{chunk-JDRYI7SR.js → chunk-7KHGAMSG.js} +89 -74
- package/dist/chunk-7KHGAMSG.js.map +1 -0
- package/dist/{chunk-3SM6XYCZ.js → chunk-O7Z6DV2I.js} +179 -504
- package/dist/chunk-O7Z6DV2I.js.map +1 -0
- package/dist/chunk-PERB3C4S.js +342 -0
- package/dist/handlers/index.d.ts +28 -11
- package/dist/handlers/index.js +2 -2
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -1
- package/dist/{proxy-qTA69nOV.d.ts → proxy-CsZ5h2Ya.d.ts} +3 -3
- package/dist/render-IKGZZOBP.js +8 -0
- package/dist/render-IKGZZOBP.js.map +1 -0
- package/dist/render-MUKKTCF6.js +936 -0
- package/dist/server.js +101 -1333
- package/dist/server.js.map +1 -1
- package/package.json +5 -3
- package/dist/chunk-3SM6XYCZ.js.map +0 -1
- package/dist/chunk-JDRYI7SR.js.map +0 -1
package/dist/server.js
CHANGED
|
@@ -1,511 +1,20 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
createTransientStore,
|
|
4
|
+
ffsFetch,
|
|
5
|
+
storeKeys
|
|
6
|
+
} from "./chunk-PERB3C4S.js";
|
|
2
7
|
|
|
3
8
|
// src/server.ts
|
|
4
9
|
import express5 from "express";
|
|
5
10
|
import bodyParser from "body-parser";
|
|
6
11
|
|
|
7
|
-
// src/ffmpeg.ts
|
|
8
|
-
import { execFileSync, spawn } from "child_process";
|
|
9
|
-
import { pipeline } from "stream";
|
|
10
|
-
import fs from "fs/promises";
|
|
11
|
-
import os from "os";
|
|
12
|
-
import path from "path";
|
|
13
|
-
import pathToFFmpeg from "ffmpeg-static";
|
|
14
|
-
import tar from "tar-stream";
|
|
15
|
-
import { createWriteStream } from "fs";
|
|
16
|
-
import { promisify } from "util";
|
|
17
|
-
var pump = promisify(pipeline);
|
|
18
|
-
var ffmpegBin = process.env.FFMPEG ?? pathToFFmpeg;
|
|
19
|
-
function getFFmpegVersion() {
|
|
20
|
-
return execFileSync(ffmpegBin, ["-version"], { encoding: "utf8" }).split("\n")[0].trim();
|
|
21
|
-
}
|
|
22
|
-
var FFmpegCommand = class {
|
|
23
|
-
globalArgs;
|
|
24
|
-
inputs;
|
|
25
|
-
filterComplex;
|
|
26
|
-
outputArgs;
|
|
27
|
-
constructor(globalArgs, inputs, filterComplex, outputArgs) {
|
|
28
|
-
this.globalArgs = globalArgs;
|
|
29
|
-
this.inputs = inputs;
|
|
30
|
-
this.filterComplex = filterComplex;
|
|
31
|
-
this.outputArgs = outputArgs;
|
|
32
|
-
}
|
|
33
|
-
buildArgs(inputResolver) {
|
|
34
|
-
const inputArgs = [];
|
|
35
|
-
for (const input of this.inputs) {
|
|
36
|
-
if (input.type === "color") {
|
|
37
|
-
inputArgs.push(...input.preArgs);
|
|
38
|
-
} else if (input.type === "animation") {
|
|
39
|
-
inputArgs.push(
|
|
40
|
-
...input.preArgs,
|
|
41
|
-
"-i",
|
|
42
|
-
path.join(inputResolver(input), "frame_%05d")
|
|
43
|
-
);
|
|
44
|
-
} else {
|
|
45
|
-
inputArgs.push(...input.preArgs, "-i", inputResolver(input));
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
const args = [
|
|
49
|
-
...this.globalArgs,
|
|
50
|
-
...inputArgs,
|
|
51
|
-
"-filter_complex",
|
|
52
|
-
this.filterComplex,
|
|
53
|
-
...this.outputArgs
|
|
54
|
-
];
|
|
55
|
-
return args;
|
|
56
|
-
}
|
|
57
|
-
};
|
|
58
|
-
var FFmpegRunner = class {
|
|
59
|
-
command;
|
|
60
|
-
ffmpegProc;
|
|
61
|
-
constructor(command) {
|
|
62
|
-
this.command = command;
|
|
63
|
-
}
|
|
64
|
-
async run(sourceFetcher, imageTransformer, referenceResolver, urlTransformer) {
|
|
65
|
-
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "ffs-"));
|
|
66
|
-
const fileMapping = /* @__PURE__ */ new Map();
|
|
67
|
-
const fetchCache = /* @__PURE__ */ new Map();
|
|
68
|
-
const fetchAndSaveSource = async (input, sourceUrl, inputName) => {
|
|
69
|
-
const stream = await sourceFetcher({
|
|
70
|
-
type: input.type,
|
|
71
|
-
src: sourceUrl
|
|
72
|
-
});
|
|
73
|
-
if (input.type === "animation") {
|
|
74
|
-
const extractionDir = path.join(tempDir, inputName);
|
|
75
|
-
await fs.mkdir(extractionDir, { recursive: true });
|
|
76
|
-
const extract = tar.extract();
|
|
77
|
-
const extractPromise = new Promise((resolve, reject) => {
|
|
78
|
-
extract.on("entry", async (header, stream2, next) => {
|
|
79
|
-
if (header.name.startsWith("frame_")) {
|
|
80
|
-
const transformedStream = imageTransformer ? await imageTransformer(stream2) : stream2;
|
|
81
|
-
const outputPath = path.join(extractionDir, header.name);
|
|
82
|
-
const writeStream = createWriteStream(outputPath);
|
|
83
|
-
transformedStream.pipe(writeStream);
|
|
84
|
-
writeStream.on("finish", next);
|
|
85
|
-
writeStream.on("error", reject);
|
|
86
|
-
}
|
|
87
|
-
});
|
|
88
|
-
extract.on("finish", resolve);
|
|
89
|
-
extract.on("error", reject);
|
|
90
|
-
});
|
|
91
|
-
stream.pipe(extract);
|
|
92
|
-
await extractPromise;
|
|
93
|
-
return extractionDir;
|
|
94
|
-
} else if (input.type === "image" && imageTransformer) {
|
|
95
|
-
const tempFile = path.join(tempDir, inputName);
|
|
96
|
-
const transformedStream = await imageTransformer(stream);
|
|
97
|
-
const writeStream = createWriteStream(tempFile);
|
|
98
|
-
transformedStream.on("error", (e) => writeStream.destroy(e));
|
|
99
|
-
await pump(transformedStream, writeStream);
|
|
100
|
-
return tempFile;
|
|
101
|
-
} else {
|
|
102
|
-
const tempFile = path.join(tempDir, inputName);
|
|
103
|
-
const writeStream = createWriteStream(tempFile);
|
|
104
|
-
stream.on("error", (e) => writeStream.destroy(e));
|
|
105
|
-
await pump(stream, writeStream);
|
|
106
|
-
return tempFile;
|
|
107
|
-
}
|
|
108
|
-
};
|
|
109
|
-
await Promise.all(
|
|
110
|
-
this.command.inputs.map(async (input) => {
|
|
111
|
-
if (input.type === "color") return;
|
|
112
|
-
const inputName = `ffmpeg_input_${input.index.toString().padStart(3, "0")}`;
|
|
113
|
-
const sourceUrl = referenceResolver ? referenceResolver(input.source) : input.source;
|
|
114
|
-
if ((input.type === "video" || input.type === "audio") && (sourceUrl.startsWith("http://") || sourceUrl.startsWith("https://"))) {
|
|
115
|
-
const finalUrl = urlTransformer ? urlTransformer(sourceUrl) : sourceUrl;
|
|
116
|
-
fileMapping.set(input.index, finalUrl);
|
|
117
|
-
return;
|
|
118
|
-
}
|
|
119
|
-
const shouldCache = input.source.startsWith("#");
|
|
120
|
-
if (shouldCache) {
|
|
121
|
-
let fetchPromise = fetchCache.get(input.source);
|
|
122
|
-
if (!fetchPromise) {
|
|
123
|
-
fetchPromise = fetchAndSaveSource(input, sourceUrl, inputName);
|
|
124
|
-
fetchCache.set(input.source, fetchPromise);
|
|
125
|
-
}
|
|
126
|
-
const filePath = await fetchPromise;
|
|
127
|
-
fileMapping.set(input.index, filePath);
|
|
128
|
-
} else {
|
|
129
|
-
const filePath = await fetchAndSaveSource(
|
|
130
|
-
input,
|
|
131
|
-
sourceUrl,
|
|
132
|
-
inputName
|
|
133
|
-
);
|
|
134
|
-
fileMapping.set(input.index, filePath);
|
|
135
|
-
}
|
|
136
|
-
})
|
|
137
|
-
);
|
|
138
|
-
const finalArgs = this.command.buildArgs((input) => {
|
|
139
|
-
const filePath = fileMapping.get(input.index);
|
|
140
|
-
if (!filePath)
|
|
141
|
-
throw new Error(`File for input index ${input.index} not found`);
|
|
142
|
-
return filePath;
|
|
143
|
-
});
|
|
144
|
-
const ffmpegProc = spawn(ffmpegBin, finalArgs);
|
|
145
|
-
ffmpegProc.stderr.on("data", (data) => {
|
|
146
|
-
console.error(data.toString());
|
|
147
|
-
});
|
|
148
|
-
ffmpegProc.on("close", async () => {
|
|
149
|
-
try {
|
|
150
|
-
await fs.rm(tempDir, { recursive: true, force: true });
|
|
151
|
-
} catch (err) {
|
|
152
|
-
console.error("Error removing temp directory:", err);
|
|
153
|
-
}
|
|
154
|
-
});
|
|
155
|
-
this.ffmpegProc = ffmpegProc;
|
|
156
|
-
return ffmpegProc.stdout;
|
|
157
|
-
}
|
|
158
|
-
close() {
|
|
159
|
-
if (this.ffmpegProc) {
|
|
160
|
-
this.ffmpegProc.kill("SIGTERM");
|
|
161
|
-
this.ffmpegProc = void 0;
|
|
162
|
-
}
|
|
163
|
-
}
|
|
164
|
-
};
|
|
165
|
-
|
|
166
12
|
// src/handlers/shared.ts
|
|
167
13
|
import "express";
|
|
168
14
|
|
|
169
|
-
// src/storage.ts
|
|
170
|
-
import {
|
|
171
|
-
S3Client,
|
|
172
|
-
PutObjectCommand,
|
|
173
|
-
GetObjectCommand,
|
|
174
|
-
HeadObjectCommand,
|
|
175
|
-
DeleteObjectCommand
|
|
176
|
-
} from "@aws-sdk/client-s3";
|
|
177
|
-
import { Upload } from "@aws-sdk/lib-storage";
|
|
178
|
-
import fs2 from "fs/promises";
|
|
179
|
-
import { createReadStream, createWriteStream as createWriteStream2, existsSync } from "fs";
|
|
180
|
-
import { pipeline as pipeline2 } from "stream/promises";
|
|
181
|
-
import path2 from "path";
|
|
182
|
-
import os2 from "os";
|
|
183
|
-
import crypto from "crypto";
|
|
184
|
-
var DEFAULT_SOURCE_TTL_MS = 60 * 60 * 1e3;
|
|
185
|
-
var DEFAULT_JOB_METADATA_TTL_MS = 8 * 60 * 60 * 1e3;
|
|
186
|
-
var S3TransientStore = class {
|
|
187
|
-
client;
|
|
188
|
-
bucket;
|
|
189
|
-
prefix;
|
|
190
|
-
sourceTtlMs;
|
|
191
|
-
jobMetadataTtlMs;
|
|
192
|
-
constructor(options) {
|
|
193
|
-
this.client = new S3Client({
|
|
194
|
-
endpoint: options.endpoint,
|
|
195
|
-
region: options.region ?? "auto",
|
|
196
|
-
credentials: options.accessKeyId ? {
|
|
197
|
-
accessKeyId: options.accessKeyId,
|
|
198
|
-
secretAccessKey: options.secretAccessKey
|
|
199
|
-
} : void 0,
|
|
200
|
-
forcePathStyle: !!options.endpoint
|
|
201
|
-
});
|
|
202
|
-
this.bucket = options.bucket;
|
|
203
|
-
this.prefix = options.prefix ?? "";
|
|
204
|
-
this.sourceTtlMs = options.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
205
|
-
this.jobMetadataTtlMs = options.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
|
|
206
|
-
}
|
|
207
|
-
getExpires(ttlMs) {
|
|
208
|
-
return new Date(Date.now() + ttlMs);
|
|
209
|
-
}
|
|
210
|
-
getFullKey(key) {
|
|
211
|
-
return `${this.prefix}${key}`;
|
|
212
|
-
}
|
|
213
|
-
async put(key, stream, ttlMs) {
|
|
214
|
-
const upload = new Upload({
|
|
215
|
-
client: this.client,
|
|
216
|
-
params: {
|
|
217
|
-
Bucket: this.bucket,
|
|
218
|
-
Key: this.getFullKey(key),
|
|
219
|
-
Body: stream,
|
|
220
|
-
Expires: this.getExpires(ttlMs ?? this.sourceTtlMs)
|
|
221
|
-
}
|
|
222
|
-
});
|
|
223
|
-
await upload.done();
|
|
224
|
-
}
|
|
225
|
-
async getStream(key) {
|
|
226
|
-
try {
|
|
227
|
-
const response = await this.client.send(
|
|
228
|
-
new GetObjectCommand({
|
|
229
|
-
Bucket: this.bucket,
|
|
230
|
-
Key: this.getFullKey(key)
|
|
231
|
-
})
|
|
232
|
-
);
|
|
233
|
-
return response.Body;
|
|
234
|
-
} catch (err) {
|
|
235
|
-
const error = err;
|
|
236
|
-
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
|
237
|
-
return null;
|
|
238
|
-
}
|
|
239
|
-
throw err;
|
|
240
|
-
}
|
|
241
|
-
}
|
|
242
|
-
async exists(key) {
|
|
243
|
-
try {
|
|
244
|
-
await this.client.send(
|
|
245
|
-
new HeadObjectCommand({
|
|
246
|
-
Bucket: this.bucket,
|
|
247
|
-
Key: this.getFullKey(key)
|
|
248
|
-
})
|
|
249
|
-
);
|
|
250
|
-
return true;
|
|
251
|
-
} catch (err) {
|
|
252
|
-
const error = err;
|
|
253
|
-
if (error.name === "NotFound" || error.$metadata?.httpStatusCode === 404) {
|
|
254
|
-
return false;
|
|
255
|
-
}
|
|
256
|
-
throw err;
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
async existsMany(keys) {
|
|
260
|
-
const results = await Promise.all(
|
|
261
|
-
keys.map(async (key) => [key, await this.exists(key)])
|
|
262
|
-
);
|
|
263
|
-
return new Map(results);
|
|
264
|
-
}
|
|
265
|
-
async delete(key) {
|
|
266
|
-
try {
|
|
267
|
-
await this.client.send(
|
|
268
|
-
new DeleteObjectCommand({
|
|
269
|
-
Bucket: this.bucket,
|
|
270
|
-
Key: this.getFullKey(key)
|
|
271
|
-
})
|
|
272
|
-
);
|
|
273
|
-
} catch (err) {
|
|
274
|
-
const error = err;
|
|
275
|
-
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
|
276
|
-
return;
|
|
277
|
-
}
|
|
278
|
-
throw err;
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
async putJson(key, data, ttlMs) {
|
|
282
|
-
await this.client.send(
|
|
283
|
-
new PutObjectCommand({
|
|
284
|
-
Bucket: this.bucket,
|
|
285
|
-
Key: this.getFullKey(key),
|
|
286
|
-
Body: JSON.stringify(data),
|
|
287
|
-
ContentType: "application/json",
|
|
288
|
-
Expires: this.getExpires(ttlMs ?? this.jobMetadataTtlMs)
|
|
289
|
-
})
|
|
290
|
-
);
|
|
291
|
-
}
|
|
292
|
-
async getJson(key) {
|
|
293
|
-
try {
|
|
294
|
-
const response = await this.client.send(
|
|
295
|
-
new GetObjectCommand({
|
|
296
|
-
Bucket: this.bucket,
|
|
297
|
-
Key: this.getFullKey(key)
|
|
298
|
-
})
|
|
299
|
-
);
|
|
300
|
-
const body = await response.Body?.transformToString();
|
|
301
|
-
if (!body) return null;
|
|
302
|
-
return JSON.parse(body);
|
|
303
|
-
} catch (err) {
|
|
304
|
-
const error = err;
|
|
305
|
-
if (error.name === "NoSuchKey" || error.$metadata?.httpStatusCode === 404) {
|
|
306
|
-
return null;
|
|
307
|
-
}
|
|
308
|
-
throw err;
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
close() {
|
|
312
|
-
}
|
|
313
|
-
};
|
|
314
|
-
var LocalTransientStore = class {
|
|
315
|
-
baseDir;
|
|
316
|
-
initialized = false;
|
|
317
|
-
cleanupInterval;
|
|
318
|
-
sourceTtlMs;
|
|
319
|
-
jobMetadataTtlMs;
|
|
320
|
-
/** For cleanup, use the longer of the two TTLs */
|
|
321
|
-
maxTtlMs;
|
|
322
|
-
constructor(options) {
|
|
323
|
-
this.baseDir = options?.baseDir ?? path2.join(os2.tmpdir(), "ffs-transient");
|
|
324
|
-
this.sourceTtlMs = options?.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
325
|
-
this.jobMetadataTtlMs = options?.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
|
|
326
|
-
this.maxTtlMs = Math.max(this.sourceTtlMs, this.jobMetadataTtlMs);
|
|
327
|
-
this.cleanupInterval = setInterval(() => {
|
|
328
|
-
this.cleanupExpired().catch(console.error);
|
|
329
|
-
}, 3e5);
|
|
330
|
-
}
|
|
331
|
-
/**
|
|
332
|
-
* Remove files older than max TTL
|
|
333
|
-
*/
|
|
334
|
-
async cleanupExpired() {
|
|
335
|
-
if (!this.initialized) return;
|
|
336
|
-
const now = Date.now();
|
|
337
|
-
await this.cleanupDir(this.baseDir, now);
|
|
338
|
-
}
|
|
339
|
-
async cleanupDir(dir, now) {
|
|
340
|
-
let entries;
|
|
341
|
-
try {
|
|
342
|
-
entries = await fs2.readdir(dir, { withFileTypes: true });
|
|
343
|
-
} catch {
|
|
344
|
-
return;
|
|
345
|
-
}
|
|
346
|
-
for (const entry of entries) {
|
|
347
|
-
const fullPath = path2.join(dir, entry.name);
|
|
348
|
-
if (entry.isDirectory()) {
|
|
349
|
-
await this.cleanupDir(fullPath, now);
|
|
350
|
-
try {
|
|
351
|
-
await fs2.rmdir(fullPath);
|
|
352
|
-
} catch {
|
|
353
|
-
}
|
|
354
|
-
} else if (entry.isFile()) {
|
|
355
|
-
try {
|
|
356
|
-
const stat = await fs2.stat(fullPath);
|
|
357
|
-
if (now - stat.mtimeMs > this.maxTtlMs) {
|
|
358
|
-
await fs2.rm(fullPath, { force: true });
|
|
359
|
-
}
|
|
360
|
-
} catch {
|
|
361
|
-
}
|
|
362
|
-
}
|
|
363
|
-
}
|
|
364
|
-
}
|
|
365
|
-
async ensureDir(filePath) {
|
|
366
|
-
await fs2.mkdir(path2.dirname(filePath), { recursive: true });
|
|
367
|
-
this.initialized = true;
|
|
368
|
-
}
|
|
369
|
-
filePath(key) {
|
|
370
|
-
return path2.join(this.baseDir, key);
|
|
371
|
-
}
|
|
372
|
-
tmpPathFor(finalPath) {
|
|
373
|
-
const rand = crypto.randomBytes(8).toString("hex");
|
|
374
|
-
return `${finalPath}.tmp-${process.pid}-${rand}`;
|
|
375
|
-
}
|
|
376
|
-
async put(key, stream, _ttlMs) {
|
|
377
|
-
const fp = this.filePath(key);
|
|
378
|
-
await this.ensureDir(fp);
|
|
379
|
-
const tmpPath = this.tmpPathFor(fp);
|
|
380
|
-
try {
|
|
381
|
-
const writeStream = createWriteStream2(tmpPath);
|
|
382
|
-
await pipeline2(stream, writeStream);
|
|
383
|
-
await fs2.rename(tmpPath, fp);
|
|
384
|
-
} catch (err) {
|
|
385
|
-
await fs2.rm(tmpPath, { force: true }).catch(() => {
|
|
386
|
-
});
|
|
387
|
-
throw err;
|
|
388
|
-
}
|
|
389
|
-
}
|
|
390
|
-
async getStream(key) {
|
|
391
|
-
const fp = this.filePath(key);
|
|
392
|
-
if (!existsSync(fp)) return null;
|
|
393
|
-
return createReadStream(fp);
|
|
394
|
-
}
|
|
395
|
-
async exists(key) {
|
|
396
|
-
try {
|
|
397
|
-
await fs2.access(this.filePath(key));
|
|
398
|
-
return true;
|
|
399
|
-
} catch {
|
|
400
|
-
return false;
|
|
401
|
-
}
|
|
402
|
-
}
|
|
403
|
-
async existsMany(keys) {
|
|
404
|
-
const results = await Promise.all(
|
|
405
|
-
keys.map(async (key) => [key, await this.exists(key)])
|
|
406
|
-
);
|
|
407
|
-
return new Map(results);
|
|
408
|
-
}
|
|
409
|
-
async delete(key) {
|
|
410
|
-
await fs2.rm(this.filePath(key), { force: true });
|
|
411
|
-
}
|
|
412
|
-
async putJson(key, data, _ttlMs) {
|
|
413
|
-
const fp = this.filePath(key);
|
|
414
|
-
await this.ensureDir(fp);
|
|
415
|
-
const tmpPath = this.tmpPathFor(fp);
|
|
416
|
-
try {
|
|
417
|
-
await fs2.writeFile(tmpPath, JSON.stringify(data));
|
|
418
|
-
await fs2.rename(tmpPath, fp);
|
|
419
|
-
} catch (err) {
|
|
420
|
-
await fs2.rm(tmpPath, { force: true }).catch(() => {
|
|
421
|
-
});
|
|
422
|
-
throw err;
|
|
423
|
-
}
|
|
424
|
-
}
|
|
425
|
-
async getJson(key) {
|
|
426
|
-
try {
|
|
427
|
-
const content = await fs2.readFile(this.filePath(key), "utf-8");
|
|
428
|
-
return JSON.parse(content);
|
|
429
|
-
} catch {
|
|
430
|
-
return null;
|
|
431
|
-
}
|
|
432
|
-
}
|
|
433
|
-
close() {
|
|
434
|
-
if (this.cleanupInterval) {
|
|
435
|
-
clearInterval(this.cleanupInterval);
|
|
436
|
-
this.cleanupInterval = void 0;
|
|
437
|
-
}
|
|
438
|
-
}
|
|
439
|
-
};
|
|
440
|
-
function createTransientStore() {
|
|
441
|
-
const sourceTtlMs = process.env.FFS_SOURCE_CACHE_TTL_MS ? parseInt(process.env.FFS_SOURCE_CACHE_TTL_MS, 10) : DEFAULT_SOURCE_TTL_MS;
|
|
442
|
-
const jobMetadataTtlMs = process.env.FFS_JOB_METADATA_TTL_MS ? parseInt(process.env.FFS_JOB_METADATA_TTL_MS, 10) : DEFAULT_JOB_METADATA_TTL_MS;
|
|
443
|
-
if (process.env.FFS_TRANSIENT_STORE_BUCKET) {
|
|
444
|
-
return new S3TransientStore({
|
|
445
|
-
endpoint: process.env.FFS_TRANSIENT_STORE_ENDPOINT,
|
|
446
|
-
region: process.env.FFS_TRANSIENT_STORE_REGION ?? "auto",
|
|
447
|
-
bucket: process.env.FFS_TRANSIENT_STORE_BUCKET,
|
|
448
|
-
prefix: process.env.FFS_TRANSIENT_STORE_PREFIX,
|
|
449
|
-
accessKeyId: process.env.FFS_TRANSIENT_STORE_ACCESS_KEY,
|
|
450
|
-
secretAccessKey: process.env.FFS_TRANSIENT_STORE_SECRET_KEY,
|
|
451
|
-
sourceTtlMs,
|
|
452
|
-
jobMetadataTtlMs
|
|
453
|
-
});
|
|
454
|
-
}
|
|
455
|
-
return new LocalTransientStore({
|
|
456
|
-
baseDir: process.env.FFS_TRANSIENT_STORE_LOCAL_DIR,
|
|
457
|
-
sourceTtlMs,
|
|
458
|
-
jobMetadataTtlMs
|
|
459
|
-
});
|
|
460
|
-
}
|
|
461
|
-
function hashUrl(url) {
|
|
462
|
-
return crypto.createHash("sha256").update(url).digest("hex").slice(0, 16);
|
|
463
|
-
}
|
|
464
|
-
function sourceStoreKey(url) {
|
|
465
|
-
return `sources/${hashUrl(url)}`;
|
|
466
|
-
}
|
|
467
|
-
function warmupJobStoreKey(jobId) {
|
|
468
|
-
return `jobs/warmup/${jobId}.json`;
|
|
469
|
-
}
|
|
470
|
-
function renderJobStoreKey(jobId) {
|
|
471
|
-
return `jobs/render/${jobId}.json`;
|
|
472
|
-
}
|
|
473
|
-
function warmupAndRenderJobStoreKey(jobId) {
|
|
474
|
-
return `jobs/warmup-and-render/${jobId}.json`;
|
|
475
|
-
}
|
|
476
|
-
var storeKeys = {
|
|
477
|
-
source: sourceStoreKey,
|
|
478
|
-
warmupJob: warmupJobStoreKey,
|
|
479
|
-
renderJob: renderJobStoreKey,
|
|
480
|
-
warmupAndRenderJob: warmupAndRenderJobStoreKey
|
|
481
|
-
};
|
|
482
|
-
|
|
483
15
|
// src/proxy.ts
|
|
484
16
|
import http from "http";
|
|
485
17
|
import { Readable } from "stream";
|
|
486
|
-
|
|
487
|
-
// src/fetch.ts
|
|
488
|
-
import { fetch, Agent } from "undici";
|
|
489
|
-
async function ffsFetch(url, options) {
|
|
490
|
-
const {
|
|
491
|
-
method,
|
|
492
|
-
body,
|
|
493
|
-
headers,
|
|
494
|
-
headersTimeout = 3e5,
|
|
495
|
-
// 5 minutes
|
|
496
|
-
bodyTimeout = 3e5
|
|
497
|
-
// 5 minutes
|
|
498
|
-
} = options ?? {};
|
|
499
|
-
const agent = new Agent({ headersTimeout, bodyTimeout });
|
|
500
|
-
return fetch(url, {
|
|
501
|
-
method,
|
|
502
|
-
body,
|
|
503
|
-
headers: { "User-Agent": "FFS (+https://effing.dev/ffs)", ...headers },
|
|
504
|
-
dispatcher: agent
|
|
505
|
-
});
|
|
506
|
-
}
|
|
507
|
-
|
|
508
|
-
// src/proxy.ts
|
|
509
18
|
var HttpProxy = class {
|
|
510
19
|
server = null;
|
|
511
20
|
_port = null;
|
|
@@ -584,11 +93,11 @@ var HttpProxy = class {
|
|
|
584
93
|
* Parse the proxy path to extract the original URL.
|
|
585
94
|
* Path format: /{originalUrl}
|
|
586
95
|
*/
|
|
587
|
-
parseProxyPath(
|
|
588
|
-
if (!
|
|
96
|
+
parseProxyPath(path) {
|
|
97
|
+
if (!path.startsWith("/http://") && !path.startsWith("/https://")) {
|
|
589
98
|
return null;
|
|
590
99
|
}
|
|
591
|
-
return
|
|
100
|
+
return path.slice(1);
|
|
592
101
|
}
|
|
593
102
|
/**
|
|
594
103
|
* Filter headers to forward to the upstream server.
|
|
@@ -627,20 +136,22 @@ var HttpProxy = class {
|
|
|
627
136
|
|
|
628
137
|
// src/handlers/shared.ts
|
|
629
138
|
import { effieDataSchema } from "@effing/effie";
|
|
630
|
-
async function createServerContext() {
|
|
139
|
+
async function createServerContext(options) {
|
|
631
140
|
const port2 = process.env.FFS_PORT || process.env.PORT || 2e3;
|
|
632
|
-
const
|
|
633
|
-
|
|
141
|
+
const enableHttpProxy = options?.httpProxy ?? !options?.renderBackendResolver;
|
|
142
|
+
let httpProxy;
|
|
143
|
+
if (enableHttpProxy) {
|
|
144
|
+
httpProxy = new HttpProxy();
|
|
145
|
+
await httpProxy.start();
|
|
146
|
+
}
|
|
634
147
|
return {
|
|
635
148
|
transientStore: createTransientStore(),
|
|
636
149
|
httpProxy,
|
|
637
150
|
baseUrl: process.env.FFS_BASE_URL || `http://localhost:${port2}`,
|
|
638
151
|
skipValidation: !!process.env.FFS_SKIP_VALIDATION && process.env.FFS_SKIP_VALIDATION !== "false",
|
|
639
152
|
warmupConcurrency: parseInt(process.env.FFS_WARMUP_CONCURRENCY || "4", 10),
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
warmupBackendApiKey: process.env.FFS_WARMUP_BACKEND_API_KEY,
|
|
643
|
-
renderBackendApiKey: process.env.FFS_RENDER_BACKEND_API_KEY
|
|
153
|
+
warmupBackendResolver: options?.warmupBackendResolver,
|
|
154
|
+
renderBackendResolver: options?.renderBackendResolver
|
|
644
155
|
};
|
|
645
156
|
}
|
|
646
157
|
function parseEffieData(body, skipValidation) {
|
|
@@ -687,7 +198,7 @@ data: ${JSON.stringify(data)}
|
|
|
687
198
|
|
|
688
199
|
// src/handlers/caching.ts
|
|
689
200
|
import "express";
|
|
690
|
-
import { Readable as
|
|
201
|
+
import { Readable as Readable2, Transform } from "stream";
|
|
691
202
|
import { randomUUID as randomUUID3 } from "crypto";
|
|
692
203
|
import {
|
|
693
204
|
extractEffieSources,
|
|
@@ -702,775 +213,17 @@ import { extractEffieSourcesWithTypes, effieDataSchema as effieDataSchema3 } fro
|
|
|
702
213
|
// src/handlers/rendering.ts
|
|
703
214
|
import "express";
|
|
704
215
|
import { randomUUID } from "crypto";
|
|
705
|
-
|
|
706
|
-
// src/render.ts
|
|
707
|
-
import { Readable as Readable2 } from "stream";
|
|
708
|
-
import { createReadStream as createReadStream2 } from "fs";
|
|
709
|
-
|
|
710
|
-
// src/motion.ts
|
|
711
|
-
function getEasingExpression(tNormExpr, easingType) {
|
|
712
|
-
switch (easingType) {
|
|
713
|
-
case "ease-in":
|
|
714
|
-
return `pow(${tNormExpr},2)`;
|
|
715
|
-
case "ease-out":
|
|
716
|
-
return `(1-pow(1-(${tNormExpr}),2))`;
|
|
717
|
-
case "ease-in-out":
|
|
718
|
-
return `if(lt(${tNormExpr},0.5),2*pow(${tNormExpr},2),1-pow(-2*(${tNormExpr})+2,2)/2)`;
|
|
719
|
-
case "linear":
|
|
720
|
-
default:
|
|
721
|
-
return `(${tNormExpr})`;
|
|
722
|
-
}
|
|
723
|
-
}
|
|
724
|
-
function processSlideMotion(motion, relativeTimeExpr) {
|
|
725
|
-
const duration = motion.duration ?? 1;
|
|
726
|
-
const distance = motion.distance ?? 1;
|
|
727
|
-
const reverse = motion.reverse ?? false;
|
|
728
|
-
const easing = motion.easing ?? "linear";
|
|
729
|
-
const tNormExpr = `(${relativeTimeExpr})/${duration}`;
|
|
730
|
-
const easedProgressExpr = getEasingExpression(tNormExpr, easing);
|
|
731
|
-
const finalTimeFactorExpr = reverse ? easedProgressExpr : `(1-(${easedProgressExpr}))`;
|
|
732
|
-
let activeX;
|
|
733
|
-
let activeY;
|
|
734
|
-
let initialX;
|
|
735
|
-
let initialY;
|
|
736
|
-
let finalX;
|
|
737
|
-
let finalY;
|
|
738
|
-
switch (motion.direction) {
|
|
739
|
-
case "left": {
|
|
740
|
-
const offsetXLeft = `${distance}*W`;
|
|
741
|
-
activeX = `(${offsetXLeft})*${finalTimeFactorExpr}`;
|
|
742
|
-
activeY = "0";
|
|
743
|
-
initialX = reverse ? "0" : offsetXLeft;
|
|
744
|
-
initialY = "0";
|
|
745
|
-
finalX = reverse ? offsetXLeft : "0";
|
|
746
|
-
finalY = "0";
|
|
747
|
-
break;
|
|
748
|
-
}
|
|
749
|
-
case "right": {
|
|
750
|
-
const offsetXRight = `-${distance}*W`;
|
|
751
|
-
activeX = `(${offsetXRight})*${finalTimeFactorExpr}`;
|
|
752
|
-
activeY = "0";
|
|
753
|
-
initialX = reverse ? "0" : offsetXRight;
|
|
754
|
-
initialY = "0";
|
|
755
|
-
finalX = reverse ? offsetXRight : "0";
|
|
756
|
-
finalY = "0";
|
|
757
|
-
break;
|
|
758
|
-
}
|
|
759
|
-
case "up": {
|
|
760
|
-
const offsetYUp = `${distance}*H`;
|
|
761
|
-
activeX = "0";
|
|
762
|
-
activeY = `(${offsetYUp})*${finalTimeFactorExpr}`;
|
|
763
|
-
initialX = "0";
|
|
764
|
-
initialY = reverse ? "0" : offsetYUp;
|
|
765
|
-
finalX = "0";
|
|
766
|
-
finalY = reverse ? offsetYUp : "0";
|
|
767
|
-
break;
|
|
768
|
-
}
|
|
769
|
-
case "down": {
|
|
770
|
-
const offsetYDown = `-${distance}*H`;
|
|
771
|
-
activeX = "0";
|
|
772
|
-
activeY = `(${offsetYDown})*${finalTimeFactorExpr}`;
|
|
773
|
-
initialX = "0";
|
|
774
|
-
initialY = reverse ? "0" : offsetYDown;
|
|
775
|
-
finalX = "0";
|
|
776
|
-
finalY = reverse ? offsetYDown : "0";
|
|
777
|
-
break;
|
|
778
|
-
}
|
|
779
|
-
}
|
|
780
|
-
return { initialX, initialY, activeX, activeY, finalX, finalY, duration };
|
|
781
|
-
}
|
|
782
|
-
function processBounceMotion(motion, relativeTimeExpr) {
|
|
783
|
-
const amplitude = motion.amplitude ?? 0.5;
|
|
784
|
-
const duration = motion.duration ?? 1;
|
|
785
|
-
const initialY = `-overlay_h*${amplitude}`;
|
|
786
|
-
const finalY = "0";
|
|
787
|
-
const tNormExpr = `(${relativeTimeExpr})/${duration}`;
|
|
788
|
-
const activeBounceExpression = `if(lt(${tNormExpr},0.363636),${initialY}+overlay_h*${amplitude}*(7.5625*${tNormExpr}*${tNormExpr}),if(lt(${tNormExpr},0.727273),${initialY}+overlay_h*${amplitude}*(7.5625*(${tNormExpr}-0.545455)*(${tNormExpr}-0.545455)+0.75),if(lt(${tNormExpr},0.909091),${initialY}+overlay_h*${amplitude}*(7.5625*(${tNormExpr}-0.818182)*(${tNormExpr}-0.818182)+0.9375),if(lt(${tNormExpr},0.954545),${initialY}+overlay_h*${amplitude}*(7.5625*(${tNormExpr}-0.954545)*(${tNormExpr}-0.954545)+0.984375),${finalY}))))`;
|
|
789
|
-
return {
|
|
790
|
-
initialX: "0",
|
|
791
|
-
initialY,
|
|
792
|
-
activeX: "0",
|
|
793
|
-
activeY: activeBounceExpression,
|
|
794
|
-
// This expression now scales with duration
|
|
795
|
-
finalX: "0",
|
|
796
|
-
finalY,
|
|
797
|
-
duration
|
|
798
|
-
// Return the actual duration used
|
|
799
|
-
};
|
|
800
|
-
}
|
|
801
|
-
function processShakeMotion(motion, relativeTimeExpr) {
|
|
802
|
-
const intensity = motion.intensity ?? 10;
|
|
803
|
-
const frequency = motion.frequency ?? 4;
|
|
804
|
-
const duration = motion.duration ?? 1;
|
|
805
|
-
const activeX = `${intensity}*sin(${relativeTimeExpr}*PI*${frequency})`;
|
|
806
|
-
const activeY = `${intensity}*cos(${relativeTimeExpr}*PI*${frequency})`;
|
|
807
|
-
return {
|
|
808
|
-
initialX: "0",
|
|
809
|
-
initialY: "0",
|
|
810
|
-
activeX,
|
|
811
|
-
activeY,
|
|
812
|
-
finalX: "0",
|
|
813
|
-
finalY: "0",
|
|
814
|
-
duration
|
|
815
|
-
};
|
|
816
|
-
}
|
|
817
|
-
function processMotion(delay, motion) {
|
|
818
|
-
if (!motion) return "x=0:y=0";
|
|
819
|
-
const start = delay + (motion.start ?? 0);
|
|
820
|
-
const relativeTimeExpr = `(t-${start})`;
|
|
821
|
-
let components;
|
|
822
|
-
switch (motion.type) {
|
|
823
|
-
case "bounce":
|
|
824
|
-
components = processBounceMotion(motion, relativeTimeExpr);
|
|
825
|
-
break;
|
|
826
|
-
case "shake":
|
|
827
|
-
components = processShakeMotion(motion, relativeTimeExpr);
|
|
828
|
-
break;
|
|
829
|
-
case "slide":
|
|
830
|
-
components = processSlideMotion(motion, relativeTimeExpr);
|
|
831
|
-
break;
|
|
832
|
-
default:
|
|
833
|
-
motion;
|
|
834
|
-
throw new Error(
|
|
835
|
-
`Unsupported motion type: ${motion.type}`
|
|
836
|
-
);
|
|
837
|
-
}
|
|
838
|
-
const motionEndTime = start + components.duration;
|
|
839
|
-
const xArg = `if(lt(t,${start}),${components.initialX},if(lt(t,${motionEndTime}),${components.activeX},${components.finalX}))`;
|
|
840
|
-
const yArg = `if(lt(t,${start}),${components.initialY},if(lt(t,${motionEndTime}),${components.activeY},${components.finalY}))`;
|
|
841
|
-
return `x='${xArg}':y='${yArg}'`;
|
|
842
|
-
}
|
|
843
|
-
|
|
844
|
-
// src/effect.ts
|
|
845
|
-
function processFadeIn(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
846
|
-
return `fade=t=in:st=${effect.start}:d=${effect.duration}:alpha=1`;
|
|
847
|
-
}
|
|
848
|
-
function processFadeOut(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
849
|
-
return `fade=t=out:st=${effect.start}:d=${effect.duration}:alpha=1`;
|
|
850
|
-
}
|
|
851
|
-
function processSaturateIn(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
852
|
-
return `hue='s=max(0,min(1,(t-${effect.start})/${effect.duration}))'`;
|
|
853
|
-
}
|
|
854
|
-
function processSaturateOut(effect, _frameRate, _frameWidth, _frameHeight) {
|
|
855
|
-
return `hue='s=max(0,min(1,(${effect.start + effect.duration}-t)/${effect.duration}))'`;
|
|
856
|
-
}
|
|
857
|
-
function processScroll(effect, frameRate, _frameWidth, _frameHeight) {
|
|
858
|
-
const distance = effect.distance ?? 1;
|
|
859
|
-
const scroll = distance / (1 + distance);
|
|
860
|
-
const speed = scroll / (effect.duration * frameRate);
|
|
861
|
-
switch (effect.direction) {
|
|
862
|
-
case "left":
|
|
863
|
-
return `scroll=h=${speed}`;
|
|
864
|
-
case "right":
|
|
865
|
-
return `scroll=hpos=${1 - scroll}:h=-${speed}`;
|
|
866
|
-
case "up":
|
|
867
|
-
return `scroll=v=${speed}`;
|
|
868
|
-
case "down":
|
|
869
|
-
return `scroll=vpos=${1 - scroll}:v=-${speed}`;
|
|
870
|
-
}
|
|
871
|
-
}
|
|
872
|
-
function processEffect(effect, frameRate, frameWidth, frameHeight) {
|
|
873
|
-
switch (effect.type) {
|
|
874
|
-
case "fade-in":
|
|
875
|
-
return processFadeIn(effect, frameRate, frameWidth, frameHeight);
|
|
876
|
-
case "fade-out":
|
|
877
|
-
return processFadeOut(effect, frameRate, frameWidth, frameHeight);
|
|
878
|
-
case "saturate-in":
|
|
879
|
-
return processSaturateIn(effect, frameRate, frameWidth, frameHeight);
|
|
880
|
-
case "saturate-out":
|
|
881
|
-
return processSaturateOut(effect, frameRate, frameWidth, frameHeight);
|
|
882
|
-
case "scroll":
|
|
883
|
-
return processScroll(effect, frameRate, frameWidth, frameHeight);
|
|
884
|
-
default:
|
|
885
|
-
effect;
|
|
886
|
-
throw new Error(
|
|
887
|
-
`Unsupported effect type: ${effect.type}`
|
|
888
|
-
);
|
|
889
|
-
}
|
|
890
|
-
}
|
|
891
|
-
function processEffects(effects, frameRate, frameWidth, frameHeight) {
|
|
892
|
-
if (!effects || effects.length === 0) return "";
|
|
893
|
-
const filters = [];
|
|
894
|
-
for (const effect of effects) {
|
|
895
|
-
const filter = processEffect(effect, frameRate, frameWidth, frameHeight);
|
|
896
|
-
filters.push(filter);
|
|
897
|
-
}
|
|
898
|
-
return filters.join(",");
|
|
899
|
-
}
|
|
900
|
-
|
|
901
|
-
// src/transition.ts
|
|
902
|
-
function processTransition(transition) {
|
|
903
|
-
switch (transition.type) {
|
|
904
|
-
case "fade": {
|
|
905
|
-
if ("through" in transition) {
|
|
906
|
-
return `fade${transition.through}`;
|
|
907
|
-
}
|
|
908
|
-
const easing = transition.easing ?? "linear";
|
|
909
|
-
return {
|
|
910
|
-
linear: "fade",
|
|
911
|
-
"ease-in": "fadeslow",
|
|
912
|
-
"ease-out": "fadefast"
|
|
913
|
-
}[easing];
|
|
914
|
-
}
|
|
915
|
-
case "barn": {
|
|
916
|
-
const orientation = transition.orientation ?? "horizontal";
|
|
917
|
-
const mode = transition.mode ?? "open";
|
|
918
|
-
const prefix = orientation === "vertical" ? "vert" : "horz";
|
|
919
|
-
return `${prefix}${mode}`;
|
|
920
|
-
}
|
|
921
|
-
case "circle": {
|
|
922
|
-
const mode = transition.mode ?? "open";
|
|
923
|
-
return `circle${mode}`;
|
|
924
|
-
}
|
|
925
|
-
case "wipe":
|
|
926
|
-
case "slide":
|
|
927
|
-
case "smooth": {
|
|
928
|
-
const direction = transition.direction ?? "left";
|
|
929
|
-
return `${transition.type}${direction}`;
|
|
930
|
-
}
|
|
931
|
-
case "slice": {
|
|
932
|
-
const direction = transition.direction ?? "left";
|
|
933
|
-
const prefix = {
|
|
934
|
-
left: "hl",
|
|
935
|
-
right: "hr",
|
|
936
|
-
up: "vu",
|
|
937
|
-
down: "vd"
|
|
938
|
-
}[direction];
|
|
939
|
-
return `${prefix}${transition.type}`;
|
|
940
|
-
}
|
|
941
|
-
case "zoom": {
|
|
942
|
-
return "zoomin";
|
|
943
|
-
}
|
|
944
|
-
case "dissolve":
|
|
945
|
-
case "pixelize":
|
|
946
|
-
case "radial":
|
|
947
|
-
return transition.type;
|
|
948
|
-
default:
|
|
949
|
-
transition;
|
|
950
|
-
throw new Error(
|
|
951
|
-
`Unsupported transition type: ${transition.type}`
|
|
952
|
-
);
|
|
953
|
-
}
|
|
954
|
-
}
|
|
955
|
-
|
|
956
|
-
// src/render.ts
|
|
957
|
-
import sharp from "sharp";
|
|
958
|
-
import { fileURLToPath } from "url";
|
|
959
|
-
var EffieRenderer = class {
|
|
960
|
-
effieData;
|
|
961
|
-
ffmpegRunner;
|
|
962
|
-
allowLocalFiles;
|
|
963
|
-
transientStore;
|
|
964
|
-
httpProxy;
|
|
965
|
-
constructor(effieData, options) {
|
|
966
|
-
this.effieData = effieData;
|
|
967
|
-
this.allowLocalFiles = options?.allowLocalFiles ?? false;
|
|
968
|
-
this.transientStore = options?.transientStore;
|
|
969
|
-
this.httpProxy = options?.httpProxy;
|
|
970
|
-
}
|
|
971
|
-
async fetchSource(src) {
|
|
972
|
-
if (src.startsWith("data:")) {
|
|
973
|
-
const commaIndex = src.indexOf(",");
|
|
974
|
-
if (commaIndex === -1) {
|
|
975
|
-
throw new Error("Invalid data URL");
|
|
976
|
-
}
|
|
977
|
-
const meta = src.slice(5, commaIndex);
|
|
978
|
-
const isBase64 = meta.endsWith(";base64");
|
|
979
|
-
const data = src.slice(commaIndex + 1);
|
|
980
|
-
const buffer = isBase64 ? Buffer.from(data, "base64") : Buffer.from(decodeURIComponent(data));
|
|
981
|
-
return Readable2.from(buffer);
|
|
982
|
-
}
|
|
983
|
-
if (src.startsWith("file:")) {
|
|
984
|
-
if (!this.allowLocalFiles) {
|
|
985
|
-
throw new Error(
|
|
986
|
-
"Local file paths are not allowed. Use allowLocalFiles option for trusted operations."
|
|
987
|
-
);
|
|
988
|
-
}
|
|
989
|
-
return createReadStream2(fileURLToPath(src));
|
|
990
|
-
}
|
|
991
|
-
if (this.transientStore) {
|
|
992
|
-
const cachedStream = await this.transientStore.getStream(
|
|
993
|
-
storeKeys.source(src)
|
|
994
|
-
);
|
|
995
|
-
if (cachedStream) {
|
|
996
|
-
return cachedStream;
|
|
997
|
-
}
|
|
998
|
-
}
|
|
999
|
-
const response = await ffsFetch(src, {
|
|
1000
|
-
headersTimeout: 10 * 60 * 1e3,
|
|
1001
|
-
// 10 minutes
|
|
1002
|
-
bodyTimeout: 20 * 60 * 1e3
|
|
1003
|
-
// 20 minutes
|
|
1004
|
-
});
|
|
1005
|
-
if (!response.ok) {
|
|
1006
|
-
throw new Error(
|
|
1007
|
-
`Failed to fetch ${src}: ${response.status} ${response.statusText}`
|
|
1008
|
-
);
|
|
1009
|
-
}
|
|
1010
|
-
if (!response.body) {
|
|
1011
|
-
throw new Error(`No body for ${src}`);
|
|
1012
|
-
}
|
|
1013
|
-
return Readable2.fromWeb(response.body);
|
|
1014
|
-
}
|
|
1015
|
-
buildAudioFilter({
|
|
1016
|
-
duration,
|
|
1017
|
-
volume,
|
|
1018
|
-
fadeIn,
|
|
1019
|
-
fadeOut
|
|
1020
|
-
}) {
|
|
1021
|
-
const filters = [];
|
|
1022
|
-
if (volume !== void 0) {
|
|
1023
|
-
filters.push(`volume=${volume}`);
|
|
1024
|
-
}
|
|
1025
|
-
if (fadeIn !== void 0) {
|
|
1026
|
-
filters.push(`afade=type=in:start_time=0:duration=${fadeIn}`);
|
|
1027
|
-
}
|
|
1028
|
-
if (fadeOut !== void 0) {
|
|
1029
|
-
filters.push(
|
|
1030
|
-
`afade=type=out:start_time=${duration - fadeOut}:duration=${fadeOut}`
|
|
1031
|
-
);
|
|
1032
|
-
}
|
|
1033
|
-
return filters.length ? filters.join(",") : "anull";
|
|
1034
|
-
}
|
|
1035
|
-
getFrameDimensions(scaleFactor) {
|
|
1036
|
-
return {
|
|
1037
|
-
frameWidth: Math.floor(this.effieData.width * scaleFactor / 2) * 2,
|
|
1038
|
-
frameHeight: Math.floor(this.effieData.height * scaleFactor / 2) * 2
|
|
1039
|
-
};
|
|
1040
|
-
}
|
|
1041
|
-
/**
|
|
1042
|
-
* Builds an FFmpeg input for a background (global or segment).
|
|
1043
|
-
*/
|
|
1044
|
-
buildBackgroundInput(background, inputIndex, frameWidth, frameHeight) {
|
|
1045
|
-
if (background.type === "image") {
|
|
1046
|
-
return {
|
|
1047
|
-
index: inputIndex,
|
|
1048
|
-
source: background.source,
|
|
1049
|
-
preArgs: ["-loop", "1", "-framerate", this.effieData.fps.toString()],
|
|
1050
|
-
type: "image"
|
|
1051
|
-
};
|
|
1052
|
-
} else if (background.type === "video") {
|
|
1053
|
-
return {
|
|
1054
|
-
index: inputIndex,
|
|
1055
|
-
source: background.source,
|
|
1056
|
-
preArgs: ["-stream_loop", "-1"],
|
|
1057
|
-
type: "video"
|
|
1058
|
-
};
|
|
1059
|
-
}
|
|
1060
|
-
return {
|
|
1061
|
-
index: inputIndex,
|
|
1062
|
-
source: "",
|
|
1063
|
-
preArgs: [
|
|
1064
|
-
"-f",
|
|
1065
|
-
"lavfi",
|
|
1066
|
-
"-i",
|
|
1067
|
-
`color=${background.color}:size=${frameWidth}x${frameHeight}:rate=${this.effieData.fps}`
|
|
1068
|
-
],
|
|
1069
|
-
type: "color"
|
|
1070
|
-
};
|
|
1071
|
-
}
|
|
1072
|
-
buildOutputArgs(outputFilename) {
|
|
1073
|
-
return [
|
|
1074
|
-
"-map",
|
|
1075
|
-
"[outv]",
|
|
1076
|
-
"-map",
|
|
1077
|
-
"[outa]",
|
|
1078
|
-
"-c:v",
|
|
1079
|
-
"libx264",
|
|
1080
|
-
"-r",
|
|
1081
|
-
this.effieData.fps.toString(),
|
|
1082
|
-
"-pix_fmt",
|
|
1083
|
-
"yuv420p",
|
|
1084
|
-
"-preset",
|
|
1085
|
-
"fast",
|
|
1086
|
-
"-crf",
|
|
1087
|
-
"28",
|
|
1088
|
-
"-c:a",
|
|
1089
|
-
"aac",
|
|
1090
|
-
"-movflags",
|
|
1091
|
-
"frag_keyframe+empty_moov",
|
|
1092
|
-
"-f",
|
|
1093
|
-
"mp4",
|
|
1094
|
-
outputFilename
|
|
1095
|
-
];
|
|
1096
|
-
}
|
|
1097
|
-
buildLayerInput(layer, duration, inputIndex) {
|
|
1098
|
-
let preArgs = [];
|
|
1099
|
-
if (layer.type === "image") {
|
|
1100
|
-
preArgs = [
|
|
1101
|
-
"-loop",
|
|
1102
|
-
"1",
|
|
1103
|
-
"-t",
|
|
1104
|
-
duration.toString(),
|
|
1105
|
-
"-framerate",
|
|
1106
|
-
this.effieData.fps.toString()
|
|
1107
|
-
];
|
|
1108
|
-
} else if (layer.type === "animation") {
|
|
1109
|
-
preArgs = ["-f", "image2", "-framerate", this.effieData.fps.toString()];
|
|
1110
|
-
}
|
|
1111
|
-
return {
|
|
1112
|
-
index: inputIndex,
|
|
1113
|
-
source: layer.source,
|
|
1114
|
-
preArgs,
|
|
1115
|
-
type: layer.type
|
|
1116
|
-
};
|
|
1117
|
-
}
|
|
1118
|
-
/**
|
|
1119
|
-
* Builds filter chain for all layers in a segment.
|
|
1120
|
-
* @param segment - The segment containing layers
|
|
1121
|
-
* @param bgLabel - Label for the background input (e.g., "bg_seg0" or "bg_seg")
|
|
1122
|
-
* @param labelPrefix - Prefix for generated labels (e.g., "seg0_" or "")
|
|
1123
|
-
* @param layerInputOffset - Starting input index for layers
|
|
1124
|
-
* @param frameWidth - Frame width for nullsrc
|
|
1125
|
-
* @param frameHeight - Frame height for nullsrc
|
|
1126
|
-
* @param outputLabel - Label for the final video output
|
|
1127
|
-
* @returns Array of filter parts to add to the filter chain
|
|
1128
|
-
*/
|
|
1129
|
-
buildLayerFilters(segment, bgLabel, labelPrefix, layerInputOffset, frameWidth, frameHeight, outputLabel) {
|
|
1130
|
-
const filterParts = [];
|
|
1131
|
-
let currentVidLabel = bgLabel;
|
|
1132
|
-
for (let l = 0; l < segment.layers.length; l++) {
|
|
1133
|
-
const inputIdx = layerInputOffset + l;
|
|
1134
|
-
const layerLabel = `${labelPrefix}layer${l}`;
|
|
1135
|
-
const layer = segment.layers[l];
|
|
1136
|
-
const effectChain = layer.effects ? processEffects(
|
|
1137
|
-
layer.effects,
|
|
1138
|
-
this.effieData.fps,
|
|
1139
|
-
frameWidth,
|
|
1140
|
-
frameHeight
|
|
1141
|
-
) : "";
|
|
1142
|
-
filterParts.push(
|
|
1143
|
-
`[${inputIdx}:v]trim=start=0:duration=${segment.duration},${effectChain ? effectChain + "," : ""}setsar=1,setpts=PTS-STARTPTS[${layerLabel}]`
|
|
1144
|
-
);
|
|
1145
|
-
let overlayInputLabel = layerLabel;
|
|
1146
|
-
const delay = layer.delay ?? 0;
|
|
1147
|
-
if (delay > 0) {
|
|
1148
|
-
filterParts.push(
|
|
1149
|
-
`nullsrc=size=${frameWidth}x${frameHeight}:duration=${delay},setpts=PTS-STARTPTS[null_${layerLabel}]`
|
|
1150
|
-
);
|
|
1151
|
-
filterParts.push(
|
|
1152
|
-
`[null_${layerLabel}][${layerLabel}]concat=n=2:v=1:a=0[delayed_${layerLabel}]`
|
|
1153
|
-
);
|
|
1154
|
-
overlayInputLabel = `delayed_${layerLabel}`;
|
|
1155
|
-
}
|
|
1156
|
-
const overlayOutputLabel = `${labelPrefix}tmp${l}`;
|
|
1157
|
-
const offset = layer.motion ? processMotion(delay, layer.motion) : "0:0";
|
|
1158
|
-
const fromTime = layer.from ?? 0;
|
|
1159
|
-
const untilTime = layer.until ?? segment.duration;
|
|
1160
|
-
filterParts.push(
|
|
1161
|
-
`[${currentVidLabel}][${overlayInputLabel}]overlay=${offset}:enable='between(t,${fromTime},${untilTime})',fps=${this.effieData.fps}[${overlayOutputLabel}]`
|
|
1162
|
-
);
|
|
1163
|
-
currentVidLabel = overlayOutputLabel;
|
|
1164
|
-
}
|
|
1165
|
-
filterParts.push(`[${currentVidLabel}]null[${outputLabel}]`);
|
|
1166
|
-
return filterParts;
|
|
1167
|
-
}
|
|
1168
|
-
/**
|
|
1169
|
-
* Applies xfade/concat transitions between video segments.
|
|
1170
|
-
* Modifies videoSegmentLabels in place to update labels after transitions.
|
|
1171
|
-
* @param filterParts - Array to append filter parts to
|
|
1172
|
-
* @param videoSegmentLabels - Array of video segment labels (modified in place)
|
|
1173
|
-
*/
|
|
1174
|
-
applyTransitions(filterParts, videoSegmentLabels) {
|
|
1175
|
-
let transitionOffset = 0;
|
|
1176
|
-
this.effieData.segments.forEach((segment, i) => {
|
|
1177
|
-
if (i === 0) {
|
|
1178
|
-
transitionOffset = segment.duration;
|
|
1179
|
-
return;
|
|
1180
|
-
}
|
|
1181
|
-
const combineLabel = `[vid_com${i}]`;
|
|
1182
|
-
if (!segment.transition) {
|
|
1183
|
-
transitionOffset += segment.duration;
|
|
1184
|
-
filterParts.push(
|
|
1185
|
-
`${videoSegmentLabels[i - 1]}${videoSegmentLabels[i]}concat=n=2:v=1:a=0,fps=${this.effieData.fps}${combineLabel}`
|
|
1186
|
-
);
|
|
1187
|
-
videoSegmentLabels[i] = combineLabel;
|
|
1188
|
-
return;
|
|
1189
|
-
}
|
|
1190
|
-
const transitionName = processTransition(segment.transition);
|
|
1191
|
-
const transitionDuration = segment.transition.duration;
|
|
1192
|
-
transitionOffset -= transitionDuration;
|
|
1193
|
-
filterParts.push(
|
|
1194
|
-
`${videoSegmentLabels[i - 1]}${videoSegmentLabels[i]}xfade=transition=${transitionName}:duration=${transitionDuration}:offset=${transitionOffset}${combineLabel}`
|
|
1195
|
-
);
|
|
1196
|
-
videoSegmentLabels[i] = combineLabel;
|
|
1197
|
-
transitionOffset += segment.duration;
|
|
1198
|
-
});
|
|
1199
|
-
filterParts.push(`${videoSegmentLabels.at(-1)}null[outv]`);
|
|
1200
|
-
}
|
|
1201
|
-
/**
|
|
1202
|
-
* Applies general audio mixing: concats segment audio and mixes with global audio if present.
|
|
1203
|
-
* @param filterParts - Array to append filter parts to
|
|
1204
|
-
* @param audioSegmentLabels - Array of audio segment labels to concat
|
|
1205
|
-
* @param totalDuration - Total duration for audio trimming
|
|
1206
|
-
* @param generalAudioInputIndex - Input index for general audio (if present)
|
|
1207
|
-
*/
|
|
1208
|
-
applyGeneralAudio(filterParts, audioSegmentLabels, totalDuration, generalAudioInputIndex) {
|
|
1209
|
-
if (this.effieData.audio) {
|
|
1210
|
-
const audioSeek = this.effieData.audio.seek ?? 0;
|
|
1211
|
-
const generalAudioFilter = this.buildAudioFilter({
|
|
1212
|
-
duration: totalDuration,
|
|
1213
|
-
volume: this.effieData.audio.volume,
|
|
1214
|
-
fadeIn: this.effieData.audio.fadeIn,
|
|
1215
|
-
fadeOut: this.effieData.audio.fadeOut
|
|
1216
|
-
});
|
|
1217
|
-
filterParts.push(
|
|
1218
|
-
`[${generalAudioInputIndex}:a]atrim=start=${audioSeek}:duration=${totalDuration},${generalAudioFilter},asetpts=PTS-STARTPTS[general_audio]`
|
|
1219
|
-
);
|
|
1220
|
-
filterParts.push(
|
|
1221
|
-
`${audioSegmentLabels.join("")}concat=n=${this.effieData.segments.length}:v=0:a=1,atrim=start=0:duration=${totalDuration}[segments_audio]`
|
|
1222
|
-
);
|
|
1223
|
-
filterParts.push(
|
|
1224
|
-
`[general_audio][segments_audio]amix=inputs=2:duration=longest[outa]`
|
|
1225
|
-
);
|
|
1226
|
-
} else {
|
|
1227
|
-
filterParts.push(
|
|
1228
|
-
`${audioSegmentLabels.join("")}concat=n=${this.effieData.segments.length}:v=0:a=1[outa]`
|
|
1229
|
-
);
|
|
1230
|
-
}
|
|
1231
|
-
}
|
|
1232
|
-
buildFFmpegCommand(outputFilename, scaleFactor = 1) {
|
|
1233
|
-
const globalArgs = ["-y", "-loglevel", "error"];
|
|
1234
|
-
const inputs = [];
|
|
1235
|
-
let inputIndex = 0;
|
|
1236
|
-
const { frameWidth, frameHeight } = this.getFrameDimensions(scaleFactor);
|
|
1237
|
-
const backgroundSeek = this.effieData.background.type === "video" ? this.effieData.background.seek ?? 0 : 0;
|
|
1238
|
-
inputs.push(
|
|
1239
|
-
this.buildBackgroundInput(
|
|
1240
|
-
this.effieData.background,
|
|
1241
|
-
inputIndex,
|
|
1242
|
-
frameWidth,
|
|
1243
|
-
frameHeight
|
|
1244
|
-
)
|
|
1245
|
-
);
|
|
1246
|
-
const globalBgInputIdx = inputIndex;
|
|
1247
|
-
inputIndex++;
|
|
1248
|
-
const segmentBgInputIndices = [];
|
|
1249
|
-
for (const segment of this.effieData.segments) {
|
|
1250
|
-
if (segment.background) {
|
|
1251
|
-
inputs.push(
|
|
1252
|
-
this.buildBackgroundInput(
|
|
1253
|
-
segment.background,
|
|
1254
|
-
inputIndex,
|
|
1255
|
-
frameWidth,
|
|
1256
|
-
frameHeight
|
|
1257
|
-
)
|
|
1258
|
-
);
|
|
1259
|
-
segmentBgInputIndices.push(inputIndex);
|
|
1260
|
-
inputIndex++;
|
|
1261
|
-
} else {
|
|
1262
|
-
segmentBgInputIndices.push(null);
|
|
1263
|
-
}
|
|
1264
|
-
}
|
|
1265
|
-
const globalBgSegmentIndices = [];
|
|
1266
|
-
for (let i = 0; i < this.effieData.segments.length; i++) {
|
|
1267
|
-
if (segmentBgInputIndices[i] === null) {
|
|
1268
|
-
globalBgSegmentIndices.push(i);
|
|
1269
|
-
}
|
|
1270
|
-
}
|
|
1271
|
-
for (const segment of this.effieData.segments) {
|
|
1272
|
-
for (const layer of segment.layers) {
|
|
1273
|
-
inputs.push(this.buildLayerInput(layer, segment.duration, inputIndex));
|
|
1274
|
-
inputIndex++;
|
|
1275
|
-
}
|
|
1276
|
-
}
|
|
1277
|
-
for (const segment of this.effieData.segments) {
|
|
1278
|
-
if (segment.audio) {
|
|
1279
|
-
inputs.push({
|
|
1280
|
-
index: inputIndex,
|
|
1281
|
-
source: segment.audio.source,
|
|
1282
|
-
preArgs: [],
|
|
1283
|
-
type: "audio"
|
|
1284
|
-
});
|
|
1285
|
-
inputIndex++;
|
|
1286
|
-
}
|
|
1287
|
-
}
|
|
1288
|
-
if (this.effieData.audio) {
|
|
1289
|
-
inputs.push({
|
|
1290
|
-
index: inputIndex,
|
|
1291
|
-
source: this.effieData.audio.source,
|
|
1292
|
-
preArgs: [],
|
|
1293
|
-
type: "audio"
|
|
1294
|
-
});
|
|
1295
|
-
inputIndex++;
|
|
1296
|
-
}
|
|
1297
|
-
const numSegmentBgInputs = segmentBgInputIndices.filter(
|
|
1298
|
-
(i) => i !== null
|
|
1299
|
-
).length;
|
|
1300
|
-
const numVideoInputs = 1 + numSegmentBgInputs + this.effieData.segments.reduce((sum, seg) => sum + seg.layers.length, 0);
|
|
1301
|
-
let audioCounter = 0;
|
|
1302
|
-
let currentTime = 0;
|
|
1303
|
-
let layerInputOffset = 1 + numSegmentBgInputs;
|
|
1304
|
-
const filterParts = [];
|
|
1305
|
-
const videoSegmentLabels = [];
|
|
1306
|
-
const audioSegmentLabels = [];
|
|
1307
|
-
const globalBgFifoLabels = /* @__PURE__ */ new Map();
|
|
1308
|
-
const bgFilter = `fps=${this.effieData.fps},scale=${frameWidth}x${frameHeight}:force_original_aspect_ratio=increase,crop=${frameWidth}:${frameHeight}`;
|
|
1309
|
-
if (globalBgSegmentIndices.length === 1) {
|
|
1310
|
-
const fifoLabel = `bg_fifo_0`;
|
|
1311
|
-
filterParts.push(`[${globalBgInputIdx}:v]${bgFilter},fifo[${fifoLabel}]`);
|
|
1312
|
-
globalBgFifoLabels.set(globalBgSegmentIndices[0], fifoLabel);
|
|
1313
|
-
} else if (globalBgSegmentIndices.length > 1) {
|
|
1314
|
-
const splitCount = globalBgSegmentIndices.length;
|
|
1315
|
-
const splitOutputLabels = globalBgSegmentIndices.map(
|
|
1316
|
-
(_, i) => `bg_split_${i}`
|
|
1317
|
-
);
|
|
1318
|
-
filterParts.push(
|
|
1319
|
-
`[${globalBgInputIdx}:v]${bgFilter},split=${splitCount}${splitOutputLabels.map((l) => `[${l}]`).join("")}`
|
|
1320
|
-
);
|
|
1321
|
-
for (let i = 0; i < splitCount; i++) {
|
|
1322
|
-
const fifoLabel = `bg_fifo_${i}`;
|
|
1323
|
-
filterParts.push(`[${splitOutputLabels[i]}]fifo[${fifoLabel}]`);
|
|
1324
|
-
globalBgFifoLabels.set(globalBgSegmentIndices[i], fifoLabel);
|
|
1325
|
-
}
|
|
1326
|
-
}
|
|
1327
|
-
for (let segIdx = 0; segIdx < this.effieData.segments.length; segIdx++) {
|
|
1328
|
-
const segment = this.effieData.segments[segIdx];
|
|
1329
|
-
const bgLabel = `bg_seg${segIdx}`;
|
|
1330
|
-
if (segment.background) {
|
|
1331
|
-
const segBgInputIdx = segmentBgInputIndices[segIdx];
|
|
1332
|
-
const segBgSeek = segment.background.type === "video" ? segment.background.seek ?? 0 : 0;
|
|
1333
|
-
filterParts.push(
|
|
1334
|
-
`[${segBgInputIdx}:v]fps=${this.effieData.fps},scale=${frameWidth}x${frameHeight},trim=start=${segBgSeek}:duration=${segment.duration},setpts=PTS-STARTPTS[${bgLabel}]`
|
|
1335
|
-
);
|
|
1336
|
-
} else {
|
|
1337
|
-
const fifoLabel = globalBgFifoLabels.get(segIdx);
|
|
1338
|
-
if (fifoLabel) {
|
|
1339
|
-
filterParts.push(
|
|
1340
|
-
`[${fifoLabel}]trim=start=${backgroundSeek + currentTime}:duration=${segment.duration},setpts=PTS-STARTPTS[${bgLabel}]`
|
|
1341
|
-
);
|
|
1342
|
-
}
|
|
1343
|
-
}
|
|
1344
|
-
const vidLabel = `vid_seg${segIdx}`;
|
|
1345
|
-
filterParts.push(
|
|
1346
|
-
...this.buildLayerFilters(
|
|
1347
|
-
segment,
|
|
1348
|
-
bgLabel,
|
|
1349
|
-
`seg${segIdx}_`,
|
|
1350
|
-
layerInputOffset,
|
|
1351
|
-
frameWidth,
|
|
1352
|
-
frameHeight,
|
|
1353
|
-
vidLabel
|
|
1354
|
-
)
|
|
1355
|
-
);
|
|
1356
|
-
layerInputOffset += segment.layers.length;
|
|
1357
|
-
videoSegmentLabels.push(`[${vidLabel}]`);
|
|
1358
|
-
const nextSegment = this.effieData.segments[segIdx + 1];
|
|
1359
|
-
const transitionDuration = nextSegment?.transition?.duration ?? 0;
|
|
1360
|
-
const realDuration = Math.max(
|
|
1361
|
-
1e-3,
|
|
1362
|
-
segment.duration - transitionDuration
|
|
1363
|
-
);
|
|
1364
|
-
if (segment.audio) {
|
|
1365
|
-
const audioInputIndex = numVideoInputs + audioCounter;
|
|
1366
|
-
const audioFilter = this.buildAudioFilter({
|
|
1367
|
-
duration: realDuration,
|
|
1368
|
-
volume: segment.audio.volume,
|
|
1369
|
-
fadeIn: segment.audio.fadeIn,
|
|
1370
|
-
fadeOut: segment.audio.fadeOut
|
|
1371
|
-
});
|
|
1372
|
-
filterParts.push(
|
|
1373
|
-
`[${audioInputIndex}:a]atrim=start=0:duration=${realDuration},${audioFilter},asetpts=PTS-STARTPTS[aud_seg${segIdx}]`
|
|
1374
|
-
);
|
|
1375
|
-
audioCounter++;
|
|
1376
|
-
} else {
|
|
1377
|
-
filterParts.push(
|
|
1378
|
-
`anullsrc=r=44100:cl=stereo,atrim=start=0:duration=${realDuration},asetpts=PTS-STARTPTS[aud_seg${segIdx}]`
|
|
1379
|
-
);
|
|
1380
|
-
}
|
|
1381
|
-
audioSegmentLabels.push(`[aud_seg${segIdx}]`);
|
|
1382
|
-
currentTime += realDuration;
|
|
1383
|
-
}
|
|
1384
|
-
this.applyGeneralAudio(
|
|
1385
|
-
filterParts,
|
|
1386
|
-
audioSegmentLabels,
|
|
1387
|
-
currentTime,
|
|
1388
|
-
numVideoInputs + audioCounter
|
|
1389
|
-
);
|
|
1390
|
-
this.applyTransitions(filterParts, videoSegmentLabels);
|
|
1391
|
-
const filterComplex = filterParts.join(";");
|
|
1392
|
-
const outputArgs = this.buildOutputArgs(outputFilename);
|
|
1393
|
-
return new FFmpegCommand(globalArgs, inputs, filterComplex, outputArgs);
|
|
1394
|
-
}
|
|
1395
|
-
createImageTransformer(scaleFactor) {
|
|
1396
|
-
return async (imageStream) => {
|
|
1397
|
-
if (scaleFactor === 1) return imageStream;
|
|
1398
|
-
const sharpTransformer = sharp();
|
|
1399
|
-
imageStream.on("error", (err) => {
|
|
1400
|
-
if (!sharpTransformer.destroyed) {
|
|
1401
|
-
sharpTransformer.destroy(err);
|
|
1402
|
-
}
|
|
1403
|
-
});
|
|
1404
|
-
sharpTransformer.on("error", (err) => {
|
|
1405
|
-
if (!imageStream.destroyed) {
|
|
1406
|
-
imageStream.destroy(err);
|
|
1407
|
-
}
|
|
1408
|
-
});
|
|
1409
|
-
imageStream.pipe(sharpTransformer);
|
|
1410
|
-
try {
|
|
1411
|
-
const metadata = await sharpTransformer.metadata();
|
|
1412
|
-
const imageWidth = metadata.width ?? this.effieData.width;
|
|
1413
|
-
const imageHeight = metadata.height ?? this.effieData.height;
|
|
1414
|
-
return sharpTransformer.resize({
|
|
1415
|
-
width: Math.floor(imageWidth * scaleFactor),
|
|
1416
|
-
height: Math.floor(imageHeight * scaleFactor)
|
|
1417
|
-
});
|
|
1418
|
-
} catch (error) {
|
|
1419
|
-
if (!sharpTransformer.destroyed) {
|
|
1420
|
-
sharpTransformer.destroy(error);
|
|
1421
|
-
}
|
|
1422
|
-
throw error;
|
|
1423
|
-
}
|
|
1424
|
-
};
|
|
1425
|
-
}
|
|
1426
|
-
/**
|
|
1427
|
-
* Resolves a source reference to its actual URL.
|
|
1428
|
-
* If the source is a #reference, returns the resolved URL.
|
|
1429
|
-
* Otherwise, returns the source as-is.
|
|
1430
|
-
*/
|
|
1431
|
-
resolveReference(src) {
|
|
1432
|
-
if (src.startsWith("#")) {
|
|
1433
|
-
const sourceName = src.slice(1);
|
|
1434
|
-
if (sourceName in this.effieData.sources) {
|
|
1435
|
-
return this.effieData.sources[sourceName];
|
|
1436
|
-
}
|
|
1437
|
-
}
|
|
1438
|
-
return src;
|
|
1439
|
-
}
|
|
1440
|
-
/**
|
|
1441
|
-
* Renders the effie data to a video stream.
|
|
1442
|
-
* @param scaleFactor - Scale factor for output dimensions
|
|
1443
|
-
*/
|
|
1444
|
-
async render(scaleFactor = 1) {
|
|
1445
|
-
const ffmpegCommand = this.buildFFmpegCommand("-", scaleFactor);
|
|
1446
|
-
this.ffmpegRunner = new FFmpegRunner(ffmpegCommand);
|
|
1447
|
-
const urlTransformer = this.httpProxy ? (url) => this.httpProxy.transformUrl(url) : void 0;
|
|
1448
|
-
return this.ffmpegRunner.run(
|
|
1449
|
-
async ({ src }) => this.fetchSource(src),
|
|
1450
|
-
this.createImageTransformer(scaleFactor),
|
|
1451
|
-
(src) => this.resolveReference(src),
|
|
1452
|
-
urlTransformer
|
|
1453
|
-
);
|
|
1454
|
-
}
|
|
1455
|
-
close() {
|
|
1456
|
-
if (this.ffmpegRunner) {
|
|
1457
|
-
this.ffmpegRunner.close();
|
|
1458
|
-
}
|
|
1459
|
-
}
|
|
1460
|
-
};
|
|
1461
|
-
|
|
1462
|
-
// src/handlers/rendering.ts
|
|
1463
216
|
import { effieDataSchema as effieDataSchema2 } from "@effing/effie";
|
|
1464
|
-
async function createRenderJob(req, res, ctx2) {
|
|
217
|
+
async function createRenderJob(req, res, ctx2, options) {
|
|
1465
218
|
try {
|
|
1466
219
|
const isWrapped = "effie" in req.body;
|
|
1467
220
|
let rawEffieData;
|
|
1468
221
|
let scale;
|
|
1469
222
|
let upload;
|
|
1470
223
|
if (isWrapped) {
|
|
1471
|
-
const
|
|
1472
|
-
if (typeof
|
|
1473
|
-
const response = await ffsFetch(
|
|
224
|
+
const options2 = req.body;
|
|
225
|
+
if (typeof options2.effie === "string") {
|
|
226
|
+
const response = await ffsFetch(options2.effie);
|
|
1474
227
|
if (!response.ok) {
|
|
1475
228
|
throw new Error(
|
|
1476
229
|
`Failed to fetch Effie data: ${response.status} ${response.statusText}`
|
|
@@ -1478,10 +231,10 @@ async function createRenderJob(req, res, ctx2) {
|
|
|
1478
231
|
}
|
|
1479
232
|
rawEffieData = await response.json();
|
|
1480
233
|
} else {
|
|
1481
|
-
rawEffieData =
|
|
234
|
+
rawEffieData = options2.effie;
|
|
1482
235
|
}
|
|
1483
|
-
scale =
|
|
1484
|
-
upload =
|
|
236
|
+
scale = options2.scale ?? 1;
|
|
237
|
+
upload = options2.upload;
|
|
1485
238
|
} else {
|
|
1486
239
|
rawEffieData = req.body;
|
|
1487
240
|
scale = parseFloat(req.query.scale?.toString() || "1");
|
|
@@ -1513,12 +266,13 @@ async function createRenderJob(req, res, ctx2) {
|
|
|
1513
266
|
effie,
|
|
1514
267
|
scale,
|
|
1515
268
|
upload,
|
|
1516
|
-
createdAt: Date.now()
|
|
269
|
+
createdAt: Date.now(),
|
|
270
|
+
metadata: options?.metadata
|
|
1517
271
|
};
|
|
1518
272
|
await ctx2.transientStore.putJson(
|
|
1519
273
|
storeKeys.renderJob(jobId),
|
|
1520
274
|
job,
|
|
1521
|
-
ctx2.transientStore.
|
|
275
|
+
ctx2.transientStore.jobDataTtlMs
|
|
1522
276
|
);
|
|
1523
277
|
res.json({
|
|
1524
278
|
id: jobId,
|
|
@@ -1533,17 +287,20 @@ async function streamRenderJob(req, res, ctx2) {
|
|
|
1533
287
|
try {
|
|
1534
288
|
setupCORSHeaders(res);
|
|
1535
289
|
const jobId = req.params.id;
|
|
1536
|
-
|
|
1537
|
-
|
|
1538
|
-
return;
|
|
1539
|
-
}
|
|
1540
|
-
const jobCacheKey = storeKeys.renderJob(jobId);
|
|
1541
|
-
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
1542
|
-
ctx2.transientStore.delete(jobCacheKey);
|
|
290
|
+
const jobStoreKey = storeKeys.renderJob(jobId);
|
|
291
|
+
const job = await ctx2.transientStore.getJson(jobStoreKey);
|
|
1543
292
|
if (!job) {
|
|
1544
293
|
res.status(404).json({ error: "Job not found or expired" });
|
|
1545
294
|
return;
|
|
1546
295
|
}
|
|
296
|
+
if (ctx2.renderBackendResolver) {
|
|
297
|
+
const backend = ctx2.renderBackendResolver(job.effie, job.metadata);
|
|
298
|
+
if (backend) {
|
|
299
|
+
await proxyRenderFromBackend(res, jobId, backend);
|
|
300
|
+
return;
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
ctx2.transientStore.delete(jobStoreKey);
|
|
1547
304
|
if (job.upload) {
|
|
1548
305
|
await streamRenderWithUpload(res, job, ctx2);
|
|
1549
306
|
} else {
|
|
@@ -1559,6 +316,7 @@ async function streamRenderJob(req, res, ctx2) {
|
|
|
1559
316
|
}
|
|
1560
317
|
}
|
|
1561
318
|
async function streamRenderDirect(res, job, ctx2) {
|
|
319
|
+
const { EffieRenderer } = await import("./render-MUKKTCF6.js");
|
|
1562
320
|
const renderer = new EffieRenderer(job.effie, {
|
|
1563
321
|
transientStore: ctx2.transientStore,
|
|
1564
322
|
httpProxy: ctx2.httpProxy
|
|
@@ -1623,6 +381,7 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
|
|
|
1623
381
|
timings.uploadCoverTime = Date.now() - uploadCoverStartTime;
|
|
1624
382
|
}
|
|
1625
383
|
const renderStartTime = Date.now();
|
|
384
|
+
const { EffieRenderer } = await import("./render-MUKKTCF6.js");
|
|
1626
385
|
const renderer = new EffieRenderer(effie, {
|
|
1627
386
|
transientStore: ctx2.transientStore,
|
|
1628
387
|
httpProxy: ctx2.httpProxy
|
|
@@ -1652,10 +411,10 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
|
|
|
1652
411
|
timings.uploadTime = Date.now() - uploadStartTime;
|
|
1653
412
|
return timings;
|
|
1654
413
|
}
|
|
1655
|
-
async function proxyRenderFromBackend(res, jobId,
|
|
1656
|
-
const backendUrl = `${
|
|
414
|
+
async function proxyRenderFromBackend(res, jobId, backend) {
|
|
415
|
+
const backendUrl = `${backend.baseUrl}/render/${jobId}`;
|
|
1657
416
|
const response = await ffsFetch(backendUrl, {
|
|
1658
|
-
headers:
|
|
417
|
+
headers: backend.apiKey ? { Authorization: `Bearer ${backend.apiKey}` } : void 0
|
|
1659
418
|
});
|
|
1660
419
|
if (!response.ok) {
|
|
1661
420
|
res.status(response.status).json({ error: "Backend render failed" });
|
|
@@ -1712,12 +471,12 @@ async function proxyRenderFromBackend(res, jobId, ctx2) {
|
|
|
1712
471
|
}
|
|
1713
472
|
|
|
1714
473
|
// src/handlers/orchestrating.ts
|
|
1715
|
-
async function createWarmupAndRenderJob(req, res, ctx2) {
|
|
474
|
+
async function createWarmupAndRenderJob(req, res, ctx2, options) {
|
|
1716
475
|
try {
|
|
1717
|
-
const
|
|
476
|
+
const body = req.body;
|
|
1718
477
|
let rawEffieData;
|
|
1719
|
-
if (typeof
|
|
1720
|
-
const response = await ffsFetch(
|
|
478
|
+
if (typeof body.effie === "string") {
|
|
479
|
+
const response = await ffsFetch(body.effie);
|
|
1721
480
|
if (!response.ok) {
|
|
1722
481
|
throw new Error(
|
|
1723
482
|
`Failed to fetch Effie data: ${response.status} ${response.statusText}`
|
|
@@ -1725,7 +484,7 @@ async function createWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1725
484
|
}
|
|
1726
485
|
rawEffieData = await response.json();
|
|
1727
486
|
} else {
|
|
1728
|
-
rawEffieData =
|
|
487
|
+
rawEffieData = body.effie;
|
|
1729
488
|
}
|
|
1730
489
|
let effie;
|
|
1731
490
|
if (!ctx2.skipValidation) {
|
|
@@ -1750,8 +509,8 @@ async function createWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1750
509
|
effie = data;
|
|
1751
510
|
}
|
|
1752
511
|
const sources = extractEffieSourcesWithTypes(effie);
|
|
1753
|
-
const scale =
|
|
1754
|
-
const upload =
|
|
512
|
+
const scale = body.scale ?? 1;
|
|
513
|
+
const upload = body.upload;
|
|
1755
514
|
const jobId = randomUUID2();
|
|
1756
515
|
const warmupJobId = randomUUID2();
|
|
1757
516
|
const renderJobId = randomUUID2();
|
|
@@ -1762,17 +521,18 @@ async function createWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1762
521
|
upload,
|
|
1763
522
|
warmupJobId,
|
|
1764
523
|
renderJobId,
|
|
1765
|
-
createdAt: Date.now()
|
|
524
|
+
createdAt: Date.now(),
|
|
525
|
+
metadata: options?.metadata
|
|
1766
526
|
};
|
|
1767
527
|
await ctx2.transientStore.putJson(
|
|
1768
528
|
storeKeys.warmupAndRenderJob(jobId),
|
|
1769
529
|
job,
|
|
1770
|
-
ctx2.transientStore.
|
|
530
|
+
ctx2.transientStore.jobDataTtlMs
|
|
1771
531
|
);
|
|
1772
532
|
await ctx2.transientStore.putJson(
|
|
1773
533
|
storeKeys.warmupJob(warmupJobId),
|
|
1774
|
-
{ sources },
|
|
1775
|
-
ctx2.transientStore.
|
|
534
|
+
{ sources, metadata: options?.metadata },
|
|
535
|
+
ctx2.transientStore.jobDataTtlMs
|
|
1776
536
|
);
|
|
1777
537
|
await ctx2.transientStore.putJson(
|
|
1778
538
|
storeKeys.renderJob(renderJobId),
|
|
@@ -1780,9 +540,10 @@ async function createWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1780
540
|
effie,
|
|
1781
541
|
scale,
|
|
1782
542
|
upload,
|
|
1783
|
-
createdAt: Date.now()
|
|
543
|
+
createdAt: Date.now(),
|
|
544
|
+
metadata: options?.metadata
|
|
1784
545
|
},
|
|
1785
|
-
ctx2.transientStore.
|
|
546
|
+
ctx2.transientStore.jobDataTtlMs
|
|
1786
547
|
);
|
|
1787
548
|
res.json({
|
|
1788
549
|
id: jobId,
|
|
@@ -1797,13 +558,15 @@ async function streamWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1797
558
|
try {
|
|
1798
559
|
setupCORSHeaders(res);
|
|
1799
560
|
const jobId = req.params.id;
|
|
1800
|
-
const
|
|
1801
|
-
const job = await ctx2.transientStore.getJson(
|
|
1802
|
-
ctx2.transientStore.delete(
|
|
561
|
+
const jobStoreKey = storeKeys.warmupAndRenderJob(jobId);
|
|
562
|
+
const job = await ctx2.transientStore.getJson(jobStoreKey);
|
|
563
|
+
ctx2.transientStore.delete(jobStoreKey);
|
|
1803
564
|
if (!job) {
|
|
1804
565
|
res.status(404).json({ error: "Job not found" });
|
|
1805
566
|
return;
|
|
1806
567
|
}
|
|
568
|
+
const warmupBackend = ctx2.warmupBackendResolver ? ctx2.warmupBackendResolver(job.sources, job.metadata) : null;
|
|
569
|
+
const renderBackend = ctx2.renderBackendResolver ? ctx2.renderBackendResolver(job.effie, job.metadata) : null;
|
|
1807
570
|
setupSSEResponse(res);
|
|
1808
571
|
const sendEvent = createSSEEventSender(res);
|
|
1809
572
|
let keepalivePhase = "warmup";
|
|
@@ -1811,13 +574,13 @@ async function streamWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1811
574
|
sendEvent("keepalive", { phase: keepalivePhase });
|
|
1812
575
|
}, 25e3);
|
|
1813
576
|
try {
|
|
1814
|
-
if (
|
|
577
|
+
if (warmupBackend) {
|
|
1815
578
|
await proxyRemoteSSE(
|
|
1816
|
-
`${
|
|
579
|
+
`${warmupBackend.baseUrl}/warmup/${job.warmupJobId}`,
|
|
1817
580
|
sendEvent,
|
|
1818
581
|
"warmup:",
|
|
1819
582
|
res,
|
|
1820
|
-
|
|
583
|
+
warmupBackend.apiKey ? { Authorization: `Bearer ${warmupBackend.apiKey}` } : void 0
|
|
1821
584
|
);
|
|
1822
585
|
} else {
|
|
1823
586
|
const warmupSender = prefixEventSender(sendEvent, "warmup:");
|
|
@@ -1825,13 +588,13 @@ async function streamWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1825
588
|
warmupSender("complete", { status: "ready" });
|
|
1826
589
|
}
|
|
1827
590
|
keepalivePhase = "render";
|
|
1828
|
-
if (
|
|
591
|
+
if (renderBackend) {
|
|
1829
592
|
await proxyRemoteSSE(
|
|
1830
|
-
`${
|
|
593
|
+
`${renderBackend.baseUrl}/render/${job.renderJobId}`,
|
|
1831
594
|
sendEvent,
|
|
1832
595
|
"render:",
|
|
1833
596
|
res,
|
|
1834
|
-
|
|
597
|
+
renderBackend.apiKey ? { Authorization: `Bearer ${renderBackend.apiKey}` } : void 0
|
|
1835
598
|
);
|
|
1836
599
|
} else {
|
|
1837
600
|
const renderSender = prefixEventSender(sendEvent, "render:");
|
|
@@ -1850,7 +613,7 @@ async function streamWarmupAndRenderJob(req, res, ctx2) {
|
|
|
1850
613
|
sendEvent("complete", { status: "ready", videoUrl });
|
|
1851
614
|
}
|
|
1852
615
|
}
|
|
1853
|
-
if (job.upload && !
|
|
616
|
+
if (job.upload && !renderBackend) {
|
|
1854
617
|
sendEvent("complete", { status: "done" });
|
|
1855
618
|
}
|
|
1856
619
|
} catch (error) {
|
|
@@ -1955,7 +718,7 @@ function shouldSkipWarmup(source) {
|
|
|
1955
718
|
return source.type === "video" || source.type === "audio";
|
|
1956
719
|
}
|
|
1957
720
|
var inFlightFetches = /* @__PURE__ */ new Map();
|
|
1958
|
-
async function createWarmupJob(req, res, ctx2) {
|
|
721
|
+
async function createWarmupJob(req, res, ctx2, options) {
|
|
1959
722
|
try {
|
|
1960
723
|
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
1961
724
|
if ("error" in parseResult) {
|
|
@@ -1964,10 +727,11 @@ async function createWarmupJob(req, res, ctx2) {
|
|
|
1964
727
|
}
|
|
1965
728
|
const sources = extractEffieSourcesWithTypes2(parseResult.effie);
|
|
1966
729
|
const jobId = randomUUID3();
|
|
730
|
+
const job = { sources, metadata: options?.metadata };
|
|
1967
731
|
await ctx2.transientStore.putJson(
|
|
1968
732
|
storeKeys.warmupJob(jobId),
|
|
1969
|
-
|
|
1970
|
-
ctx2.transientStore.
|
|
733
|
+
job,
|
|
734
|
+
ctx2.transientStore.jobDataTtlMs
|
|
1971
735
|
);
|
|
1972
736
|
res.json({
|
|
1973
737
|
id: jobId,
|
|
@@ -1982,29 +746,32 @@ async function streamWarmupJob(req, res, ctx2) {
|
|
|
1982
746
|
try {
|
|
1983
747
|
setupCORSHeaders(res);
|
|
1984
748
|
const jobId = req.params.id;
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
const sendEvent2 = createSSEEventSender(res);
|
|
1988
|
-
try {
|
|
1989
|
-
await proxyRemoteSSE(
|
|
1990
|
-
`${ctx2.warmupBackendBaseUrl}/warmup/${jobId}`,
|
|
1991
|
-
sendEvent2,
|
|
1992
|
-
"",
|
|
1993
|
-
res,
|
|
1994
|
-
ctx2.warmupBackendApiKey ? { Authorization: `Bearer ${ctx2.warmupBackendApiKey}` } : void 0
|
|
1995
|
-
);
|
|
1996
|
-
} finally {
|
|
1997
|
-
res.end();
|
|
1998
|
-
}
|
|
1999
|
-
return;
|
|
2000
|
-
}
|
|
2001
|
-
const jobCacheKey = storeKeys.warmupJob(jobId);
|
|
2002
|
-
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
2003
|
-
ctx2.transientStore.delete(jobCacheKey);
|
|
749
|
+
const jobStoreKey = storeKeys.warmupJob(jobId);
|
|
750
|
+
const job = await ctx2.transientStore.getJson(jobStoreKey);
|
|
2004
751
|
if (!job) {
|
|
2005
752
|
res.status(404).json({ error: "Job not found" });
|
|
2006
753
|
return;
|
|
2007
754
|
}
|
|
755
|
+
if (ctx2.warmupBackendResolver) {
|
|
756
|
+
const backend = ctx2.warmupBackendResolver(job.sources, job.metadata);
|
|
757
|
+
if (backend) {
|
|
758
|
+
setupSSEResponse(res);
|
|
759
|
+
const sendEvent2 = createSSEEventSender(res);
|
|
760
|
+
try {
|
|
761
|
+
await proxyRemoteSSE(
|
|
762
|
+
`${backend.baseUrl}/warmup/${jobId}`,
|
|
763
|
+
sendEvent2,
|
|
764
|
+
"",
|
|
765
|
+
res,
|
|
766
|
+
backend.apiKey ? { Authorization: `Bearer ${backend.apiKey}` } : void 0
|
|
767
|
+
);
|
|
768
|
+
} finally {
|
|
769
|
+
res.end();
|
|
770
|
+
}
|
|
771
|
+
return;
|
|
772
|
+
}
|
|
773
|
+
}
|
|
774
|
+
ctx2.transientStore.delete(jobStoreKey);
|
|
2008
775
|
setupSSEResponse(res);
|
|
2009
776
|
const sendEvent = createSSEEventSender(res);
|
|
2010
777
|
try {
|
|
@@ -2152,7 +919,7 @@ async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
|
|
|
2152
919
|
throw new Error(`${response.status} ${response.statusText}`);
|
|
2153
920
|
}
|
|
2154
921
|
sendEvent("downloading", { url, status: "started", bytesReceived: 0 });
|
|
2155
|
-
const sourceStream =
|
|
922
|
+
const sourceStream = Readable2.fromWeb(
|
|
2156
923
|
response.body
|
|
2157
924
|
);
|
|
2158
925
|
let totalBytes = 0;
|
|
@@ -2182,11 +949,12 @@ async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
|
|
|
2182
949
|
}
|
|
2183
950
|
|
|
2184
951
|
// src/server.ts
|
|
2185
|
-
console.log("FFS", getFFmpegVersion());
|
|
2186
952
|
var app = express5();
|
|
2187
953
|
app.use(bodyParser.json({ limit: "50mb" }));
|
|
2188
954
|
var ctx = await createServerContext();
|
|
2189
|
-
|
|
955
|
+
if (ctx.httpProxy) {
|
|
956
|
+
console.log(`FFS HTTP proxy listening on port ${ctx.httpProxy.port}`);
|
|
957
|
+
}
|
|
2190
958
|
function validateAuth(req, res) {
|
|
2191
959
|
const apiKey = process.env.FFS_API_KEY;
|
|
2192
960
|
if (!apiKey) return true;
|
|
@@ -2225,7 +993,7 @@ var server = app.listen(port, () => {
|
|
|
2225
993
|
});
|
|
2226
994
|
function shutdown() {
|
|
2227
995
|
console.log("Shutting down FFS server...");
|
|
2228
|
-
ctx.httpProxy
|
|
996
|
+
ctx.httpProxy?.close();
|
|
2229
997
|
ctx.transientStore.close();
|
|
2230
998
|
server.close(() => {
|
|
2231
999
|
console.log("FFS server stopped");
|