@effing/ffs 0.2.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +140 -16
- package/dist/{chunk-6YHSYHDY.js → chunk-7FMPCMLO.js} +559 -215
- package/dist/chunk-7FMPCMLO.js.map +1 -0
- package/dist/{chunk-A7BAW24L.js → chunk-J64HSZNQ.js} +65 -46
- package/dist/chunk-J64HSZNQ.js.map +1 -0
- package/dist/handlers/index.d.ts +38 -4
- package/dist/handlers/index.js +10 -2
- package/dist/index.d.ts +5 -5
- package/dist/index.js +1 -1
- package/dist/{proxy-BI8OMQl0.d.ts → proxy-qTA69nOV.d.ts} +11 -7
- package/dist/server.js +660 -293
- package/dist/server.js.map +1 -1
- package/package.json +2 -2
- package/dist/chunk-6YHSYHDY.js.map +0 -1
- package/dist/chunk-A7BAW24L.js.map +0 -1
package/dist/server.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
// src/server.ts
|
|
4
|
-
import
|
|
4
|
+
import express5 from "express";
|
|
5
5
|
import bodyParser from "body-parser";
|
|
6
6
|
|
|
7
7
|
// src/handlers/shared.ts
|
|
8
8
|
import "express";
|
|
9
9
|
|
|
10
|
-
// src/
|
|
10
|
+
// src/storage.ts
|
|
11
11
|
import {
|
|
12
12
|
S3Client,
|
|
13
13
|
PutObjectCommand,
|
|
@@ -22,11 +22,14 @@ import { pipeline } from "stream/promises";
|
|
|
22
22
|
import path from "path";
|
|
23
23
|
import os from "os";
|
|
24
24
|
import crypto from "crypto";
|
|
25
|
-
var
|
|
25
|
+
var DEFAULT_SOURCE_TTL_MS = 60 * 60 * 1e3;
|
|
26
|
+
var DEFAULT_JOB_METADATA_TTL_MS = 8 * 60 * 60 * 1e3;
|
|
27
|
+
var S3TransientStore = class {
|
|
26
28
|
client;
|
|
27
29
|
bucket;
|
|
28
30
|
prefix;
|
|
29
|
-
|
|
31
|
+
sourceTtlMs;
|
|
32
|
+
jobMetadataTtlMs;
|
|
30
33
|
constructor(options) {
|
|
31
34
|
this.client = new S3Client({
|
|
32
35
|
endpoint: options.endpoint,
|
|
@@ -39,22 +42,23 @@ var S3CacheStorage = class {
|
|
|
39
42
|
});
|
|
40
43
|
this.bucket = options.bucket;
|
|
41
44
|
this.prefix = options.prefix ?? "";
|
|
42
|
-
this.
|
|
45
|
+
this.sourceTtlMs = options.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
46
|
+
this.jobMetadataTtlMs = options.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
|
|
43
47
|
}
|
|
44
|
-
getExpires() {
|
|
45
|
-
return new Date(Date.now() +
|
|
48
|
+
getExpires(ttlMs) {
|
|
49
|
+
return new Date(Date.now() + ttlMs);
|
|
46
50
|
}
|
|
47
51
|
getFullKey(key) {
|
|
48
52
|
return `${this.prefix}${key}`;
|
|
49
53
|
}
|
|
50
|
-
async put(key, stream) {
|
|
54
|
+
async put(key, stream, ttlMs) {
|
|
51
55
|
const upload = new Upload({
|
|
52
56
|
client: this.client,
|
|
53
57
|
params: {
|
|
54
58
|
Bucket: this.bucket,
|
|
55
59
|
Key: this.getFullKey(key),
|
|
56
60
|
Body: stream,
|
|
57
|
-
Expires: this.getExpires()
|
|
61
|
+
Expires: this.getExpires(ttlMs ?? this.sourceTtlMs)
|
|
58
62
|
}
|
|
59
63
|
});
|
|
60
64
|
await upload.done();
|
|
@@ -115,14 +119,14 @@ var S3CacheStorage = class {
|
|
|
115
119
|
throw err;
|
|
116
120
|
}
|
|
117
121
|
}
|
|
118
|
-
async putJson(key, data) {
|
|
122
|
+
async putJson(key, data, ttlMs) {
|
|
119
123
|
await this.client.send(
|
|
120
124
|
new PutObjectCommand({
|
|
121
125
|
Bucket: this.bucket,
|
|
122
126
|
Key: this.getFullKey(key),
|
|
123
127
|
Body: JSON.stringify(data),
|
|
124
128
|
ContentType: "application/json",
|
|
125
|
-
Expires: this.getExpires()
|
|
129
|
+
Expires: this.getExpires(ttlMs ?? this.jobMetadataTtlMs)
|
|
126
130
|
})
|
|
127
131
|
);
|
|
128
132
|
}
|
|
@@ -148,20 +152,25 @@ var S3CacheStorage = class {
|
|
|
148
152
|
close() {
|
|
149
153
|
}
|
|
150
154
|
};
|
|
151
|
-
var
|
|
155
|
+
var LocalTransientStore = class {
|
|
152
156
|
baseDir;
|
|
153
157
|
initialized = false;
|
|
154
158
|
cleanupInterval;
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
+
sourceTtlMs;
|
|
160
|
+
jobMetadataTtlMs;
|
|
161
|
+
/** For cleanup, use the longer of the two TTLs */
|
|
162
|
+
maxTtlMs;
|
|
163
|
+
constructor(options) {
|
|
164
|
+
this.baseDir = options?.baseDir ?? path.join(os.tmpdir(), "ffs-transient");
|
|
165
|
+
this.sourceTtlMs = options?.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
166
|
+
this.jobMetadataTtlMs = options?.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
|
|
167
|
+
this.maxTtlMs = Math.max(this.sourceTtlMs, this.jobMetadataTtlMs);
|
|
159
168
|
this.cleanupInterval = setInterval(() => {
|
|
160
169
|
this.cleanupExpired().catch(console.error);
|
|
161
170
|
}, 3e5);
|
|
162
171
|
}
|
|
163
172
|
/**
|
|
164
|
-
* Remove files older than TTL
|
|
173
|
+
* Remove files older than max TTL
|
|
165
174
|
*/
|
|
166
175
|
async cleanupExpired() {
|
|
167
176
|
if (!this.initialized) return;
|
|
@@ -186,7 +195,7 @@ var LocalCacheStorage = class {
|
|
|
186
195
|
} else if (entry.isFile()) {
|
|
187
196
|
try {
|
|
188
197
|
const stat = await fs.stat(fullPath);
|
|
189
|
-
if (now - stat.mtimeMs > this.
|
|
198
|
+
if (now - stat.mtimeMs > this.maxTtlMs) {
|
|
190
199
|
await fs.rm(fullPath, { force: true });
|
|
191
200
|
}
|
|
192
201
|
} catch {
|
|
@@ -205,7 +214,7 @@ var LocalCacheStorage = class {
|
|
|
205
214
|
const rand = crypto.randomBytes(8).toString("hex");
|
|
206
215
|
return `${finalPath}.tmp-${process.pid}-${rand}`;
|
|
207
216
|
}
|
|
208
|
-
async put(key, stream) {
|
|
217
|
+
async put(key, stream, _ttlMs) {
|
|
209
218
|
const fp = this.filePath(key);
|
|
210
219
|
await this.ensureDir(fp);
|
|
211
220
|
const tmpPath = this.tmpPathFor(fp);
|
|
@@ -241,7 +250,7 @@ var LocalCacheStorage = class {
|
|
|
241
250
|
async delete(key) {
|
|
242
251
|
await fs.rm(this.filePath(key), { force: true });
|
|
243
252
|
}
|
|
244
|
-
async putJson(key, data) {
|
|
253
|
+
async putJson(key, data, _ttlMs) {
|
|
245
254
|
const fp = this.filePath(key);
|
|
246
255
|
await this.ensureDir(fp);
|
|
247
256
|
const tmpPath = this.tmpPathFor(fp);
|
|
@@ -269,37 +278,47 @@ var LocalCacheStorage = class {
|
|
|
269
278
|
}
|
|
270
279
|
}
|
|
271
280
|
};
|
|
272
|
-
function
|
|
273
|
-
const
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
281
|
+
function createTransientStore() {
|
|
282
|
+
const sourceTtlMs = process.env.FFS_SOURCE_CACHE_TTL_MS ? parseInt(process.env.FFS_SOURCE_CACHE_TTL_MS, 10) : DEFAULT_SOURCE_TTL_MS;
|
|
283
|
+
const jobMetadataTtlMs = process.env.FFS_JOB_METADATA_TTL_MS ? parseInt(process.env.FFS_JOB_METADATA_TTL_MS, 10) : DEFAULT_JOB_METADATA_TTL_MS;
|
|
284
|
+
if (process.env.FFS_TRANSIENT_STORE_BUCKET) {
|
|
285
|
+
return new S3TransientStore({
|
|
286
|
+
endpoint: process.env.FFS_TRANSIENT_STORE_ENDPOINT,
|
|
287
|
+
region: process.env.FFS_TRANSIENT_STORE_REGION ?? "auto",
|
|
288
|
+
bucket: process.env.FFS_TRANSIENT_STORE_BUCKET,
|
|
289
|
+
prefix: process.env.FFS_TRANSIENT_STORE_PREFIX,
|
|
290
|
+
accessKeyId: process.env.FFS_TRANSIENT_STORE_ACCESS_KEY,
|
|
291
|
+
secretAccessKey: process.env.FFS_TRANSIENT_STORE_SECRET_KEY,
|
|
292
|
+
sourceTtlMs,
|
|
293
|
+
jobMetadataTtlMs
|
|
283
294
|
});
|
|
284
295
|
}
|
|
285
|
-
return new
|
|
296
|
+
return new LocalTransientStore({
|
|
297
|
+
baseDir: process.env.FFS_TRANSIENT_STORE_LOCAL_DIR,
|
|
298
|
+
sourceTtlMs,
|
|
299
|
+
jobMetadataTtlMs
|
|
300
|
+
});
|
|
286
301
|
}
|
|
287
302
|
function hashUrl(url) {
|
|
288
303
|
return crypto.createHash("sha256").update(url).digest("hex").slice(0, 16);
|
|
289
304
|
}
|
|
290
|
-
function
|
|
305
|
+
function sourceStoreKey(url) {
|
|
291
306
|
return `sources/${hashUrl(url)}`;
|
|
292
307
|
}
|
|
293
|
-
function
|
|
308
|
+
function warmupJobStoreKey(jobId) {
|
|
294
309
|
return `jobs/warmup/${jobId}.json`;
|
|
295
310
|
}
|
|
296
|
-
function
|
|
311
|
+
function renderJobStoreKey(jobId) {
|
|
297
312
|
return `jobs/render/${jobId}.json`;
|
|
298
313
|
}
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
314
|
+
function warmupAndRenderJobStoreKey(jobId) {
|
|
315
|
+
return `jobs/warmup-and-render/${jobId}.json`;
|
|
316
|
+
}
|
|
317
|
+
var storeKeys = {
|
|
318
|
+
source: sourceStoreKey,
|
|
319
|
+
warmupJob: warmupJobStoreKey,
|
|
320
|
+
renderJob: renderJobStoreKey,
|
|
321
|
+
warmupAndRenderJob: warmupAndRenderJobStoreKey
|
|
303
322
|
};
|
|
304
323
|
|
|
305
324
|
// src/proxy.ts
|
|
@@ -450,15 +469,19 @@ var HttpProxy = class {
|
|
|
450
469
|
// src/handlers/shared.ts
|
|
451
470
|
import { effieDataSchema } from "@effing/effie";
|
|
452
471
|
async function createServerContext() {
|
|
453
|
-
const port2 = process.env.FFS_PORT || 2e3;
|
|
472
|
+
const port2 = process.env.FFS_PORT || process.env.PORT || 2e3;
|
|
454
473
|
const httpProxy = new HttpProxy();
|
|
455
474
|
await httpProxy.start();
|
|
456
475
|
return {
|
|
457
|
-
|
|
476
|
+
transientStore: createTransientStore(),
|
|
458
477
|
httpProxy,
|
|
459
478
|
baseUrl: process.env.FFS_BASE_URL || `http://localhost:${port2}`,
|
|
460
479
|
skipValidation: !!process.env.FFS_SKIP_VALIDATION && process.env.FFS_SKIP_VALIDATION !== "false",
|
|
461
|
-
|
|
480
|
+
warmupConcurrency: parseInt(process.env.FFS_WARMUP_CONCURRENCY || "4", 10),
|
|
481
|
+
warmupBackendBaseUrl: process.env.FFS_WARMUP_BACKEND_BASE_URL,
|
|
482
|
+
renderBackendBaseUrl: process.env.FFS_RENDER_BACKEND_BASE_URL,
|
|
483
|
+
warmupBackendApiKey: process.env.FFS_WARMUP_BACKEND_API_KEY,
|
|
484
|
+
renderBackendApiKey: process.env.FFS_RENDER_BACKEND_API_KEY
|
|
462
485
|
};
|
|
463
486
|
}
|
|
464
487
|
function parseEffieData(body, skipValidation) {
|
|
@@ -505,224 +528,24 @@ data: ${JSON.stringify(data)}
|
|
|
505
528
|
|
|
506
529
|
// src/handlers/caching.ts
|
|
507
530
|
import "express";
|
|
508
|
-
import { Readable as
|
|
509
|
-
import { randomUUID } from "crypto";
|
|
531
|
+
import { Readable as Readable3, Transform } from "stream";
|
|
532
|
+
import { randomUUID as randomUUID3 } from "crypto";
|
|
510
533
|
import {
|
|
511
534
|
extractEffieSources,
|
|
512
|
-
extractEffieSourcesWithTypes
|
|
535
|
+
extractEffieSourcesWithTypes as extractEffieSourcesWithTypes2
|
|
513
536
|
} from "@effing/effie";
|
|
514
|
-
function shouldSkipWarmup(source) {
|
|
515
|
-
return source.type === "video" || source.type === "audio";
|
|
516
|
-
}
|
|
517
|
-
var inFlightFetches = /* @__PURE__ */ new Map();
|
|
518
|
-
async function createWarmupJob(req, res, ctx2) {
|
|
519
|
-
try {
|
|
520
|
-
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
521
|
-
if ("error" in parseResult) {
|
|
522
|
-
res.status(400).json(parseResult);
|
|
523
|
-
return;
|
|
524
|
-
}
|
|
525
|
-
const sources = extractEffieSourcesWithTypes(parseResult.effie);
|
|
526
|
-
const jobId = randomUUID();
|
|
527
|
-
await ctx2.cacheStorage.putJson(cacheKeys.warmupJob(jobId), { sources });
|
|
528
|
-
res.json({
|
|
529
|
-
id: jobId,
|
|
530
|
-
url: `${ctx2.baseUrl}/warmup/${jobId}`
|
|
531
|
-
});
|
|
532
|
-
} catch (error) {
|
|
533
|
-
console.error("Error creating warmup job:", error);
|
|
534
|
-
res.status(500).json({ error: "Failed to create warmup job" });
|
|
535
|
-
}
|
|
536
|
-
}
|
|
537
|
-
async function streamWarmupJob(req, res, ctx2) {
|
|
538
|
-
try {
|
|
539
|
-
setupCORSHeaders(res);
|
|
540
|
-
const jobId = req.params.id;
|
|
541
|
-
const jobCacheKey = cacheKeys.warmupJob(jobId);
|
|
542
|
-
const job = await ctx2.cacheStorage.getJson(jobCacheKey);
|
|
543
|
-
ctx2.cacheStorage.delete(jobCacheKey);
|
|
544
|
-
if (!job) {
|
|
545
|
-
res.status(404).json({ error: "Job not found" });
|
|
546
|
-
return;
|
|
547
|
-
}
|
|
548
|
-
setupSSEResponse(res);
|
|
549
|
-
const sendEvent = createSSEEventSender(res);
|
|
550
|
-
try {
|
|
551
|
-
await warmupSources(job.sources, sendEvent, ctx2);
|
|
552
|
-
sendEvent("complete", { status: "ready" });
|
|
553
|
-
} catch (error) {
|
|
554
|
-
sendEvent("error", { message: String(error) });
|
|
555
|
-
} finally {
|
|
556
|
-
res.end();
|
|
557
|
-
}
|
|
558
|
-
} catch (error) {
|
|
559
|
-
console.error("Error in warmup streaming:", error);
|
|
560
|
-
if (!res.headersSent) {
|
|
561
|
-
res.status(500).json({ error: "Warmup streaming failed" });
|
|
562
|
-
} else {
|
|
563
|
-
res.end();
|
|
564
|
-
}
|
|
565
|
-
}
|
|
566
|
-
}
|
|
567
|
-
async function purgeCache(req, res, ctx2) {
|
|
568
|
-
try {
|
|
569
|
-
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
570
|
-
if ("error" in parseResult) {
|
|
571
|
-
res.status(400).json(parseResult);
|
|
572
|
-
return;
|
|
573
|
-
}
|
|
574
|
-
const sources = extractEffieSources(parseResult.effie);
|
|
575
|
-
let purged = 0;
|
|
576
|
-
for (const url of sources) {
|
|
577
|
-
const ck = cacheKeys.source(url);
|
|
578
|
-
if (await ctx2.cacheStorage.exists(ck)) {
|
|
579
|
-
await ctx2.cacheStorage.delete(ck);
|
|
580
|
-
purged++;
|
|
581
|
-
}
|
|
582
|
-
}
|
|
583
|
-
res.json({ purged, total: sources.length });
|
|
584
|
-
} catch (error) {
|
|
585
|
-
console.error("Error purging cache:", error);
|
|
586
|
-
res.status(500).json({ error: "Failed to purge cache" });
|
|
587
|
-
}
|
|
588
|
-
}
|
|
589
|
-
async function warmupSources(sources, sendEvent, ctx2) {
|
|
590
|
-
const total = sources.length;
|
|
591
|
-
sendEvent("start", { total });
|
|
592
|
-
let cached = 0;
|
|
593
|
-
let failed = 0;
|
|
594
|
-
let skipped = 0;
|
|
595
|
-
const sourcesToCache = [];
|
|
596
|
-
for (const source of sources) {
|
|
597
|
-
if (shouldSkipWarmup(source)) {
|
|
598
|
-
skipped++;
|
|
599
|
-
sendEvent("progress", {
|
|
600
|
-
url: source.url,
|
|
601
|
-
status: "skipped",
|
|
602
|
-
reason: "http-video-audio-passthrough",
|
|
603
|
-
cached,
|
|
604
|
-
failed,
|
|
605
|
-
skipped,
|
|
606
|
-
total
|
|
607
|
-
});
|
|
608
|
-
} else {
|
|
609
|
-
sourcesToCache.push(source);
|
|
610
|
-
}
|
|
611
|
-
}
|
|
612
|
-
const sourceCacheKeys = sourcesToCache.map((s) => cacheKeys.source(s.url));
|
|
613
|
-
const existsMap = await ctx2.cacheStorage.existsMany(sourceCacheKeys);
|
|
614
|
-
for (let i = 0; i < sourcesToCache.length; i++) {
|
|
615
|
-
if (existsMap.get(sourceCacheKeys[i])) {
|
|
616
|
-
cached++;
|
|
617
|
-
sendEvent("progress", {
|
|
618
|
-
url: sourcesToCache[i].url,
|
|
619
|
-
status: "hit",
|
|
620
|
-
cached,
|
|
621
|
-
failed,
|
|
622
|
-
skipped,
|
|
623
|
-
total
|
|
624
|
-
});
|
|
625
|
-
}
|
|
626
|
-
}
|
|
627
|
-
const uncached = sourcesToCache.filter(
|
|
628
|
-
(_, i) => !existsMap.get(sourceCacheKeys[i])
|
|
629
|
-
);
|
|
630
|
-
if (uncached.length === 0) {
|
|
631
|
-
sendEvent("summary", { cached, failed, skipped, total });
|
|
632
|
-
return;
|
|
633
|
-
}
|
|
634
|
-
const keepalive = setInterval(() => {
|
|
635
|
-
sendEvent("keepalive", { cached, failed, skipped, total });
|
|
636
|
-
}, 25e3);
|
|
637
|
-
const queue = [...uncached];
|
|
638
|
-
const workers = Array.from(
|
|
639
|
-
{ length: Math.min(ctx2.cacheConcurrency, queue.length) },
|
|
640
|
-
async () => {
|
|
641
|
-
while (queue.length > 0) {
|
|
642
|
-
const source = queue.shift();
|
|
643
|
-
const cacheKey = cacheKeys.source(source.url);
|
|
644
|
-
const startTime = Date.now();
|
|
645
|
-
try {
|
|
646
|
-
let fetchPromise = inFlightFetches.get(cacheKey);
|
|
647
|
-
if (!fetchPromise) {
|
|
648
|
-
fetchPromise = fetchAndCache(source.url, cacheKey, sendEvent, ctx2);
|
|
649
|
-
inFlightFetches.set(cacheKey, fetchPromise);
|
|
650
|
-
}
|
|
651
|
-
await fetchPromise;
|
|
652
|
-
inFlightFetches.delete(cacheKey);
|
|
653
|
-
cached++;
|
|
654
|
-
sendEvent("progress", {
|
|
655
|
-
url: source.url,
|
|
656
|
-
status: "cached",
|
|
657
|
-
cached,
|
|
658
|
-
failed,
|
|
659
|
-
skipped,
|
|
660
|
-
total,
|
|
661
|
-
ms: Date.now() - startTime
|
|
662
|
-
});
|
|
663
|
-
} catch (error) {
|
|
664
|
-
inFlightFetches.delete(cacheKey);
|
|
665
|
-
failed++;
|
|
666
|
-
sendEvent("progress", {
|
|
667
|
-
url: source.url,
|
|
668
|
-
status: "error",
|
|
669
|
-
error: String(error),
|
|
670
|
-
cached,
|
|
671
|
-
failed,
|
|
672
|
-
skipped,
|
|
673
|
-
total,
|
|
674
|
-
ms: Date.now() - startTime
|
|
675
|
-
});
|
|
676
|
-
}
|
|
677
|
-
}
|
|
678
|
-
}
|
|
679
|
-
);
|
|
680
|
-
await Promise.all(workers);
|
|
681
|
-
clearInterval(keepalive);
|
|
682
|
-
sendEvent("summary", { cached, failed, skipped, total });
|
|
683
|
-
}
|
|
684
|
-
async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
|
|
685
|
-
const response = await ffsFetch(url, {
|
|
686
|
-
headersTimeout: 10 * 60 * 1e3,
|
|
687
|
-
// 10 minutes
|
|
688
|
-
bodyTimeout: 20 * 60 * 1e3
|
|
689
|
-
// 20 minutes
|
|
690
|
-
});
|
|
691
|
-
if (!response.ok) {
|
|
692
|
-
throw new Error(`${response.status} ${response.statusText}`);
|
|
693
|
-
}
|
|
694
|
-
sendEvent("downloading", { url, status: "started", bytesReceived: 0 });
|
|
695
|
-
const sourceStream = Readable2.fromWeb(
|
|
696
|
-
response.body
|
|
697
|
-
);
|
|
698
|
-
let totalBytes = 0;
|
|
699
|
-
let lastEventTime = Date.now();
|
|
700
|
-
const PROGRESS_INTERVAL = 1e4;
|
|
701
|
-
const progressStream = new Transform({
|
|
702
|
-
transform(chunk, _encoding, callback) {
|
|
703
|
-
totalBytes += chunk.length;
|
|
704
|
-
const now = Date.now();
|
|
705
|
-
if (now - lastEventTime >= PROGRESS_INTERVAL) {
|
|
706
|
-
sendEvent("downloading", {
|
|
707
|
-
url,
|
|
708
|
-
status: "downloading",
|
|
709
|
-
bytesReceived: totalBytes
|
|
710
|
-
});
|
|
711
|
-
lastEventTime = now;
|
|
712
|
-
}
|
|
713
|
-
callback(null, chunk);
|
|
714
|
-
}
|
|
715
|
-
});
|
|
716
|
-
const trackedStream = sourceStream.pipe(progressStream);
|
|
717
|
-
await ctx2.cacheStorage.put(cacheKey, trackedStream);
|
|
718
|
-
}
|
|
719
537
|
|
|
720
|
-
// src/handlers/
|
|
538
|
+
// src/handlers/orchestrating.ts
|
|
721
539
|
import "express";
|
|
722
540
|
import { randomUUID as randomUUID2 } from "crypto";
|
|
541
|
+
import { extractEffieSourcesWithTypes, effieDataSchema as effieDataSchema3 } from "@effing/effie";
|
|
542
|
+
|
|
543
|
+
// src/handlers/rendering.ts
|
|
544
|
+
import "express";
|
|
545
|
+
import { randomUUID } from "crypto";
|
|
723
546
|
|
|
724
547
|
// src/render.ts
|
|
725
|
-
import { Readable as
|
|
548
|
+
import { Readable as Readable2 } from "stream";
|
|
726
549
|
import { createReadStream as createReadStream2 } from "fs";
|
|
727
550
|
|
|
728
551
|
// src/motion.ts
|
|
@@ -1133,12 +956,12 @@ var EffieRenderer = class {
|
|
|
1133
956
|
effieData;
|
|
1134
957
|
ffmpegRunner;
|
|
1135
958
|
allowLocalFiles;
|
|
1136
|
-
|
|
959
|
+
transientStore;
|
|
1137
960
|
httpProxy;
|
|
1138
961
|
constructor(effieData, options) {
|
|
1139
962
|
this.effieData = effieData;
|
|
1140
963
|
this.allowLocalFiles = options?.allowLocalFiles ?? false;
|
|
1141
|
-
this.
|
|
964
|
+
this.transientStore = options?.transientStore;
|
|
1142
965
|
this.httpProxy = options?.httpProxy;
|
|
1143
966
|
}
|
|
1144
967
|
async fetchSource(src) {
|
|
@@ -1151,7 +974,7 @@ var EffieRenderer = class {
|
|
|
1151
974
|
const isBase64 = meta.endsWith(";base64");
|
|
1152
975
|
const data = src.slice(commaIndex + 1);
|
|
1153
976
|
const buffer = isBase64 ? Buffer.from(data, "base64") : Buffer.from(decodeURIComponent(data));
|
|
1154
|
-
return
|
|
977
|
+
return Readable2.from(buffer);
|
|
1155
978
|
}
|
|
1156
979
|
if (src.startsWith("file:")) {
|
|
1157
980
|
if (!this.allowLocalFiles) {
|
|
@@ -1161,9 +984,9 @@ var EffieRenderer = class {
|
|
|
1161
984
|
}
|
|
1162
985
|
return createReadStream2(fileURLToPath(src));
|
|
1163
986
|
}
|
|
1164
|
-
if (this.
|
|
1165
|
-
const cachedStream = await this.
|
|
1166
|
-
|
|
987
|
+
if (this.transientStore) {
|
|
988
|
+
const cachedStream = await this.transientStore.getStream(
|
|
989
|
+
storeKeys.source(src)
|
|
1167
990
|
);
|
|
1168
991
|
if (cachedStream) {
|
|
1169
992
|
return cachedStream;
|
|
@@ -1183,7 +1006,7 @@ var EffieRenderer = class {
|
|
|
1183
1006
|
if (!response.body) {
|
|
1184
1007
|
throw new Error(`No body for ${src}`);
|
|
1185
1008
|
}
|
|
1186
|
-
return
|
|
1009
|
+
return Readable2.fromWeb(response.body);
|
|
1187
1010
|
}
|
|
1188
1011
|
buildAudioFilter({
|
|
1189
1012
|
duration,
|
|
@@ -1681,14 +1504,18 @@ async function createRenderJob(req, res, ctx2) {
|
|
|
1681
1504
|
}
|
|
1682
1505
|
effie = data;
|
|
1683
1506
|
}
|
|
1684
|
-
const jobId =
|
|
1507
|
+
const jobId = randomUUID();
|
|
1685
1508
|
const job = {
|
|
1686
1509
|
effie,
|
|
1687
1510
|
scale,
|
|
1688
1511
|
upload,
|
|
1689
1512
|
createdAt: Date.now()
|
|
1690
1513
|
};
|
|
1691
|
-
await ctx2.
|
|
1514
|
+
await ctx2.transientStore.putJson(
|
|
1515
|
+
storeKeys.renderJob(jobId),
|
|
1516
|
+
job,
|
|
1517
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1518
|
+
);
|
|
1692
1519
|
res.json({
|
|
1693
1520
|
id: jobId,
|
|
1694
1521
|
url: `${ctx2.baseUrl}/render/${jobId}`
|
|
@@ -1702,9 +1529,13 @@ async function streamRenderJob(req, res, ctx2) {
|
|
|
1702
1529
|
try {
|
|
1703
1530
|
setupCORSHeaders(res);
|
|
1704
1531
|
const jobId = req.params.id;
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1532
|
+
if (ctx2.renderBackendBaseUrl) {
|
|
1533
|
+
await proxyRenderFromBackend(res, jobId, ctx2);
|
|
1534
|
+
return;
|
|
1535
|
+
}
|
|
1536
|
+
const jobCacheKey = storeKeys.renderJob(jobId);
|
|
1537
|
+
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
1538
|
+
ctx2.transientStore.delete(jobCacheKey);
|
|
1708
1539
|
if (!job) {
|
|
1709
1540
|
res.status(404).json({ error: "Job not found or expired" });
|
|
1710
1541
|
return;
|
|
@@ -1725,7 +1556,7 @@ async function streamRenderJob(req, res, ctx2) {
|
|
|
1725
1556
|
}
|
|
1726
1557
|
async function streamRenderDirect(res, job, ctx2) {
|
|
1727
1558
|
const renderer = new EffieRenderer(job.effie, {
|
|
1728
|
-
|
|
1559
|
+
transientStore: ctx2.transientStore,
|
|
1729
1560
|
httpProxy: ctx2.httpProxy
|
|
1730
1561
|
});
|
|
1731
1562
|
const videoStream = await renderer.render(job.scale);
|
|
@@ -1789,7 +1620,7 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
|
|
|
1789
1620
|
}
|
|
1790
1621
|
const renderStartTime = Date.now();
|
|
1791
1622
|
const renderer = new EffieRenderer(effie, {
|
|
1792
|
-
|
|
1623
|
+
transientStore: ctx2.transientStore,
|
|
1793
1624
|
httpProxy: ctx2.httpProxy
|
|
1794
1625
|
});
|
|
1795
1626
|
const videoStream = await renderer.render(scale);
|
|
@@ -1817,44 +1648,580 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
|
|
|
1817
1648
|
timings.uploadTime = Date.now() - uploadStartTime;
|
|
1818
1649
|
return timings;
|
|
1819
1650
|
}
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
const
|
|
1830
|
-
if (
|
|
1831
|
-
res
|
|
1832
|
-
|
|
1651
|
+
async function proxyRenderFromBackend(res, jobId, ctx2) {
|
|
1652
|
+
const backendUrl = `${ctx2.renderBackendBaseUrl}/render/${jobId}`;
|
|
1653
|
+
const response = await ffsFetch(backendUrl, {
|
|
1654
|
+
headers: ctx2.renderBackendApiKey ? { Authorization: `Bearer ${ctx2.renderBackendApiKey}` } : void 0
|
|
1655
|
+
});
|
|
1656
|
+
if (!response.ok) {
|
|
1657
|
+
res.status(response.status).json({ error: "Backend render failed" });
|
|
1658
|
+
return;
|
|
1659
|
+
}
|
|
1660
|
+
const contentType = response.headers.get("content-type") || "";
|
|
1661
|
+
if (contentType.includes("text/event-stream")) {
|
|
1662
|
+
setupSSEResponse(res);
|
|
1663
|
+
const sendEvent = createSSEEventSender(res);
|
|
1664
|
+
const reader = response.body?.getReader();
|
|
1665
|
+
if (!reader) {
|
|
1666
|
+
sendEvent("error", { message: "No response body from backend" });
|
|
1667
|
+
res.end();
|
|
1668
|
+
return;
|
|
1669
|
+
}
|
|
1670
|
+
const decoder = new TextDecoder();
|
|
1671
|
+
let buffer = "";
|
|
1672
|
+
try {
|
|
1673
|
+
while (true) {
|
|
1674
|
+
const { done, value } = await reader.read();
|
|
1675
|
+
if (done) break;
|
|
1676
|
+
if (res.destroyed) {
|
|
1677
|
+
reader.cancel();
|
|
1678
|
+
break;
|
|
1679
|
+
}
|
|
1680
|
+
buffer += decoder.decode(value, { stream: true });
|
|
1681
|
+
const lines = buffer.split("\n");
|
|
1682
|
+
buffer = lines.pop() || "";
|
|
1683
|
+
let currentEvent = "";
|
|
1684
|
+
let currentData = "";
|
|
1685
|
+
for (const line of lines) {
|
|
1686
|
+
if (line.startsWith("event: ")) {
|
|
1687
|
+
currentEvent = line.slice(7);
|
|
1688
|
+
} else if (line.startsWith("data: ")) {
|
|
1689
|
+
currentData = line.slice(6);
|
|
1690
|
+
} else if (line === "" && currentEvent && currentData) {
|
|
1691
|
+
try {
|
|
1692
|
+
const data = JSON.parse(currentData);
|
|
1693
|
+
sendEvent(currentEvent, data);
|
|
1694
|
+
} catch {
|
|
1695
|
+
}
|
|
1696
|
+
currentEvent = "";
|
|
1697
|
+
currentData = "";
|
|
1698
|
+
}
|
|
1699
|
+
}
|
|
1700
|
+
}
|
|
1701
|
+
} finally {
|
|
1702
|
+
reader.releaseLock();
|
|
1703
|
+
res.end();
|
|
1704
|
+
}
|
|
1705
|
+
} else {
|
|
1706
|
+
await proxyBinaryStream(response, res);
|
|
1833
1707
|
}
|
|
1834
|
-
return true;
|
|
1835
1708
|
}
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1843
|
-
|
|
1844
|
-
|
|
1845
|
-
|
|
1846
|
-
|
|
1709
|
+
|
|
1710
|
+
// src/handlers/orchestrating.ts
|
|
1711
|
+
async function createWarmupAndRenderJob(req, res, ctx2) {
|
|
1712
|
+
try {
|
|
1713
|
+
const options = req.body;
|
|
1714
|
+
let rawEffieData;
|
|
1715
|
+
if (typeof options.effie === "string") {
|
|
1716
|
+
const response = await ffsFetch(options.effie);
|
|
1717
|
+
if (!response.ok) {
|
|
1718
|
+
throw new Error(
|
|
1719
|
+
`Failed to fetch Effie data: ${response.status} ${response.statusText}`
|
|
1720
|
+
);
|
|
1721
|
+
}
|
|
1722
|
+
rawEffieData = await response.json();
|
|
1723
|
+
} else {
|
|
1724
|
+
rawEffieData = options.effie;
|
|
1725
|
+
}
|
|
1726
|
+
let effie;
|
|
1727
|
+
if (!ctx2.skipValidation) {
|
|
1728
|
+
const result = effieDataSchema3.safeParse(rawEffieData);
|
|
1729
|
+
if (!result.success) {
|
|
1730
|
+
res.status(400).json({
|
|
1731
|
+
error: "Invalid effie data",
|
|
1732
|
+
issues: result.error.issues.map((issue) => ({
|
|
1733
|
+
path: issue.path.join("."),
|
|
1734
|
+
message: issue.message
|
|
1735
|
+
}))
|
|
1736
|
+
});
|
|
1737
|
+
return;
|
|
1738
|
+
}
|
|
1739
|
+
effie = result.data;
|
|
1740
|
+
} else {
|
|
1741
|
+
const data = rawEffieData;
|
|
1742
|
+
if (!data?.segments) {
|
|
1743
|
+
res.status(400).json({ error: "Invalid effie data: missing segments" });
|
|
1744
|
+
return;
|
|
1745
|
+
}
|
|
1746
|
+
effie = data;
|
|
1747
|
+
}
|
|
1748
|
+
const sources = extractEffieSourcesWithTypes(effie);
|
|
1749
|
+
const scale = options.scale ?? 1;
|
|
1750
|
+
const upload = options.upload;
|
|
1751
|
+
const jobId = randomUUID2();
|
|
1752
|
+
const warmupJobId = randomUUID2();
|
|
1753
|
+
const renderJobId = randomUUID2();
|
|
1754
|
+
const job = {
|
|
1755
|
+
effie,
|
|
1756
|
+
sources,
|
|
1757
|
+
scale,
|
|
1758
|
+
upload,
|
|
1759
|
+
warmupJobId,
|
|
1760
|
+
renderJobId,
|
|
1761
|
+
createdAt: Date.now()
|
|
1762
|
+
};
|
|
1763
|
+
await ctx2.transientStore.putJson(
|
|
1764
|
+
storeKeys.warmupAndRenderJob(jobId),
|
|
1765
|
+
job,
|
|
1766
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1767
|
+
);
|
|
1768
|
+
await ctx2.transientStore.putJson(
|
|
1769
|
+
storeKeys.warmupJob(warmupJobId),
|
|
1770
|
+
{ sources },
|
|
1771
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1772
|
+
);
|
|
1773
|
+
await ctx2.transientStore.putJson(
|
|
1774
|
+
storeKeys.renderJob(renderJobId),
|
|
1775
|
+
{
|
|
1776
|
+
effie,
|
|
1777
|
+
scale,
|
|
1778
|
+
upload,
|
|
1779
|
+
createdAt: Date.now()
|
|
1780
|
+
},
|
|
1781
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1782
|
+
);
|
|
1783
|
+
res.json({
|
|
1784
|
+
id: jobId,
|
|
1785
|
+
url: `${ctx2.baseUrl}/warmup-and-render/${jobId}`
|
|
1786
|
+
});
|
|
1787
|
+
} catch (error) {
|
|
1788
|
+
console.error("Error creating warmup-and-render job:", error);
|
|
1789
|
+
res.status(500).json({ error: "Failed to create warmup-and-render job" });
|
|
1790
|
+
}
|
|
1791
|
+
}
|
|
1792
|
+
async function streamWarmupAndRenderJob(req, res, ctx2) {
|
|
1793
|
+
try {
|
|
1794
|
+
setupCORSHeaders(res);
|
|
1795
|
+
const jobId = req.params.id;
|
|
1796
|
+
const jobCacheKey = storeKeys.warmupAndRenderJob(jobId);
|
|
1797
|
+
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
1798
|
+
ctx2.transientStore.delete(jobCacheKey);
|
|
1799
|
+
if (!job) {
|
|
1800
|
+
res.status(404).json({ error: "Job not found" });
|
|
1801
|
+
return;
|
|
1802
|
+
}
|
|
1803
|
+
setupSSEResponse(res);
|
|
1804
|
+
const sendEvent = createSSEEventSender(res);
|
|
1805
|
+
let keepalivePhase = "warmup";
|
|
1806
|
+
const keepalive = setInterval(() => {
|
|
1807
|
+
sendEvent("keepalive", { phase: keepalivePhase });
|
|
1808
|
+
}, 25e3);
|
|
1809
|
+
try {
|
|
1810
|
+
if (ctx2.warmupBackendBaseUrl) {
|
|
1811
|
+
await proxyRemoteSSE(
|
|
1812
|
+
`${ctx2.warmupBackendBaseUrl}/warmup/${job.warmupJobId}`,
|
|
1813
|
+
sendEvent,
|
|
1814
|
+
"warmup:",
|
|
1815
|
+
res,
|
|
1816
|
+
ctx2.warmupBackendApiKey ? { Authorization: `Bearer ${ctx2.warmupBackendApiKey}` } : void 0
|
|
1817
|
+
);
|
|
1818
|
+
} else {
|
|
1819
|
+
const warmupSender = prefixEventSender(sendEvent, "warmup:");
|
|
1820
|
+
await warmupSources(job.sources, warmupSender, ctx2);
|
|
1821
|
+
warmupSender("complete", { status: "ready" });
|
|
1822
|
+
}
|
|
1823
|
+
keepalivePhase = "render";
|
|
1824
|
+
if (ctx2.renderBackendBaseUrl) {
|
|
1825
|
+
await proxyRemoteSSE(
|
|
1826
|
+
`${ctx2.renderBackendBaseUrl}/render/${job.renderJobId}`,
|
|
1827
|
+
sendEvent,
|
|
1828
|
+
"render:",
|
|
1829
|
+
res,
|
|
1830
|
+
ctx2.renderBackendApiKey ? { Authorization: `Bearer ${ctx2.renderBackendApiKey}` } : void 0
|
|
1831
|
+
);
|
|
1832
|
+
} else {
|
|
1833
|
+
const renderSender = prefixEventSender(sendEvent, "render:");
|
|
1834
|
+
if (job.upload) {
|
|
1835
|
+
renderSender("started", { status: "rendering" });
|
|
1836
|
+
const timings = await renderAndUploadInternal(
|
|
1837
|
+
job.effie,
|
|
1838
|
+
job.scale,
|
|
1839
|
+
job.upload,
|
|
1840
|
+
renderSender,
|
|
1841
|
+
ctx2
|
|
1842
|
+
);
|
|
1843
|
+
renderSender("complete", { status: "uploaded", timings });
|
|
1844
|
+
} else {
|
|
1845
|
+
const videoUrl = `${ctx2.baseUrl}/render/${job.renderJobId}`;
|
|
1846
|
+
sendEvent("complete", { status: "ready", videoUrl });
|
|
1847
|
+
}
|
|
1848
|
+
}
|
|
1849
|
+
if (job.upload && !ctx2.renderBackendBaseUrl) {
|
|
1850
|
+
sendEvent("complete", { status: "done" });
|
|
1851
|
+
}
|
|
1852
|
+
} catch (error) {
|
|
1853
|
+
sendEvent("error", {
|
|
1854
|
+
phase: keepalivePhase,
|
|
1855
|
+
message: String(error)
|
|
1856
|
+
});
|
|
1857
|
+
} finally {
|
|
1858
|
+
clearInterval(keepalive);
|
|
1859
|
+
res.end();
|
|
1860
|
+
}
|
|
1861
|
+
} catch (error) {
|
|
1862
|
+
console.error("Error in warmup-and-render streaming:", error);
|
|
1863
|
+
if (!res.headersSent) {
|
|
1864
|
+
res.status(500).json({ error: "Warmup-and-render streaming failed" });
|
|
1865
|
+
} else {
|
|
1866
|
+
res.end();
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
}
|
|
1870
|
+
function prefixEventSender(sendEvent, prefix) {
|
|
1871
|
+
return (event, data) => {
|
|
1872
|
+
sendEvent(`${prefix}${event}`, data);
|
|
1873
|
+
};
|
|
1874
|
+
}
|
|
1875
|
+
async function proxyRemoteSSE(url, sendEvent, prefix, res, headers) {
|
|
1876
|
+
const response = await ffsFetch(url, {
|
|
1877
|
+
headers: {
|
|
1878
|
+
Accept: "text/event-stream",
|
|
1879
|
+
...headers
|
|
1880
|
+
}
|
|
1881
|
+
});
|
|
1882
|
+
if (!response.ok) {
|
|
1883
|
+
throw new Error(`Remote backend error: ${response.status}`);
|
|
1884
|
+
}
|
|
1885
|
+
const reader = response.body?.getReader();
|
|
1886
|
+
if (!reader) {
|
|
1887
|
+
throw new Error("No response body from remote backend");
|
|
1888
|
+
}
|
|
1889
|
+
const decoder = new TextDecoder();
|
|
1890
|
+
let buffer = "";
|
|
1891
|
+
try {
|
|
1892
|
+
while (true) {
|
|
1893
|
+
const { done, value } = await reader.read();
|
|
1894
|
+
if (done) break;
|
|
1895
|
+
if (res.destroyed) {
|
|
1896
|
+
reader.cancel();
|
|
1897
|
+
break;
|
|
1898
|
+
}
|
|
1899
|
+
buffer += decoder.decode(value, { stream: true });
|
|
1900
|
+
const lines = buffer.split("\n");
|
|
1901
|
+
buffer = lines.pop() || "";
|
|
1902
|
+
let currentEvent = "";
|
|
1903
|
+
let currentData = "";
|
|
1904
|
+
for (const line of lines) {
|
|
1905
|
+
if (line.startsWith("event: ")) {
|
|
1906
|
+
currentEvent = line.slice(7);
|
|
1907
|
+
} else if (line.startsWith("data: ")) {
|
|
1908
|
+
currentData = line.slice(6);
|
|
1909
|
+
} else if (line === "" && currentEvent && currentData) {
|
|
1910
|
+
try {
|
|
1911
|
+
const data = JSON.parse(currentData);
|
|
1912
|
+
sendEvent(`${prefix}${currentEvent}`, data);
|
|
1913
|
+
} catch {
|
|
1914
|
+
}
|
|
1915
|
+
currentEvent = "";
|
|
1916
|
+
currentData = "";
|
|
1917
|
+
}
|
|
1918
|
+
}
|
|
1919
|
+
}
|
|
1920
|
+
} finally {
|
|
1921
|
+
reader.releaseLock();
|
|
1922
|
+
}
|
|
1923
|
+
}
|
|
1924
|
+
async function proxyBinaryStream(response, res) {
|
|
1925
|
+
const contentType = response.headers.get("content-type");
|
|
1926
|
+
if (contentType) res.set("Content-Type", contentType);
|
|
1927
|
+
const contentLength = response.headers.get("content-length");
|
|
1928
|
+
if (contentLength) res.set("Content-Length", contentLength);
|
|
1929
|
+
const reader = response.body?.getReader();
|
|
1930
|
+
if (!reader) {
|
|
1931
|
+
throw new Error("No response body");
|
|
1932
|
+
}
|
|
1933
|
+
try {
|
|
1934
|
+
while (true) {
|
|
1935
|
+
const { done, value } = await reader.read();
|
|
1936
|
+
if (done) break;
|
|
1937
|
+
if (res.destroyed) {
|
|
1938
|
+
reader.cancel();
|
|
1939
|
+
break;
|
|
1940
|
+
}
|
|
1941
|
+
res.write(value);
|
|
1942
|
+
}
|
|
1943
|
+
} finally {
|
|
1944
|
+
reader.releaseLock();
|
|
1945
|
+
res.end();
|
|
1946
|
+
}
|
|
1947
|
+
}
|
|
1948
|
+
|
|
1949
|
+
// src/handlers/caching.ts
|
|
1950
|
+
function shouldSkipWarmup(source) {
|
|
1951
|
+
return source.type === "video" || source.type === "audio";
|
|
1952
|
+
}
|
|
1953
|
+
var inFlightFetches = /* @__PURE__ */ new Map();
|
|
1954
|
+
async function createWarmupJob(req, res, ctx2) {
|
|
1955
|
+
try {
|
|
1956
|
+
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
1957
|
+
if ("error" in parseResult) {
|
|
1958
|
+
res.status(400).json(parseResult);
|
|
1959
|
+
return;
|
|
1960
|
+
}
|
|
1961
|
+
const sources = extractEffieSourcesWithTypes2(parseResult.effie);
|
|
1962
|
+
const jobId = randomUUID3();
|
|
1963
|
+
await ctx2.transientStore.putJson(
|
|
1964
|
+
storeKeys.warmupJob(jobId),
|
|
1965
|
+
{ sources },
|
|
1966
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1967
|
+
);
|
|
1968
|
+
res.json({
|
|
1969
|
+
id: jobId,
|
|
1970
|
+
url: `${ctx2.baseUrl}/warmup/${jobId}`
|
|
1971
|
+
});
|
|
1972
|
+
} catch (error) {
|
|
1973
|
+
console.error("Error creating warmup job:", error);
|
|
1974
|
+
res.status(500).json({ error: "Failed to create warmup job" });
|
|
1975
|
+
}
|
|
1976
|
+
}
|
|
1977
|
+
async function streamWarmupJob(req, res, ctx2) {
|
|
1978
|
+
try {
|
|
1979
|
+
setupCORSHeaders(res);
|
|
1980
|
+
const jobId = req.params.id;
|
|
1981
|
+
if (ctx2.warmupBackendBaseUrl) {
|
|
1982
|
+
setupSSEResponse(res);
|
|
1983
|
+
const sendEvent2 = createSSEEventSender(res);
|
|
1984
|
+
try {
|
|
1985
|
+
await proxyRemoteSSE(
|
|
1986
|
+
`${ctx2.warmupBackendBaseUrl}/warmup/${jobId}`,
|
|
1987
|
+
sendEvent2,
|
|
1988
|
+
"",
|
|
1989
|
+
res,
|
|
1990
|
+
ctx2.warmupBackendApiKey ? { Authorization: `Bearer ${ctx2.warmupBackendApiKey}` } : void 0
|
|
1991
|
+
);
|
|
1992
|
+
} finally {
|
|
1993
|
+
res.end();
|
|
1994
|
+
}
|
|
1995
|
+
return;
|
|
1996
|
+
}
|
|
1997
|
+
const jobCacheKey = storeKeys.warmupJob(jobId);
|
|
1998
|
+
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
1999
|
+
ctx2.transientStore.delete(jobCacheKey);
|
|
2000
|
+
if (!job) {
|
|
2001
|
+
res.status(404).json({ error: "Job not found" });
|
|
2002
|
+
return;
|
|
2003
|
+
}
|
|
2004
|
+
setupSSEResponse(res);
|
|
2005
|
+
const sendEvent = createSSEEventSender(res);
|
|
2006
|
+
try {
|
|
2007
|
+
await warmupSources(job.sources, sendEvent, ctx2);
|
|
2008
|
+
sendEvent("complete", { status: "ready" });
|
|
2009
|
+
} catch (error) {
|
|
2010
|
+
sendEvent("error", { message: String(error) });
|
|
2011
|
+
} finally {
|
|
2012
|
+
res.end();
|
|
2013
|
+
}
|
|
2014
|
+
} catch (error) {
|
|
2015
|
+
console.error("Error in warmup streaming:", error);
|
|
2016
|
+
if (!res.headersSent) {
|
|
2017
|
+
res.status(500).json({ error: "Warmup streaming failed" });
|
|
2018
|
+
} else {
|
|
2019
|
+
res.end();
|
|
2020
|
+
}
|
|
2021
|
+
}
|
|
2022
|
+
}
|
|
2023
|
+
async function purgeCache(req, res, ctx2) {
|
|
2024
|
+
try {
|
|
2025
|
+
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
2026
|
+
if ("error" in parseResult) {
|
|
2027
|
+
res.status(400).json(parseResult);
|
|
2028
|
+
return;
|
|
2029
|
+
}
|
|
2030
|
+
const sources = extractEffieSources(parseResult.effie);
|
|
2031
|
+
let purged = 0;
|
|
2032
|
+
for (const url of sources) {
|
|
2033
|
+
const ck = storeKeys.source(url);
|
|
2034
|
+
if (await ctx2.transientStore.exists(ck)) {
|
|
2035
|
+
await ctx2.transientStore.delete(ck);
|
|
2036
|
+
purged++;
|
|
2037
|
+
}
|
|
2038
|
+
}
|
|
2039
|
+
res.json({ purged, total: sources.length });
|
|
2040
|
+
} catch (error) {
|
|
2041
|
+
console.error("Error purging cache:", error);
|
|
2042
|
+
res.status(500).json({ error: "Failed to purge cache" });
|
|
2043
|
+
}
|
|
2044
|
+
}
|
|
2045
|
+
async function warmupSources(sources, sendEvent, ctx2) {
|
|
2046
|
+
const total = sources.length;
|
|
2047
|
+
sendEvent("start", { total });
|
|
2048
|
+
let cached = 0;
|
|
2049
|
+
let failed = 0;
|
|
2050
|
+
let skipped = 0;
|
|
2051
|
+
const sourcesToCache = [];
|
|
2052
|
+
for (const source of sources) {
|
|
2053
|
+
if (shouldSkipWarmup(source)) {
|
|
2054
|
+
skipped++;
|
|
2055
|
+
sendEvent("progress", {
|
|
2056
|
+
url: source.url,
|
|
2057
|
+
status: "skipped",
|
|
2058
|
+
reason: "http-video-audio-passthrough",
|
|
2059
|
+
cached,
|
|
2060
|
+
failed,
|
|
2061
|
+
skipped,
|
|
2062
|
+
total
|
|
2063
|
+
});
|
|
2064
|
+
} else {
|
|
2065
|
+
sourcesToCache.push(source);
|
|
2066
|
+
}
|
|
2067
|
+
}
|
|
2068
|
+
const sourceCacheKeys = sourcesToCache.map((s) => storeKeys.source(s.url));
|
|
2069
|
+
const existsMap = await ctx2.transientStore.existsMany(sourceCacheKeys);
|
|
2070
|
+
for (let i = 0; i < sourcesToCache.length; i++) {
|
|
2071
|
+
if (existsMap.get(sourceCacheKeys[i])) {
|
|
2072
|
+
cached++;
|
|
2073
|
+
sendEvent("progress", {
|
|
2074
|
+
url: sourcesToCache[i].url,
|
|
2075
|
+
status: "hit",
|
|
2076
|
+
cached,
|
|
2077
|
+
failed,
|
|
2078
|
+
skipped,
|
|
2079
|
+
total
|
|
2080
|
+
});
|
|
2081
|
+
}
|
|
2082
|
+
}
|
|
2083
|
+
const uncached = sourcesToCache.filter(
|
|
2084
|
+
(_, i) => !existsMap.get(sourceCacheKeys[i])
|
|
2085
|
+
);
|
|
2086
|
+
if (uncached.length === 0) {
|
|
2087
|
+
sendEvent("summary", { cached, failed, skipped, total });
|
|
2088
|
+
return;
|
|
2089
|
+
}
|
|
2090
|
+
const keepalive = setInterval(() => {
|
|
2091
|
+
sendEvent("keepalive", { cached, failed, skipped, total });
|
|
2092
|
+
}, 25e3);
|
|
2093
|
+
const queue = [...uncached];
|
|
2094
|
+
const workers = Array.from(
|
|
2095
|
+
{ length: Math.min(ctx2.warmupConcurrency, queue.length) },
|
|
2096
|
+
async () => {
|
|
2097
|
+
while (queue.length > 0) {
|
|
2098
|
+
const source = queue.shift();
|
|
2099
|
+
const cacheKey = storeKeys.source(source.url);
|
|
2100
|
+
const startTime = Date.now();
|
|
2101
|
+
try {
|
|
2102
|
+
let fetchPromise = inFlightFetches.get(cacheKey);
|
|
2103
|
+
if (!fetchPromise) {
|
|
2104
|
+
fetchPromise = fetchAndCache(source.url, cacheKey, sendEvent, ctx2);
|
|
2105
|
+
inFlightFetches.set(cacheKey, fetchPromise);
|
|
2106
|
+
}
|
|
2107
|
+
await fetchPromise;
|
|
2108
|
+
inFlightFetches.delete(cacheKey);
|
|
2109
|
+
cached++;
|
|
2110
|
+
sendEvent("progress", {
|
|
2111
|
+
url: source.url,
|
|
2112
|
+
status: "cached",
|
|
2113
|
+
cached,
|
|
2114
|
+
failed,
|
|
2115
|
+
skipped,
|
|
2116
|
+
total,
|
|
2117
|
+
ms: Date.now() - startTime
|
|
2118
|
+
});
|
|
2119
|
+
} catch (error) {
|
|
2120
|
+
inFlightFetches.delete(cacheKey);
|
|
2121
|
+
failed++;
|
|
2122
|
+
sendEvent("progress", {
|
|
2123
|
+
url: source.url,
|
|
2124
|
+
status: "error",
|
|
2125
|
+
error: String(error),
|
|
2126
|
+
cached,
|
|
2127
|
+
failed,
|
|
2128
|
+
skipped,
|
|
2129
|
+
total,
|
|
2130
|
+
ms: Date.now() - startTime
|
|
2131
|
+
});
|
|
2132
|
+
}
|
|
2133
|
+
}
|
|
2134
|
+
}
|
|
2135
|
+
);
|
|
2136
|
+
await Promise.all(workers);
|
|
2137
|
+
clearInterval(keepalive);
|
|
2138
|
+
sendEvent("summary", { cached, failed, skipped, total });
|
|
2139
|
+
}
|
|
2140
|
+
async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
|
|
2141
|
+
const response = await ffsFetch(url, {
|
|
2142
|
+
headersTimeout: 10 * 60 * 1e3,
|
|
2143
|
+
// 10 minutes
|
|
2144
|
+
bodyTimeout: 20 * 60 * 1e3
|
|
2145
|
+
// 20 minutes
|
|
2146
|
+
});
|
|
2147
|
+
if (!response.ok) {
|
|
2148
|
+
throw new Error(`${response.status} ${response.statusText}`);
|
|
2149
|
+
}
|
|
2150
|
+
sendEvent("downloading", { url, status: "started", bytesReceived: 0 });
|
|
2151
|
+
const sourceStream = Readable3.fromWeb(
|
|
2152
|
+
response.body
|
|
2153
|
+
);
|
|
2154
|
+
let totalBytes = 0;
|
|
2155
|
+
let lastEventTime = Date.now();
|
|
2156
|
+
const PROGRESS_INTERVAL = 1e4;
|
|
2157
|
+
const progressStream = new Transform({
|
|
2158
|
+
transform(chunk, _encoding, callback) {
|
|
2159
|
+
totalBytes += chunk.length;
|
|
2160
|
+
const now = Date.now();
|
|
2161
|
+
if (now - lastEventTime >= PROGRESS_INTERVAL) {
|
|
2162
|
+
sendEvent("downloading", {
|
|
2163
|
+
url,
|
|
2164
|
+
status: "downloading",
|
|
2165
|
+
bytesReceived: totalBytes
|
|
2166
|
+
});
|
|
2167
|
+
lastEventTime = now;
|
|
2168
|
+
}
|
|
2169
|
+
callback(null, chunk);
|
|
2170
|
+
}
|
|
2171
|
+
});
|
|
2172
|
+
const trackedStream = sourceStream.pipe(progressStream);
|
|
2173
|
+
await ctx2.transientStore.put(
|
|
2174
|
+
cacheKey,
|
|
2175
|
+
trackedStream,
|
|
2176
|
+
ctx2.transientStore.sourceTtlMs
|
|
2177
|
+
);
|
|
2178
|
+
}
|
|
2179
|
+
|
|
2180
|
+
// src/server.ts
|
|
2181
|
+
var app = express5();
|
|
2182
|
+
app.use(bodyParser.json({ limit: "50mb" }));
|
|
2183
|
+
var ctx = await createServerContext();
|
|
2184
|
+
console.log(`FFS HTTP proxy listening on port ${ctx.httpProxy.port}`);
|
|
2185
|
+
function validateAuth(req, res) {
|
|
2186
|
+
const apiKey = process.env.FFS_API_KEY;
|
|
2187
|
+
if (!apiKey) return true;
|
|
2188
|
+
const authHeader = req.headers.authorization;
|
|
2189
|
+
if (!authHeader || authHeader !== `Bearer ${apiKey}`) {
|
|
2190
|
+
res.status(401).json({ error: "Unauthorized" });
|
|
2191
|
+
return false;
|
|
2192
|
+
}
|
|
2193
|
+
return true;
|
|
2194
|
+
}
|
|
2195
|
+
app.post("/warmup", (req, res) => {
|
|
2196
|
+
if (!validateAuth(req, res)) return;
|
|
2197
|
+
createWarmupJob(req, res, ctx);
|
|
2198
|
+
});
|
|
2199
|
+
app.post("/purge", (req, res) => {
|
|
2200
|
+
if (!validateAuth(req, res)) return;
|
|
2201
|
+
purgeCache(req, res, ctx);
|
|
2202
|
+
});
|
|
2203
|
+
app.post("/render", (req, res) => {
|
|
2204
|
+
if (!validateAuth(req, res)) return;
|
|
2205
|
+
createRenderJob(req, res, ctx);
|
|
2206
|
+
});
|
|
2207
|
+
app.post("/warmup-and-render", (req, res) => {
|
|
2208
|
+
if (!validateAuth(req, res)) return;
|
|
2209
|
+
createWarmupAndRenderJob(req, res, ctx);
|
|
1847
2210
|
});
|
|
1848
2211
|
app.get("/warmup/:id", (req, res) => streamWarmupJob(req, res, ctx));
|
|
1849
2212
|
app.get("/render/:id", (req, res) => streamRenderJob(req, res, ctx));
|
|
1850
|
-
|
|
2213
|
+
app.get(
|
|
2214
|
+
"/warmup-and-render/:id",
|
|
2215
|
+
(req, res) => streamWarmupAndRenderJob(req, res, ctx)
|
|
2216
|
+
);
|
|
2217
|
+
var port = process.env.FFS_PORT || process.env.PORT || 2e3;
|
|
1851
2218
|
var server = app.listen(port, () => {
|
|
1852
2219
|
console.log(`FFS server listening on port ${port}`);
|
|
1853
2220
|
});
|
|
1854
2221
|
function shutdown() {
|
|
1855
2222
|
console.log("Shutting down FFS server...");
|
|
1856
2223
|
ctx.httpProxy.close();
|
|
1857
|
-
ctx.
|
|
2224
|
+
ctx.transientStore.close();
|
|
1858
2225
|
server.close(() => {
|
|
1859
2226
|
console.log("FFS server stopped");
|
|
1860
2227
|
process.exit(0);
|