@effing/ffs 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +136 -16
- package/dist/{chunk-A7BAW24L.js → chunk-J64HSZNQ.js} +65 -46
- package/dist/chunk-J64HSZNQ.js.map +1 -0
- package/dist/{chunk-6YHSYHDY.js → chunk-XSCNUWZJ.js} +550 -214
- package/dist/chunk-XSCNUWZJ.js.map +1 -0
- package/dist/handlers/index.d.ts +36 -4
- package/dist/handlers/index.js +10 -2
- package/dist/index.d.ts +5 -5
- package/dist/index.js +1 -1
- package/dist/{proxy-BI8OMQl0.d.ts → proxy-qTA69nOV.d.ts} +11 -7
- package/dist/server.js +650 -291
- package/dist/server.js.map +1 -1
- package/package.json +2 -2
- package/dist/chunk-6YHSYHDY.js.map +0 -1
- package/dist/chunk-A7BAW24L.js.map +0 -1
package/dist/server.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
// src/server.ts
|
|
4
|
-
import
|
|
4
|
+
import express5 from "express";
|
|
5
5
|
import bodyParser from "body-parser";
|
|
6
6
|
|
|
7
7
|
// src/handlers/shared.ts
|
|
8
8
|
import "express";
|
|
9
9
|
|
|
10
|
-
// src/
|
|
10
|
+
// src/storage.ts
|
|
11
11
|
import {
|
|
12
12
|
S3Client,
|
|
13
13
|
PutObjectCommand,
|
|
@@ -22,11 +22,14 @@ import { pipeline } from "stream/promises";
|
|
|
22
22
|
import path from "path";
|
|
23
23
|
import os from "os";
|
|
24
24
|
import crypto from "crypto";
|
|
25
|
-
var
|
|
25
|
+
var DEFAULT_SOURCE_TTL_MS = 60 * 60 * 1e3;
|
|
26
|
+
var DEFAULT_JOB_METADATA_TTL_MS = 8 * 60 * 60 * 1e3;
|
|
27
|
+
var S3TransientStore = class {
|
|
26
28
|
client;
|
|
27
29
|
bucket;
|
|
28
30
|
prefix;
|
|
29
|
-
|
|
31
|
+
sourceTtlMs;
|
|
32
|
+
jobMetadataTtlMs;
|
|
30
33
|
constructor(options) {
|
|
31
34
|
this.client = new S3Client({
|
|
32
35
|
endpoint: options.endpoint,
|
|
@@ -39,22 +42,23 @@ var S3CacheStorage = class {
|
|
|
39
42
|
});
|
|
40
43
|
this.bucket = options.bucket;
|
|
41
44
|
this.prefix = options.prefix ?? "";
|
|
42
|
-
this.
|
|
45
|
+
this.sourceTtlMs = options.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
46
|
+
this.jobMetadataTtlMs = options.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
|
|
43
47
|
}
|
|
44
|
-
getExpires() {
|
|
45
|
-
return new Date(Date.now() +
|
|
48
|
+
getExpires(ttlMs) {
|
|
49
|
+
return new Date(Date.now() + ttlMs);
|
|
46
50
|
}
|
|
47
51
|
getFullKey(key) {
|
|
48
52
|
return `${this.prefix}${key}`;
|
|
49
53
|
}
|
|
50
|
-
async put(key, stream) {
|
|
54
|
+
async put(key, stream, ttlMs) {
|
|
51
55
|
const upload = new Upload({
|
|
52
56
|
client: this.client,
|
|
53
57
|
params: {
|
|
54
58
|
Bucket: this.bucket,
|
|
55
59
|
Key: this.getFullKey(key),
|
|
56
60
|
Body: stream,
|
|
57
|
-
Expires: this.getExpires()
|
|
61
|
+
Expires: this.getExpires(ttlMs ?? this.sourceTtlMs)
|
|
58
62
|
}
|
|
59
63
|
});
|
|
60
64
|
await upload.done();
|
|
@@ -115,14 +119,14 @@ var S3CacheStorage = class {
|
|
|
115
119
|
throw err;
|
|
116
120
|
}
|
|
117
121
|
}
|
|
118
|
-
async putJson(key, data) {
|
|
122
|
+
async putJson(key, data, ttlMs) {
|
|
119
123
|
await this.client.send(
|
|
120
124
|
new PutObjectCommand({
|
|
121
125
|
Bucket: this.bucket,
|
|
122
126
|
Key: this.getFullKey(key),
|
|
123
127
|
Body: JSON.stringify(data),
|
|
124
128
|
ContentType: "application/json",
|
|
125
|
-
Expires: this.getExpires()
|
|
129
|
+
Expires: this.getExpires(ttlMs ?? this.jobMetadataTtlMs)
|
|
126
130
|
})
|
|
127
131
|
);
|
|
128
132
|
}
|
|
@@ -148,20 +152,25 @@ var S3CacheStorage = class {
|
|
|
148
152
|
close() {
|
|
149
153
|
}
|
|
150
154
|
};
|
|
151
|
-
var
|
|
155
|
+
var LocalTransientStore = class {
|
|
152
156
|
baseDir;
|
|
153
157
|
initialized = false;
|
|
154
158
|
cleanupInterval;
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
+
sourceTtlMs;
|
|
160
|
+
jobMetadataTtlMs;
|
|
161
|
+
/** For cleanup, use the longer of the two TTLs */
|
|
162
|
+
maxTtlMs;
|
|
163
|
+
constructor(options) {
|
|
164
|
+
this.baseDir = options?.baseDir ?? path.join(os.tmpdir(), "ffs-transient");
|
|
165
|
+
this.sourceTtlMs = options?.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
|
|
166
|
+
this.jobMetadataTtlMs = options?.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
|
|
167
|
+
this.maxTtlMs = Math.max(this.sourceTtlMs, this.jobMetadataTtlMs);
|
|
159
168
|
this.cleanupInterval = setInterval(() => {
|
|
160
169
|
this.cleanupExpired().catch(console.error);
|
|
161
170
|
}, 3e5);
|
|
162
171
|
}
|
|
163
172
|
/**
|
|
164
|
-
* Remove files older than TTL
|
|
173
|
+
* Remove files older than max TTL
|
|
165
174
|
*/
|
|
166
175
|
async cleanupExpired() {
|
|
167
176
|
if (!this.initialized) return;
|
|
@@ -186,7 +195,7 @@ var LocalCacheStorage = class {
|
|
|
186
195
|
} else if (entry.isFile()) {
|
|
187
196
|
try {
|
|
188
197
|
const stat = await fs.stat(fullPath);
|
|
189
|
-
if (now - stat.mtimeMs > this.
|
|
198
|
+
if (now - stat.mtimeMs > this.maxTtlMs) {
|
|
190
199
|
await fs.rm(fullPath, { force: true });
|
|
191
200
|
}
|
|
192
201
|
} catch {
|
|
@@ -205,7 +214,7 @@ var LocalCacheStorage = class {
|
|
|
205
214
|
const rand = crypto.randomBytes(8).toString("hex");
|
|
206
215
|
return `${finalPath}.tmp-${process.pid}-${rand}`;
|
|
207
216
|
}
|
|
208
|
-
async put(key, stream) {
|
|
217
|
+
async put(key, stream, _ttlMs) {
|
|
209
218
|
const fp = this.filePath(key);
|
|
210
219
|
await this.ensureDir(fp);
|
|
211
220
|
const tmpPath = this.tmpPathFor(fp);
|
|
@@ -241,7 +250,7 @@ var LocalCacheStorage = class {
|
|
|
241
250
|
async delete(key) {
|
|
242
251
|
await fs.rm(this.filePath(key), { force: true });
|
|
243
252
|
}
|
|
244
|
-
async putJson(key, data) {
|
|
253
|
+
async putJson(key, data, _ttlMs) {
|
|
245
254
|
const fp = this.filePath(key);
|
|
246
255
|
await this.ensureDir(fp);
|
|
247
256
|
const tmpPath = this.tmpPathFor(fp);
|
|
@@ -269,37 +278,47 @@ var LocalCacheStorage = class {
|
|
|
269
278
|
}
|
|
270
279
|
}
|
|
271
280
|
};
|
|
272
|
-
function
|
|
273
|
-
const
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
281
|
+
function createTransientStore() {
|
|
282
|
+
const sourceTtlMs = process.env.FFS_SOURCE_CACHE_TTL_MS ? parseInt(process.env.FFS_SOURCE_CACHE_TTL_MS, 10) : DEFAULT_SOURCE_TTL_MS;
|
|
283
|
+
const jobMetadataTtlMs = process.env.FFS_JOB_METADATA_TTL_MS ? parseInt(process.env.FFS_JOB_METADATA_TTL_MS, 10) : DEFAULT_JOB_METADATA_TTL_MS;
|
|
284
|
+
if (process.env.FFS_TRANSIENT_STORE_BUCKET) {
|
|
285
|
+
return new S3TransientStore({
|
|
286
|
+
endpoint: process.env.FFS_TRANSIENT_STORE_ENDPOINT,
|
|
287
|
+
region: process.env.FFS_TRANSIENT_STORE_REGION ?? "auto",
|
|
288
|
+
bucket: process.env.FFS_TRANSIENT_STORE_BUCKET,
|
|
289
|
+
prefix: process.env.FFS_TRANSIENT_STORE_PREFIX,
|
|
290
|
+
accessKeyId: process.env.FFS_TRANSIENT_STORE_ACCESS_KEY,
|
|
291
|
+
secretAccessKey: process.env.FFS_TRANSIENT_STORE_SECRET_KEY,
|
|
292
|
+
sourceTtlMs,
|
|
293
|
+
jobMetadataTtlMs
|
|
283
294
|
});
|
|
284
295
|
}
|
|
285
|
-
return new
|
|
296
|
+
return new LocalTransientStore({
|
|
297
|
+
baseDir: process.env.FFS_TRANSIENT_STORE_LOCAL_DIR,
|
|
298
|
+
sourceTtlMs,
|
|
299
|
+
jobMetadataTtlMs
|
|
300
|
+
});
|
|
286
301
|
}
|
|
287
302
|
function hashUrl(url) {
|
|
288
303
|
return crypto.createHash("sha256").update(url).digest("hex").slice(0, 16);
|
|
289
304
|
}
|
|
290
|
-
function
|
|
305
|
+
function sourceStoreKey(url) {
|
|
291
306
|
return `sources/${hashUrl(url)}`;
|
|
292
307
|
}
|
|
293
|
-
function
|
|
308
|
+
function warmupJobStoreKey(jobId) {
|
|
294
309
|
return `jobs/warmup/${jobId}.json`;
|
|
295
310
|
}
|
|
296
|
-
function
|
|
311
|
+
function renderJobStoreKey(jobId) {
|
|
297
312
|
return `jobs/render/${jobId}.json`;
|
|
298
313
|
}
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
314
|
+
function warmupAndRenderJobStoreKey(jobId) {
|
|
315
|
+
return `jobs/warmup-and-render/${jobId}.json`;
|
|
316
|
+
}
|
|
317
|
+
var storeKeys = {
|
|
318
|
+
source: sourceStoreKey,
|
|
319
|
+
warmupJob: warmupJobStoreKey,
|
|
320
|
+
renderJob: renderJobStoreKey,
|
|
321
|
+
warmupAndRenderJob: warmupAndRenderJobStoreKey
|
|
303
322
|
};
|
|
304
323
|
|
|
305
324
|
// src/proxy.ts
|
|
@@ -454,11 +473,13 @@ async function createServerContext() {
|
|
|
454
473
|
const httpProxy = new HttpProxy();
|
|
455
474
|
await httpProxy.start();
|
|
456
475
|
return {
|
|
457
|
-
|
|
476
|
+
transientStore: createTransientStore(),
|
|
458
477
|
httpProxy,
|
|
459
478
|
baseUrl: process.env.FFS_BASE_URL || `http://localhost:${port2}`,
|
|
460
479
|
skipValidation: !!process.env.FFS_SKIP_VALIDATION && process.env.FFS_SKIP_VALIDATION !== "false",
|
|
461
|
-
|
|
480
|
+
warmupConcurrency: parseInt(process.env.FFS_WARMUP_CONCURRENCY || "4", 10),
|
|
481
|
+
warmupBackendBaseUrl: process.env.FFS_WARMUP_BACKEND_BASE_URL,
|
|
482
|
+
renderBackendBaseUrl: process.env.FFS_RENDER_BACKEND_BASE_URL
|
|
462
483
|
};
|
|
463
484
|
}
|
|
464
485
|
function parseEffieData(body, skipValidation) {
|
|
@@ -505,224 +526,24 @@ data: ${JSON.stringify(data)}
|
|
|
505
526
|
|
|
506
527
|
// src/handlers/caching.ts
|
|
507
528
|
import "express";
|
|
508
|
-
import { Readable as
|
|
509
|
-
import { randomUUID } from "crypto";
|
|
529
|
+
import { Readable as Readable3, Transform } from "stream";
|
|
530
|
+
import { randomUUID as randomUUID3 } from "crypto";
|
|
510
531
|
import {
|
|
511
532
|
extractEffieSources,
|
|
512
|
-
extractEffieSourcesWithTypes
|
|
533
|
+
extractEffieSourcesWithTypes as extractEffieSourcesWithTypes2
|
|
513
534
|
} from "@effing/effie";
|
|
514
|
-
function shouldSkipWarmup(source) {
|
|
515
|
-
return source.type === "video" || source.type === "audio";
|
|
516
|
-
}
|
|
517
|
-
var inFlightFetches = /* @__PURE__ */ new Map();
|
|
518
|
-
async function createWarmupJob(req, res, ctx2) {
|
|
519
|
-
try {
|
|
520
|
-
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
521
|
-
if ("error" in parseResult) {
|
|
522
|
-
res.status(400).json(parseResult);
|
|
523
|
-
return;
|
|
524
|
-
}
|
|
525
|
-
const sources = extractEffieSourcesWithTypes(parseResult.effie);
|
|
526
|
-
const jobId = randomUUID();
|
|
527
|
-
await ctx2.cacheStorage.putJson(cacheKeys.warmupJob(jobId), { sources });
|
|
528
|
-
res.json({
|
|
529
|
-
id: jobId,
|
|
530
|
-
url: `${ctx2.baseUrl}/warmup/${jobId}`
|
|
531
|
-
});
|
|
532
|
-
} catch (error) {
|
|
533
|
-
console.error("Error creating warmup job:", error);
|
|
534
|
-
res.status(500).json({ error: "Failed to create warmup job" });
|
|
535
|
-
}
|
|
536
|
-
}
|
|
537
|
-
async function streamWarmupJob(req, res, ctx2) {
|
|
538
|
-
try {
|
|
539
|
-
setupCORSHeaders(res);
|
|
540
|
-
const jobId = req.params.id;
|
|
541
|
-
const jobCacheKey = cacheKeys.warmupJob(jobId);
|
|
542
|
-
const job = await ctx2.cacheStorage.getJson(jobCacheKey);
|
|
543
|
-
ctx2.cacheStorage.delete(jobCacheKey);
|
|
544
|
-
if (!job) {
|
|
545
|
-
res.status(404).json({ error: "Job not found" });
|
|
546
|
-
return;
|
|
547
|
-
}
|
|
548
|
-
setupSSEResponse(res);
|
|
549
|
-
const sendEvent = createSSEEventSender(res);
|
|
550
|
-
try {
|
|
551
|
-
await warmupSources(job.sources, sendEvent, ctx2);
|
|
552
|
-
sendEvent("complete", { status: "ready" });
|
|
553
|
-
} catch (error) {
|
|
554
|
-
sendEvent("error", { message: String(error) });
|
|
555
|
-
} finally {
|
|
556
|
-
res.end();
|
|
557
|
-
}
|
|
558
|
-
} catch (error) {
|
|
559
|
-
console.error("Error in warmup streaming:", error);
|
|
560
|
-
if (!res.headersSent) {
|
|
561
|
-
res.status(500).json({ error: "Warmup streaming failed" });
|
|
562
|
-
} else {
|
|
563
|
-
res.end();
|
|
564
|
-
}
|
|
565
|
-
}
|
|
566
|
-
}
|
|
567
|
-
async function purgeCache(req, res, ctx2) {
|
|
568
|
-
try {
|
|
569
|
-
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
570
|
-
if ("error" in parseResult) {
|
|
571
|
-
res.status(400).json(parseResult);
|
|
572
|
-
return;
|
|
573
|
-
}
|
|
574
|
-
const sources = extractEffieSources(parseResult.effie);
|
|
575
|
-
let purged = 0;
|
|
576
|
-
for (const url of sources) {
|
|
577
|
-
const ck = cacheKeys.source(url);
|
|
578
|
-
if (await ctx2.cacheStorage.exists(ck)) {
|
|
579
|
-
await ctx2.cacheStorage.delete(ck);
|
|
580
|
-
purged++;
|
|
581
|
-
}
|
|
582
|
-
}
|
|
583
|
-
res.json({ purged, total: sources.length });
|
|
584
|
-
} catch (error) {
|
|
585
|
-
console.error("Error purging cache:", error);
|
|
586
|
-
res.status(500).json({ error: "Failed to purge cache" });
|
|
587
|
-
}
|
|
588
|
-
}
|
|
589
|
-
async function warmupSources(sources, sendEvent, ctx2) {
|
|
590
|
-
const total = sources.length;
|
|
591
|
-
sendEvent("start", { total });
|
|
592
|
-
let cached = 0;
|
|
593
|
-
let failed = 0;
|
|
594
|
-
let skipped = 0;
|
|
595
|
-
const sourcesToCache = [];
|
|
596
|
-
for (const source of sources) {
|
|
597
|
-
if (shouldSkipWarmup(source)) {
|
|
598
|
-
skipped++;
|
|
599
|
-
sendEvent("progress", {
|
|
600
|
-
url: source.url,
|
|
601
|
-
status: "skipped",
|
|
602
|
-
reason: "http-video-audio-passthrough",
|
|
603
|
-
cached,
|
|
604
|
-
failed,
|
|
605
|
-
skipped,
|
|
606
|
-
total
|
|
607
|
-
});
|
|
608
|
-
} else {
|
|
609
|
-
sourcesToCache.push(source);
|
|
610
|
-
}
|
|
611
|
-
}
|
|
612
|
-
const sourceCacheKeys = sourcesToCache.map((s) => cacheKeys.source(s.url));
|
|
613
|
-
const existsMap = await ctx2.cacheStorage.existsMany(sourceCacheKeys);
|
|
614
|
-
for (let i = 0; i < sourcesToCache.length; i++) {
|
|
615
|
-
if (existsMap.get(sourceCacheKeys[i])) {
|
|
616
|
-
cached++;
|
|
617
|
-
sendEvent("progress", {
|
|
618
|
-
url: sourcesToCache[i].url,
|
|
619
|
-
status: "hit",
|
|
620
|
-
cached,
|
|
621
|
-
failed,
|
|
622
|
-
skipped,
|
|
623
|
-
total
|
|
624
|
-
});
|
|
625
|
-
}
|
|
626
|
-
}
|
|
627
|
-
const uncached = sourcesToCache.filter(
|
|
628
|
-
(_, i) => !existsMap.get(sourceCacheKeys[i])
|
|
629
|
-
);
|
|
630
|
-
if (uncached.length === 0) {
|
|
631
|
-
sendEvent("summary", { cached, failed, skipped, total });
|
|
632
|
-
return;
|
|
633
|
-
}
|
|
634
|
-
const keepalive = setInterval(() => {
|
|
635
|
-
sendEvent("keepalive", { cached, failed, skipped, total });
|
|
636
|
-
}, 25e3);
|
|
637
|
-
const queue = [...uncached];
|
|
638
|
-
const workers = Array.from(
|
|
639
|
-
{ length: Math.min(ctx2.cacheConcurrency, queue.length) },
|
|
640
|
-
async () => {
|
|
641
|
-
while (queue.length > 0) {
|
|
642
|
-
const source = queue.shift();
|
|
643
|
-
const cacheKey = cacheKeys.source(source.url);
|
|
644
|
-
const startTime = Date.now();
|
|
645
|
-
try {
|
|
646
|
-
let fetchPromise = inFlightFetches.get(cacheKey);
|
|
647
|
-
if (!fetchPromise) {
|
|
648
|
-
fetchPromise = fetchAndCache(source.url, cacheKey, sendEvent, ctx2);
|
|
649
|
-
inFlightFetches.set(cacheKey, fetchPromise);
|
|
650
|
-
}
|
|
651
|
-
await fetchPromise;
|
|
652
|
-
inFlightFetches.delete(cacheKey);
|
|
653
|
-
cached++;
|
|
654
|
-
sendEvent("progress", {
|
|
655
|
-
url: source.url,
|
|
656
|
-
status: "cached",
|
|
657
|
-
cached,
|
|
658
|
-
failed,
|
|
659
|
-
skipped,
|
|
660
|
-
total,
|
|
661
|
-
ms: Date.now() - startTime
|
|
662
|
-
});
|
|
663
|
-
} catch (error) {
|
|
664
|
-
inFlightFetches.delete(cacheKey);
|
|
665
|
-
failed++;
|
|
666
|
-
sendEvent("progress", {
|
|
667
|
-
url: source.url,
|
|
668
|
-
status: "error",
|
|
669
|
-
error: String(error),
|
|
670
|
-
cached,
|
|
671
|
-
failed,
|
|
672
|
-
skipped,
|
|
673
|
-
total,
|
|
674
|
-
ms: Date.now() - startTime
|
|
675
|
-
});
|
|
676
|
-
}
|
|
677
|
-
}
|
|
678
|
-
}
|
|
679
|
-
);
|
|
680
|
-
await Promise.all(workers);
|
|
681
|
-
clearInterval(keepalive);
|
|
682
|
-
sendEvent("summary", { cached, failed, skipped, total });
|
|
683
|
-
}
|
|
684
|
-
async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
|
|
685
|
-
const response = await ffsFetch(url, {
|
|
686
|
-
headersTimeout: 10 * 60 * 1e3,
|
|
687
|
-
// 10 minutes
|
|
688
|
-
bodyTimeout: 20 * 60 * 1e3
|
|
689
|
-
// 20 minutes
|
|
690
|
-
});
|
|
691
|
-
if (!response.ok) {
|
|
692
|
-
throw new Error(`${response.status} ${response.statusText}`);
|
|
693
|
-
}
|
|
694
|
-
sendEvent("downloading", { url, status: "started", bytesReceived: 0 });
|
|
695
|
-
const sourceStream = Readable2.fromWeb(
|
|
696
|
-
response.body
|
|
697
|
-
);
|
|
698
|
-
let totalBytes = 0;
|
|
699
|
-
let lastEventTime = Date.now();
|
|
700
|
-
const PROGRESS_INTERVAL = 1e4;
|
|
701
|
-
const progressStream = new Transform({
|
|
702
|
-
transform(chunk, _encoding, callback) {
|
|
703
|
-
totalBytes += chunk.length;
|
|
704
|
-
const now = Date.now();
|
|
705
|
-
if (now - lastEventTime >= PROGRESS_INTERVAL) {
|
|
706
|
-
sendEvent("downloading", {
|
|
707
|
-
url,
|
|
708
|
-
status: "downloading",
|
|
709
|
-
bytesReceived: totalBytes
|
|
710
|
-
});
|
|
711
|
-
lastEventTime = now;
|
|
712
|
-
}
|
|
713
|
-
callback(null, chunk);
|
|
714
|
-
}
|
|
715
|
-
});
|
|
716
|
-
const trackedStream = sourceStream.pipe(progressStream);
|
|
717
|
-
await ctx2.cacheStorage.put(cacheKey, trackedStream);
|
|
718
|
-
}
|
|
719
535
|
|
|
720
|
-
// src/handlers/
|
|
536
|
+
// src/handlers/orchestrating.ts
|
|
721
537
|
import "express";
|
|
722
538
|
import { randomUUID as randomUUID2 } from "crypto";
|
|
539
|
+
import { extractEffieSourcesWithTypes, effieDataSchema as effieDataSchema3 } from "@effing/effie";
|
|
540
|
+
|
|
541
|
+
// src/handlers/rendering.ts
|
|
542
|
+
import "express";
|
|
543
|
+
import { randomUUID } from "crypto";
|
|
723
544
|
|
|
724
545
|
// src/render.ts
|
|
725
|
-
import { Readable as
|
|
546
|
+
import { Readable as Readable2 } from "stream";
|
|
726
547
|
import { createReadStream as createReadStream2 } from "fs";
|
|
727
548
|
|
|
728
549
|
// src/motion.ts
|
|
@@ -1133,12 +954,12 @@ var EffieRenderer = class {
|
|
|
1133
954
|
effieData;
|
|
1134
955
|
ffmpegRunner;
|
|
1135
956
|
allowLocalFiles;
|
|
1136
|
-
|
|
957
|
+
transientStore;
|
|
1137
958
|
httpProxy;
|
|
1138
959
|
constructor(effieData, options) {
|
|
1139
960
|
this.effieData = effieData;
|
|
1140
961
|
this.allowLocalFiles = options?.allowLocalFiles ?? false;
|
|
1141
|
-
this.
|
|
962
|
+
this.transientStore = options?.transientStore;
|
|
1142
963
|
this.httpProxy = options?.httpProxy;
|
|
1143
964
|
}
|
|
1144
965
|
async fetchSource(src) {
|
|
@@ -1151,7 +972,7 @@ var EffieRenderer = class {
|
|
|
1151
972
|
const isBase64 = meta.endsWith(";base64");
|
|
1152
973
|
const data = src.slice(commaIndex + 1);
|
|
1153
974
|
const buffer = isBase64 ? Buffer.from(data, "base64") : Buffer.from(decodeURIComponent(data));
|
|
1154
|
-
return
|
|
975
|
+
return Readable2.from(buffer);
|
|
1155
976
|
}
|
|
1156
977
|
if (src.startsWith("file:")) {
|
|
1157
978
|
if (!this.allowLocalFiles) {
|
|
@@ -1161,9 +982,9 @@ var EffieRenderer = class {
|
|
|
1161
982
|
}
|
|
1162
983
|
return createReadStream2(fileURLToPath(src));
|
|
1163
984
|
}
|
|
1164
|
-
if (this.
|
|
1165
|
-
const cachedStream = await this.
|
|
1166
|
-
|
|
985
|
+
if (this.transientStore) {
|
|
986
|
+
const cachedStream = await this.transientStore.getStream(
|
|
987
|
+
storeKeys.source(src)
|
|
1167
988
|
);
|
|
1168
989
|
if (cachedStream) {
|
|
1169
990
|
return cachedStream;
|
|
@@ -1183,7 +1004,7 @@ var EffieRenderer = class {
|
|
|
1183
1004
|
if (!response.body) {
|
|
1184
1005
|
throw new Error(`No body for ${src}`);
|
|
1185
1006
|
}
|
|
1186
|
-
return
|
|
1007
|
+
return Readable2.fromWeb(response.body);
|
|
1187
1008
|
}
|
|
1188
1009
|
buildAudioFilter({
|
|
1189
1010
|
duration,
|
|
@@ -1681,14 +1502,18 @@ async function createRenderJob(req, res, ctx2) {
|
|
|
1681
1502
|
}
|
|
1682
1503
|
effie = data;
|
|
1683
1504
|
}
|
|
1684
|
-
const jobId =
|
|
1505
|
+
const jobId = randomUUID();
|
|
1685
1506
|
const job = {
|
|
1686
1507
|
effie,
|
|
1687
1508
|
scale,
|
|
1688
1509
|
upload,
|
|
1689
1510
|
createdAt: Date.now()
|
|
1690
1511
|
};
|
|
1691
|
-
await ctx2.
|
|
1512
|
+
await ctx2.transientStore.putJson(
|
|
1513
|
+
storeKeys.renderJob(jobId),
|
|
1514
|
+
job,
|
|
1515
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1516
|
+
);
|
|
1692
1517
|
res.json({
|
|
1693
1518
|
id: jobId,
|
|
1694
1519
|
url: `${ctx2.baseUrl}/render/${jobId}`
|
|
@@ -1702,9 +1527,13 @@ async function streamRenderJob(req, res, ctx2) {
|
|
|
1702
1527
|
try {
|
|
1703
1528
|
setupCORSHeaders(res);
|
|
1704
1529
|
const jobId = req.params.id;
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1530
|
+
if (ctx2.renderBackendBaseUrl) {
|
|
1531
|
+
await proxyRenderFromBackend(res, jobId, ctx2);
|
|
1532
|
+
return;
|
|
1533
|
+
}
|
|
1534
|
+
const jobCacheKey = storeKeys.renderJob(jobId);
|
|
1535
|
+
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
1536
|
+
ctx2.transientStore.delete(jobCacheKey);
|
|
1708
1537
|
if (!job) {
|
|
1709
1538
|
res.status(404).json({ error: "Job not found or expired" });
|
|
1710
1539
|
return;
|
|
@@ -1725,7 +1554,7 @@ async function streamRenderJob(req, res, ctx2) {
|
|
|
1725
1554
|
}
|
|
1726
1555
|
async function streamRenderDirect(res, job, ctx2) {
|
|
1727
1556
|
const renderer = new EffieRenderer(job.effie, {
|
|
1728
|
-
|
|
1557
|
+
transientStore: ctx2.transientStore,
|
|
1729
1558
|
httpProxy: ctx2.httpProxy
|
|
1730
1559
|
});
|
|
1731
1560
|
const videoStream = await renderer.render(job.scale);
|
|
@@ -1789,7 +1618,7 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
|
|
|
1789
1618
|
}
|
|
1790
1619
|
const renderStartTime = Date.now();
|
|
1791
1620
|
const renderer = new EffieRenderer(effie, {
|
|
1792
|
-
|
|
1621
|
+
transientStore: ctx2.transientStore,
|
|
1793
1622
|
httpProxy: ctx2.httpProxy
|
|
1794
1623
|
});
|
|
1795
1624
|
const videoStream = await renderer.render(scale);
|
|
@@ -1817,36 +1646,566 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
|
|
|
1817
1646
|
timings.uploadTime = Date.now() - uploadStartTime;
|
|
1818
1647
|
return timings;
|
|
1819
1648
|
}
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
const
|
|
1828
|
-
if (
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1649
|
+
async function proxyRenderFromBackend(res, jobId, ctx2) {
|
|
1650
|
+
const backendUrl = `${ctx2.renderBackendBaseUrl}/render/${jobId}`;
|
|
1651
|
+
const response = await ffsFetch(backendUrl);
|
|
1652
|
+
if (!response.ok) {
|
|
1653
|
+
res.status(response.status).json({ error: "Backend render failed" });
|
|
1654
|
+
return;
|
|
1655
|
+
}
|
|
1656
|
+
const contentType = response.headers.get("content-type") || "";
|
|
1657
|
+
if (contentType.includes("text/event-stream")) {
|
|
1658
|
+
setupSSEResponse(res);
|
|
1659
|
+
const sendEvent = createSSEEventSender(res);
|
|
1660
|
+
const reader = response.body?.getReader();
|
|
1661
|
+
if (!reader) {
|
|
1662
|
+
sendEvent("error", { message: "No response body from backend" });
|
|
1663
|
+
res.end();
|
|
1664
|
+
return;
|
|
1665
|
+
}
|
|
1666
|
+
const decoder = new TextDecoder();
|
|
1667
|
+
let buffer = "";
|
|
1668
|
+
try {
|
|
1669
|
+
while (true) {
|
|
1670
|
+
const { done, value } = await reader.read();
|
|
1671
|
+
if (done) break;
|
|
1672
|
+
if (res.destroyed) {
|
|
1673
|
+
reader.cancel();
|
|
1674
|
+
break;
|
|
1675
|
+
}
|
|
1676
|
+
buffer += decoder.decode(value, { stream: true });
|
|
1677
|
+
const lines = buffer.split("\n");
|
|
1678
|
+
buffer = lines.pop() || "";
|
|
1679
|
+
let currentEvent = "";
|
|
1680
|
+
let currentData = "";
|
|
1681
|
+
for (const line of lines) {
|
|
1682
|
+
if (line.startsWith("event: ")) {
|
|
1683
|
+
currentEvent = line.slice(7);
|
|
1684
|
+
} else if (line.startsWith("data: ")) {
|
|
1685
|
+
currentData = line.slice(6);
|
|
1686
|
+
} else if (line === "" && currentEvent && currentData) {
|
|
1687
|
+
try {
|
|
1688
|
+
const data = JSON.parse(currentData);
|
|
1689
|
+
sendEvent(currentEvent, data);
|
|
1690
|
+
} catch {
|
|
1691
|
+
}
|
|
1692
|
+
currentEvent = "";
|
|
1693
|
+
currentData = "";
|
|
1694
|
+
}
|
|
1695
|
+
}
|
|
1696
|
+
}
|
|
1697
|
+
} finally {
|
|
1698
|
+
reader.releaseLock();
|
|
1699
|
+
res.end();
|
|
1700
|
+
}
|
|
1701
|
+
} else {
|
|
1702
|
+
await proxyBinaryStream(response, res);
|
|
1833
1703
|
}
|
|
1834
|
-
return true;
|
|
1835
1704
|
}
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1843
|
-
|
|
1844
|
-
|
|
1845
|
-
|
|
1846
|
-
|
|
1705
|
+
|
|
1706
|
+
// src/handlers/orchestrating.ts
|
|
1707
|
+
async function createWarmupAndRenderJob(req, res, ctx2) {
|
|
1708
|
+
try {
|
|
1709
|
+
const options = req.body;
|
|
1710
|
+
let rawEffieData;
|
|
1711
|
+
if (typeof options.effie === "string") {
|
|
1712
|
+
const response = await ffsFetch(options.effie);
|
|
1713
|
+
if (!response.ok) {
|
|
1714
|
+
throw new Error(
|
|
1715
|
+
`Failed to fetch Effie data: ${response.status} ${response.statusText}`
|
|
1716
|
+
);
|
|
1717
|
+
}
|
|
1718
|
+
rawEffieData = await response.json();
|
|
1719
|
+
} else {
|
|
1720
|
+
rawEffieData = options.effie;
|
|
1721
|
+
}
|
|
1722
|
+
let effie;
|
|
1723
|
+
if (!ctx2.skipValidation) {
|
|
1724
|
+
const result = effieDataSchema3.safeParse(rawEffieData);
|
|
1725
|
+
if (!result.success) {
|
|
1726
|
+
res.status(400).json({
|
|
1727
|
+
error: "Invalid effie data",
|
|
1728
|
+
issues: result.error.issues.map((issue) => ({
|
|
1729
|
+
path: issue.path.join("."),
|
|
1730
|
+
message: issue.message
|
|
1731
|
+
}))
|
|
1732
|
+
});
|
|
1733
|
+
return;
|
|
1734
|
+
}
|
|
1735
|
+
effie = result.data;
|
|
1736
|
+
} else {
|
|
1737
|
+
const data = rawEffieData;
|
|
1738
|
+
if (!data?.segments) {
|
|
1739
|
+
res.status(400).json({ error: "Invalid effie data: missing segments" });
|
|
1740
|
+
return;
|
|
1741
|
+
}
|
|
1742
|
+
effie = data;
|
|
1743
|
+
}
|
|
1744
|
+
const sources = extractEffieSourcesWithTypes(effie);
|
|
1745
|
+
const scale = options.scale ?? 1;
|
|
1746
|
+
const upload = options.upload;
|
|
1747
|
+
const jobId = randomUUID2();
|
|
1748
|
+
const warmupJobId = randomUUID2();
|
|
1749
|
+
const renderJobId = randomUUID2();
|
|
1750
|
+
const job = {
|
|
1751
|
+
effie,
|
|
1752
|
+
sources,
|
|
1753
|
+
scale,
|
|
1754
|
+
upload,
|
|
1755
|
+
warmupJobId,
|
|
1756
|
+
renderJobId,
|
|
1757
|
+
createdAt: Date.now()
|
|
1758
|
+
};
|
|
1759
|
+
await ctx2.transientStore.putJson(
|
|
1760
|
+
storeKeys.warmupAndRenderJob(jobId),
|
|
1761
|
+
job,
|
|
1762
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1763
|
+
);
|
|
1764
|
+
await ctx2.transientStore.putJson(
|
|
1765
|
+
storeKeys.warmupJob(warmupJobId),
|
|
1766
|
+
{ sources },
|
|
1767
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1768
|
+
);
|
|
1769
|
+
await ctx2.transientStore.putJson(
|
|
1770
|
+
storeKeys.renderJob(renderJobId),
|
|
1771
|
+
{
|
|
1772
|
+
effie,
|
|
1773
|
+
scale,
|
|
1774
|
+
upload,
|
|
1775
|
+
createdAt: Date.now()
|
|
1776
|
+
},
|
|
1777
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1778
|
+
);
|
|
1779
|
+
res.json({
|
|
1780
|
+
id: jobId,
|
|
1781
|
+
url: `${ctx2.baseUrl}/warmup-and-render/${jobId}`
|
|
1782
|
+
});
|
|
1783
|
+
} catch (error) {
|
|
1784
|
+
console.error("Error creating warmup-and-render job:", error);
|
|
1785
|
+
res.status(500).json({ error: "Failed to create warmup-and-render job" });
|
|
1786
|
+
}
|
|
1787
|
+
}
|
|
1788
|
+
async function streamWarmupAndRenderJob(req, res, ctx2) {
|
|
1789
|
+
try {
|
|
1790
|
+
setupCORSHeaders(res);
|
|
1791
|
+
const jobId = req.params.id;
|
|
1792
|
+
const jobCacheKey = storeKeys.warmupAndRenderJob(jobId);
|
|
1793
|
+
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
1794
|
+
ctx2.transientStore.delete(jobCacheKey);
|
|
1795
|
+
if (!job) {
|
|
1796
|
+
res.status(404).json({ error: "Job not found" });
|
|
1797
|
+
return;
|
|
1798
|
+
}
|
|
1799
|
+
setupSSEResponse(res);
|
|
1800
|
+
const sendEvent = createSSEEventSender(res);
|
|
1801
|
+
let keepalivePhase = "warmup";
|
|
1802
|
+
const keepalive = setInterval(() => {
|
|
1803
|
+
sendEvent("keepalive", { phase: keepalivePhase });
|
|
1804
|
+
}, 25e3);
|
|
1805
|
+
try {
|
|
1806
|
+
if (ctx2.warmupBackendBaseUrl) {
|
|
1807
|
+
await proxyRemoteSSE(
|
|
1808
|
+
`${ctx2.warmupBackendBaseUrl}/warmup/${job.warmupJobId}`,
|
|
1809
|
+
sendEvent,
|
|
1810
|
+
"warmup:",
|
|
1811
|
+
res
|
|
1812
|
+
);
|
|
1813
|
+
} else {
|
|
1814
|
+
const warmupSender = prefixEventSender(sendEvent, "warmup:");
|
|
1815
|
+
await warmupSources(job.sources, warmupSender, ctx2);
|
|
1816
|
+
warmupSender("complete", { status: "ready" });
|
|
1817
|
+
}
|
|
1818
|
+
keepalivePhase = "render";
|
|
1819
|
+
if (ctx2.renderBackendBaseUrl) {
|
|
1820
|
+
await proxyRemoteSSE(
|
|
1821
|
+
`${ctx2.renderBackendBaseUrl}/render/${job.renderJobId}`,
|
|
1822
|
+
sendEvent,
|
|
1823
|
+
"render:",
|
|
1824
|
+
res
|
|
1825
|
+
);
|
|
1826
|
+
} else {
|
|
1827
|
+
const renderSender = prefixEventSender(sendEvent, "render:");
|
|
1828
|
+
if (job.upload) {
|
|
1829
|
+
renderSender("started", { status: "rendering" });
|
|
1830
|
+
const timings = await renderAndUploadInternal(
|
|
1831
|
+
job.effie,
|
|
1832
|
+
job.scale,
|
|
1833
|
+
job.upload,
|
|
1834
|
+
renderSender,
|
|
1835
|
+
ctx2
|
|
1836
|
+
);
|
|
1837
|
+
renderSender("complete", { status: "uploaded", timings });
|
|
1838
|
+
} else {
|
|
1839
|
+
const videoUrl = `${ctx2.baseUrl}/render/${job.renderJobId}`;
|
|
1840
|
+
sendEvent("complete", { status: "ready", videoUrl });
|
|
1841
|
+
}
|
|
1842
|
+
}
|
|
1843
|
+
if (job.upload && !ctx2.renderBackendBaseUrl) {
|
|
1844
|
+
sendEvent("complete", { status: "done" });
|
|
1845
|
+
}
|
|
1846
|
+
} catch (error) {
|
|
1847
|
+
sendEvent("error", {
|
|
1848
|
+
phase: keepalivePhase,
|
|
1849
|
+
message: String(error)
|
|
1850
|
+
});
|
|
1851
|
+
} finally {
|
|
1852
|
+
clearInterval(keepalive);
|
|
1853
|
+
res.end();
|
|
1854
|
+
}
|
|
1855
|
+
} catch (error) {
|
|
1856
|
+
console.error("Error in warmup-and-render streaming:", error);
|
|
1857
|
+
if (!res.headersSent) {
|
|
1858
|
+
res.status(500).json({ error: "Warmup-and-render streaming failed" });
|
|
1859
|
+
} else {
|
|
1860
|
+
res.end();
|
|
1861
|
+
}
|
|
1862
|
+
}
|
|
1863
|
+
}
|
|
1864
|
+
function prefixEventSender(sendEvent, prefix) {
|
|
1865
|
+
return (event, data) => {
|
|
1866
|
+
sendEvent(`${prefix}${event}`, data);
|
|
1867
|
+
};
|
|
1868
|
+
}
|
|
1869
|
+
async function proxyRemoteSSE(url, sendEvent, prefix, res) {
|
|
1870
|
+
const response = await ffsFetch(url, {
|
|
1871
|
+
headers: {
|
|
1872
|
+
Accept: "text/event-stream"
|
|
1873
|
+
}
|
|
1874
|
+
});
|
|
1875
|
+
if (!response.ok) {
|
|
1876
|
+
throw new Error(`Remote backend error: ${response.status}`);
|
|
1877
|
+
}
|
|
1878
|
+
const reader = response.body?.getReader();
|
|
1879
|
+
if (!reader) {
|
|
1880
|
+
throw new Error("No response body from remote backend");
|
|
1881
|
+
}
|
|
1882
|
+
const decoder = new TextDecoder();
|
|
1883
|
+
let buffer = "";
|
|
1884
|
+
try {
|
|
1885
|
+
while (true) {
|
|
1886
|
+
const { done, value } = await reader.read();
|
|
1887
|
+
if (done) break;
|
|
1888
|
+
if (res.destroyed) {
|
|
1889
|
+
reader.cancel();
|
|
1890
|
+
break;
|
|
1891
|
+
}
|
|
1892
|
+
buffer += decoder.decode(value, { stream: true });
|
|
1893
|
+
const lines = buffer.split("\n");
|
|
1894
|
+
buffer = lines.pop() || "";
|
|
1895
|
+
let currentEvent = "";
|
|
1896
|
+
let currentData = "";
|
|
1897
|
+
for (const line of lines) {
|
|
1898
|
+
if (line.startsWith("event: ")) {
|
|
1899
|
+
currentEvent = line.slice(7);
|
|
1900
|
+
} else if (line.startsWith("data: ")) {
|
|
1901
|
+
currentData = line.slice(6);
|
|
1902
|
+
} else if (line === "" && currentEvent && currentData) {
|
|
1903
|
+
try {
|
|
1904
|
+
const data = JSON.parse(currentData);
|
|
1905
|
+
sendEvent(`${prefix}${currentEvent}`, data);
|
|
1906
|
+
} catch {
|
|
1907
|
+
}
|
|
1908
|
+
currentEvent = "";
|
|
1909
|
+
currentData = "";
|
|
1910
|
+
}
|
|
1911
|
+
}
|
|
1912
|
+
}
|
|
1913
|
+
} finally {
|
|
1914
|
+
reader.releaseLock();
|
|
1915
|
+
}
|
|
1916
|
+
}
|
|
1917
|
+
async function proxyBinaryStream(response, res) {
|
|
1918
|
+
const contentType = response.headers.get("content-type");
|
|
1919
|
+
if (contentType) res.set("Content-Type", contentType);
|
|
1920
|
+
const contentLength = response.headers.get("content-length");
|
|
1921
|
+
if (contentLength) res.set("Content-Length", contentLength);
|
|
1922
|
+
const reader = response.body?.getReader();
|
|
1923
|
+
if (!reader) {
|
|
1924
|
+
throw new Error("No response body");
|
|
1925
|
+
}
|
|
1926
|
+
try {
|
|
1927
|
+
while (true) {
|
|
1928
|
+
const { done, value } = await reader.read();
|
|
1929
|
+
if (done) break;
|
|
1930
|
+
if (res.destroyed) {
|
|
1931
|
+
reader.cancel();
|
|
1932
|
+
break;
|
|
1933
|
+
}
|
|
1934
|
+
res.write(value);
|
|
1935
|
+
}
|
|
1936
|
+
} finally {
|
|
1937
|
+
reader.releaseLock();
|
|
1938
|
+
res.end();
|
|
1939
|
+
}
|
|
1940
|
+
}
|
|
1941
|
+
|
|
1942
|
+
// src/handlers/caching.ts
|
|
1943
|
+
function shouldSkipWarmup(source) {
|
|
1944
|
+
return source.type === "video" || source.type === "audio";
|
|
1945
|
+
}
|
|
1946
|
+
var inFlightFetches = /* @__PURE__ */ new Map();
|
|
1947
|
+
async function createWarmupJob(req, res, ctx2) {
|
|
1948
|
+
try {
|
|
1949
|
+
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
1950
|
+
if ("error" in parseResult) {
|
|
1951
|
+
res.status(400).json(parseResult);
|
|
1952
|
+
return;
|
|
1953
|
+
}
|
|
1954
|
+
const sources = extractEffieSourcesWithTypes2(parseResult.effie);
|
|
1955
|
+
const jobId = randomUUID3();
|
|
1956
|
+
await ctx2.transientStore.putJson(
|
|
1957
|
+
storeKeys.warmupJob(jobId),
|
|
1958
|
+
{ sources },
|
|
1959
|
+
ctx2.transientStore.jobMetadataTtlMs
|
|
1960
|
+
);
|
|
1961
|
+
res.json({
|
|
1962
|
+
id: jobId,
|
|
1963
|
+
url: `${ctx2.baseUrl}/warmup/${jobId}`
|
|
1964
|
+
});
|
|
1965
|
+
} catch (error) {
|
|
1966
|
+
console.error("Error creating warmup job:", error);
|
|
1967
|
+
res.status(500).json({ error: "Failed to create warmup job" });
|
|
1968
|
+
}
|
|
1969
|
+
}
|
|
1970
|
+
async function streamWarmupJob(req, res, ctx2) {
|
|
1971
|
+
try {
|
|
1972
|
+
setupCORSHeaders(res);
|
|
1973
|
+
const jobId = req.params.id;
|
|
1974
|
+
if (ctx2.warmupBackendBaseUrl) {
|
|
1975
|
+
setupSSEResponse(res);
|
|
1976
|
+
const sendEvent2 = createSSEEventSender(res);
|
|
1977
|
+
try {
|
|
1978
|
+
await proxyRemoteSSE(
|
|
1979
|
+
`${ctx2.warmupBackendBaseUrl}/warmup/${jobId}`,
|
|
1980
|
+
sendEvent2,
|
|
1981
|
+
"",
|
|
1982
|
+
res
|
|
1983
|
+
);
|
|
1984
|
+
} finally {
|
|
1985
|
+
res.end();
|
|
1986
|
+
}
|
|
1987
|
+
return;
|
|
1988
|
+
}
|
|
1989
|
+
const jobCacheKey = storeKeys.warmupJob(jobId);
|
|
1990
|
+
const job = await ctx2.transientStore.getJson(jobCacheKey);
|
|
1991
|
+
ctx2.transientStore.delete(jobCacheKey);
|
|
1992
|
+
if (!job) {
|
|
1993
|
+
res.status(404).json({ error: "Job not found" });
|
|
1994
|
+
return;
|
|
1995
|
+
}
|
|
1996
|
+
setupSSEResponse(res);
|
|
1997
|
+
const sendEvent = createSSEEventSender(res);
|
|
1998
|
+
try {
|
|
1999
|
+
await warmupSources(job.sources, sendEvent, ctx2);
|
|
2000
|
+
sendEvent("complete", { status: "ready" });
|
|
2001
|
+
} catch (error) {
|
|
2002
|
+
sendEvent("error", { message: String(error) });
|
|
2003
|
+
} finally {
|
|
2004
|
+
res.end();
|
|
2005
|
+
}
|
|
2006
|
+
} catch (error) {
|
|
2007
|
+
console.error("Error in warmup streaming:", error);
|
|
2008
|
+
if (!res.headersSent) {
|
|
2009
|
+
res.status(500).json({ error: "Warmup streaming failed" });
|
|
2010
|
+
} else {
|
|
2011
|
+
res.end();
|
|
2012
|
+
}
|
|
2013
|
+
}
|
|
2014
|
+
}
|
|
2015
|
+
async function purgeCache(req, res, ctx2) {
|
|
2016
|
+
try {
|
|
2017
|
+
const parseResult = parseEffieData(req.body, ctx2.skipValidation);
|
|
2018
|
+
if ("error" in parseResult) {
|
|
2019
|
+
res.status(400).json(parseResult);
|
|
2020
|
+
return;
|
|
2021
|
+
}
|
|
2022
|
+
const sources = extractEffieSources(parseResult.effie);
|
|
2023
|
+
let purged = 0;
|
|
2024
|
+
for (const url of sources) {
|
|
2025
|
+
const ck = storeKeys.source(url);
|
|
2026
|
+
if (await ctx2.transientStore.exists(ck)) {
|
|
2027
|
+
await ctx2.transientStore.delete(ck);
|
|
2028
|
+
purged++;
|
|
2029
|
+
}
|
|
2030
|
+
}
|
|
2031
|
+
res.json({ purged, total: sources.length });
|
|
2032
|
+
} catch (error) {
|
|
2033
|
+
console.error("Error purging cache:", error);
|
|
2034
|
+
res.status(500).json({ error: "Failed to purge cache" });
|
|
2035
|
+
}
|
|
2036
|
+
}
|
|
2037
|
+
async function warmupSources(sources, sendEvent, ctx2) {
|
|
2038
|
+
const total = sources.length;
|
|
2039
|
+
sendEvent("start", { total });
|
|
2040
|
+
let cached = 0;
|
|
2041
|
+
let failed = 0;
|
|
2042
|
+
let skipped = 0;
|
|
2043
|
+
const sourcesToCache = [];
|
|
2044
|
+
for (const source of sources) {
|
|
2045
|
+
if (shouldSkipWarmup(source)) {
|
|
2046
|
+
skipped++;
|
|
2047
|
+
sendEvent("progress", {
|
|
2048
|
+
url: source.url,
|
|
2049
|
+
status: "skipped",
|
|
2050
|
+
reason: "http-video-audio-passthrough",
|
|
2051
|
+
cached,
|
|
2052
|
+
failed,
|
|
2053
|
+
skipped,
|
|
2054
|
+
total
|
|
2055
|
+
});
|
|
2056
|
+
} else {
|
|
2057
|
+
sourcesToCache.push(source);
|
|
2058
|
+
}
|
|
2059
|
+
}
|
|
2060
|
+
const sourceCacheKeys = sourcesToCache.map((s) => storeKeys.source(s.url));
|
|
2061
|
+
const existsMap = await ctx2.transientStore.existsMany(sourceCacheKeys);
|
|
2062
|
+
for (let i = 0; i < sourcesToCache.length; i++) {
|
|
2063
|
+
if (existsMap.get(sourceCacheKeys[i])) {
|
|
2064
|
+
cached++;
|
|
2065
|
+
sendEvent("progress", {
|
|
2066
|
+
url: sourcesToCache[i].url,
|
|
2067
|
+
status: "hit",
|
|
2068
|
+
cached,
|
|
2069
|
+
failed,
|
|
2070
|
+
skipped,
|
|
2071
|
+
total
|
|
2072
|
+
});
|
|
2073
|
+
}
|
|
2074
|
+
}
|
|
2075
|
+
const uncached = sourcesToCache.filter(
|
|
2076
|
+
(_, i) => !existsMap.get(sourceCacheKeys[i])
|
|
2077
|
+
);
|
|
2078
|
+
if (uncached.length === 0) {
|
|
2079
|
+
sendEvent("summary", { cached, failed, skipped, total });
|
|
2080
|
+
return;
|
|
2081
|
+
}
|
|
2082
|
+
const keepalive = setInterval(() => {
|
|
2083
|
+
sendEvent("keepalive", { cached, failed, skipped, total });
|
|
2084
|
+
}, 25e3);
|
|
2085
|
+
const queue = [...uncached];
|
|
2086
|
+
const workers = Array.from(
|
|
2087
|
+
{ length: Math.min(ctx2.warmupConcurrency, queue.length) },
|
|
2088
|
+
async () => {
|
|
2089
|
+
while (queue.length > 0) {
|
|
2090
|
+
const source = queue.shift();
|
|
2091
|
+
const cacheKey = storeKeys.source(source.url);
|
|
2092
|
+
const startTime = Date.now();
|
|
2093
|
+
try {
|
|
2094
|
+
let fetchPromise = inFlightFetches.get(cacheKey);
|
|
2095
|
+
if (!fetchPromise) {
|
|
2096
|
+
fetchPromise = fetchAndCache(source.url, cacheKey, sendEvent, ctx2);
|
|
2097
|
+
inFlightFetches.set(cacheKey, fetchPromise);
|
|
2098
|
+
}
|
|
2099
|
+
await fetchPromise;
|
|
2100
|
+
inFlightFetches.delete(cacheKey);
|
|
2101
|
+
cached++;
|
|
2102
|
+
sendEvent("progress", {
|
|
2103
|
+
url: source.url,
|
|
2104
|
+
status: "cached",
|
|
2105
|
+
cached,
|
|
2106
|
+
failed,
|
|
2107
|
+
skipped,
|
|
2108
|
+
total,
|
|
2109
|
+
ms: Date.now() - startTime
|
|
2110
|
+
});
|
|
2111
|
+
} catch (error) {
|
|
2112
|
+
inFlightFetches.delete(cacheKey);
|
|
2113
|
+
failed++;
|
|
2114
|
+
sendEvent("progress", {
|
|
2115
|
+
url: source.url,
|
|
2116
|
+
status: "error",
|
|
2117
|
+
error: String(error),
|
|
2118
|
+
cached,
|
|
2119
|
+
failed,
|
|
2120
|
+
skipped,
|
|
2121
|
+
total,
|
|
2122
|
+
ms: Date.now() - startTime
|
|
2123
|
+
});
|
|
2124
|
+
}
|
|
2125
|
+
}
|
|
2126
|
+
}
|
|
2127
|
+
);
|
|
2128
|
+
await Promise.all(workers);
|
|
2129
|
+
clearInterval(keepalive);
|
|
2130
|
+
sendEvent("summary", { cached, failed, skipped, total });
|
|
2131
|
+
}
|
|
2132
|
+
async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
|
|
2133
|
+
const response = await ffsFetch(url, {
|
|
2134
|
+
headersTimeout: 10 * 60 * 1e3,
|
|
2135
|
+
// 10 minutes
|
|
2136
|
+
bodyTimeout: 20 * 60 * 1e3
|
|
2137
|
+
// 20 minutes
|
|
2138
|
+
});
|
|
2139
|
+
if (!response.ok) {
|
|
2140
|
+
throw new Error(`${response.status} ${response.statusText}`);
|
|
2141
|
+
}
|
|
2142
|
+
sendEvent("downloading", { url, status: "started", bytesReceived: 0 });
|
|
2143
|
+
const sourceStream = Readable3.fromWeb(
|
|
2144
|
+
response.body
|
|
2145
|
+
);
|
|
2146
|
+
let totalBytes = 0;
|
|
2147
|
+
let lastEventTime = Date.now();
|
|
2148
|
+
const PROGRESS_INTERVAL = 1e4;
|
|
2149
|
+
const progressStream = new Transform({
|
|
2150
|
+
transform(chunk, _encoding, callback) {
|
|
2151
|
+
totalBytes += chunk.length;
|
|
2152
|
+
const now = Date.now();
|
|
2153
|
+
if (now - lastEventTime >= PROGRESS_INTERVAL) {
|
|
2154
|
+
sendEvent("downloading", {
|
|
2155
|
+
url,
|
|
2156
|
+
status: "downloading",
|
|
2157
|
+
bytesReceived: totalBytes
|
|
2158
|
+
});
|
|
2159
|
+
lastEventTime = now;
|
|
2160
|
+
}
|
|
2161
|
+
callback(null, chunk);
|
|
2162
|
+
}
|
|
2163
|
+
});
|
|
2164
|
+
const trackedStream = sourceStream.pipe(progressStream);
|
|
2165
|
+
await ctx2.transientStore.put(
|
|
2166
|
+
cacheKey,
|
|
2167
|
+
trackedStream,
|
|
2168
|
+
ctx2.transientStore.sourceTtlMs
|
|
2169
|
+
);
|
|
2170
|
+
}
|
|
2171
|
+
|
|
2172
|
+
// src/server.ts
|
|
2173
|
+
var app = express5();
|
|
2174
|
+
app.use(bodyParser.json({ limit: "50mb" }));
|
|
2175
|
+
var ctx = await createServerContext();
|
|
2176
|
+
console.log(`FFS HTTP proxy listening on port ${ctx.httpProxy.port}`);
|
|
2177
|
+
function validateAuth(req, res) {
|
|
2178
|
+
const apiKey = process.env.FFS_API_KEY;
|
|
2179
|
+
if (!apiKey) return true;
|
|
2180
|
+
const authHeader = req.headers.authorization;
|
|
2181
|
+
if (!authHeader || authHeader !== `Bearer ${apiKey}`) {
|
|
2182
|
+
res.status(401).json({ error: "Unauthorized" });
|
|
2183
|
+
return false;
|
|
2184
|
+
}
|
|
2185
|
+
return true;
|
|
2186
|
+
}
|
|
2187
|
+
app.post("/warmup", (req, res) => {
|
|
2188
|
+
if (!validateAuth(req, res)) return;
|
|
2189
|
+
createWarmupJob(req, res, ctx);
|
|
2190
|
+
});
|
|
2191
|
+
app.post("/purge", (req, res) => {
|
|
2192
|
+
if (!validateAuth(req, res)) return;
|
|
2193
|
+
purgeCache(req, res, ctx);
|
|
2194
|
+
});
|
|
2195
|
+
app.post("/render", (req, res) => {
|
|
2196
|
+
if (!validateAuth(req, res)) return;
|
|
2197
|
+
createRenderJob(req, res, ctx);
|
|
2198
|
+
});
|
|
2199
|
+
app.post("/warmup-and-render", (req, res) => {
|
|
2200
|
+
if (!validateAuth(req, res)) return;
|
|
2201
|
+
createWarmupAndRenderJob(req, res, ctx);
|
|
1847
2202
|
});
|
|
1848
2203
|
app.get("/warmup/:id", (req, res) => streamWarmupJob(req, res, ctx));
|
|
1849
2204
|
app.get("/render/:id", (req, res) => streamRenderJob(req, res, ctx));
|
|
2205
|
+
app.get(
|
|
2206
|
+
"/warmup-and-render/:id",
|
|
2207
|
+
(req, res) => streamWarmupAndRenderJob(req, res, ctx)
|
|
2208
|
+
);
|
|
1850
2209
|
var port = process.env.FFS_PORT || 2e3;
|
|
1851
2210
|
var server = app.listen(port, () => {
|
|
1852
2211
|
console.log(`FFS server listening on port ${port}`);
|
|
@@ -1854,7 +2213,7 @@ var server = app.listen(port, () => {
|
|
|
1854
2213
|
function shutdown() {
|
|
1855
2214
|
console.log("Shutting down FFS server...");
|
|
1856
2215
|
ctx.httpProxy.close();
|
|
1857
|
-
ctx.
|
|
2216
|
+
ctx.transientStore.close();
|
|
1858
2217
|
server.close(() => {
|
|
1859
2218
|
console.log("FFS server stopped");
|
|
1860
2219
|
process.exit(0);
|