@effing/ffs 0.1.2 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/server.js CHANGED
@@ -1,13 +1,13 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/server.ts
4
- import express4 from "express";
4
+ import express5 from "express";
5
5
  import bodyParser from "body-parser";
6
6
 
7
7
  // src/handlers/shared.ts
8
8
  import "express";
9
9
 
10
- // src/cache.ts
10
+ // src/storage.ts
11
11
  import {
12
12
  S3Client,
13
13
  PutObjectCommand,
@@ -22,11 +22,14 @@ import { pipeline } from "stream/promises";
22
22
  import path from "path";
23
23
  import os from "os";
24
24
  import crypto from "crypto";
25
- var S3CacheStorage = class {
25
+ var DEFAULT_SOURCE_TTL_MS = 60 * 60 * 1e3;
26
+ var DEFAULT_JOB_METADATA_TTL_MS = 8 * 60 * 60 * 1e3;
27
+ var S3TransientStore = class {
26
28
  client;
27
29
  bucket;
28
30
  prefix;
29
- ttlMs;
31
+ sourceTtlMs;
32
+ jobMetadataTtlMs;
30
33
  constructor(options) {
31
34
  this.client = new S3Client({
32
35
  endpoint: options.endpoint,
@@ -39,22 +42,23 @@ var S3CacheStorage = class {
39
42
  });
40
43
  this.bucket = options.bucket;
41
44
  this.prefix = options.prefix ?? "";
42
- this.ttlMs = options.ttlMs ?? 60 * 60 * 1e3;
45
+ this.sourceTtlMs = options.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
46
+ this.jobMetadataTtlMs = options.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
43
47
  }
44
- getExpires() {
45
- return new Date(Date.now() + this.ttlMs);
48
+ getExpires(ttlMs) {
49
+ return new Date(Date.now() + ttlMs);
46
50
  }
47
51
  getFullKey(key) {
48
52
  return `${this.prefix}${key}`;
49
53
  }
50
- async put(key, stream) {
54
+ async put(key, stream, ttlMs) {
51
55
  const upload = new Upload({
52
56
  client: this.client,
53
57
  params: {
54
58
  Bucket: this.bucket,
55
59
  Key: this.getFullKey(key),
56
60
  Body: stream,
57
- Expires: this.getExpires()
61
+ Expires: this.getExpires(ttlMs ?? this.sourceTtlMs)
58
62
  }
59
63
  });
60
64
  await upload.done();
@@ -115,14 +119,14 @@ var S3CacheStorage = class {
115
119
  throw err;
116
120
  }
117
121
  }
118
- async putJson(key, data) {
122
+ async putJson(key, data, ttlMs) {
119
123
  await this.client.send(
120
124
  new PutObjectCommand({
121
125
  Bucket: this.bucket,
122
126
  Key: this.getFullKey(key),
123
127
  Body: JSON.stringify(data),
124
128
  ContentType: "application/json",
125
- Expires: this.getExpires()
129
+ Expires: this.getExpires(ttlMs ?? this.jobMetadataTtlMs)
126
130
  })
127
131
  );
128
132
  }
@@ -148,20 +152,25 @@ var S3CacheStorage = class {
148
152
  close() {
149
153
  }
150
154
  };
151
- var LocalCacheStorage = class {
155
+ var LocalTransientStore = class {
152
156
  baseDir;
153
157
  initialized = false;
154
158
  cleanupInterval;
155
- ttlMs;
156
- constructor(baseDir, ttlMs = 60 * 60 * 1e3) {
157
- this.baseDir = baseDir ?? path.join(os.tmpdir(), "ffs-cache");
158
- this.ttlMs = ttlMs;
159
+ sourceTtlMs;
160
+ jobMetadataTtlMs;
161
+ /** For cleanup, use the longer of the two TTLs */
162
+ maxTtlMs;
163
+ constructor(options) {
164
+ this.baseDir = options?.baseDir ?? path.join(os.tmpdir(), "ffs-transient");
165
+ this.sourceTtlMs = options?.sourceTtlMs ?? DEFAULT_SOURCE_TTL_MS;
166
+ this.jobMetadataTtlMs = options?.jobMetadataTtlMs ?? DEFAULT_JOB_METADATA_TTL_MS;
167
+ this.maxTtlMs = Math.max(this.sourceTtlMs, this.jobMetadataTtlMs);
159
168
  this.cleanupInterval = setInterval(() => {
160
169
  this.cleanupExpired().catch(console.error);
161
170
  }, 3e5);
162
171
  }
163
172
  /**
164
- * Remove files older than TTL
173
+ * Remove files older than max TTL
165
174
  */
166
175
  async cleanupExpired() {
167
176
  if (!this.initialized) return;
@@ -186,7 +195,7 @@ var LocalCacheStorage = class {
186
195
  } else if (entry.isFile()) {
187
196
  try {
188
197
  const stat = await fs.stat(fullPath);
189
- if (now - stat.mtimeMs > this.ttlMs) {
198
+ if (now - stat.mtimeMs > this.maxTtlMs) {
190
199
  await fs.rm(fullPath, { force: true });
191
200
  }
192
201
  } catch {
@@ -205,7 +214,7 @@ var LocalCacheStorage = class {
205
214
  const rand = crypto.randomBytes(8).toString("hex");
206
215
  return `${finalPath}.tmp-${process.pid}-${rand}`;
207
216
  }
208
- async put(key, stream) {
217
+ async put(key, stream, _ttlMs) {
209
218
  const fp = this.filePath(key);
210
219
  await this.ensureDir(fp);
211
220
  const tmpPath = this.tmpPathFor(fp);
@@ -241,7 +250,7 @@ var LocalCacheStorage = class {
241
250
  async delete(key) {
242
251
  await fs.rm(this.filePath(key), { force: true });
243
252
  }
244
- async putJson(key, data) {
253
+ async putJson(key, data, _ttlMs) {
245
254
  const fp = this.filePath(key);
246
255
  await this.ensureDir(fp);
247
256
  const tmpPath = this.tmpPathFor(fp);
@@ -269,48 +278,208 @@ var LocalCacheStorage = class {
269
278
  }
270
279
  }
271
280
  };
272
- function createCacheStorage() {
273
- const ttlMs = process.env.FFS_CACHE_TTL_MS ? parseInt(process.env.FFS_CACHE_TTL_MS, 10) : 60 * 60 * 1e3;
274
- if (process.env.FFS_CACHE_BUCKET) {
275
- return new S3CacheStorage({
276
- endpoint: process.env.FFS_CACHE_ENDPOINT,
277
- region: process.env.FFS_CACHE_REGION ?? "auto",
278
- bucket: process.env.FFS_CACHE_BUCKET,
279
- prefix: process.env.FFS_CACHE_PREFIX,
280
- accessKeyId: process.env.FFS_CACHE_ACCESS_KEY,
281
- secretAccessKey: process.env.FFS_CACHE_SECRET_KEY,
282
- ttlMs
281
+ function createTransientStore() {
282
+ const sourceTtlMs = process.env.FFS_SOURCE_CACHE_TTL_MS ? parseInt(process.env.FFS_SOURCE_CACHE_TTL_MS, 10) : DEFAULT_SOURCE_TTL_MS;
283
+ const jobMetadataTtlMs = process.env.FFS_JOB_METADATA_TTL_MS ? parseInt(process.env.FFS_JOB_METADATA_TTL_MS, 10) : DEFAULT_JOB_METADATA_TTL_MS;
284
+ if (process.env.FFS_TRANSIENT_STORE_BUCKET) {
285
+ return new S3TransientStore({
286
+ endpoint: process.env.FFS_TRANSIENT_STORE_ENDPOINT,
287
+ region: process.env.FFS_TRANSIENT_STORE_REGION ?? "auto",
288
+ bucket: process.env.FFS_TRANSIENT_STORE_BUCKET,
289
+ prefix: process.env.FFS_TRANSIENT_STORE_PREFIX,
290
+ accessKeyId: process.env.FFS_TRANSIENT_STORE_ACCESS_KEY,
291
+ secretAccessKey: process.env.FFS_TRANSIENT_STORE_SECRET_KEY,
292
+ sourceTtlMs,
293
+ jobMetadataTtlMs
283
294
  });
284
295
  }
285
- return new LocalCacheStorage(process.env.FFS_CACHE_LOCAL_DIR, ttlMs);
296
+ return new LocalTransientStore({
297
+ baseDir: process.env.FFS_TRANSIENT_STORE_LOCAL_DIR,
298
+ sourceTtlMs,
299
+ jobMetadataTtlMs
300
+ });
286
301
  }
287
302
  function hashUrl(url) {
288
303
  return crypto.createHash("sha256").update(url).digest("hex").slice(0, 16);
289
304
  }
290
- function sourceCacheKey(url) {
305
+ function sourceStoreKey(url) {
291
306
  return `sources/${hashUrl(url)}`;
292
307
  }
293
- function warmupJobCacheKey(jobId) {
308
+ function warmupJobStoreKey(jobId) {
294
309
  return `jobs/warmup/${jobId}.json`;
295
310
  }
296
- function renderJobCacheKey(jobId) {
311
+ function renderJobStoreKey(jobId) {
297
312
  return `jobs/render/${jobId}.json`;
298
313
  }
299
- var cacheKeys = {
300
- source: sourceCacheKey,
301
- warmupJob: warmupJobCacheKey,
302
- renderJob: renderJobCacheKey
314
+ function warmupAndRenderJobStoreKey(jobId) {
315
+ return `jobs/warmup-and-render/${jobId}.json`;
316
+ }
317
+ var storeKeys = {
318
+ source: sourceStoreKey,
319
+ warmupJob: warmupJobStoreKey,
320
+ renderJob: renderJobStoreKey,
321
+ warmupAndRenderJob: warmupAndRenderJobStoreKey
322
+ };
323
+
324
+ // src/proxy.ts
325
+ import http from "http";
326
+ import { Readable } from "stream";
327
+
328
+ // src/fetch.ts
329
+ import { fetch, Agent } from "undici";
330
+ async function ffsFetch(url, options) {
331
+ const {
332
+ method,
333
+ body,
334
+ headers,
335
+ headersTimeout = 3e5,
336
+ // 5 minutes
337
+ bodyTimeout = 3e5
338
+ // 5 minutes
339
+ } = options ?? {};
340
+ const agent = new Agent({ headersTimeout, bodyTimeout });
341
+ return fetch(url, {
342
+ method,
343
+ body,
344
+ headers: { "User-Agent": "FFS (+https://effing.dev/ffs)", ...headers },
345
+ dispatcher: agent
346
+ });
347
+ }
348
+
349
+ // src/proxy.ts
350
+ var HttpProxy = class {
351
+ server = null;
352
+ _port = null;
353
+ startPromise = null;
354
+ get port() {
355
+ return this._port;
356
+ }
357
+ /**
358
+ * Transform a URL to go through the proxy.
359
+ * @throws Error if proxy not started
360
+ */
361
+ transformUrl(url) {
362
+ if (this._port === null) throw new Error("Proxy not started");
363
+ return `http://127.0.0.1:${this._port}/${url}`;
364
+ }
365
+ /**
366
+ * Start the proxy server. Safe to call multiple times.
367
+ */
368
+ async start() {
369
+ if (this._port !== null) return;
370
+ if (this.startPromise) {
371
+ await this.startPromise;
372
+ return;
373
+ }
374
+ this.startPromise = this.doStart();
375
+ await this.startPromise;
376
+ }
377
+ async doStart() {
378
+ this.server = http.createServer(async (req, res) => {
379
+ try {
380
+ const originalUrl = this.parseProxyPath(req.url || "");
381
+ if (!originalUrl) {
382
+ res.writeHead(400, { "Content-Type": "text/plain" });
383
+ res.end("Bad Request: invalid proxy path");
384
+ return;
385
+ }
386
+ const response = await ffsFetch(originalUrl, {
387
+ method: req.method,
388
+ headers: this.filterHeaders(req.headers),
389
+ bodyTimeout: 0
390
+ // No timeout for streaming
391
+ });
392
+ const headers = {};
393
+ response.headers.forEach((value, key) => {
394
+ headers[key] = value;
395
+ });
396
+ res.writeHead(response.status, headers);
397
+ if (response.body) {
398
+ const nodeStream = Readable.fromWeb(response.body);
399
+ nodeStream.pipe(res);
400
+ nodeStream.on("error", (err) => {
401
+ console.error("Proxy stream error:", err);
402
+ res.destroy();
403
+ });
404
+ } else {
405
+ res.end();
406
+ }
407
+ } catch (err) {
408
+ console.error("Proxy request error:", err);
409
+ if (!res.headersSent) {
410
+ res.writeHead(502, { "Content-Type": "text/plain" });
411
+ res.end("Bad Gateway");
412
+ } else {
413
+ res.destroy();
414
+ }
415
+ }
416
+ });
417
+ await new Promise((resolve) => {
418
+ this.server.listen(0, "127.0.0.1", () => {
419
+ this._port = this.server.address().port;
420
+ resolve();
421
+ });
422
+ });
423
+ }
424
+ /**
425
+ * Parse the proxy path to extract the original URL.
426
+ * Path format: /{originalUrl}
427
+ */
428
+ parseProxyPath(path3) {
429
+ if (!path3.startsWith("/http://") && !path3.startsWith("/https://")) {
430
+ return null;
431
+ }
432
+ return path3.slice(1);
433
+ }
434
+ /**
435
+ * Filter headers to forward to the upstream server.
436
+ * Removes hop-by-hop headers that shouldn't be forwarded.
437
+ */
438
+ filterHeaders(headers) {
439
+ const skip = /* @__PURE__ */ new Set([
440
+ "host",
441
+ "connection",
442
+ "keep-alive",
443
+ "transfer-encoding",
444
+ "te",
445
+ "trailer",
446
+ "upgrade",
447
+ "proxy-authorization",
448
+ "proxy-authenticate"
449
+ ]);
450
+ const result = {};
451
+ for (const [key, value] of Object.entries(headers)) {
452
+ if (!skip.has(key.toLowerCase()) && typeof value === "string") {
453
+ result[key] = value;
454
+ }
455
+ }
456
+ return result;
457
+ }
458
+ /**
459
+ * Close the proxy server and reset state.
460
+ */
461
+ close() {
462
+ this.server?.close();
463
+ this.server = null;
464
+ this._port = null;
465
+ this.startPromise = null;
466
+ }
303
467
  };
304
468
 
305
469
  // src/handlers/shared.ts
306
470
  import { effieDataSchema } from "@effing/effie";
307
- function createServerContext() {
471
+ async function createServerContext() {
308
472
  const port2 = process.env.FFS_PORT || 2e3;
473
+ const httpProxy = new HttpProxy();
474
+ await httpProxy.start();
309
475
  return {
310
- cacheStorage: createCacheStorage(),
476
+ transientStore: createTransientStore(),
477
+ httpProxy,
311
478
  baseUrl: process.env.FFS_BASE_URL || `http://localhost:${port2}`,
312
479
  skipValidation: !!process.env.FFS_SKIP_VALIDATION && process.env.FFS_SKIP_VALIDATION !== "false",
313
- cacheConcurrency: parseInt(process.env.FFS_CACHE_CONCURRENCY || "4", 10)
480
+ warmupConcurrency: parseInt(process.env.FFS_WARMUP_CONCURRENCY || "4", 10),
481
+ warmupBackendBaseUrl: process.env.FFS_WARMUP_BACKEND_BASE_URL,
482
+ renderBackendBaseUrl: process.env.FFS_RENDER_BACKEND_BASE_URL
314
483
  };
315
484
  }
316
485
  function parseEffieData(body, skipValidation) {
@@ -357,215 +526,21 @@ data: ${JSON.stringify(data)}
357
526
 
358
527
  // src/handlers/caching.ts
359
528
  import "express";
360
- import { Readable, Transform } from "stream";
361
- import { randomUUID } from "crypto";
362
-
363
- // src/fetch.ts
364
- import { fetch, Agent } from "undici";
365
- async function ffsFetch(url, options) {
366
- const {
367
- method,
368
- body,
369
- headers,
370
- headersTimeout = 3e5,
371
- // 5 minutes
372
- bodyTimeout = 3e5
373
- // 5 minutes
374
- } = options ?? {};
375
- const agent = new Agent({ headersTimeout, bodyTimeout });
376
- return fetch(url, {
377
- method,
378
- body,
379
- headers: { "User-Agent": "FFS (+https://effing.dev/ffs)", ...headers },
380
- dispatcher: agent
381
- });
382
- }
529
+ import { Readable as Readable3, Transform } from "stream";
530
+ import { randomUUID as randomUUID3 } from "crypto";
531
+ import {
532
+ extractEffieSources,
533
+ extractEffieSourcesWithTypes as extractEffieSourcesWithTypes2
534
+ } from "@effing/effie";
383
535
 
384
- // src/handlers/caching.ts
385
- import { extractEffieSources } from "@effing/effie";
386
- var inFlightFetches = /* @__PURE__ */ new Map();
387
- async function createWarmupJob(req, res, ctx2) {
388
- try {
389
- const parseResult = parseEffieData(req.body, ctx2.skipValidation);
390
- if ("error" in parseResult) {
391
- res.status(400).json(parseResult);
392
- return;
393
- }
394
- const sources = extractEffieSources(parseResult.effie);
395
- const jobId = randomUUID();
396
- await ctx2.cacheStorage.putJson(cacheKeys.warmupJob(jobId), { sources });
397
- res.json({
398
- id: jobId,
399
- url: `${ctx2.baseUrl}/warmup/${jobId}`
400
- });
401
- } catch (error) {
402
- console.error("Error creating warmup job:", error);
403
- res.status(500).json({ error: "Failed to create warmup job" });
404
- }
405
- }
406
- async function streamWarmupJob(req, res, ctx2) {
407
- try {
408
- setupCORSHeaders(res);
409
- const jobId = req.params.id;
410
- const jobCacheKey = cacheKeys.warmupJob(jobId);
411
- const job = await ctx2.cacheStorage.getJson(jobCacheKey);
412
- ctx2.cacheStorage.delete(jobCacheKey);
413
- if (!job) {
414
- res.status(404).json({ error: "Job not found" });
415
- return;
416
- }
417
- setupSSEResponse(res);
418
- const sendEvent = createSSEEventSender(res);
419
- try {
420
- await warmupSources(job.sources, sendEvent, ctx2);
421
- sendEvent("complete", { status: "ready" });
422
- } catch (error) {
423
- sendEvent("error", { message: String(error) });
424
- } finally {
425
- res.end();
426
- }
427
- } catch (error) {
428
- console.error("Error in warmup streaming:", error);
429
- if (!res.headersSent) {
430
- res.status(500).json({ error: "Warmup streaming failed" });
431
- } else {
432
- res.end();
433
- }
434
- }
435
- }
436
- async function purgeCache(req, res, ctx2) {
437
- try {
438
- const parseResult = parseEffieData(req.body, ctx2.skipValidation);
439
- if ("error" in parseResult) {
440
- res.status(400).json(parseResult);
441
- return;
442
- }
443
- const sources = extractEffieSources(parseResult.effie);
444
- let purged = 0;
445
- for (const url of sources) {
446
- const ck = cacheKeys.source(url);
447
- if (await ctx2.cacheStorage.exists(ck)) {
448
- await ctx2.cacheStorage.delete(ck);
449
- purged++;
450
- }
451
- }
452
- res.json({ purged, total: sources.length });
453
- } catch (error) {
454
- console.error("Error purging cache:", error);
455
- res.status(500).json({ error: "Failed to purge cache" });
456
- }
457
- }
458
- async function warmupSources(sources, sendEvent, ctx2) {
459
- const total = sources.length;
460
- const sourceCacheKeys = sources.map(cacheKeys.source);
461
- sendEvent("start", { total });
462
- const existsMap = await ctx2.cacheStorage.existsMany(sourceCacheKeys);
463
- let cached = 0;
464
- let failed = 0;
465
- for (let i = 0; i < sources.length; i++) {
466
- if (existsMap.get(sourceCacheKeys[i])) {
467
- cached++;
468
- sendEvent("progress", {
469
- url: sources[i],
470
- status: "hit",
471
- cached,
472
- failed,
473
- total
474
- });
475
- }
476
- }
477
- const uncached = sources.filter((_, i) => !existsMap.get(sourceCacheKeys[i]));
478
- if (uncached.length === 0) {
479
- sendEvent("summary", { cached, failed, total });
480
- return;
481
- }
482
- const keepalive = setInterval(() => {
483
- sendEvent("keepalive", { cached, failed, total });
484
- }, 25e3);
485
- const queue = [...uncached];
486
- const workers = Array.from(
487
- { length: Math.min(ctx2.cacheConcurrency, queue.length) },
488
- async () => {
489
- while (queue.length > 0) {
490
- const url = queue.shift();
491
- const cacheKey = cacheKeys.source(url);
492
- const startTime = Date.now();
493
- try {
494
- let fetchPromise = inFlightFetches.get(cacheKey);
495
- if (!fetchPromise) {
496
- fetchPromise = fetchAndCache(url, cacheKey, sendEvent, ctx2);
497
- inFlightFetches.set(cacheKey, fetchPromise);
498
- }
499
- await fetchPromise;
500
- inFlightFetches.delete(cacheKey);
501
- cached++;
502
- sendEvent("progress", {
503
- url,
504
- status: "cached",
505
- cached,
506
- failed,
507
- total,
508
- ms: Date.now() - startTime
509
- });
510
- } catch (error) {
511
- inFlightFetches.delete(cacheKey);
512
- failed++;
513
- sendEvent("progress", {
514
- url,
515
- status: "error",
516
- error: String(error),
517
- cached,
518
- failed,
519
- total,
520
- ms: Date.now() - startTime
521
- });
522
- }
523
- }
524
- }
525
- );
526
- await Promise.all(workers);
527
- clearInterval(keepalive);
528
- sendEvent("summary", { cached, failed, total });
529
- }
530
- async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
531
- const response = await ffsFetch(url, {
532
- headersTimeout: 10 * 60 * 1e3,
533
- // 10 minutes
534
- bodyTimeout: 20 * 60 * 1e3
535
- // 20 minutes
536
- });
537
- if (!response.ok) {
538
- throw new Error(`${response.status} ${response.statusText}`);
539
- }
540
- sendEvent("downloading", { url, status: "started", bytesReceived: 0 });
541
- const sourceStream = Readable.fromWeb(
542
- response.body
543
- );
544
- let totalBytes = 0;
545
- let lastEventTime = Date.now();
546
- const PROGRESS_INTERVAL = 1e4;
547
- const progressStream = new Transform({
548
- transform(chunk, _encoding, callback) {
549
- totalBytes += chunk.length;
550
- const now = Date.now();
551
- if (now - lastEventTime >= PROGRESS_INTERVAL) {
552
- sendEvent("downloading", {
553
- url,
554
- status: "downloading",
555
- bytesReceived: totalBytes
556
- });
557
- lastEventTime = now;
558
- }
559
- callback(null, chunk);
560
- }
561
- });
562
- const trackedStream = sourceStream.pipe(progressStream);
563
- await ctx2.cacheStorage.put(cacheKey, trackedStream);
564
- }
536
+ // src/handlers/orchestrating.ts
537
+ import "express";
538
+ import { randomUUID as randomUUID2 } from "crypto";
539
+ import { extractEffieSourcesWithTypes, effieDataSchema as effieDataSchema3 } from "@effing/effie";
565
540
 
566
541
  // src/handlers/rendering.ts
567
542
  import "express";
568
- import { randomUUID as randomUUID2 } from "crypto";
543
+ import { randomUUID } from "crypto";
569
544
 
570
545
  // src/render.ts
571
546
  import { Readable as Readable2 } from "stream";
@@ -815,14 +790,14 @@ var FFmpegRunner = class {
815
790
  constructor(command) {
816
791
  this.command = command;
817
792
  }
818
- async run(sourceResolver, imageTransformer) {
793
+ async run(sourceFetcher, imageTransformer, referenceResolver, urlTransformer) {
819
794
  const tempDir = await fs2.mkdtemp(path2.join(os2.tmpdir(), "ffs-"));
820
795
  const fileMapping = /* @__PURE__ */ new Map();
821
- const sourceCache = /* @__PURE__ */ new Map();
822
- const fetchAndSaveSource = async (input, inputName) => {
823
- const stream = await sourceResolver({
796
+ const fetchCache = /* @__PURE__ */ new Map();
797
+ const fetchAndSaveSource = async (input, sourceUrl, inputName) => {
798
+ const stream = await sourceFetcher({
824
799
  type: input.type,
825
- src: input.source
800
+ src: sourceUrl
826
801
  });
827
802
  if (input.type === "animation") {
828
803
  const extractionDir = path2.join(tempDir, inputName);
@@ -864,17 +839,27 @@ var FFmpegRunner = class {
864
839
  this.command.inputs.map(async (input) => {
865
840
  if (input.type === "color") return;
866
841
  const inputName = `ffmpeg_input_${input.index.toString().padStart(3, "0")}`;
842
+ const sourceUrl = referenceResolver ? referenceResolver(input.source) : input.source;
843
+ if ((input.type === "video" || input.type === "audio") && (sourceUrl.startsWith("http://") || sourceUrl.startsWith("https://"))) {
844
+ const finalUrl = urlTransformer ? urlTransformer(sourceUrl) : sourceUrl;
845
+ fileMapping.set(input.index, finalUrl);
846
+ return;
847
+ }
867
848
  const shouldCache = input.source.startsWith("#");
868
849
  if (shouldCache) {
869
- let fetchPromise = sourceCache.get(input.source);
850
+ let fetchPromise = fetchCache.get(input.source);
870
851
  if (!fetchPromise) {
871
- fetchPromise = fetchAndSaveSource(input, inputName);
872
- sourceCache.set(input.source, fetchPromise);
852
+ fetchPromise = fetchAndSaveSource(input, sourceUrl, inputName);
853
+ fetchCache.set(input.source, fetchPromise);
873
854
  }
874
855
  const filePath = await fetchPromise;
875
856
  fileMapping.set(input.index, filePath);
876
857
  } else {
877
- const filePath = await fetchAndSaveSource(input, inputName);
858
+ const filePath = await fetchAndSaveSource(
859
+ input,
860
+ sourceUrl,
861
+ inputName
862
+ );
878
863
  fileMapping.set(input.index, filePath);
879
864
  }
880
865
  })
@@ -969,20 +954,15 @@ var EffieRenderer = class {
969
954
  effieData;
970
955
  ffmpegRunner;
971
956
  allowLocalFiles;
972
- cacheStorage;
957
+ transientStore;
958
+ httpProxy;
973
959
  constructor(effieData, options) {
974
960
  this.effieData = effieData;
975
961
  this.allowLocalFiles = options?.allowLocalFiles ?? false;
976
- this.cacheStorage = options?.cacheStorage;
962
+ this.transientStore = options?.transientStore;
963
+ this.httpProxy = options?.httpProxy;
977
964
  }
978
965
  async fetchSource(src) {
979
- if (src.startsWith("#")) {
980
- const sourceName = src.slice(1);
981
- if (!(sourceName in this.effieData.sources)) {
982
- throw new Error(`Named source "${sourceName}" not found`);
983
- }
984
- src = this.effieData.sources[sourceName];
985
- }
986
966
  if (src.startsWith("data:")) {
987
967
  const commaIndex = src.indexOf(",");
988
968
  if (commaIndex === -1) {
@@ -1002,9 +982,9 @@ var EffieRenderer = class {
1002
982
  }
1003
983
  return createReadStream2(fileURLToPath(src));
1004
984
  }
1005
- if (this.cacheStorage) {
1006
- const cachedStream = await this.cacheStorage.getStream(
1007
- cacheKeys.source(src)
985
+ if (this.transientStore) {
986
+ const cachedStream = await this.transientStore.getStream(
987
+ storeKeys.source(src)
1008
988
  );
1009
989
  if (cachedStream) {
1010
990
  return cachedStream;
@@ -1276,7 +1256,13 @@ var EffieRenderer = class {
1276
1256
  segmentBgInputIndices.push(null);
1277
1257
  }
1278
1258
  }
1279
- for (const segment of this.effieData.segments) {
1259
+ const globalBgSegmentIndices = [];
1260
+ for (let i = 0; i < this.effieData.segments.length; i++) {
1261
+ if (segmentBgInputIndices[i] === null) {
1262
+ globalBgSegmentIndices.push(i);
1263
+ }
1264
+ }
1265
+ for (const segment of this.effieData.segments) {
1280
1266
  for (const layer of segment.layers) {
1281
1267
  inputs.push(this.buildLayerInput(layer, segment.duration, inputIndex));
1282
1268
  inputIndex++;
@@ -1312,6 +1298,26 @@ var EffieRenderer = class {
1312
1298
  const filterParts = [];
1313
1299
  const videoSegmentLabels = [];
1314
1300
  const audioSegmentLabels = [];
1301
+ const globalBgFifoLabels = /* @__PURE__ */ new Map();
1302
+ const bgFilter = `fps=${this.effieData.fps},scale=${frameWidth}x${frameHeight}:force_original_aspect_ratio=increase,crop=${frameWidth}:${frameHeight}`;
1303
+ if (globalBgSegmentIndices.length === 1) {
1304
+ const fifoLabel = `bg_fifo_0`;
1305
+ filterParts.push(`[${globalBgInputIdx}:v]${bgFilter},fifo[${fifoLabel}]`);
1306
+ globalBgFifoLabels.set(globalBgSegmentIndices[0], fifoLabel);
1307
+ } else if (globalBgSegmentIndices.length > 1) {
1308
+ const splitCount = globalBgSegmentIndices.length;
1309
+ const splitOutputLabels = globalBgSegmentIndices.map(
1310
+ (_, i) => `bg_split_${i}`
1311
+ );
1312
+ filterParts.push(
1313
+ `[${globalBgInputIdx}:v]${bgFilter},split=${splitCount}${splitOutputLabels.map((l) => `[${l}]`).join("")}`
1314
+ );
1315
+ for (let i = 0; i < splitCount; i++) {
1316
+ const fifoLabel = `bg_fifo_${i}`;
1317
+ filterParts.push(`[${splitOutputLabels[i]}]fifo[${fifoLabel}]`);
1318
+ globalBgFifoLabels.set(globalBgSegmentIndices[i], fifoLabel);
1319
+ }
1320
+ }
1315
1321
  for (let segIdx = 0; segIdx < this.effieData.segments.length; segIdx++) {
1316
1322
  const segment = this.effieData.segments[segIdx];
1317
1323
  const bgLabel = `bg_seg${segIdx}`;
@@ -1322,9 +1328,12 @@ var EffieRenderer = class {
1322
1328
  `[${segBgInputIdx}:v]fps=${this.effieData.fps},scale=${frameWidth}x${frameHeight},trim=start=${segBgSeek}:duration=${segment.duration},setpts=PTS-STARTPTS[${bgLabel}]`
1323
1329
  );
1324
1330
  } else {
1325
- filterParts.push(
1326
- `[${globalBgInputIdx}:v]fps=${this.effieData.fps},scale=${frameWidth}x${frameHeight},trim=start=${backgroundSeek + currentTime}:duration=${segment.duration},setpts=PTS-STARTPTS[${bgLabel}]`
1327
- );
1331
+ const fifoLabel = globalBgFifoLabels.get(segIdx);
1332
+ if (fifoLabel) {
1333
+ filterParts.push(
1334
+ `[${fifoLabel}]trim=start=${backgroundSeek + currentTime}:duration=${segment.duration},setpts=PTS-STARTPTS[${bgLabel}]`
1335
+ );
1336
+ }
1328
1337
  }
1329
1338
  const vidLabel = `vid_seg${segIdx}`;
1330
1339
  filterParts.push(
@@ -1408,6 +1417,20 @@ var EffieRenderer = class {
1408
1417
  }
1409
1418
  };
1410
1419
  }
1420
+ /**
1421
+ * Resolves a source reference to its actual URL.
1422
+ * If the source is a #reference, returns the resolved URL.
1423
+ * Otherwise, returns the source as-is.
1424
+ */
1425
+ resolveReference(src) {
1426
+ if (src.startsWith("#")) {
1427
+ const sourceName = src.slice(1);
1428
+ if (sourceName in this.effieData.sources) {
1429
+ return this.effieData.sources[sourceName];
1430
+ }
1431
+ }
1432
+ return src;
1433
+ }
1411
1434
  /**
1412
1435
  * Renders the effie data to a video stream.
1413
1436
  * @param scaleFactor - Scale factor for output dimensions
@@ -1415,9 +1438,12 @@ var EffieRenderer = class {
1415
1438
  async render(scaleFactor = 1) {
1416
1439
  const ffmpegCommand = this.buildFFmpegCommand("-", scaleFactor);
1417
1440
  this.ffmpegRunner = new FFmpegRunner(ffmpegCommand);
1441
+ const urlTransformer = this.httpProxy ? (url) => this.httpProxy.transformUrl(url) : void 0;
1418
1442
  return this.ffmpegRunner.run(
1419
1443
  async ({ src }) => this.fetchSource(src),
1420
- this.createImageTransformer(scaleFactor)
1444
+ this.createImageTransformer(scaleFactor),
1445
+ (src) => this.resolveReference(src),
1446
+ urlTransformer
1421
1447
  );
1422
1448
  }
1423
1449
  close() {
@@ -1476,14 +1502,18 @@ async function createRenderJob(req, res, ctx2) {
1476
1502
  }
1477
1503
  effie = data;
1478
1504
  }
1479
- const jobId = randomUUID2();
1505
+ const jobId = randomUUID();
1480
1506
  const job = {
1481
1507
  effie,
1482
1508
  scale,
1483
1509
  upload,
1484
1510
  createdAt: Date.now()
1485
1511
  };
1486
- await ctx2.cacheStorage.putJson(cacheKeys.renderJob(jobId), job);
1512
+ await ctx2.transientStore.putJson(
1513
+ storeKeys.renderJob(jobId),
1514
+ job,
1515
+ ctx2.transientStore.jobMetadataTtlMs
1516
+ );
1487
1517
  res.json({
1488
1518
  id: jobId,
1489
1519
  url: `${ctx2.baseUrl}/render/${jobId}`
@@ -1497,9 +1527,13 @@ async function streamRenderJob(req, res, ctx2) {
1497
1527
  try {
1498
1528
  setupCORSHeaders(res);
1499
1529
  const jobId = req.params.id;
1500
- const jobCacheKey = cacheKeys.renderJob(jobId);
1501
- const job = await ctx2.cacheStorage.getJson(jobCacheKey);
1502
- ctx2.cacheStorage.delete(jobCacheKey);
1530
+ if (ctx2.renderBackendBaseUrl) {
1531
+ await proxyRenderFromBackend(res, jobId, ctx2);
1532
+ return;
1533
+ }
1534
+ const jobCacheKey = storeKeys.renderJob(jobId);
1535
+ const job = await ctx2.transientStore.getJson(jobCacheKey);
1536
+ ctx2.transientStore.delete(jobCacheKey);
1503
1537
  if (!job) {
1504
1538
  res.status(404).json({ error: "Job not found or expired" });
1505
1539
  return;
@@ -1520,7 +1554,8 @@ async function streamRenderJob(req, res, ctx2) {
1520
1554
  }
1521
1555
  async function streamRenderDirect(res, job, ctx2) {
1522
1556
  const renderer = new EffieRenderer(job.effie, {
1523
- cacheStorage: ctx2.cacheStorage
1557
+ transientStore: ctx2.transientStore,
1558
+ httpProxy: ctx2.httpProxy
1524
1559
  });
1525
1560
  const videoStream = await renderer.render(job.scale);
1526
1561
  res.on("close", () => {
@@ -1582,7 +1617,10 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
1582
1617
  timings.uploadCoverTime = Date.now() - uploadCoverStartTime;
1583
1618
  }
1584
1619
  const renderStartTime = Date.now();
1585
- const renderer = new EffieRenderer(effie, { cacheStorage: ctx2.cacheStorage });
1620
+ const renderer = new EffieRenderer(effie, {
1621
+ transientStore: ctx2.transientStore,
1622
+ httpProxy: ctx2.httpProxy
1623
+ });
1586
1624
  const videoStream = await renderer.render(scale);
1587
1625
  const chunks = [];
1588
1626
  for await (const chunk of videoStream) {
@@ -1608,11 +1646,534 @@ async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx2) {
1608
1646
  timings.uploadTime = Date.now() - uploadStartTime;
1609
1647
  return timings;
1610
1648
  }
1649
+ async function proxyRenderFromBackend(res, jobId, ctx2) {
1650
+ const backendUrl = `${ctx2.renderBackendBaseUrl}/render/${jobId}`;
1651
+ const response = await ffsFetch(backendUrl);
1652
+ if (!response.ok) {
1653
+ res.status(response.status).json({ error: "Backend render failed" });
1654
+ return;
1655
+ }
1656
+ const contentType = response.headers.get("content-type") || "";
1657
+ if (contentType.includes("text/event-stream")) {
1658
+ setupSSEResponse(res);
1659
+ const sendEvent = createSSEEventSender(res);
1660
+ const reader = response.body?.getReader();
1661
+ if (!reader) {
1662
+ sendEvent("error", { message: "No response body from backend" });
1663
+ res.end();
1664
+ return;
1665
+ }
1666
+ const decoder = new TextDecoder();
1667
+ let buffer = "";
1668
+ try {
1669
+ while (true) {
1670
+ const { done, value } = await reader.read();
1671
+ if (done) break;
1672
+ if (res.destroyed) {
1673
+ reader.cancel();
1674
+ break;
1675
+ }
1676
+ buffer += decoder.decode(value, { stream: true });
1677
+ const lines = buffer.split("\n");
1678
+ buffer = lines.pop() || "";
1679
+ let currentEvent = "";
1680
+ let currentData = "";
1681
+ for (const line of lines) {
1682
+ if (line.startsWith("event: ")) {
1683
+ currentEvent = line.slice(7);
1684
+ } else if (line.startsWith("data: ")) {
1685
+ currentData = line.slice(6);
1686
+ } else if (line === "" && currentEvent && currentData) {
1687
+ try {
1688
+ const data = JSON.parse(currentData);
1689
+ sendEvent(currentEvent, data);
1690
+ } catch {
1691
+ }
1692
+ currentEvent = "";
1693
+ currentData = "";
1694
+ }
1695
+ }
1696
+ }
1697
+ } finally {
1698
+ reader.releaseLock();
1699
+ res.end();
1700
+ }
1701
+ } else {
1702
+ await proxyBinaryStream(response, res);
1703
+ }
1704
+ }
1705
+
1706
+ // src/handlers/orchestrating.ts
1707
+ async function createWarmupAndRenderJob(req, res, ctx2) {
1708
+ try {
1709
+ const options = req.body;
1710
+ let rawEffieData;
1711
+ if (typeof options.effie === "string") {
1712
+ const response = await ffsFetch(options.effie);
1713
+ if (!response.ok) {
1714
+ throw new Error(
1715
+ `Failed to fetch Effie data: ${response.status} ${response.statusText}`
1716
+ );
1717
+ }
1718
+ rawEffieData = await response.json();
1719
+ } else {
1720
+ rawEffieData = options.effie;
1721
+ }
1722
+ let effie;
1723
+ if (!ctx2.skipValidation) {
1724
+ const result = effieDataSchema3.safeParse(rawEffieData);
1725
+ if (!result.success) {
1726
+ res.status(400).json({
1727
+ error: "Invalid effie data",
1728
+ issues: result.error.issues.map((issue) => ({
1729
+ path: issue.path.join("."),
1730
+ message: issue.message
1731
+ }))
1732
+ });
1733
+ return;
1734
+ }
1735
+ effie = result.data;
1736
+ } else {
1737
+ const data = rawEffieData;
1738
+ if (!data?.segments) {
1739
+ res.status(400).json({ error: "Invalid effie data: missing segments" });
1740
+ return;
1741
+ }
1742
+ effie = data;
1743
+ }
1744
+ const sources = extractEffieSourcesWithTypes(effie);
1745
+ const scale = options.scale ?? 1;
1746
+ const upload = options.upload;
1747
+ const jobId = randomUUID2();
1748
+ const warmupJobId = randomUUID2();
1749
+ const renderJobId = randomUUID2();
1750
+ const job = {
1751
+ effie,
1752
+ sources,
1753
+ scale,
1754
+ upload,
1755
+ warmupJobId,
1756
+ renderJobId,
1757
+ createdAt: Date.now()
1758
+ };
1759
+ await ctx2.transientStore.putJson(
1760
+ storeKeys.warmupAndRenderJob(jobId),
1761
+ job,
1762
+ ctx2.transientStore.jobMetadataTtlMs
1763
+ );
1764
+ await ctx2.transientStore.putJson(
1765
+ storeKeys.warmupJob(warmupJobId),
1766
+ { sources },
1767
+ ctx2.transientStore.jobMetadataTtlMs
1768
+ );
1769
+ await ctx2.transientStore.putJson(
1770
+ storeKeys.renderJob(renderJobId),
1771
+ {
1772
+ effie,
1773
+ scale,
1774
+ upload,
1775
+ createdAt: Date.now()
1776
+ },
1777
+ ctx2.transientStore.jobMetadataTtlMs
1778
+ );
1779
+ res.json({
1780
+ id: jobId,
1781
+ url: `${ctx2.baseUrl}/warmup-and-render/${jobId}`
1782
+ });
1783
+ } catch (error) {
1784
+ console.error("Error creating warmup-and-render job:", error);
1785
+ res.status(500).json({ error: "Failed to create warmup-and-render job" });
1786
+ }
1787
+ }
1788
+ async function streamWarmupAndRenderJob(req, res, ctx2) {
1789
+ try {
1790
+ setupCORSHeaders(res);
1791
+ const jobId = req.params.id;
1792
+ const jobCacheKey = storeKeys.warmupAndRenderJob(jobId);
1793
+ const job = await ctx2.transientStore.getJson(jobCacheKey);
1794
+ ctx2.transientStore.delete(jobCacheKey);
1795
+ if (!job) {
1796
+ res.status(404).json({ error: "Job not found" });
1797
+ return;
1798
+ }
1799
+ setupSSEResponse(res);
1800
+ const sendEvent = createSSEEventSender(res);
1801
+ let keepalivePhase = "warmup";
1802
+ const keepalive = setInterval(() => {
1803
+ sendEvent("keepalive", { phase: keepalivePhase });
1804
+ }, 25e3);
1805
+ try {
1806
+ if (ctx2.warmupBackendBaseUrl) {
1807
+ await proxyRemoteSSE(
1808
+ `${ctx2.warmupBackendBaseUrl}/warmup/${job.warmupJobId}`,
1809
+ sendEvent,
1810
+ "warmup:",
1811
+ res
1812
+ );
1813
+ } else {
1814
+ const warmupSender = prefixEventSender(sendEvent, "warmup:");
1815
+ await warmupSources(job.sources, warmupSender, ctx2);
1816
+ warmupSender("complete", { status: "ready" });
1817
+ }
1818
+ keepalivePhase = "render";
1819
+ if (ctx2.renderBackendBaseUrl) {
1820
+ await proxyRemoteSSE(
1821
+ `${ctx2.renderBackendBaseUrl}/render/${job.renderJobId}`,
1822
+ sendEvent,
1823
+ "render:",
1824
+ res
1825
+ );
1826
+ } else {
1827
+ const renderSender = prefixEventSender(sendEvent, "render:");
1828
+ if (job.upload) {
1829
+ renderSender("started", { status: "rendering" });
1830
+ const timings = await renderAndUploadInternal(
1831
+ job.effie,
1832
+ job.scale,
1833
+ job.upload,
1834
+ renderSender,
1835
+ ctx2
1836
+ );
1837
+ renderSender("complete", { status: "uploaded", timings });
1838
+ } else {
1839
+ const videoUrl = `${ctx2.baseUrl}/render/${job.renderJobId}`;
1840
+ sendEvent("complete", { status: "ready", videoUrl });
1841
+ }
1842
+ }
1843
+ if (job.upload && !ctx2.renderBackendBaseUrl) {
1844
+ sendEvent("complete", { status: "done" });
1845
+ }
1846
+ } catch (error) {
1847
+ sendEvent("error", {
1848
+ phase: keepalivePhase,
1849
+ message: String(error)
1850
+ });
1851
+ } finally {
1852
+ clearInterval(keepalive);
1853
+ res.end();
1854
+ }
1855
+ } catch (error) {
1856
+ console.error("Error in warmup-and-render streaming:", error);
1857
+ if (!res.headersSent) {
1858
+ res.status(500).json({ error: "Warmup-and-render streaming failed" });
1859
+ } else {
1860
+ res.end();
1861
+ }
1862
+ }
1863
+ }
1864
+ function prefixEventSender(sendEvent, prefix) {
1865
+ return (event, data) => {
1866
+ sendEvent(`${prefix}${event}`, data);
1867
+ };
1868
+ }
1869
+ async function proxyRemoteSSE(url, sendEvent, prefix, res) {
1870
+ const response = await ffsFetch(url, {
1871
+ headers: {
1872
+ Accept: "text/event-stream"
1873
+ }
1874
+ });
1875
+ if (!response.ok) {
1876
+ throw new Error(`Remote backend error: ${response.status}`);
1877
+ }
1878
+ const reader = response.body?.getReader();
1879
+ if (!reader) {
1880
+ throw new Error("No response body from remote backend");
1881
+ }
1882
+ const decoder = new TextDecoder();
1883
+ let buffer = "";
1884
+ try {
1885
+ while (true) {
1886
+ const { done, value } = await reader.read();
1887
+ if (done) break;
1888
+ if (res.destroyed) {
1889
+ reader.cancel();
1890
+ break;
1891
+ }
1892
+ buffer += decoder.decode(value, { stream: true });
1893
+ const lines = buffer.split("\n");
1894
+ buffer = lines.pop() || "";
1895
+ let currentEvent = "";
1896
+ let currentData = "";
1897
+ for (const line of lines) {
1898
+ if (line.startsWith("event: ")) {
1899
+ currentEvent = line.slice(7);
1900
+ } else if (line.startsWith("data: ")) {
1901
+ currentData = line.slice(6);
1902
+ } else if (line === "" && currentEvent && currentData) {
1903
+ try {
1904
+ const data = JSON.parse(currentData);
1905
+ sendEvent(`${prefix}${currentEvent}`, data);
1906
+ } catch {
1907
+ }
1908
+ currentEvent = "";
1909
+ currentData = "";
1910
+ }
1911
+ }
1912
+ }
1913
+ } finally {
1914
+ reader.releaseLock();
1915
+ }
1916
+ }
1917
+ async function proxyBinaryStream(response, res) {
1918
+ const contentType = response.headers.get("content-type");
1919
+ if (contentType) res.set("Content-Type", contentType);
1920
+ const contentLength = response.headers.get("content-length");
1921
+ if (contentLength) res.set("Content-Length", contentLength);
1922
+ const reader = response.body?.getReader();
1923
+ if (!reader) {
1924
+ throw new Error("No response body");
1925
+ }
1926
+ try {
1927
+ while (true) {
1928
+ const { done, value } = await reader.read();
1929
+ if (done) break;
1930
+ if (res.destroyed) {
1931
+ reader.cancel();
1932
+ break;
1933
+ }
1934
+ res.write(value);
1935
+ }
1936
+ } finally {
1937
+ reader.releaseLock();
1938
+ res.end();
1939
+ }
1940
+ }
1941
+
1942
+ // src/handlers/caching.ts
1943
+ function shouldSkipWarmup(source) {
1944
+ return source.type === "video" || source.type === "audio";
1945
+ }
1946
+ var inFlightFetches = /* @__PURE__ */ new Map();
1947
+ async function createWarmupJob(req, res, ctx2) {
1948
+ try {
1949
+ const parseResult = parseEffieData(req.body, ctx2.skipValidation);
1950
+ if ("error" in parseResult) {
1951
+ res.status(400).json(parseResult);
1952
+ return;
1953
+ }
1954
+ const sources = extractEffieSourcesWithTypes2(parseResult.effie);
1955
+ const jobId = randomUUID3();
1956
+ await ctx2.transientStore.putJson(
1957
+ storeKeys.warmupJob(jobId),
1958
+ { sources },
1959
+ ctx2.transientStore.jobMetadataTtlMs
1960
+ );
1961
+ res.json({
1962
+ id: jobId,
1963
+ url: `${ctx2.baseUrl}/warmup/${jobId}`
1964
+ });
1965
+ } catch (error) {
1966
+ console.error("Error creating warmup job:", error);
1967
+ res.status(500).json({ error: "Failed to create warmup job" });
1968
+ }
1969
+ }
1970
+ async function streamWarmupJob(req, res, ctx2) {
1971
+ try {
1972
+ setupCORSHeaders(res);
1973
+ const jobId = req.params.id;
1974
+ if (ctx2.warmupBackendBaseUrl) {
1975
+ setupSSEResponse(res);
1976
+ const sendEvent2 = createSSEEventSender(res);
1977
+ try {
1978
+ await proxyRemoteSSE(
1979
+ `${ctx2.warmupBackendBaseUrl}/warmup/${jobId}`,
1980
+ sendEvent2,
1981
+ "",
1982
+ res
1983
+ );
1984
+ } finally {
1985
+ res.end();
1986
+ }
1987
+ return;
1988
+ }
1989
+ const jobCacheKey = storeKeys.warmupJob(jobId);
1990
+ const job = await ctx2.transientStore.getJson(jobCacheKey);
1991
+ ctx2.transientStore.delete(jobCacheKey);
1992
+ if (!job) {
1993
+ res.status(404).json({ error: "Job not found" });
1994
+ return;
1995
+ }
1996
+ setupSSEResponse(res);
1997
+ const sendEvent = createSSEEventSender(res);
1998
+ try {
1999
+ await warmupSources(job.sources, sendEvent, ctx2);
2000
+ sendEvent("complete", { status: "ready" });
2001
+ } catch (error) {
2002
+ sendEvent("error", { message: String(error) });
2003
+ } finally {
2004
+ res.end();
2005
+ }
2006
+ } catch (error) {
2007
+ console.error("Error in warmup streaming:", error);
2008
+ if (!res.headersSent) {
2009
+ res.status(500).json({ error: "Warmup streaming failed" });
2010
+ } else {
2011
+ res.end();
2012
+ }
2013
+ }
2014
+ }
2015
+ async function purgeCache(req, res, ctx2) {
2016
+ try {
2017
+ const parseResult = parseEffieData(req.body, ctx2.skipValidation);
2018
+ if ("error" in parseResult) {
2019
+ res.status(400).json(parseResult);
2020
+ return;
2021
+ }
2022
+ const sources = extractEffieSources(parseResult.effie);
2023
+ let purged = 0;
2024
+ for (const url of sources) {
2025
+ const ck = storeKeys.source(url);
2026
+ if (await ctx2.transientStore.exists(ck)) {
2027
+ await ctx2.transientStore.delete(ck);
2028
+ purged++;
2029
+ }
2030
+ }
2031
+ res.json({ purged, total: sources.length });
2032
+ } catch (error) {
2033
+ console.error("Error purging cache:", error);
2034
+ res.status(500).json({ error: "Failed to purge cache" });
2035
+ }
2036
+ }
2037
+ async function warmupSources(sources, sendEvent, ctx2) {
2038
+ const total = sources.length;
2039
+ sendEvent("start", { total });
2040
+ let cached = 0;
2041
+ let failed = 0;
2042
+ let skipped = 0;
2043
+ const sourcesToCache = [];
2044
+ for (const source of sources) {
2045
+ if (shouldSkipWarmup(source)) {
2046
+ skipped++;
2047
+ sendEvent("progress", {
2048
+ url: source.url,
2049
+ status: "skipped",
2050
+ reason: "http-video-audio-passthrough",
2051
+ cached,
2052
+ failed,
2053
+ skipped,
2054
+ total
2055
+ });
2056
+ } else {
2057
+ sourcesToCache.push(source);
2058
+ }
2059
+ }
2060
+ const sourceCacheKeys = sourcesToCache.map((s) => storeKeys.source(s.url));
2061
+ const existsMap = await ctx2.transientStore.existsMany(sourceCacheKeys);
2062
+ for (let i = 0; i < sourcesToCache.length; i++) {
2063
+ if (existsMap.get(sourceCacheKeys[i])) {
2064
+ cached++;
2065
+ sendEvent("progress", {
2066
+ url: sourcesToCache[i].url,
2067
+ status: "hit",
2068
+ cached,
2069
+ failed,
2070
+ skipped,
2071
+ total
2072
+ });
2073
+ }
2074
+ }
2075
+ const uncached = sourcesToCache.filter(
2076
+ (_, i) => !existsMap.get(sourceCacheKeys[i])
2077
+ );
2078
+ if (uncached.length === 0) {
2079
+ sendEvent("summary", { cached, failed, skipped, total });
2080
+ return;
2081
+ }
2082
+ const keepalive = setInterval(() => {
2083
+ sendEvent("keepalive", { cached, failed, skipped, total });
2084
+ }, 25e3);
2085
+ const queue = [...uncached];
2086
+ const workers = Array.from(
2087
+ { length: Math.min(ctx2.warmupConcurrency, queue.length) },
2088
+ async () => {
2089
+ while (queue.length > 0) {
2090
+ const source = queue.shift();
2091
+ const cacheKey = storeKeys.source(source.url);
2092
+ const startTime = Date.now();
2093
+ try {
2094
+ let fetchPromise = inFlightFetches.get(cacheKey);
2095
+ if (!fetchPromise) {
2096
+ fetchPromise = fetchAndCache(source.url, cacheKey, sendEvent, ctx2);
2097
+ inFlightFetches.set(cacheKey, fetchPromise);
2098
+ }
2099
+ await fetchPromise;
2100
+ inFlightFetches.delete(cacheKey);
2101
+ cached++;
2102
+ sendEvent("progress", {
2103
+ url: source.url,
2104
+ status: "cached",
2105
+ cached,
2106
+ failed,
2107
+ skipped,
2108
+ total,
2109
+ ms: Date.now() - startTime
2110
+ });
2111
+ } catch (error) {
2112
+ inFlightFetches.delete(cacheKey);
2113
+ failed++;
2114
+ sendEvent("progress", {
2115
+ url: source.url,
2116
+ status: "error",
2117
+ error: String(error),
2118
+ cached,
2119
+ failed,
2120
+ skipped,
2121
+ total,
2122
+ ms: Date.now() - startTime
2123
+ });
2124
+ }
2125
+ }
2126
+ }
2127
+ );
2128
+ await Promise.all(workers);
2129
+ clearInterval(keepalive);
2130
+ sendEvent("summary", { cached, failed, skipped, total });
2131
+ }
2132
+ async function fetchAndCache(url, cacheKey, sendEvent, ctx2) {
2133
+ const response = await ffsFetch(url, {
2134
+ headersTimeout: 10 * 60 * 1e3,
2135
+ // 10 minutes
2136
+ bodyTimeout: 20 * 60 * 1e3
2137
+ // 20 minutes
2138
+ });
2139
+ if (!response.ok) {
2140
+ throw new Error(`${response.status} ${response.statusText}`);
2141
+ }
2142
+ sendEvent("downloading", { url, status: "started", bytesReceived: 0 });
2143
+ const sourceStream = Readable3.fromWeb(
2144
+ response.body
2145
+ );
2146
+ let totalBytes = 0;
2147
+ let lastEventTime = Date.now();
2148
+ const PROGRESS_INTERVAL = 1e4;
2149
+ const progressStream = new Transform({
2150
+ transform(chunk, _encoding, callback) {
2151
+ totalBytes += chunk.length;
2152
+ const now = Date.now();
2153
+ if (now - lastEventTime >= PROGRESS_INTERVAL) {
2154
+ sendEvent("downloading", {
2155
+ url,
2156
+ status: "downloading",
2157
+ bytesReceived: totalBytes
2158
+ });
2159
+ lastEventTime = now;
2160
+ }
2161
+ callback(null, chunk);
2162
+ }
2163
+ });
2164
+ const trackedStream = sourceStream.pipe(progressStream);
2165
+ await ctx2.transientStore.put(
2166
+ cacheKey,
2167
+ trackedStream,
2168
+ ctx2.transientStore.sourceTtlMs
2169
+ );
2170
+ }
1611
2171
 
1612
2172
  // src/server.ts
1613
- var app = express4();
2173
+ var app = express5();
1614
2174
  app.use(bodyParser.json({ limit: "50mb" }));
1615
- var ctx = createServerContext();
2175
+ var ctx = await createServerContext();
2176
+ console.log(`FFS HTTP proxy listening on port ${ctx.httpProxy.port}`);
1616
2177
  function validateAuth(req, res) {
1617
2178
  const apiKey = process.env.FFS_API_KEY;
1618
2179
  if (!apiKey) return true;
@@ -1635,15 +2196,24 @@ app.post("/render", (req, res) => {
1635
2196
  if (!validateAuth(req, res)) return;
1636
2197
  createRenderJob(req, res, ctx);
1637
2198
  });
2199
+ app.post("/warmup-and-render", (req, res) => {
2200
+ if (!validateAuth(req, res)) return;
2201
+ createWarmupAndRenderJob(req, res, ctx);
2202
+ });
1638
2203
  app.get("/warmup/:id", (req, res) => streamWarmupJob(req, res, ctx));
1639
2204
  app.get("/render/:id", (req, res) => streamRenderJob(req, res, ctx));
2205
+ app.get(
2206
+ "/warmup-and-render/:id",
2207
+ (req, res) => streamWarmupAndRenderJob(req, res, ctx)
2208
+ );
1640
2209
  var port = process.env.FFS_PORT || 2e3;
1641
2210
  var server = app.listen(port, () => {
1642
2211
  console.log(`FFS server listening on port ${port}`);
1643
2212
  });
1644
2213
  function shutdown() {
1645
2214
  console.log("Shutting down FFS server...");
1646
- ctx.cacheStorage.close();
2215
+ ctx.httpProxy.close();
2216
+ ctx.transientStore.close();
1647
2217
  server.close(() => {
1648
2218
  console.log("FFS server stopped");
1649
2219
  process.exit(0);