@effing/ffs 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +136 -16
- package/dist/{chunk-A7BAW24L.js → chunk-J64HSZNQ.js} +65 -46
- package/dist/chunk-J64HSZNQ.js.map +1 -0
- package/dist/{chunk-6YHSYHDY.js → chunk-XSCNUWZJ.js} +550 -214
- package/dist/chunk-XSCNUWZJ.js.map +1 -0
- package/dist/handlers/index.d.ts +36 -4
- package/dist/handlers/index.js +10 -2
- package/dist/index.d.ts +5 -5
- package/dist/index.js +1 -1
- package/dist/{proxy-BI8OMQl0.d.ts → proxy-qTA69nOV.d.ts} +11 -7
- package/dist/server.js +650 -291
- package/dist/server.js.map +1 -1
- package/package.json +2 -2
- package/dist/chunk-6YHSYHDY.js.map +0 -1
- package/dist/chunk-A7BAW24L.js.map +0 -1
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import {
|
|
2
2
|
EffieRenderer,
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
} from "./chunk-
|
|
3
|
+
createTransientStore,
|
|
4
|
+
ffsFetch,
|
|
5
|
+
storeKeys
|
|
6
|
+
} from "./chunk-J64HSZNQ.js";
|
|
7
7
|
|
|
8
8
|
// src/handlers/shared.ts
|
|
9
9
|
import "express";
|
|
@@ -137,11 +137,13 @@ async function createServerContext() {
|
|
|
137
137
|
const httpProxy = new HttpProxy();
|
|
138
138
|
await httpProxy.start();
|
|
139
139
|
return {
|
|
140
|
-
|
|
140
|
+
transientStore: createTransientStore(),
|
|
141
141
|
httpProxy,
|
|
142
142
|
baseUrl: process.env.FFS_BASE_URL || `http://localhost:${port}`,
|
|
143
143
|
skipValidation: !!process.env.FFS_SKIP_VALIDATION && process.env.FFS_SKIP_VALIDATION !== "false",
|
|
144
|
-
|
|
144
|
+
warmupConcurrency: parseInt(process.env.FFS_WARMUP_CONCURRENCY || "4", 10),
|
|
145
|
+
warmupBackendBaseUrl: process.env.FFS_WARMUP_BACKEND_BASE_URL,
|
|
146
|
+
renderBackendBaseUrl: process.env.FFS_RENDER_BACKEND_BASE_URL
|
|
145
147
|
};
|
|
146
148
|
}
|
|
147
149
|
function parseEffieData(body, skipValidation) {
|
|
@@ -189,11 +191,506 @@ data: ${JSON.stringify(data)}
|
|
|
189
191
|
// src/handlers/caching.ts
|
|
190
192
|
import "express";
|
|
191
193
|
import { Readable as Readable2, Transform } from "stream";
|
|
192
|
-
import { randomUUID } from "crypto";
|
|
194
|
+
import { randomUUID as randomUUID3 } from "crypto";
|
|
193
195
|
import {
|
|
194
196
|
extractEffieSources,
|
|
195
|
-
extractEffieSourcesWithTypes
|
|
197
|
+
extractEffieSourcesWithTypes as extractEffieSourcesWithTypes2
|
|
196
198
|
} from "@effing/effie";
|
|
199
|
+
|
|
200
|
+
// src/handlers/orchestrating.ts
|
|
201
|
+
import "express";
|
|
202
|
+
import { randomUUID as randomUUID2 } from "crypto";
|
|
203
|
+
import { extractEffieSourcesWithTypes, effieDataSchema as effieDataSchema3 } from "@effing/effie";
|
|
204
|
+
|
|
205
|
+
// src/handlers/rendering.ts
|
|
206
|
+
import "express";
|
|
207
|
+
import { randomUUID } from "crypto";
|
|
208
|
+
import { effieDataSchema as effieDataSchema2 } from "@effing/effie";
|
|
209
|
+
async function createRenderJob(req, res, ctx) {
|
|
210
|
+
try {
|
|
211
|
+
const isWrapped = "effie" in req.body;
|
|
212
|
+
let rawEffieData;
|
|
213
|
+
let scale;
|
|
214
|
+
let upload;
|
|
215
|
+
if (isWrapped) {
|
|
216
|
+
const options = req.body;
|
|
217
|
+
if (typeof options.effie === "string") {
|
|
218
|
+
const response = await ffsFetch(options.effie);
|
|
219
|
+
if (!response.ok) {
|
|
220
|
+
throw new Error(
|
|
221
|
+
`Failed to fetch Effie data: ${response.status} ${response.statusText}`
|
|
222
|
+
);
|
|
223
|
+
}
|
|
224
|
+
rawEffieData = await response.json();
|
|
225
|
+
} else {
|
|
226
|
+
rawEffieData = options.effie;
|
|
227
|
+
}
|
|
228
|
+
scale = options.scale ?? 1;
|
|
229
|
+
upload = options.upload;
|
|
230
|
+
} else {
|
|
231
|
+
rawEffieData = req.body;
|
|
232
|
+
scale = parseFloat(req.query.scale?.toString() || "1");
|
|
233
|
+
}
|
|
234
|
+
let effie;
|
|
235
|
+
if (!ctx.skipValidation) {
|
|
236
|
+
const result = effieDataSchema2.safeParse(rawEffieData);
|
|
237
|
+
if (!result.success) {
|
|
238
|
+
res.status(400).json({
|
|
239
|
+
error: "Invalid effie data",
|
|
240
|
+
issues: result.error.issues.map((issue) => ({
|
|
241
|
+
path: issue.path.join("."),
|
|
242
|
+
message: issue.message
|
|
243
|
+
}))
|
|
244
|
+
});
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
effie = result.data;
|
|
248
|
+
} else {
|
|
249
|
+
const data = rawEffieData;
|
|
250
|
+
if (!data?.segments) {
|
|
251
|
+
res.status(400).json({ error: "Invalid effie data: missing segments" });
|
|
252
|
+
return;
|
|
253
|
+
}
|
|
254
|
+
effie = data;
|
|
255
|
+
}
|
|
256
|
+
const jobId = randomUUID();
|
|
257
|
+
const job = {
|
|
258
|
+
effie,
|
|
259
|
+
scale,
|
|
260
|
+
upload,
|
|
261
|
+
createdAt: Date.now()
|
|
262
|
+
};
|
|
263
|
+
await ctx.transientStore.putJson(
|
|
264
|
+
storeKeys.renderJob(jobId),
|
|
265
|
+
job,
|
|
266
|
+
ctx.transientStore.jobMetadataTtlMs
|
|
267
|
+
);
|
|
268
|
+
res.json({
|
|
269
|
+
id: jobId,
|
|
270
|
+
url: `${ctx.baseUrl}/render/${jobId}`
|
|
271
|
+
});
|
|
272
|
+
} catch (error) {
|
|
273
|
+
console.error("Error creating render job:", error);
|
|
274
|
+
res.status(500).json({ error: "Failed to create render job" });
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
async function streamRenderJob(req, res, ctx) {
|
|
278
|
+
try {
|
|
279
|
+
setupCORSHeaders(res);
|
|
280
|
+
const jobId = req.params.id;
|
|
281
|
+
if (ctx.renderBackendBaseUrl) {
|
|
282
|
+
await proxyRenderFromBackend(res, jobId, ctx);
|
|
283
|
+
return;
|
|
284
|
+
}
|
|
285
|
+
const jobCacheKey = storeKeys.renderJob(jobId);
|
|
286
|
+
const job = await ctx.transientStore.getJson(jobCacheKey);
|
|
287
|
+
ctx.transientStore.delete(jobCacheKey);
|
|
288
|
+
if (!job) {
|
|
289
|
+
res.status(404).json({ error: "Job not found or expired" });
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
if (job.upload) {
|
|
293
|
+
await streamRenderWithUpload(res, job, ctx);
|
|
294
|
+
} else {
|
|
295
|
+
await streamRenderDirect(res, job, ctx);
|
|
296
|
+
}
|
|
297
|
+
} catch (error) {
|
|
298
|
+
console.error("Error in render:", error);
|
|
299
|
+
if (!res.headersSent) {
|
|
300
|
+
res.status(500).json({ error: "Rendering failed" });
|
|
301
|
+
} else {
|
|
302
|
+
res.end();
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
async function streamRenderDirect(res, job, ctx) {
|
|
307
|
+
const renderer = new EffieRenderer(job.effie, {
|
|
308
|
+
transientStore: ctx.transientStore,
|
|
309
|
+
httpProxy: ctx.httpProxy
|
|
310
|
+
});
|
|
311
|
+
const videoStream = await renderer.render(job.scale);
|
|
312
|
+
res.on("close", () => {
|
|
313
|
+
videoStream.destroy();
|
|
314
|
+
renderer.close();
|
|
315
|
+
});
|
|
316
|
+
res.set("Content-Type", "video/mp4");
|
|
317
|
+
videoStream.pipe(res);
|
|
318
|
+
}
|
|
319
|
+
async function streamRenderWithUpload(res, job, ctx) {
|
|
320
|
+
setupSSEResponse(res);
|
|
321
|
+
const sendEvent = createSSEEventSender(res);
|
|
322
|
+
const keepalive = setInterval(() => {
|
|
323
|
+
sendEvent("keepalive", { status: "rendering" });
|
|
324
|
+
}, 25e3);
|
|
325
|
+
try {
|
|
326
|
+
sendEvent("started", { status: "rendering" });
|
|
327
|
+
const timings = await renderAndUploadInternal(
|
|
328
|
+
job.effie,
|
|
329
|
+
job.scale,
|
|
330
|
+
job.upload,
|
|
331
|
+
sendEvent,
|
|
332
|
+
ctx
|
|
333
|
+
);
|
|
334
|
+
sendEvent("complete", { status: "uploaded", timings });
|
|
335
|
+
} catch (error) {
|
|
336
|
+
sendEvent("error", { message: String(error) });
|
|
337
|
+
} finally {
|
|
338
|
+
clearInterval(keepalive);
|
|
339
|
+
res.end();
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx) {
|
|
343
|
+
const timings = {};
|
|
344
|
+
if (upload.coverUrl) {
|
|
345
|
+
const fetchCoverStartTime = Date.now();
|
|
346
|
+
const coverFetchResponse = await ffsFetch(effie.cover);
|
|
347
|
+
if (!coverFetchResponse.ok) {
|
|
348
|
+
throw new Error(
|
|
349
|
+
`Failed to fetch cover image: ${coverFetchResponse.status} ${coverFetchResponse.statusText}`
|
|
350
|
+
);
|
|
351
|
+
}
|
|
352
|
+
const coverBuffer = Buffer.from(await coverFetchResponse.arrayBuffer());
|
|
353
|
+
timings.fetchCoverTime = Date.now() - fetchCoverStartTime;
|
|
354
|
+
const uploadCoverStartTime = Date.now();
|
|
355
|
+
const uploadCoverResponse = await ffsFetch(upload.coverUrl, {
|
|
356
|
+
method: "PUT",
|
|
357
|
+
body: coverBuffer,
|
|
358
|
+
headers: {
|
|
359
|
+
"Content-Type": "image/png",
|
|
360
|
+
"Content-Length": coverBuffer.length.toString()
|
|
361
|
+
}
|
|
362
|
+
});
|
|
363
|
+
if (!uploadCoverResponse.ok) {
|
|
364
|
+
throw new Error(
|
|
365
|
+
`Failed to upload cover: ${uploadCoverResponse.status} ${uploadCoverResponse.statusText}`
|
|
366
|
+
);
|
|
367
|
+
}
|
|
368
|
+
timings.uploadCoverTime = Date.now() - uploadCoverStartTime;
|
|
369
|
+
}
|
|
370
|
+
const renderStartTime = Date.now();
|
|
371
|
+
const renderer = new EffieRenderer(effie, {
|
|
372
|
+
transientStore: ctx.transientStore,
|
|
373
|
+
httpProxy: ctx.httpProxy
|
|
374
|
+
});
|
|
375
|
+
const videoStream = await renderer.render(scale);
|
|
376
|
+
const chunks = [];
|
|
377
|
+
for await (const chunk of videoStream) {
|
|
378
|
+
chunks.push(Buffer.from(chunk));
|
|
379
|
+
}
|
|
380
|
+
const videoBuffer = Buffer.concat(chunks);
|
|
381
|
+
timings.renderTime = Date.now() - renderStartTime;
|
|
382
|
+
sendEvent("keepalive", { status: "uploading" });
|
|
383
|
+
const uploadStartTime = Date.now();
|
|
384
|
+
const uploadResponse = await ffsFetch(upload.videoUrl, {
|
|
385
|
+
method: "PUT",
|
|
386
|
+
body: videoBuffer,
|
|
387
|
+
headers: {
|
|
388
|
+
"Content-Type": "video/mp4",
|
|
389
|
+
"Content-Length": videoBuffer.length.toString()
|
|
390
|
+
}
|
|
391
|
+
});
|
|
392
|
+
if (!uploadResponse.ok) {
|
|
393
|
+
throw new Error(
|
|
394
|
+
`Failed to upload rendered video: ${uploadResponse.status} ${uploadResponse.statusText}`
|
|
395
|
+
);
|
|
396
|
+
}
|
|
397
|
+
timings.uploadTime = Date.now() - uploadStartTime;
|
|
398
|
+
return timings;
|
|
399
|
+
}
|
|
400
|
+
async function proxyRenderFromBackend(res, jobId, ctx) {
|
|
401
|
+
const backendUrl = `${ctx.renderBackendBaseUrl}/render/${jobId}`;
|
|
402
|
+
const response = await ffsFetch(backendUrl);
|
|
403
|
+
if (!response.ok) {
|
|
404
|
+
res.status(response.status).json({ error: "Backend render failed" });
|
|
405
|
+
return;
|
|
406
|
+
}
|
|
407
|
+
const contentType = response.headers.get("content-type") || "";
|
|
408
|
+
if (contentType.includes("text/event-stream")) {
|
|
409
|
+
setupSSEResponse(res);
|
|
410
|
+
const sendEvent = createSSEEventSender(res);
|
|
411
|
+
const reader = response.body?.getReader();
|
|
412
|
+
if (!reader) {
|
|
413
|
+
sendEvent("error", { message: "No response body from backend" });
|
|
414
|
+
res.end();
|
|
415
|
+
return;
|
|
416
|
+
}
|
|
417
|
+
const decoder = new TextDecoder();
|
|
418
|
+
let buffer = "";
|
|
419
|
+
try {
|
|
420
|
+
while (true) {
|
|
421
|
+
const { done, value } = await reader.read();
|
|
422
|
+
if (done) break;
|
|
423
|
+
if (res.destroyed) {
|
|
424
|
+
reader.cancel();
|
|
425
|
+
break;
|
|
426
|
+
}
|
|
427
|
+
buffer += decoder.decode(value, { stream: true });
|
|
428
|
+
const lines = buffer.split("\n");
|
|
429
|
+
buffer = lines.pop() || "";
|
|
430
|
+
let currentEvent = "";
|
|
431
|
+
let currentData = "";
|
|
432
|
+
for (const line of lines) {
|
|
433
|
+
if (line.startsWith("event: ")) {
|
|
434
|
+
currentEvent = line.slice(7);
|
|
435
|
+
} else if (line.startsWith("data: ")) {
|
|
436
|
+
currentData = line.slice(6);
|
|
437
|
+
} else if (line === "" && currentEvent && currentData) {
|
|
438
|
+
try {
|
|
439
|
+
const data = JSON.parse(currentData);
|
|
440
|
+
sendEvent(currentEvent, data);
|
|
441
|
+
} catch {
|
|
442
|
+
}
|
|
443
|
+
currentEvent = "";
|
|
444
|
+
currentData = "";
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
} finally {
|
|
449
|
+
reader.releaseLock();
|
|
450
|
+
res.end();
|
|
451
|
+
}
|
|
452
|
+
} else {
|
|
453
|
+
await proxyBinaryStream(response, res);
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// src/handlers/orchestrating.ts
|
|
458
|
+
async function createWarmupAndRenderJob(req, res, ctx) {
|
|
459
|
+
try {
|
|
460
|
+
const options = req.body;
|
|
461
|
+
let rawEffieData;
|
|
462
|
+
if (typeof options.effie === "string") {
|
|
463
|
+
const response = await ffsFetch(options.effie);
|
|
464
|
+
if (!response.ok) {
|
|
465
|
+
throw new Error(
|
|
466
|
+
`Failed to fetch Effie data: ${response.status} ${response.statusText}`
|
|
467
|
+
);
|
|
468
|
+
}
|
|
469
|
+
rawEffieData = await response.json();
|
|
470
|
+
} else {
|
|
471
|
+
rawEffieData = options.effie;
|
|
472
|
+
}
|
|
473
|
+
let effie;
|
|
474
|
+
if (!ctx.skipValidation) {
|
|
475
|
+
const result = effieDataSchema3.safeParse(rawEffieData);
|
|
476
|
+
if (!result.success) {
|
|
477
|
+
res.status(400).json({
|
|
478
|
+
error: "Invalid effie data",
|
|
479
|
+
issues: result.error.issues.map((issue) => ({
|
|
480
|
+
path: issue.path.join("."),
|
|
481
|
+
message: issue.message
|
|
482
|
+
}))
|
|
483
|
+
});
|
|
484
|
+
return;
|
|
485
|
+
}
|
|
486
|
+
effie = result.data;
|
|
487
|
+
} else {
|
|
488
|
+
const data = rawEffieData;
|
|
489
|
+
if (!data?.segments) {
|
|
490
|
+
res.status(400).json({ error: "Invalid effie data: missing segments" });
|
|
491
|
+
return;
|
|
492
|
+
}
|
|
493
|
+
effie = data;
|
|
494
|
+
}
|
|
495
|
+
const sources = extractEffieSourcesWithTypes(effie);
|
|
496
|
+
const scale = options.scale ?? 1;
|
|
497
|
+
const upload = options.upload;
|
|
498
|
+
const jobId = randomUUID2();
|
|
499
|
+
const warmupJobId = randomUUID2();
|
|
500
|
+
const renderJobId = randomUUID2();
|
|
501
|
+
const job = {
|
|
502
|
+
effie,
|
|
503
|
+
sources,
|
|
504
|
+
scale,
|
|
505
|
+
upload,
|
|
506
|
+
warmupJobId,
|
|
507
|
+
renderJobId,
|
|
508
|
+
createdAt: Date.now()
|
|
509
|
+
};
|
|
510
|
+
await ctx.transientStore.putJson(
|
|
511
|
+
storeKeys.warmupAndRenderJob(jobId),
|
|
512
|
+
job,
|
|
513
|
+
ctx.transientStore.jobMetadataTtlMs
|
|
514
|
+
);
|
|
515
|
+
await ctx.transientStore.putJson(
|
|
516
|
+
storeKeys.warmupJob(warmupJobId),
|
|
517
|
+
{ sources },
|
|
518
|
+
ctx.transientStore.jobMetadataTtlMs
|
|
519
|
+
);
|
|
520
|
+
await ctx.transientStore.putJson(
|
|
521
|
+
storeKeys.renderJob(renderJobId),
|
|
522
|
+
{
|
|
523
|
+
effie,
|
|
524
|
+
scale,
|
|
525
|
+
upload,
|
|
526
|
+
createdAt: Date.now()
|
|
527
|
+
},
|
|
528
|
+
ctx.transientStore.jobMetadataTtlMs
|
|
529
|
+
);
|
|
530
|
+
res.json({
|
|
531
|
+
id: jobId,
|
|
532
|
+
url: `${ctx.baseUrl}/warmup-and-render/${jobId}`
|
|
533
|
+
});
|
|
534
|
+
} catch (error) {
|
|
535
|
+
console.error("Error creating warmup-and-render job:", error);
|
|
536
|
+
res.status(500).json({ error: "Failed to create warmup-and-render job" });
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
async function streamWarmupAndRenderJob(req, res, ctx) {
|
|
540
|
+
try {
|
|
541
|
+
setupCORSHeaders(res);
|
|
542
|
+
const jobId = req.params.id;
|
|
543
|
+
const jobCacheKey = storeKeys.warmupAndRenderJob(jobId);
|
|
544
|
+
const job = await ctx.transientStore.getJson(jobCacheKey);
|
|
545
|
+
ctx.transientStore.delete(jobCacheKey);
|
|
546
|
+
if (!job) {
|
|
547
|
+
res.status(404).json({ error: "Job not found" });
|
|
548
|
+
return;
|
|
549
|
+
}
|
|
550
|
+
setupSSEResponse(res);
|
|
551
|
+
const sendEvent = createSSEEventSender(res);
|
|
552
|
+
let keepalivePhase = "warmup";
|
|
553
|
+
const keepalive = setInterval(() => {
|
|
554
|
+
sendEvent("keepalive", { phase: keepalivePhase });
|
|
555
|
+
}, 25e3);
|
|
556
|
+
try {
|
|
557
|
+
if (ctx.warmupBackendBaseUrl) {
|
|
558
|
+
await proxyRemoteSSE(
|
|
559
|
+
`${ctx.warmupBackendBaseUrl}/warmup/${job.warmupJobId}`,
|
|
560
|
+
sendEvent,
|
|
561
|
+
"warmup:",
|
|
562
|
+
res
|
|
563
|
+
);
|
|
564
|
+
} else {
|
|
565
|
+
const warmupSender = prefixEventSender(sendEvent, "warmup:");
|
|
566
|
+
await warmupSources(job.sources, warmupSender, ctx);
|
|
567
|
+
warmupSender("complete", { status: "ready" });
|
|
568
|
+
}
|
|
569
|
+
keepalivePhase = "render";
|
|
570
|
+
if (ctx.renderBackendBaseUrl) {
|
|
571
|
+
await proxyRemoteSSE(
|
|
572
|
+
`${ctx.renderBackendBaseUrl}/render/${job.renderJobId}`,
|
|
573
|
+
sendEvent,
|
|
574
|
+
"render:",
|
|
575
|
+
res
|
|
576
|
+
);
|
|
577
|
+
} else {
|
|
578
|
+
const renderSender = prefixEventSender(sendEvent, "render:");
|
|
579
|
+
if (job.upload) {
|
|
580
|
+
renderSender("started", { status: "rendering" });
|
|
581
|
+
const timings = await renderAndUploadInternal(
|
|
582
|
+
job.effie,
|
|
583
|
+
job.scale,
|
|
584
|
+
job.upload,
|
|
585
|
+
renderSender,
|
|
586
|
+
ctx
|
|
587
|
+
);
|
|
588
|
+
renderSender("complete", { status: "uploaded", timings });
|
|
589
|
+
} else {
|
|
590
|
+
const videoUrl = `${ctx.baseUrl}/render/${job.renderJobId}`;
|
|
591
|
+
sendEvent("complete", { status: "ready", videoUrl });
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
if (job.upload && !ctx.renderBackendBaseUrl) {
|
|
595
|
+
sendEvent("complete", { status: "done" });
|
|
596
|
+
}
|
|
597
|
+
} catch (error) {
|
|
598
|
+
sendEvent("error", {
|
|
599
|
+
phase: keepalivePhase,
|
|
600
|
+
message: String(error)
|
|
601
|
+
});
|
|
602
|
+
} finally {
|
|
603
|
+
clearInterval(keepalive);
|
|
604
|
+
res.end();
|
|
605
|
+
}
|
|
606
|
+
} catch (error) {
|
|
607
|
+
console.error("Error in warmup-and-render streaming:", error);
|
|
608
|
+
if (!res.headersSent) {
|
|
609
|
+
res.status(500).json({ error: "Warmup-and-render streaming failed" });
|
|
610
|
+
} else {
|
|
611
|
+
res.end();
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
function prefixEventSender(sendEvent, prefix) {
|
|
616
|
+
return (event, data) => {
|
|
617
|
+
sendEvent(`${prefix}${event}`, data);
|
|
618
|
+
};
|
|
619
|
+
}
|
|
620
|
+
async function proxyRemoteSSE(url, sendEvent, prefix, res) {
|
|
621
|
+
const response = await ffsFetch(url, {
|
|
622
|
+
headers: {
|
|
623
|
+
Accept: "text/event-stream"
|
|
624
|
+
}
|
|
625
|
+
});
|
|
626
|
+
if (!response.ok) {
|
|
627
|
+
throw new Error(`Remote backend error: ${response.status}`);
|
|
628
|
+
}
|
|
629
|
+
const reader = response.body?.getReader();
|
|
630
|
+
if (!reader) {
|
|
631
|
+
throw new Error("No response body from remote backend");
|
|
632
|
+
}
|
|
633
|
+
const decoder = new TextDecoder();
|
|
634
|
+
let buffer = "";
|
|
635
|
+
try {
|
|
636
|
+
while (true) {
|
|
637
|
+
const { done, value } = await reader.read();
|
|
638
|
+
if (done) break;
|
|
639
|
+
if (res.destroyed) {
|
|
640
|
+
reader.cancel();
|
|
641
|
+
break;
|
|
642
|
+
}
|
|
643
|
+
buffer += decoder.decode(value, { stream: true });
|
|
644
|
+
const lines = buffer.split("\n");
|
|
645
|
+
buffer = lines.pop() || "";
|
|
646
|
+
let currentEvent = "";
|
|
647
|
+
let currentData = "";
|
|
648
|
+
for (const line of lines) {
|
|
649
|
+
if (line.startsWith("event: ")) {
|
|
650
|
+
currentEvent = line.slice(7);
|
|
651
|
+
} else if (line.startsWith("data: ")) {
|
|
652
|
+
currentData = line.slice(6);
|
|
653
|
+
} else if (line === "" && currentEvent && currentData) {
|
|
654
|
+
try {
|
|
655
|
+
const data = JSON.parse(currentData);
|
|
656
|
+
sendEvent(`${prefix}${currentEvent}`, data);
|
|
657
|
+
} catch {
|
|
658
|
+
}
|
|
659
|
+
currentEvent = "";
|
|
660
|
+
currentData = "";
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
} finally {
|
|
665
|
+
reader.releaseLock();
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
async function proxyBinaryStream(response, res) {
|
|
669
|
+
const contentType = response.headers.get("content-type");
|
|
670
|
+
if (contentType) res.set("Content-Type", contentType);
|
|
671
|
+
const contentLength = response.headers.get("content-length");
|
|
672
|
+
if (contentLength) res.set("Content-Length", contentLength);
|
|
673
|
+
const reader = response.body?.getReader();
|
|
674
|
+
if (!reader) {
|
|
675
|
+
throw new Error("No response body");
|
|
676
|
+
}
|
|
677
|
+
try {
|
|
678
|
+
while (true) {
|
|
679
|
+
const { done, value } = await reader.read();
|
|
680
|
+
if (done) break;
|
|
681
|
+
if (res.destroyed) {
|
|
682
|
+
reader.cancel();
|
|
683
|
+
break;
|
|
684
|
+
}
|
|
685
|
+
res.write(value);
|
|
686
|
+
}
|
|
687
|
+
} finally {
|
|
688
|
+
reader.releaseLock();
|
|
689
|
+
res.end();
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
// src/handlers/caching.ts
|
|
197
694
|
function shouldSkipWarmup(source) {
|
|
198
695
|
return source.type === "video" || source.type === "audio";
|
|
199
696
|
}
|
|
@@ -205,9 +702,13 @@ async function createWarmupJob(req, res, ctx) {
|
|
|
205
702
|
res.status(400).json(parseResult);
|
|
206
703
|
return;
|
|
207
704
|
}
|
|
208
|
-
const sources =
|
|
209
|
-
const jobId =
|
|
210
|
-
await ctx.
|
|
705
|
+
const sources = extractEffieSourcesWithTypes2(parseResult.effie);
|
|
706
|
+
const jobId = randomUUID3();
|
|
707
|
+
await ctx.transientStore.putJson(
|
|
708
|
+
storeKeys.warmupJob(jobId),
|
|
709
|
+
{ sources },
|
|
710
|
+
ctx.transientStore.jobMetadataTtlMs
|
|
711
|
+
);
|
|
211
712
|
res.json({
|
|
212
713
|
id: jobId,
|
|
213
714
|
url: `${ctx.baseUrl}/warmup/${jobId}`
|
|
@@ -221,9 +722,24 @@ async function streamWarmupJob(req, res, ctx) {
|
|
|
221
722
|
try {
|
|
222
723
|
setupCORSHeaders(res);
|
|
223
724
|
const jobId = req.params.id;
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
725
|
+
if (ctx.warmupBackendBaseUrl) {
|
|
726
|
+
setupSSEResponse(res);
|
|
727
|
+
const sendEvent2 = createSSEEventSender(res);
|
|
728
|
+
try {
|
|
729
|
+
await proxyRemoteSSE(
|
|
730
|
+
`${ctx.warmupBackendBaseUrl}/warmup/${jobId}`,
|
|
731
|
+
sendEvent2,
|
|
732
|
+
"",
|
|
733
|
+
res
|
|
734
|
+
);
|
|
735
|
+
} finally {
|
|
736
|
+
res.end();
|
|
737
|
+
}
|
|
738
|
+
return;
|
|
739
|
+
}
|
|
740
|
+
const jobCacheKey = storeKeys.warmupJob(jobId);
|
|
741
|
+
const job = await ctx.transientStore.getJson(jobCacheKey);
|
|
742
|
+
ctx.transientStore.delete(jobCacheKey);
|
|
227
743
|
if (!job) {
|
|
228
744
|
res.status(404).json({ error: "Job not found" });
|
|
229
745
|
return;
|
|
@@ -257,9 +773,9 @@ async function purgeCache(req, res, ctx) {
|
|
|
257
773
|
const sources = extractEffieSources(parseResult.effie);
|
|
258
774
|
let purged = 0;
|
|
259
775
|
for (const url of sources) {
|
|
260
|
-
const ck =
|
|
261
|
-
if (await ctx.
|
|
262
|
-
await ctx.
|
|
776
|
+
const ck = storeKeys.source(url);
|
|
777
|
+
if (await ctx.transientStore.exists(ck)) {
|
|
778
|
+
await ctx.transientStore.delete(ck);
|
|
263
779
|
purged++;
|
|
264
780
|
}
|
|
265
781
|
}
|
|
@@ -292,8 +808,8 @@ async function warmupSources(sources, sendEvent, ctx) {
|
|
|
292
808
|
sourcesToCache.push(source);
|
|
293
809
|
}
|
|
294
810
|
}
|
|
295
|
-
const sourceCacheKeys = sourcesToCache.map((s) =>
|
|
296
|
-
const existsMap = await ctx.
|
|
811
|
+
const sourceCacheKeys = sourcesToCache.map((s) => storeKeys.source(s.url));
|
|
812
|
+
const existsMap = await ctx.transientStore.existsMany(sourceCacheKeys);
|
|
297
813
|
for (let i = 0; i < sourcesToCache.length; i++) {
|
|
298
814
|
if (existsMap.get(sourceCacheKeys[i])) {
|
|
299
815
|
cached++;
|
|
@@ -319,11 +835,11 @@ async function warmupSources(sources, sendEvent, ctx) {
|
|
|
319
835
|
}, 25e3);
|
|
320
836
|
const queue = [...uncached];
|
|
321
837
|
const workers = Array.from(
|
|
322
|
-
{ length: Math.min(ctx.
|
|
838
|
+
{ length: Math.min(ctx.warmupConcurrency, queue.length) },
|
|
323
839
|
async () => {
|
|
324
840
|
while (queue.length > 0) {
|
|
325
841
|
const source = queue.shift();
|
|
326
|
-
const cacheKey =
|
|
842
|
+
const cacheKey = storeKeys.source(source.url);
|
|
327
843
|
const startTime = Date.now();
|
|
328
844
|
try {
|
|
329
845
|
let fetchPromise = inFlightFetches.get(cacheKey);
|
|
@@ -397,203 +913,23 @@ async function fetchAndCache(url, cacheKey, sendEvent, ctx) {
|
|
|
397
913
|
}
|
|
398
914
|
});
|
|
399
915
|
const trackedStream = sourceStream.pipe(progressStream);
|
|
400
|
-
await ctx.
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
import { randomUUID as randomUUID2 } from "crypto";
|
|
406
|
-
import { effieDataSchema as effieDataSchema2 } from "@effing/effie";
|
|
407
|
-
async function createRenderJob(req, res, ctx) {
|
|
408
|
-
try {
|
|
409
|
-
const isWrapped = "effie" in req.body;
|
|
410
|
-
let rawEffieData;
|
|
411
|
-
let scale;
|
|
412
|
-
let upload;
|
|
413
|
-
if (isWrapped) {
|
|
414
|
-
const options = req.body;
|
|
415
|
-
if (typeof options.effie === "string") {
|
|
416
|
-
const response = await ffsFetch(options.effie);
|
|
417
|
-
if (!response.ok) {
|
|
418
|
-
throw new Error(
|
|
419
|
-
`Failed to fetch Effie data: ${response.status} ${response.statusText}`
|
|
420
|
-
);
|
|
421
|
-
}
|
|
422
|
-
rawEffieData = await response.json();
|
|
423
|
-
} else {
|
|
424
|
-
rawEffieData = options.effie;
|
|
425
|
-
}
|
|
426
|
-
scale = options.scale ?? 1;
|
|
427
|
-
upload = options.upload;
|
|
428
|
-
} else {
|
|
429
|
-
rawEffieData = req.body;
|
|
430
|
-
scale = parseFloat(req.query.scale?.toString() || "1");
|
|
431
|
-
}
|
|
432
|
-
let effie;
|
|
433
|
-
if (!ctx.skipValidation) {
|
|
434
|
-
const result = effieDataSchema2.safeParse(rawEffieData);
|
|
435
|
-
if (!result.success) {
|
|
436
|
-
res.status(400).json({
|
|
437
|
-
error: "Invalid effie data",
|
|
438
|
-
issues: result.error.issues.map((issue) => ({
|
|
439
|
-
path: issue.path.join("."),
|
|
440
|
-
message: issue.message
|
|
441
|
-
}))
|
|
442
|
-
});
|
|
443
|
-
return;
|
|
444
|
-
}
|
|
445
|
-
effie = result.data;
|
|
446
|
-
} else {
|
|
447
|
-
const data = rawEffieData;
|
|
448
|
-
if (!data?.segments) {
|
|
449
|
-
res.status(400).json({ error: "Invalid effie data: missing segments" });
|
|
450
|
-
return;
|
|
451
|
-
}
|
|
452
|
-
effie = data;
|
|
453
|
-
}
|
|
454
|
-
const jobId = randomUUID2();
|
|
455
|
-
const job = {
|
|
456
|
-
effie,
|
|
457
|
-
scale,
|
|
458
|
-
upload,
|
|
459
|
-
createdAt: Date.now()
|
|
460
|
-
};
|
|
461
|
-
await ctx.cacheStorage.putJson(cacheKeys.renderJob(jobId), job);
|
|
462
|
-
res.json({
|
|
463
|
-
id: jobId,
|
|
464
|
-
url: `${ctx.baseUrl}/render/${jobId}`
|
|
465
|
-
});
|
|
466
|
-
} catch (error) {
|
|
467
|
-
console.error("Error creating render job:", error);
|
|
468
|
-
res.status(500).json({ error: "Failed to create render job" });
|
|
469
|
-
}
|
|
470
|
-
}
|
|
471
|
-
async function streamRenderJob(req, res, ctx) {
|
|
472
|
-
try {
|
|
473
|
-
setupCORSHeaders(res);
|
|
474
|
-
const jobId = req.params.id;
|
|
475
|
-
const jobCacheKey = cacheKeys.renderJob(jobId);
|
|
476
|
-
const job = await ctx.cacheStorage.getJson(jobCacheKey);
|
|
477
|
-
ctx.cacheStorage.delete(jobCacheKey);
|
|
478
|
-
if (!job) {
|
|
479
|
-
res.status(404).json({ error: "Job not found or expired" });
|
|
480
|
-
return;
|
|
481
|
-
}
|
|
482
|
-
if (job.upload) {
|
|
483
|
-
await streamRenderWithUpload(res, job, ctx);
|
|
484
|
-
} else {
|
|
485
|
-
await streamRenderDirect(res, job, ctx);
|
|
486
|
-
}
|
|
487
|
-
} catch (error) {
|
|
488
|
-
console.error("Error in render:", error);
|
|
489
|
-
if (!res.headersSent) {
|
|
490
|
-
res.status(500).json({ error: "Rendering failed" });
|
|
491
|
-
} else {
|
|
492
|
-
res.end();
|
|
493
|
-
}
|
|
494
|
-
}
|
|
495
|
-
}
|
|
496
|
-
async function streamRenderDirect(res, job, ctx) {
|
|
497
|
-
const renderer = new EffieRenderer(job.effie, {
|
|
498
|
-
cacheStorage: ctx.cacheStorage,
|
|
499
|
-
httpProxy: ctx.httpProxy
|
|
500
|
-
});
|
|
501
|
-
const videoStream = await renderer.render(job.scale);
|
|
502
|
-
res.on("close", () => {
|
|
503
|
-
videoStream.destroy();
|
|
504
|
-
renderer.close();
|
|
505
|
-
});
|
|
506
|
-
res.set("Content-Type", "video/mp4");
|
|
507
|
-
videoStream.pipe(res);
|
|
508
|
-
}
|
|
509
|
-
async function streamRenderWithUpload(res, job, ctx) {
|
|
510
|
-
setupSSEResponse(res);
|
|
511
|
-
const sendEvent = createSSEEventSender(res);
|
|
512
|
-
const keepalive = setInterval(() => {
|
|
513
|
-
sendEvent("keepalive", { status: "rendering" });
|
|
514
|
-
}, 25e3);
|
|
515
|
-
try {
|
|
516
|
-
sendEvent("started", { status: "rendering" });
|
|
517
|
-
const timings = await renderAndUploadInternal(
|
|
518
|
-
job.effie,
|
|
519
|
-
job.scale,
|
|
520
|
-
job.upload,
|
|
521
|
-
sendEvent,
|
|
522
|
-
ctx
|
|
523
|
-
);
|
|
524
|
-
sendEvent("complete", { status: "uploaded", timings });
|
|
525
|
-
} catch (error) {
|
|
526
|
-
sendEvent("error", { message: String(error) });
|
|
527
|
-
} finally {
|
|
528
|
-
clearInterval(keepalive);
|
|
529
|
-
res.end();
|
|
530
|
-
}
|
|
531
|
-
}
|
|
532
|
-
async function renderAndUploadInternal(effie, scale, upload, sendEvent, ctx) {
|
|
533
|
-
const timings = {};
|
|
534
|
-
if (upload.coverUrl) {
|
|
535
|
-
const fetchCoverStartTime = Date.now();
|
|
536
|
-
const coverFetchResponse = await ffsFetch(effie.cover);
|
|
537
|
-
if (!coverFetchResponse.ok) {
|
|
538
|
-
throw new Error(
|
|
539
|
-
`Failed to fetch cover image: ${coverFetchResponse.status} ${coverFetchResponse.statusText}`
|
|
540
|
-
);
|
|
541
|
-
}
|
|
542
|
-
const coverBuffer = Buffer.from(await coverFetchResponse.arrayBuffer());
|
|
543
|
-
timings.fetchCoverTime = Date.now() - fetchCoverStartTime;
|
|
544
|
-
const uploadCoverStartTime = Date.now();
|
|
545
|
-
const uploadCoverResponse = await ffsFetch(upload.coverUrl, {
|
|
546
|
-
method: "PUT",
|
|
547
|
-
body: coverBuffer,
|
|
548
|
-
headers: {
|
|
549
|
-
"Content-Type": "image/png",
|
|
550
|
-
"Content-Length": coverBuffer.length.toString()
|
|
551
|
-
}
|
|
552
|
-
});
|
|
553
|
-
if (!uploadCoverResponse.ok) {
|
|
554
|
-
throw new Error(
|
|
555
|
-
`Failed to upload cover: ${uploadCoverResponse.status} ${uploadCoverResponse.statusText}`
|
|
556
|
-
);
|
|
557
|
-
}
|
|
558
|
-
timings.uploadCoverTime = Date.now() - uploadCoverStartTime;
|
|
559
|
-
}
|
|
560
|
-
const renderStartTime = Date.now();
|
|
561
|
-
const renderer = new EffieRenderer(effie, {
|
|
562
|
-
cacheStorage: ctx.cacheStorage,
|
|
563
|
-
httpProxy: ctx.httpProxy
|
|
564
|
-
});
|
|
565
|
-
const videoStream = await renderer.render(scale);
|
|
566
|
-
const chunks = [];
|
|
567
|
-
for await (const chunk of videoStream) {
|
|
568
|
-
chunks.push(Buffer.from(chunk));
|
|
569
|
-
}
|
|
570
|
-
const videoBuffer = Buffer.concat(chunks);
|
|
571
|
-
timings.renderTime = Date.now() - renderStartTime;
|
|
572
|
-
sendEvent("keepalive", { status: "uploading" });
|
|
573
|
-
const uploadStartTime = Date.now();
|
|
574
|
-
const uploadResponse = await ffsFetch(upload.videoUrl, {
|
|
575
|
-
method: "PUT",
|
|
576
|
-
body: videoBuffer,
|
|
577
|
-
headers: {
|
|
578
|
-
"Content-Type": "video/mp4",
|
|
579
|
-
"Content-Length": videoBuffer.length.toString()
|
|
580
|
-
}
|
|
581
|
-
});
|
|
582
|
-
if (!uploadResponse.ok) {
|
|
583
|
-
throw new Error(
|
|
584
|
-
`Failed to upload rendered video: ${uploadResponse.status} ${uploadResponse.statusText}`
|
|
585
|
-
);
|
|
586
|
-
}
|
|
587
|
-
timings.uploadTime = Date.now() - uploadStartTime;
|
|
588
|
-
return timings;
|
|
916
|
+
await ctx.transientStore.put(
|
|
917
|
+
cacheKey,
|
|
918
|
+
trackedStream,
|
|
919
|
+
ctx.transientStore.sourceTtlMs
|
|
920
|
+
);
|
|
589
921
|
}
|
|
590
922
|
|
|
591
923
|
export {
|
|
592
924
|
createServerContext,
|
|
925
|
+
createRenderJob,
|
|
926
|
+
streamRenderJob,
|
|
927
|
+
createWarmupAndRenderJob,
|
|
928
|
+
streamWarmupAndRenderJob,
|
|
929
|
+
proxyRemoteSSE,
|
|
930
|
+
proxyBinaryStream,
|
|
593
931
|
createWarmupJob,
|
|
594
932
|
streamWarmupJob,
|
|
595
|
-
purgeCache
|
|
596
|
-
createRenderJob,
|
|
597
|
-
streamRenderJob
|
|
933
|
+
purgeCache
|
|
598
934
|
};
|
|
599
|
-
//# sourceMappingURL=chunk-
|
|
935
|
+
//# sourceMappingURL=chunk-XSCNUWZJ.js.map
|