@vibeo/renderer 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser.d.ts +15 -0
- package/dist/browser.d.ts.map +1 -0
- package/dist/browser.js +40 -0
- package/dist/browser.js.map +1 -0
- package/dist/bundler.d.ts +10 -0
- package/dist/bundler.d.ts.map +1 -0
- package/dist/bundler.js +135 -0
- package/dist/bundler.js.map +1 -0
- package/dist/capture-frame.d.ts +12 -0
- package/dist/capture-frame.d.ts.map +1 -0
- package/dist/capture-frame.js +21 -0
- package/dist/capture-frame.js.map +1 -0
- package/dist/frame-range.d.ts +15 -0
- package/dist/frame-range.d.ts.map +1 -0
- package/dist/frame-range.js +55 -0
- package/dist/frame-range.js.map +1 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +18 -0
- package/dist/index.js.map +1 -0
- package/dist/parallel-render.d.ts +22 -0
- package/dist/parallel-render.d.ts.map +1 -0
- package/dist/parallel-render.js +69 -0
- package/dist/parallel-render.js.map +1 -0
- package/dist/render-composition.d.ts +19 -0
- package/dist/render-composition.d.ts.map +1 -0
- package/dist/render-composition.js +104 -0
- package/dist/render-composition.js.map +1 -0
- package/dist/seek-to-frame.d.ts +15 -0
- package/dist/seek-to-frame.d.ts.map +1 -0
- package/dist/seek-to-frame.js +58 -0
- package/dist/seek-to-frame.js.map +1 -0
- package/dist/stitch-audio.d.ts +7 -0
- package/dist/stitch-audio.d.ts.map +1 -0
- package/dist/stitch-audio.js +53 -0
- package/dist/stitch-audio.js.map +1 -0
- package/dist/stitch-frames.d.ts +11 -0
- package/dist/stitch-frames.d.ts.map +1 -0
- package/dist/stitch-frames.js +77 -0
- package/dist/stitch-frames.js.map +1 -0
- package/dist/types.d.ts +74 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/package.json +33 -0
- package/src/browser.ts +48 -0
- package/src/bundler.ts +146 -0
- package/src/capture-frame.ts +39 -0
- package/src/frame-range.ts +81 -0
- package/src/index.ts +36 -0
- package/src/parallel-render.ts +134 -0
- package/src/render-composition.ts +144 -0
- package/src/seek-to-frame.ts +89 -0
- package/src/stitch-audio.ts +79 -0
- package/src/stitch-frames.ts +95 -0
- package/src/types.ts +80 -0
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import type { FrameRange } from "./types.js";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Parse a frame range string like "10-50" or "25" into a [start, end] tuple.
|
|
5
|
+
* Returns null if the input is null or empty.
|
|
6
|
+
*/
|
|
7
|
+
export function parseFrameRange(
|
|
8
|
+
input: string | null,
|
|
9
|
+
durationInFrames: number,
|
|
10
|
+
): FrameRange | null {
|
|
11
|
+
if (input === null || input.trim() === "") {
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const trimmed = input.trim();
|
|
16
|
+
|
|
17
|
+
if (trimmed.includes("-")) {
|
|
18
|
+
const [startStr, endStr] = trimmed.split("-");
|
|
19
|
+
const start = parseInt(startStr!, 10);
|
|
20
|
+
const end = parseInt(endStr!, 10);
|
|
21
|
+
|
|
22
|
+
if (isNaN(start) || isNaN(end)) {
|
|
23
|
+
throw new Error(`Invalid frame range: "${input}". Expected format: "start-end"`);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
validateFrameRange([start, end], durationInFrames);
|
|
27
|
+
return [start, end];
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const singleFrame = parseInt(trimmed, 10);
|
|
31
|
+
if (isNaN(singleFrame)) {
|
|
32
|
+
throw new Error(`Invalid frame range: "${input}". Expected a number or "start-end"`);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
validateFrameRange([singleFrame, singleFrame], durationInFrames);
|
|
36
|
+
return [singleFrame, singleFrame];
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Get the real frame range to render, accounting for optional user-specified range.
|
|
41
|
+
*/
|
|
42
|
+
export function getRealFrameRange(
|
|
43
|
+
durationInFrames: number,
|
|
44
|
+
frameRange: FrameRange | null,
|
|
45
|
+
): FrameRange {
|
|
46
|
+
if (frameRange === null) {
|
|
47
|
+
return [0, durationInFrames - 1];
|
|
48
|
+
}
|
|
49
|
+
return frameRange;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Validate that a frame range is within bounds and well-formed.
|
|
54
|
+
*/
|
|
55
|
+
export function validateFrameRange(
|
|
56
|
+
range: FrameRange,
|
|
57
|
+
durationInFrames: number,
|
|
58
|
+
): void {
|
|
59
|
+
const [start, end] = range;
|
|
60
|
+
|
|
61
|
+
if (!Number.isInteger(start) || !Number.isInteger(end)) {
|
|
62
|
+
throw new Error(`Frame range values must be integers. Got [${start}, ${end}]`);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (start < 0) {
|
|
66
|
+
throw new Error(`Frame range start must be >= 0. Got ${start}`);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
if (end < start) {
|
|
70
|
+
throw new Error(
|
|
71
|
+
`Frame range end (${end}) must be >= start (${start})`,
|
|
72
|
+
);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (end >= durationInFrames) {
|
|
76
|
+
throw new Error(
|
|
77
|
+
`Frame range end (${end}) exceeds duration (${durationInFrames} frames). ` +
|
|
78
|
+
`Max allowed end frame is ${durationInFrames - 1}`,
|
|
79
|
+
);
|
|
80
|
+
}
|
|
81
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
// Types
|
|
2
|
+
export type {
|
|
3
|
+
RenderConfig,
|
|
4
|
+
RenderProgress,
|
|
5
|
+
Codec,
|
|
6
|
+
ImageFormat,
|
|
7
|
+
FrameRange,
|
|
8
|
+
StitchOptions,
|
|
9
|
+
AudioMuxOptions,
|
|
10
|
+
BundleResult,
|
|
11
|
+
} from "./types.js";
|
|
12
|
+
|
|
13
|
+
// Browser lifecycle
|
|
14
|
+
export { launchBrowser, closeBrowser, createPage } from "./browser.js";
|
|
15
|
+
|
|
16
|
+
// Bundler
|
|
17
|
+
export { bundle } from "./bundler.js";
|
|
18
|
+
|
|
19
|
+
// Frame navigation
|
|
20
|
+
export { seekToFrame, loadBundle } from "./seek-to-frame.js";
|
|
21
|
+
|
|
22
|
+
// Frame capture
|
|
23
|
+
export { captureFrame } from "./capture-frame.js";
|
|
24
|
+
|
|
25
|
+
// Frame range utilities
|
|
26
|
+
export { parseFrameRange, getRealFrameRange, validateFrameRange } from "./frame-range.js";
|
|
27
|
+
|
|
28
|
+
// FFmpeg stitching
|
|
29
|
+
export { stitchFrames, getContainerExt } from "./stitch-frames.js";
|
|
30
|
+
export { stitchAudio } from "./stitch-audio.js";
|
|
31
|
+
|
|
32
|
+
// Parallel rendering
|
|
33
|
+
export { parallelRender } from "./parallel-render.js";
|
|
34
|
+
|
|
35
|
+
// Full render orchestration
|
|
36
|
+
export { renderComposition } from "./render-composition.js";
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import type { Browser } from "playwright";
|
|
2
|
+
import type { FrameRange, ImageFormat, RenderProgress } from "./types.js";
|
|
3
|
+
import { createPage } from "./browser.js";
|
|
4
|
+
import { seekToFrame, loadBundle } from "./seek-to-frame.js";
|
|
5
|
+
import { captureFrame } from "./capture-frame.js";
|
|
6
|
+
|
|
7
|
+
interface ParallelRenderOptions {
|
|
8
|
+
browser: Browser;
|
|
9
|
+
bundleUrl: string;
|
|
10
|
+
compositionId: string;
|
|
11
|
+
frameRange: FrameRange;
|
|
12
|
+
outputDir: string;
|
|
13
|
+
width: number;
|
|
14
|
+
height: number;
|
|
15
|
+
concurrency: number;
|
|
16
|
+
imageFormat: ImageFormat;
|
|
17
|
+
quality?: number;
|
|
18
|
+
onProgress?: (progress: RenderProgress) => void;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Split a frame range into N roughly-equal chunks.
|
|
23
|
+
*/
|
|
24
|
+
function splitFrameRange(range: FrameRange, chunks: number): FrameRange[] {
|
|
25
|
+
const [start, end] = range;
|
|
26
|
+
const totalFrames = end - start + 1;
|
|
27
|
+
const framesPerChunk = Math.ceil(totalFrames / chunks);
|
|
28
|
+
const ranges: FrameRange[] = [];
|
|
29
|
+
|
|
30
|
+
for (let i = 0; i < chunks; i++) {
|
|
31
|
+
const chunkStart = start + i * framesPerChunk;
|
|
32
|
+
const chunkEnd = Math.min(chunkStart + framesPerChunk - 1, end);
|
|
33
|
+
if (chunkStart > end) break;
|
|
34
|
+
ranges.push([chunkStart, chunkEnd]);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return ranges;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Render a chunk of frames using a single browser page.
|
|
42
|
+
*/
|
|
43
|
+
async function renderChunk(
|
|
44
|
+
browser: Browser,
|
|
45
|
+
bundleUrl: string,
|
|
46
|
+
compositionId: string,
|
|
47
|
+
frameRange: FrameRange,
|
|
48
|
+
outputDir: string,
|
|
49
|
+
width: number,
|
|
50
|
+
height: number,
|
|
51
|
+
imageFormat: ImageFormat,
|
|
52
|
+
quality: number | undefined,
|
|
53
|
+
onFrameRendered: () => void,
|
|
54
|
+
): Promise<void> {
|
|
55
|
+
const page = await createPage(browser, width, height);
|
|
56
|
+
|
|
57
|
+
try {
|
|
58
|
+
await loadBundle(page, bundleUrl);
|
|
59
|
+
|
|
60
|
+
const [start, end] = frameRange;
|
|
61
|
+
for (let frame = start; frame <= end; frame++) {
|
|
62
|
+
await seekToFrame(page, frame, compositionId);
|
|
63
|
+
await captureFrame(page, outputDir, frame, { imageFormat, quality });
|
|
64
|
+
onFrameRendered();
|
|
65
|
+
}
|
|
66
|
+
} finally {
|
|
67
|
+
await page.close();
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Distribute frame rendering across multiple browser tabs in parallel.
|
|
73
|
+
* Opens N pages, each rendering a chunk of the frame range.
|
|
74
|
+
*/
|
|
75
|
+
export async function parallelRender(
|
|
76
|
+
options: ParallelRenderOptions,
|
|
77
|
+
): Promise<void> {
|
|
78
|
+
const {
|
|
79
|
+
browser,
|
|
80
|
+
bundleUrl,
|
|
81
|
+
compositionId,
|
|
82
|
+
frameRange,
|
|
83
|
+
outputDir,
|
|
84
|
+
width,
|
|
85
|
+
height,
|
|
86
|
+
concurrency,
|
|
87
|
+
imageFormat,
|
|
88
|
+
quality,
|
|
89
|
+
onProgress,
|
|
90
|
+
} = options;
|
|
91
|
+
|
|
92
|
+
const [start, end] = frameRange;
|
|
93
|
+
const totalFrames = end - start + 1;
|
|
94
|
+
const effectiveConcurrency = Math.min(concurrency, totalFrames);
|
|
95
|
+
const chunks = splitFrameRange(frameRange, effectiveConcurrency);
|
|
96
|
+
|
|
97
|
+
let framesRendered = 0;
|
|
98
|
+
const startTime = Date.now();
|
|
99
|
+
|
|
100
|
+
const onFrameRendered = () => {
|
|
101
|
+
framesRendered++;
|
|
102
|
+
if (onProgress) {
|
|
103
|
+
const elapsed = Date.now() - startTime;
|
|
104
|
+
const msPerFrame = elapsed / framesRendered;
|
|
105
|
+
const remaining = totalFrames - framesRendered;
|
|
106
|
+
const etaMs = remaining > 0 ? msPerFrame * remaining : 0;
|
|
107
|
+
|
|
108
|
+
onProgress({
|
|
109
|
+
framesRendered,
|
|
110
|
+
totalFrames,
|
|
111
|
+
percent: framesRendered / totalFrames,
|
|
112
|
+
etaMs: framesRendered > 0 ? etaMs : null,
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
// Render all chunks in parallel
|
|
118
|
+
await Promise.all(
|
|
119
|
+
chunks.map((chunk) =>
|
|
120
|
+
renderChunk(
|
|
121
|
+
browser,
|
|
122
|
+
bundleUrl,
|
|
123
|
+
compositionId,
|
|
124
|
+
chunk,
|
|
125
|
+
outputDir,
|
|
126
|
+
width,
|
|
127
|
+
height,
|
|
128
|
+
imageFormat,
|
|
129
|
+
quality,
|
|
130
|
+
onFrameRendered,
|
|
131
|
+
),
|
|
132
|
+
),
|
|
133
|
+
);
|
|
134
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import { mkdtemp, rm, mkdir } from "node:fs/promises";
|
|
2
|
+
import { join, dirname } from "node:path";
|
|
3
|
+
import { tmpdir } from "node:os";
|
|
4
|
+
import { availableParallelism } from "node:os";
|
|
5
|
+
import type { RenderConfig, RenderProgress } from "./types.js";
|
|
6
|
+
import { bundle } from "./bundler.js";
|
|
7
|
+
import { launchBrowser, closeBrowser } from "./browser.js";
|
|
8
|
+
import { getRealFrameRange } from "./frame-range.js";
|
|
9
|
+
import { parallelRender } from "./parallel-render.js";
|
|
10
|
+
import { stitchFrames } from "./stitch-frames.js";
|
|
11
|
+
import { stitchAudio } from "./stitch-audio.js";
|
|
12
|
+
|
|
13
|
+
interface CompositionInfo {
|
|
14
|
+
width: number;
|
|
15
|
+
height: number;
|
|
16
|
+
fps: number;
|
|
17
|
+
durationInFrames: number;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Orchestrate a full composition render:
|
|
22
|
+
* 1. Bundle the entry point
|
|
23
|
+
* 2. Launch headless browser
|
|
24
|
+
* 3. Render frames in parallel across browser tabs
|
|
25
|
+
* 4. Stitch frames into video with FFmpeg
|
|
26
|
+
* 5. Mux audio if present
|
|
27
|
+
* 6. Cleanup temp files
|
|
28
|
+
*/
|
|
29
|
+
export async function renderComposition(
|
|
30
|
+
config: RenderConfig,
|
|
31
|
+
compositionInfo: CompositionInfo,
|
|
32
|
+
): Promise<string> {
|
|
33
|
+
const {
|
|
34
|
+
entry,
|
|
35
|
+
compositionId,
|
|
36
|
+
outputPath,
|
|
37
|
+
codec,
|
|
38
|
+
imageFormat,
|
|
39
|
+
quality,
|
|
40
|
+
frameRange: userFrameRange,
|
|
41
|
+
concurrency,
|
|
42
|
+
pixelFormat,
|
|
43
|
+
onProgress,
|
|
44
|
+
} = config;
|
|
45
|
+
|
|
46
|
+
const fps = config.fps ?? compositionInfo.fps;
|
|
47
|
+
const { width, height, durationInFrames } = compositionInfo;
|
|
48
|
+
|
|
49
|
+
const frameRange = getRealFrameRange(durationInFrames, userFrameRange);
|
|
50
|
+
const totalFrames = frameRange[1] - frameRange[0] + 1;
|
|
51
|
+
|
|
52
|
+
// Create temp directories
|
|
53
|
+
const framesDir = await mkdtemp(join(tmpdir(), "vibeo-frames-"));
|
|
54
|
+
|
|
55
|
+
// Ensure output directory exists
|
|
56
|
+
await mkdir(dirname(outputPath), { recursive: true });
|
|
57
|
+
|
|
58
|
+
let bundleResult: Awaited<ReturnType<typeof bundle>> | null = null;
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
// Step 1: Bundle
|
|
62
|
+
const reportStage = (stage: string) => {
|
|
63
|
+
if (onProgress) {
|
|
64
|
+
onProgress({
|
|
65
|
+
framesRendered: 0,
|
|
66
|
+
totalFrames,
|
|
67
|
+
percent: 0,
|
|
68
|
+
etaMs: null,
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
reportStage("bundling");
|
|
74
|
+
bundleResult = await bundle(entry);
|
|
75
|
+
|
|
76
|
+
// Step 2: Launch browser
|
|
77
|
+
reportStage("launching browser");
|
|
78
|
+
const browser = await launchBrowser();
|
|
79
|
+
|
|
80
|
+
// Step 3: Render frames in parallel
|
|
81
|
+
const effectiveConcurrency =
|
|
82
|
+
concurrency > 0 ? concurrency : Math.max(1, Math.floor(availableParallelism() / 2));
|
|
83
|
+
|
|
84
|
+
await parallelRender({
|
|
85
|
+
browser,
|
|
86
|
+
bundleUrl: bundleResult.url,
|
|
87
|
+
compositionId,
|
|
88
|
+
frameRange,
|
|
89
|
+
outputDir: framesDir,
|
|
90
|
+
width,
|
|
91
|
+
height,
|
|
92
|
+
concurrency: effectiveConcurrency,
|
|
93
|
+
imageFormat,
|
|
94
|
+
quality,
|
|
95
|
+
onProgress,
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
// Step 4: Stitch frames into video
|
|
99
|
+
const videoPath = join(
|
|
100
|
+
framesDir,
|
|
101
|
+
`stitched.${codec === "vp9" ? "webm" : codec === "prores" ? "mov" : "mp4"}`,
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
await stitchFrames({
|
|
105
|
+
framesDir,
|
|
106
|
+
outputPath: videoPath,
|
|
107
|
+
fps,
|
|
108
|
+
codec,
|
|
109
|
+
imageFormat,
|
|
110
|
+
pixelFormat,
|
|
111
|
+
quality,
|
|
112
|
+
width,
|
|
113
|
+
height,
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
// Step 5: Mux audio (if audio assets exist)
|
|
117
|
+
// For now, just copy the video to the output path if no audio
|
|
118
|
+
// Audio assets would be collected from the composition at render time
|
|
119
|
+
const finalPath = await stitchAudio({
|
|
120
|
+
videoPath,
|
|
121
|
+
audioPaths: [], // Audio collection will be implemented per-composition
|
|
122
|
+
outputPath,
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
// Report completion
|
|
126
|
+
if (onProgress) {
|
|
127
|
+
onProgress({
|
|
128
|
+
framesRendered: totalFrames,
|
|
129
|
+
totalFrames,
|
|
130
|
+
percent: 1,
|
|
131
|
+
etaMs: 0,
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return finalPath;
|
|
136
|
+
} finally {
|
|
137
|
+
// Step 6: Cleanup
|
|
138
|
+
await closeBrowser();
|
|
139
|
+
if (bundleResult) {
|
|
140
|
+
await bundleResult.cleanup();
|
|
141
|
+
}
|
|
142
|
+
await rm(framesDir, { recursive: true, force: true }).catch(() => {});
|
|
143
|
+
}
|
|
144
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import type { Page } from "playwright";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Navigate the headless browser page to render a specific frame.
|
|
5
|
+
*
|
|
6
|
+
* 1. Call window.vibeo_setFrame(frame, compositionId) to update the React tree.
|
|
7
|
+
* 2. Wait for React render to complete (poll for window.vibeo_ready flag).
|
|
8
|
+
* 3. Wait for all fonts to load.
|
|
9
|
+
* 4. Wait for any pending delayRender handles to resolve.
|
|
10
|
+
*/
|
|
11
|
+
export async function seekToFrame(
|
|
12
|
+
page: Page,
|
|
13
|
+
frame: number,
|
|
14
|
+
compositionId: string,
|
|
15
|
+
): Promise<void> {
|
|
16
|
+
// Set the frame via the global bridge function
|
|
17
|
+
await page.evaluate(
|
|
18
|
+
({ frame, compositionId }) => {
|
|
19
|
+
const win = window as typeof window & {
|
|
20
|
+
vibeo_setFrame?: (frame: number, compositionId: string) => void;
|
|
21
|
+
};
|
|
22
|
+
if (typeof win.vibeo_setFrame === "function") {
|
|
23
|
+
win.vibeo_setFrame(frame, compositionId);
|
|
24
|
+
} else {
|
|
25
|
+
throw new Error(
|
|
26
|
+
"window.vibeo_setFrame is not defined. " +
|
|
27
|
+
"Make sure the bundle registers this function.",
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
},
|
|
31
|
+
{ frame, compositionId },
|
|
32
|
+
);
|
|
33
|
+
|
|
34
|
+
// Wait for React to finish rendering and all delays to resolve
|
|
35
|
+
await page.evaluate(() => {
|
|
36
|
+
return new Promise<void>((resolve, reject) => {
|
|
37
|
+
const timeout = 30_000;
|
|
38
|
+
const start = Date.now();
|
|
39
|
+
|
|
40
|
+
function poll() {
|
|
41
|
+
const win = window as typeof window & {
|
|
42
|
+
vibeo_ready?: boolean;
|
|
43
|
+
vibeo_pendingDelays?: number;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
if (Date.now() - start > timeout) {
|
|
47
|
+
reject(new Error("Timed out waiting for frame render"));
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const ready = win.vibeo_ready === true;
|
|
52
|
+
const noPendingDelays =
|
|
53
|
+
win.vibeo_pendingDelays === undefined || win.vibeo_pendingDelays === 0;
|
|
54
|
+
|
|
55
|
+
if (ready && noPendingDelays) {
|
|
56
|
+
resolve();
|
|
57
|
+
} else {
|
|
58
|
+
requestAnimationFrame(poll);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
poll();
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
// Wait for all fonts to be ready
|
|
67
|
+
await page.evaluate(() => document.fonts.ready);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Load the bundled app in the browser page.
|
|
72
|
+
*/
|
|
73
|
+
export async function loadBundle(
|
|
74
|
+
page: Page,
|
|
75
|
+
url: string,
|
|
76
|
+
): Promise<void> {
|
|
77
|
+
await page.goto(url, { waitUntil: "networkidle" });
|
|
78
|
+
|
|
79
|
+
// Wait for the vibeo_setFrame bridge to be available
|
|
80
|
+
await page.waitForFunction(
|
|
81
|
+
() => {
|
|
82
|
+
const win = window as typeof window & {
|
|
83
|
+
vibeo_setFrame?: unknown;
|
|
84
|
+
};
|
|
85
|
+
return typeof win.vibeo_setFrame === "function";
|
|
86
|
+
},
|
|
87
|
+
{ timeout: 30_000 },
|
|
88
|
+
);
|
|
89
|
+
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import type { AudioMuxOptions } from "./types.js";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Mux audio tracks into a video file using FFmpeg.
|
|
6
|
+
* If multiple audio files are provided, they are mixed together.
|
|
7
|
+
*/
|
|
8
|
+
export async function stitchAudio(options: AudioMuxOptions): Promise<string> {
|
|
9
|
+
const { videoPath, audioPaths, outputPath } = options;
|
|
10
|
+
|
|
11
|
+
if (audioPaths.length === 0) {
|
|
12
|
+
// No audio to mux — just copy the video
|
|
13
|
+
return videoPath;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const args: string[] = ["-y"];
|
|
17
|
+
|
|
18
|
+
// Input: video
|
|
19
|
+
args.push("-i", videoPath);
|
|
20
|
+
|
|
21
|
+
// Input: each audio track
|
|
22
|
+
for (const audioPath of audioPaths) {
|
|
23
|
+
args.push("-i", audioPath);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if (audioPaths.length === 1) {
|
|
27
|
+
// Simple case: one audio track, just mux
|
|
28
|
+
args.push(
|
|
29
|
+
"-c:v", "copy",
|
|
30
|
+
"-c:a", "aac",
|
|
31
|
+
"-b:a", "192k",
|
|
32
|
+
"-shortest",
|
|
33
|
+
outputPath,
|
|
34
|
+
);
|
|
35
|
+
} else {
|
|
36
|
+
// Multiple audio tracks: use amix filter to mix them
|
|
37
|
+
const filterInputs = audioPaths.map((_, i) => `[${i + 1}:a]`).join("");
|
|
38
|
+
const filter = `${filterInputs}amix=inputs=${audioPaths.length}:duration=longest:dropout_transition=0[aout]`;
|
|
39
|
+
|
|
40
|
+
args.push(
|
|
41
|
+
"-filter_complex", filter,
|
|
42
|
+
"-map", "0:v",
|
|
43
|
+
"-map", "[aout]",
|
|
44
|
+
"-c:v", "copy",
|
|
45
|
+
"-c:a", "aac",
|
|
46
|
+
"-b:a", "192k",
|
|
47
|
+
"-shortest",
|
|
48
|
+
outputPath,
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return runFfmpeg(args);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function runFfmpeg(args: string[]): Promise<string> {
|
|
56
|
+
return new Promise((resolve, reject) => {
|
|
57
|
+
const proc = spawn("ffmpeg", args, {
|
|
58
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
let stderr = "";
|
|
62
|
+
|
|
63
|
+
proc.stderr.on("data", (data: Buffer) => {
|
|
64
|
+
stderr += data.toString();
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
proc.on("close", (code) => {
|
|
68
|
+
if (code === 0) {
|
|
69
|
+
resolve(args[args.length - 1]!);
|
|
70
|
+
} else {
|
|
71
|
+
reject(new Error(`FFmpeg exited with code ${code}:\n${stderr}`));
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
proc.on("error", (err) => {
|
|
76
|
+
reject(new Error(`Failed to spawn FFmpeg: ${err.message}`));
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import type { StitchOptions } from "./types.js";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Get the FFmpeg codec arguments for the given codec.
|
|
6
|
+
*/
|
|
7
|
+
function getCodecArgs(codec: string, quality: number): string[] {
|
|
8
|
+
switch (codec) {
|
|
9
|
+
case "h264":
|
|
10
|
+
return ["-c:v", "libx264", "-crf", String(quality), "-preset", "fast", "-pix_fmt", "yuv420p"];
|
|
11
|
+
case "h265":
|
|
12
|
+
return ["-c:v", "libx265", "-crf", String(quality), "-preset", "fast", "-pix_fmt", "yuv420p"];
|
|
13
|
+
case "vp9":
|
|
14
|
+
return ["-c:v", "libvpx-vp9", "-crf", String(quality), "-b:v", "0", "-pix_fmt", "yuv420p"];
|
|
15
|
+
case "prores":
|
|
16
|
+
return ["-c:v", "prores_ks", "-profile:v", String(Math.min(quality, 5)), "-pix_fmt", "yuva444p10le"];
|
|
17
|
+
default:
|
|
18
|
+
throw new Error(`Unsupported codec: ${codec}`);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Get the output container format for the given codec.
|
|
24
|
+
*/
|
|
25
|
+
function getContainerExt(codec: string): string {
|
|
26
|
+
switch (codec) {
|
|
27
|
+
case "vp9":
|
|
28
|
+
return "webm";
|
|
29
|
+
case "prores":
|
|
30
|
+
return "mov";
|
|
31
|
+
default:
|
|
32
|
+
return "mp4";
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Stitch a sequence of frame images into a video using FFmpeg.
|
|
38
|
+
*/
|
|
39
|
+
export async function stitchFrames(options: StitchOptions): Promise<string> {
|
|
40
|
+
const {
|
|
41
|
+
framesDir,
|
|
42
|
+
outputPath,
|
|
43
|
+
fps,
|
|
44
|
+
codec,
|
|
45
|
+
imageFormat,
|
|
46
|
+
quality,
|
|
47
|
+
} = options;
|
|
48
|
+
|
|
49
|
+
const ext = imageFormat === "jpeg" ? "jpg" : "png";
|
|
50
|
+
const inputPattern = `${framesDir}/frame-%06d.${ext}`;
|
|
51
|
+
const codecArgs = getCodecArgs(codec, quality);
|
|
52
|
+
|
|
53
|
+
const args = [
|
|
54
|
+
"-y",
|
|
55
|
+
"-framerate", String(fps),
|
|
56
|
+
"-i", inputPattern,
|
|
57
|
+
...codecArgs,
|
|
58
|
+
"-r", String(fps),
|
|
59
|
+
outputPath,
|
|
60
|
+
];
|
|
61
|
+
|
|
62
|
+
return runFfmpeg(args);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Run an FFmpeg command and return the output path.
|
|
67
|
+
*/
|
|
68
|
+
function runFfmpeg(args: string[]): Promise<string> {
|
|
69
|
+
return new Promise((resolve, reject) => {
|
|
70
|
+
const proc = spawn("ffmpeg", args, {
|
|
71
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
let stderr = "";
|
|
75
|
+
|
|
76
|
+
proc.stderr.on("data", (data: Buffer) => {
|
|
77
|
+
stderr += data.toString();
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
proc.on("close", (code) => {
|
|
81
|
+
if (code === 0) {
|
|
82
|
+
// The output path is the last argument
|
|
83
|
+
resolve(args[args.length - 1]!);
|
|
84
|
+
} else {
|
|
85
|
+
reject(new Error(`FFmpeg exited with code ${code}:\n${stderr}`));
|
|
86
|
+
}
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
proc.on("error", (err) => {
|
|
90
|
+
reject(new Error(`Failed to spawn FFmpeg: ${err.message}`));
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
export { getContainerExt };
|