@t3lnet/sceneforge 1.0.17 → 1.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +65 -0
- package/cli/commands/add-audio-to-steps.js +37 -9
- package/cli/commands/concat-final-videos.js +37 -4
- package/cli/commands/pipeline.js +44 -3
- package/cli/commands/record-demo.js +20 -30
- package/cli/commands/split-video.js +17 -5
- package/cli/utils/dimensions.js +249 -0
- package/context/templates/base/cli-reference.md +132 -0
- package/dist/templates/base/cli-reference.md +132 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -117,6 +117,71 @@ npx sceneforge split --demo my-demo --crf 15
|
|
|
117
117
|
- Higher quality (lower CRF) prevents generation loss
|
|
118
118
|
- `high` preset (CRF 10) produces near-lossless quality for professional demos
|
|
119
119
|
|
|
120
|
+
## Viewport Settings (Recording)
|
|
121
|
+
|
|
122
|
+
Control browser viewport size and zoom level during recording:
|
|
123
|
+
|
|
124
|
+
```bash
|
|
125
|
+
--viewport <WxH|preset> # Viewport size (default: 1440x900)
|
|
126
|
+
# Presets: 720p, 1080p, 1440p, 4k
|
|
127
|
+
--width <px> # Viewport width
|
|
128
|
+
--height <px> # Viewport height
|
|
129
|
+
--zoom <percent> # Browser zoom: 100, 150, 200 (default: 100)
|
|
130
|
+
--device-scale-factor <n> # Device pixel ratio: 1, 1.5, 2
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
**Examples:**
|
|
134
|
+
|
|
135
|
+
```bash
|
|
136
|
+
# Record at 1080p
|
|
137
|
+
npx sceneforge record --definition demo.yaml --base-url http://localhost:5173 --viewport 1080p
|
|
138
|
+
|
|
139
|
+
# Record with zoom for extra detail
|
|
140
|
+
npx sceneforge record --definition demo.yaml --base-url http://localhost:5173 \
|
|
141
|
+
--viewport 1920x1080 --zoom 150
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
## Output Dimensions (Video Processing)
|
|
145
|
+
|
|
146
|
+
Control final video resolution with support for different platforms and aspect ratios:
|
|
147
|
+
|
|
148
|
+
**Presets:**
|
|
149
|
+
|
|
150
|
+
| Preset | Resolution | Use Case |
|
|
151
|
+
|--------|------------|----------|
|
|
152
|
+
| `720p` | 1280x720 | HD landscape |
|
|
153
|
+
| `1080p` | 1920x1080 | Full HD landscape |
|
|
154
|
+
| `4k` | 3840x2160 | 4K UHD |
|
|
155
|
+
| `tiktok` | 1080x1920 | TikTok/Reels (portrait) |
|
|
156
|
+
| `shorts` | 1080x1920 | YouTube Shorts (portrait) |
|
|
157
|
+
| `square` | 1080x1080 | Instagram posts |
|
|
158
|
+
|
|
159
|
+
```bash
|
|
160
|
+
--output-size <WxH|preset> # Output video dimensions
|
|
161
|
+
--output-width <px> # Output width (-1 for auto)
|
|
162
|
+
--output-height <px> # Output height (-1 for auto)
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
**Examples:**
|
|
166
|
+
|
|
167
|
+
```bash
|
|
168
|
+
# Standard 1080p output
|
|
169
|
+
npx sceneforge concat --demo my-demo --output-size 1080p
|
|
170
|
+
|
|
171
|
+
# TikTok/YouTube Shorts (portrait)
|
|
172
|
+
npx sceneforge pipeline --demo my-demo --base-url http://localhost:5173 \
|
|
173
|
+
--output-size tiktok --quality high
|
|
174
|
+
|
|
175
|
+
# Square for Instagram
|
|
176
|
+
npx sceneforge concat --demo my-demo --output-size square
|
|
177
|
+
|
|
178
|
+
# Full pipeline with all video options
|
|
179
|
+
npx sceneforge pipeline --demo my-demo --base-url http://localhost:5173 \
|
|
180
|
+
--viewport 1080p --zoom 150 \
|
|
181
|
+
--output-size 1080p \
|
|
182
|
+
--quality high
|
|
183
|
+
```
|
|
184
|
+
|
|
120
185
|
## Notes
|
|
121
186
|
|
|
122
187
|
- Voiceover generation uses ElevenLabs and requires `ELEVENLABS_API_KEY` + `ELEVENLABS_VOICE_ID`.
|
|
@@ -10,6 +10,12 @@ import {
|
|
|
10
10
|
getQualityHelpText,
|
|
11
11
|
logQualitySettings,
|
|
12
12
|
} from "../utils/quality.js";
|
|
13
|
+
import {
|
|
14
|
+
parseOutputDimensions,
|
|
15
|
+
getOutputDimensionsHelpText,
|
|
16
|
+
getScaleFilterArgs,
|
|
17
|
+
logOutputDimensions,
|
|
18
|
+
} from "../utils/dimensions.js";
|
|
13
19
|
|
|
14
20
|
function printHelp() {
|
|
15
21
|
console.log(`
|
|
@@ -26,6 +32,7 @@ Options:
|
|
|
26
32
|
--output-dir <path> Output directory (defaults to e2e/output or output)
|
|
27
33
|
--help, -h Show this help message
|
|
28
34
|
${getQualityHelpText()}
|
|
35
|
+
${getOutputDimensionsHelpText()}
|
|
29
36
|
|
|
30
37
|
Output:
|
|
31
38
|
Creates step_XX_<stepId>_with_audio.mp4 files in the videos/<demo>/ folder
|
|
@@ -33,18 +40,32 @@ Output:
|
|
|
33
40
|
Examples:
|
|
34
41
|
sceneforge add-audio --demo create-quote
|
|
35
42
|
sceneforge add-audio --demo create-quote --quality high
|
|
43
|
+
sceneforge add-audio --demo create-quote --output-size 1080p
|
|
36
44
|
sceneforge add-audio --all --codec libx265
|
|
37
45
|
`);
|
|
38
46
|
}
|
|
39
47
|
|
|
40
|
-
|
|
48
|
+
// Build scale filter string for chaining in filter_complex
|
|
49
|
+
function buildScaleFilter(outputDimensions) {
|
|
50
|
+
if (!outputDimensions) return "";
|
|
51
|
+
const { width, height } = outputDimensions;
|
|
52
|
+
if (width === -1 || height === -1) {
|
|
53
|
+
return `scale=${width}:${height}`;
|
|
54
|
+
}
|
|
55
|
+
return `scale=${width}:${height}:force_original_aspect_ratio=decrease,pad=${width}:${height}:(ow-iw)/2:(oh-ih)/2:black`;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
async function addAudioToStep(videoPath, audioPath, outputPath, padding, nextVideoPath, qualityOptions = {}, outputDimensions = null) {
|
|
41
59
|
const videoDuration = await getMediaDuration(videoPath);
|
|
42
60
|
const audioDuration = await getMediaDuration(audioPath);
|
|
43
61
|
const targetDuration = audioDuration + padding;
|
|
44
62
|
const encodingArgs = getVideoEncodingArgs(qualityOptions);
|
|
63
|
+
const scaleFilter = buildScaleFilter(outputDimensions);
|
|
45
64
|
|
|
46
65
|
if (targetDuration <= videoDuration) {
|
|
47
66
|
const padDuration = Math.max(0, videoDuration - audioDuration);
|
|
67
|
+
// For this case, use -vf for scaling since filter_complex only handles audio
|
|
68
|
+
const videoFilterArgs = scaleFilter ? ["-vf", scaleFilter] : [];
|
|
48
69
|
await runFFmpeg([
|
|
49
70
|
"-y",
|
|
50
71
|
"-i",
|
|
@@ -53,6 +74,7 @@ async function addAudioToStep(videoPath, audioPath, outputPath, padding, nextVid
|
|
|
53
74
|
audioPath,
|
|
54
75
|
"-filter_complex",
|
|
55
76
|
`[1:a]apad=pad_dur=${padDuration}[a]`,
|
|
77
|
+
...videoFilterArgs,
|
|
56
78
|
"-map",
|
|
57
79
|
"0:v",
|
|
58
80
|
"-map",
|
|
@@ -69,11 +91,13 @@ async function addAudioToStep(videoPath, audioPath, outputPath, padding, nextVid
|
|
|
69
91
|
|
|
70
92
|
if (nextVideoPath) {
|
|
71
93
|
const stillDuration = 0.04;
|
|
94
|
+
// Chain scale filter into the filter_complex output
|
|
95
|
+
const scaleChain = scaleFilter ? `,${scaleFilter}` : "";
|
|
72
96
|
const filterGraph =
|
|
73
97
|
`[1:v]trim=start=0:end=${stillDuration},setpts=PTS-STARTPTS,` +
|
|
74
98
|
`tpad=stop_mode=clone:stop_duration=${extensionNeeded},` +
|
|
75
99
|
`trim=duration=${extensionNeeded}[next_still];` +
|
|
76
|
-
`[0:v][next_still]concat=n=2:v=1:a=0[outv]`;
|
|
100
|
+
`[0:v][next_still]concat=n=2:v=1:a=0${scaleChain}[outv]`;
|
|
77
101
|
|
|
78
102
|
await runFFmpeg([
|
|
79
103
|
"-y",
|
|
@@ -97,6 +121,8 @@ async function addAudioToStep(videoPath, audioPath, outputPath, padding, nextVid
|
|
|
97
121
|
return;
|
|
98
122
|
}
|
|
99
123
|
|
|
124
|
+
// Chain scale filter into the filter_complex output
|
|
125
|
+
const scaleChain = scaleFilter ? `,${scaleFilter}` : "";
|
|
100
126
|
await runFFmpeg([
|
|
101
127
|
"-y",
|
|
102
128
|
"-i",
|
|
@@ -104,7 +130,7 @@ async function addAudioToStep(videoPath, audioPath, outputPath, padding, nextVid
|
|
|
104
130
|
"-i",
|
|
105
131
|
audioPath,
|
|
106
132
|
"-filter_complex",
|
|
107
|
-
`[0:v]tpad=stop_mode=clone:stop_duration=${extensionNeeded}[v]`,
|
|
133
|
+
`[0:v]tpad=stop_mode=clone:stop_duration=${extensionNeeded}${scaleChain}[v]`,
|
|
108
134
|
"-map",
|
|
109
135
|
"[v]",
|
|
110
136
|
"-map",
|
|
@@ -116,9 +142,10 @@ async function addAudioToStep(videoPath, audioPath, outputPath, padding, nextVid
|
|
|
116
142
|
]);
|
|
117
143
|
}
|
|
118
144
|
|
|
119
|
-
async function processDemo(demoName, paths, padding, qualityOptions = {}) {
|
|
145
|
+
async function processDemo(demoName, paths, padding, qualityOptions = {}, outputDimensions = null) {
|
|
120
146
|
console.log(`\n[audio] Processing: ${demoName}\n`);
|
|
121
147
|
logQualitySettings(qualityOptions, "[audio]");
|
|
148
|
+
logOutputDimensions(outputDimensions, "[audio]");
|
|
122
149
|
|
|
123
150
|
const stepsManifestPath = path.join(paths.videosDir, demoName, "steps-manifest.json");
|
|
124
151
|
let stepsManifest;
|
|
@@ -204,7 +231,7 @@ async function processDemo(demoName, paths, padding, qualityOptions = {}) {
|
|
|
204
231
|
);
|
|
205
232
|
|
|
206
233
|
try {
|
|
207
|
-
await addAudioToStep(step.videoFile, audioSegment.audioFile, outputPath, padding, nextVideoPath, qualityOptions);
|
|
234
|
+
await addAudioToStep(step.videoFile, audioSegment.audioFile, outputPath, padding, nextVideoPath, qualityOptions, outputDimensions);
|
|
208
235
|
outputFiles.push(outputPath);
|
|
209
236
|
} catch (error) {
|
|
210
237
|
console.error(`[audio] ${paddedIndex}. ${step.stepId}: ✗ Failed to process`);
|
|
@@ -237,7 +264,7 @@ async function processDemo(demoName, paths, padding, qualityOptions = {}) {
|
|
|
237
264
|
console.log(`[audio] Output: ${path.join(paths.videosDir, demoName)}`);
|
|
238
265
|
}
|
|
239
266
|
|
|
240
|
-
async function processAll(paths, padding, qualityOptions = {}) {
|
|
267
|
+
async function processAll(paths, padding, qualityOptions = {}, outputDimensions = null) {
|
|
241
268
|
console.log("\n[audio] Processing all demos...\n");
|
|
242
269
|
|
|
243
270
|
try {
|
|
@@ -268,7 +295,7 @@ async function processAll(paths, padding, qualityOptions = {}) {
|
|
|
268
295
|
console.log(`[audio] Found ${demosToProcess.length} demo(s) to process\n`);
|
|
269
296
|
|
|
270
297
|
for (const demo of demosToProcess) {
|
|
271
|
-
await processDemo(demo, paths, padding, qualityOptions);
|
|
298
|
+
await processDemo(demo, paths, padding, qualityOptions, outputDimensions);
|
|
272
299
|
}
|
|
273
300
|
|
|
274
301
|
await fs.rm(paths.tempDir, { recursive: true, force: true });
|
|
@@ -302,15 +329,16 @@ export async function runAddAudioCommand(argv) {
|
|
|
302
329
|
const rootDir = resolveRoot(root);
|
|
303
330
|
const paths = await getOutputPaths(rootDir, outputDir);
|
|
304
331
|
const qualityOptions = parseQualityArgs(args, getFlagValue, hasFlag);
|
|
332
|
+
const outputDimensions = parseOutputDimensions(args, getFlagValue);
|
|
305
333
|
|
|
306
334
|
if (demo) {
|
|
307
|
-
await processDemo(demo, paths, padding, qualityOptions);
|
|
335
|
+
await processDemo(demo, paths, padding, qualityOptions, outputDimensions);
|
|
308
336
|
await fs.rm(paths.tempDir, { recursive: true, force: true });
|
|
309
337
|
return;
|
|
310
338
|
}
|
|
311
339
|
|
|
312
340
|
if (all) {
|
|
313
|
-
await processAll(paths, padding, qualityOptions);
|
|
341
|
+
await processAll(paths, padding, qualityOptions, outputDimensions);
|
|
314
342
|
return;
|
|
315
343
|
}
|
|
316
344
|
|
|
@@ -11,6 +11,11 @@ import {
|
|
|
11
11
|
DEFAULT_AUDIO_CODEC,
|
|
12
12
|
DEFAULT_AUDIO_BITRATE,
|
|
13
13
|
} from "../utils/quality.js";
|
|
14
|
+
import {
|
|
15
|
+
parseOutputDimensions,
|
|
16
|
+
getOutputDimensionsHelpText,
|
|
17
|
+
logOutputDimensions,
|
|
18
|
+
} from "../utils/dimensions.js";
|
|
14
19
|
|
|
15
20
|
function printHelp() {
|
|
16
21
|
console.log(`
|
|
@@ -33,10 +38,13 @@ Options:
|
|
|
33
38
|
--output-dir <path> Output directory (defaults to e2e/output or output)
|
|
34
39
|
--help, -h Show this help message
|
|
35
40
|
${getQualityHelpText()}
|
|
41
|
+
${getOutputDimensionsHelpText()}
|
|
36
42
|
|
|
37
43
|
Examples:
|
|
38
44
|
sceneforge concat --demo create-quote
|
|
39
45
|
sceneforge concat --demo create-quote --quality high
|
|
46
|
+
sceneforge concat --demo create-quote --output-size 1080p
|
|
47
|
+
sceneforge concat --demo create-quote --output-size tiktok # 1080x1920 vertical
|
|
40
48
|
sceneforge concat --demo create-quote --intro intro.mp4 --outro outro.mp4
|
|
41
49
|
sceneforge concat --demo create-quote --music background.mp3 --music-volume 0.2
|
|
42
50
|
sceneforge concat --all --codec libx265
|
|
@@ -65,7 +73,17 @@ async function loadMediaConfig(demoName, paths, rootDir) {
|
|
|
65
73
|
}
|
|
66
74
|
}
|
|
67
75
|
|
|
68
|
-
|
|
76
|
+
// Build scale filter string for use in filter_complex
|
|
77
|
+
function buildScaleFilter(outputDimensions) {
|
|
78
|
+
if (!outputDimensions) return "";
|
|
79
|
+
const { width, height } = outputDimensions;
|
|
80
|
+
if (width === -1 || height === -1) {
|
|
81
|
+
return `scale=${width}:${height}`;
|
|
82
|
+
}
|
|
83
|
+
return `scale=${width}:${height}:force_original_aspect_ratio=decrease,pad=${width}:${height}:(ow-iw)/2:(oh-ih)/2:black`;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
async function buildConcatWithIntroOutro(stepFiles, demoDir, introPath, outroPath, outputPath, qualityOptions = {}, outputDimensions = null) {
|
|
69
87
|
const allInputs = [];
|
|
70
88
|
const inputPaths = [];
|
|
71
89
|
let inputIndex = 0;
|
|
@@ -92,7 +110,11 @@ async function buildConcatWithIntroOutro(stepFiles, demoDir, introPath, outroPat
|
|
|
92
110
|
}
|
|
93
111
|
|
|
94
112
|
const concatInputs = inputPaths.map(({ index }) => `[${index}:v:0][${index}:a:0]`).join("");
|
|
95
|
-
const
|
|
113
|
+
const scaleFilter = buildScaleFilter(outputDimensions);
|
|
114
|
+
// If scaling, we need to output concat to temp labels, then apply scale to video only
|
|
115
|
+
const filterGraph = scaleFilter
|
|
116
|
+
? `${concatInputs}concat=n=${inputPaths.length}:v=1:a=1[tmpv][outa];[tmpv]${scaleFilter}[outv]`
|
|
117
|
+
: `${concatInputs}concat=n=${inputPaths.length}:v=1:a=1[outv][outa]`;
|
|
96
118
|
const encodingArgs = getVideoEncodingArgs(qualityOptions);
|
|
97
119
|
|
|
98
120
|
await runFFmpeg([
|
|
@@ -210,6 +232,9 @@ async function concatDemo(demoName, paths, options = {}) {
|
|
|
210
232
|
if (options.qualityOptions) {
|
|
211
233
|
logQualitySettings(options.qualityOptions, "[concat]");
|
|
212
234
|
}
|
|
235
|
+
if (options.outputDimensions) {
|
|
236
|
+
logOutputDimensions(options.outputDimensions, "[concat]");
|
|
237
|
+
}
|
|
213
238
|
|
|
214
239
|
const { rootDir, introOverride, outroOverride, musicOverride, musicOptions = {} } = options;
|
|
215
240
|
const demoDir = path.join(paths.videosDir, demoName);
|
|
@@ -297,6 +322,7 @@ async function concatDemo(demoName, paths, options = {}) {
|
|
|
297
322
|
console.log("[concat] Concatenating clips...");
|
|
298
323
|
|
|
299
324
|
const qualityOptions = options.qualityOptions || {};
|
|
325
|
+
const outputDimensions = options.outputDimensions || null;
|
|
300
326
|
|
|
301
327
|
if (hasIntroOutro) {
|
|
302
328
|
await buildConcatWithIntroOutro(
|
|
@@ -305,13 +331,18 @@ async function concatDemo(demoName, paths, options = {}) {
|
|
|
305
331
|
introPath,
|
|
306
332
|
outroPath,
|
|
307
333
|
hasMusic ? tempConcatPath : outputPath,
|
|
308
|
-
qualityOptions
|
|
334
|
+
qualityOptions,
|
|
335
|
+
outputDimensions
|
|
309
336
|
);
|
|
310
337
|
} else {
|
|
311
338
|
// Original concatenation logic for steps only
|
|
312
339
|
const inputArgs = stepFiles.flatMap((file) => ["-i", path.join(demoDir, file)]);
|
|
313
340
|
const concatInputs = stepFiles.map((_, index) => `[${index}:v:0][${index}:a:0]`).join("");
|
|
314
|
-
const
|
|
341
|
+
const scaleFilter = buildScaleFilter(outputDimensions);
|
|
342
|
+
// If scaling, we need to output concat to temp labels, then apply scale to video only
|
|
343
|
+
const filterGraph = scaleFilter
|
|
344
|
+
? `${concatInputs}concat=n=${stepFiles.length}:v=1:a=1[tmpv][outa];[tmpv]${scaleFilter}[outv]`
|
|
345
|
+
: `${concatInputs}concat=n=${stepFiles.length}:v=1:a=1[outv][outa]`;
|
|
315
346
|
const encodingArgs = getVideoEncodingArgs(qualityOptions);
|
|
316
347
|
|
|
317
348
|
await runFFmpeg([
|
|
@@ -457,6 +488,7 @@ export async function runConcatCommand(argv) {
|
|
|
457
488
|
const rootDir = resolveRoot(root);
|
|
458
489
|
const paths = await getOutputPaths(rootDir, outputDir);
|
|
459
490
|
const qualityOptions = parseQualityArgs(args, getFlagValue, hasFlag);
|
|
491
|
+
const outputDimensions = parseOutputDimensions(args, getFlagValue);
|
|
460
492
|
|
|
461
493
|
const options = {
|
|
462
494
|
rootDir,
|
|
@@ -470,6 +502,7 @@ export async function runConcatCommand(argv) {
|
|
|
470
502
|
fadeOut: musicFadeOut,
|
|
471
503
|
},
|
|
472
504
|
qualityOptions,
|
|
505
|
+
outputDimensions,
|
|
473
506
|
};
|
|
474
507
|
|
|
475
508
|
if (demo) {
|
package/cli/commands/pipeline.js
CHANGED
|
@@ -4,6 +4,7 @@ import { loadDemoDefinition } from "@t3lnet/sceneforge";
|
|
|
4
4
|
import { getFlagValue, hasFlag } from "../utils/args.js";
|
|
5
5
|
import { ensureDir, getOutputPaths, resolveRoot, toAbsolute } from "../utils/paths.js";
|
|
6
6
|
import { getQualityHelpText } from "../utils/quality.js";
|
|
7
|
+
import { getViewportHelpText, getOutputDimensionsHelpText } from "../utils/dimensions.js";
|
|
7
8
|
import { runRecordDemoCommand } from "./record-demo.js";
|
|
8
9
|
import { runSplitVideoCommand } from "./split-video.js";
|
|
9
10
|
import { runGenerateVoiceoverCommand } from "./generate-voiceover.js";
|
|
@@ -42,6 +43,8 @@ Media options (for final video):
|
|
|
42
43
|
--music-fade-in <s> Fade in duration for music (default: 1)
|
|
43
44
|
--music-fade-out <s> Fade out duration for music (default: 2)
|
|
44
45
|
${getQualityHelpText()}
|
|
46
|
+
${getViewportHelpText()}
|
|
47
|
+
${getOutputDimensionsHelpText()}
|
|
45
48
|
|
|
46
49
|
--help, -h Show this help message
|
|
47
50
|
|
|
@@ -200,6 +203,18 @@ export async function runPipelineCommand(argv) {
|
|
|
200
203
|
const crf = getFlagValue(args, "--crf");
|
|
201
204
|
const codec = getFlagValue(args, "--codec");
|
|
202
205
|
|
|
206
|
+
// Viewport options (for recording)
|
|
207
|
+
const viewport = getFlagValue(args, "--viewport");
|
|
208
|
+
const viewportWidth = getFlagValue(args, "--width");
|
|
209
|
+
const viewportHeight = getFlagValue(args, "--height");
|
|
210
|
+
const zoom = getFlagValue(args, "--zoom");
|
|
211
|
+
const deviceScaleFactor = getFlagValue(args, "--device-scale-factor");
|
|
212
|
+
|
|
213
|
+
// Output dimension options (for video processing)
|
|
214
|
+
const outputSize = getFlagValue(args, "--output-size");
|
|
215
|
+
const outputWidth = getFlagValue(args, "--output-width");
|
|
216
|
+
const outputHeight = getFlagValue(args, "--output-height");
|
|
217
|
+
|
|
203
218
|
const rootDir = resolveRoot(root);
|
|
204
219
|
const outputPaths = await getOutputPaths(rootDir, outputDir);
|
|
205
220
|
|
|
@@ -246,6 +261,20 @@ export async function runPipelineCommand(argv) {
|
|
|
246
261
|
if (crf) console.log(` - CRF: ${crf}`);
|
|
247
262
|
if (codec) console.log(` - Codec: ${codec}`);
|
|
248
263
|
}
|
|
264
|
+
if (viewport || viewportWidth || viewportHeight || zoom || deviceScaleFactor) {
|
|
265
|
+
console.log("\nViewport options:");
|
|
266
|
+
if (viewport) console.log(` - Viewport: ${viewport}`);
|
|
267
|
+
if (viewportWidth) console.log(` - Width: ${viewportWidth}`);
|
|
268
|
+
if (viewportHeight) console.log(` - Height: ${viewportHeight}`);
|
|
269
|
+
if (zoom) console.log(` - Zoom: ${zoom}%`);
|
|
270
|
+
if (deviceScaleFactor) console.log(` - Device scale factor: ${deviceScaleFactor}`);
|
|
271
|
+
}
|
|
272
|
+
if (outputSize || outputWidth || outputHeight) {
|
|
273
|
+
console.log("\nOutput dimension options:");
|
|
274
|
+
if (outputSize) console.log(` - Output size: ${outputSize}`);
|
|
275
|
+
if (outputWidth) console.log(` - Output width: ${outputWidth}`);
|
|
276
|
+
if (outputHeight) console.log(` - Output height: ${outputHeight}`);
|
|
277
|
+
}
|
|
249
278
|
return;
|
|
250
279
|
}
|
|
251
280
|
|
|
@@ -293,8 +322,20 @@ export async function runPipelineCommand(argv) {
|
|
|
293
322
|
qualityArgs.push("--codec", codec);
|
|
294
323
|
}
|
|
295
324
|
|
|
325
|
+
// Build output dimension args to pass through to video processing commands
|
|
326
|
+
const outputDimensionArgs = [];
|
|
327
|
+
if (outputSize) {
|
|
328
|
+
outputDimensionArgs.push("--output-size", outputSize);
|
|
329
|
+
}
|
|
330
|
+
if (outputWidth) {
|
|
331
|
+
outputDimensionArgs.push("--output-width", outputWidth);
|
|
332
|
+
}
|
|
333
|
+
if (outputHeight) {
|
|
334
|
+
outputDimensionArgs.push("--output-height", outputHeight);
|
|
335
|
+
}
|
|
336
|
+
|
|
296
337
|
await runStep("split", plan.split, () =>
|
|
297
|
-
runSplitVideoCommand(["--demo", demoName, ...sharedArgs, ...qualityArgs])
|
|
338
|
+
runSplitVideoCommand(["--demo", demoName, ...sharedArgs, ...qualityArgs, ...outputDimensionArgs])
|
|
298
339
|
);
|
|
299
340
|
|
|
300
341
|
const voiceArgs = ["--demo", demoName, ...sharedArgs];
|
|
@@ -306,14 +347,14 @@ export async function runPipelineCommand(argv) {
|
|
|
306
347
|
}
|
|
307
348
|
await runStep("voiceover", plan.voiceover, () => runGenerateVoiceoverCommand(voiceArgs));
|
|
308
349
|
|
|
309
|
-
const audioArgs = ["--demo", demoName, ...sharedArgs, ...qualityArgs];
|
|
350
|
+
const audioArgs = ["--demo", demoName, ...sharedArgs, ...qualityArgs, ...outputDimensionArgs];
|
|
310
351
|
if (padding) {
|
|
311
352
|
audioArgs.push("--padding", padding);
|
|
312
353
|
}
|
|
313
354
|
await runStep("add-audio", plan.addAudio, () => runAddAudioCommand(audioArgs));
|
|
314
355
|
|
|
315
356
|
// Build concat args with media options and quality settings
|
|
316
|
-
const concatArgs = ["--demo", demoName, ...sharedArgs, ...qualityArgs];
|
|
357
|
+
const concatArgs = ["--demo", demoName, ...sharedArgs, ...qualityArgs, ...outputDimensionArgs];
|
|
317
358
|
if (intro) {
|
|
318
359
|
concatArgs.push("--intro", intro);
|
|
319
360
|
}
|
|
@@ -15,6 +15,12 @@ import {
|
|
|
15
15
|
toAbsolute,
|
|
16
16
|
} from "../utils/paths.js";
|
|
17
17
|
import { getMediaDuration } from "../utils/media.js";
|
|
18
|
+
import {
|
|
19
|
+
parseViewportArgs,
|
|
20
|
+
parseDeviceScaleFactor,
|
|
21
|
+
getViewportHelpText,
|
|
22
|
+
logViewportSettings,
|
|
23
|
+
} from "../utils/dimensions.js";
|
|
18
24
|
|
|
19
25
|
function printHelp() {
|
|
20
26
|
console.log(`
|
|
@@ -35,45 +41,19 @@ Options:
|
|
|
35
41
|
--root <path> Project root (defaults to cwd)
|
|
36
42
|
--output-dir <path> Output directory (defaults to output or e2e/output)
|
|
37
43
|
--storage-state <path> Playwright storage state JSON
|
|
38
|
-
--viewport <WxH> Viewport size, e.g. 1440x900 (default)
|
|
39
|
-
--width <px> Viewport width (overrides --viewport)
|
|
40
|
-
--height <px> Viewport height (overrides --viewport)
|
|
41
44
|
--headed Run browser headed
|
|
42
45
|
--slowmo <ms> Slow down Playwright actions
|
|
43
46
|
--no-video Skip video recording
|
|
44
47
|
--help, -h Show this help message
|
|
48
|
+
${getViewportHelpText()}
|
|
45
49
|
|
|
46
50
|
Examples:
|
|
47
51
|
sceneforge record --definition demo-definitions/create-quote.yaml --base-url http://localhost:5173
|
|
48
52
|
sceneforge record --demo create-quote --definitions-dir examples --base-url http://localhost:5173
|
|
53
|
+
sceneforge record --demo create-quote --base-url http://localhost:5173 --viewport 1920x1080 --zoom 150
|
|
49
54
|
`);
|
|
50
55
|
}
|
|
51
56
|
|
|
52
|
-
function parseViewport(args) {
|
|
53
|
-
const viewportValue = getFlagValue(args, "--viewport");
|
|
54
|
-
const widthValue = getFlagValue(args, "--width");
|
|
55
|
-
const heightValue = getFlagValue(args, "--height");
|
|
56
|
-
|
|
57
|
-
const defaultViewport = { width: 1440, height: 900 };
|
|
58
|
-
|
|
59
|
-
if (widthValue || heightValue) {
|
|
60
|
-
const width = widthValue ? Number(widthValue) : defaultViewport.width;
|
|
61
|
-
const height = heightValue ? Number(heightValue) : defaultViewport.height;
|
|
62
|
-
return { width, height };
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
if (!viewportValue) {
|
|
66
|
-
return defaultViewport;
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
const match = viewportValue.match(/^(\d+)x(\d+)$/i);
|
|
70
|
-
if (!match) {
|
|
71
|
-
return defaultViewport;
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
return { width: Number(match[1]), height: Number(match[2]) };
|
|
75
|
-
}
|
|
76
|
-
|
|
77
57
|
function resolveStartUrl(startPath, baseUrl) {
|
|
78
58
|
if (!startPath) return null;
|
|
79
59
|
const interpolated = startPath
|
|
@@ -233,7 +213,16 @@ export async function runRecordDemoCommand(argv) {
|
|
|
233
213
|
await ensureDir(outputPaths.outputDir);
|
|
234
214
|
await ensureDir(outputPaths.videosDir);
|
|
235
215
|
|
|
236
|
-
const viewport =
|
|
216
|
+
const viewport = parseViewportArgs(args, getFlagValue);
|
|
217
|
+
const deviceScaleFactor = parseDeviceScaleFactor(args, getFlagValue);
|
|
218
|
+
|
|
219
|
+
logViewportSettings(viewport, deviceScaleFactor, "[record]");
|
|
220
|
+
|
|
221
|
+
// Calculate video recording size (viewport * scale factor for high-DPI capture)
|
|
222
|
+
const recordVideoSize = {
|
|
223
|
+
width: Math.round(viewport.width * deviceScaleFactor),
|
|
224
|
+
height: Math.round(viewport.height * deviceScaleFactor),
|
|
225
|
+
};
|
|
237
226
|
|
|
238
227
|
const recordDir = path.join(outputPaths.videosDir, ".recordings", definition.name);
|
|
239
228
|
if (!noVideo) {
|
|
@@ -247,7 +236,8 @@ export async function runRecordDemoCommand(argv) {
|
|
|
247
236
|
|
|
248
237
|
const context = await browser.newContext({
|
|
249
238
|
viewport,
|
|
250
|
-
|
|
239
|
+
deviceScaleFactor,
|
|
240
|
+
recordVideo: noVideo ? undefined : { dir: recordDir, size: recordVideoSize },
|
|
251
241
|
storageState: storageState ? toAbsolute(rootDir, storageState) : undefined,
|
|
252
242
|
locale: locale || undefined,
|
|
253
243
|
extraHTTPHeaders: locale
|
|
@@ -10,6 +10,12 @@ import {
|
|
|
10
10
|
getQualityHelpText,
|
|
11
11
|
logQualitySettings,
|
|
12
12
|
} from "../utils/quality.js";
|
|
13
|
+
import {
|
|
14
|
+
parseOutputDimensions,
|
|
15
|
+
getOutputDimensionsHelpText,
|
|
16
|
+
getScaleFilterArgs,
|
|
17
|
+
logOutputDimensions,
|
|
18
|
+
} from "../utils/dimensions.js";
|
|
13
19
|
|
|
14
20
|
function printHelp() {
|
|
15
21
|
console.log(`
|
|
@@ -25,11 +31,13 @@ Options:
|
|
|
25
31
|
--output-dir <path> Output directory (defaults to e2e/output or output)
|
|
26
32
|
--help, -h Show this help message
|
|
27
33
|
${getQualityHelpText()}
|
|
34
|
+
${getOutputDimensionsHelpText()}
|
|
28
35
|
|
|
29
36
|
Examples:
|
|
30
37
|
sceneforge split --demo create-quote
|
|
31
38
|
sceneforge split --demo create-quote --quality high
|
|
32
39
|
sceneforge split --demo create-quote --crf 15 --codec libx265
|
|
40
|
+
sceneforge split --demo create-quote --output-size 1080p
|
|
33
41
|
sceneforge split --all
|
|
34
42
|
`);
|
|
35
43
|
}
|
|
@@ -63,9 +71,10 @@ async function findVideoFile(demoName, videosDir, testResultsDir) {
|
|
|
63
71
|
return null;
|
|
64
72
|
}
|
|
65
73
|
|
|
66
|
-
async function splitDemo(demoName, paths, qualityOptions = {}) {
|
|
74
|
+
async function splitDemo(demoName, paths, qualityOptions = {}, outputDimensions = null) {
|
|
67
75
|
console.log(`\n[split] Processing: ${demoName}\n`);
|
|
68
76
|
logQualitySettings(qualityOptions, "[split]");
|
|
77
|
+
logOutputDimensions(outputDimensions, "[split]");
|
|
69
78
|
|
|
70
79
|
const scriptPath = path.join(paths.scriptsDir, `${demoName}.json`);
|
|
71
80
|
let script;
|
|
@@ -127,6 +136,7 @@ async function splitDemo(demoName, paths, qualityOptions = {}) {
|
|
|
127
136
|
|
|
128
137
|
try {
|
|
129
138
|
const encodingArgs = getVideoEncodingArgs({ ...qualityOptions, includeAudio: false });
|
|
139
|
+
const scaleArgs = getScaleFilterArgs(outputDimensions);
|
|
130
140
|
await runFFmpeg([
|
|
131
141
|
"-y",
|
|
132
142
|
"-i",
|
|
@@ -135,6 +145,7 @@ async function splitDemo(demoName, paths, qualityOptions = {}) {
|
|
|
135
145
|
String(startSec),
|
|
136
146
|
"-t",
|
|
137
147
|
String(duration),
|
|
148
|
+
...scaleArgs,
|
|
138
149
|
...encodingArgs,
|
|
139
150
|
"-an",
|
|
140
151
|
outputPath,
|
|
@@ -181,7 +192,7 @@ async function splitDemo(demoName, paths, qualityOptions = {}) {
|
|
|
181
192
|
console.log(`[split] Manifest: ${manifestPath}`);
|
|
182
193
|
}
|
|
183
194
|
|
|
184
|
-
async function splitAll(paths, qualityOptions = {}) {
|
|
195
|
+
async function splitAll(paths, qualityOptions = {}, outputDimensions = null) {
|
|
185
196
|
console.log("\n[split] Processing all demos...\n");
|
|
186
197
|
|
|
187
198
|
try {
|
|
@@ -199,7 +210,7 @@ async function splitAll(paths, qualityOptions = {}) {
|
|
|
199
210
|
|
|
200
211
|
for (const file of scriptFiles) {
|
|
201
212
|
const demoName = path.basename(file, ".json");
|
|
202
|
-
await splitDemo(demoName, paths, qualityOptions);
|
|
213
|
+
await splitDemo(demoName, paths, qualityOptions, outputDimensions);
|
|
203
214
|
}
|
|
204
215
|
|
|
205
216
|
console.log("\n[split] All demos processed!");
|
|
@@ -230,14 +241,15 @@ export async function runSplitVideoCommand(argv) {
|
|
|
230
241
|
const rootDir = resolveRoot(root);
|
|
231
242
|
const paths = await getOutputPaths(rootDir, outputDir);
|
|
232
243
|
const qualityOptions = parseQualityArgs(args, getFlagValue, hasFlag);
|
|
244
|
+
const outputDimensions = parseOutputDimensions(args, getFlagValue);
|
|
233
245
|
|
|
234
246
|
if (demo) {
|
|
235
|
-
await splitDemo(demo, paths, qualityOptions);
|
|
247
|
+
await splitDemo(demo, paths, qualityOptions, outputDimensions);
|
|
236
248
|
return;
|
|
237
249
|
}
|
|
238
250
|
|
|
239
251
|
if (all) {
|
|
240
|
-
await splitAll(paths, qualityOptions);
|
|
252
|
+
await splitAll(paths, qualityOptions, outputDimensions);
|
|
241
253
|
return;
|
|
242
254
|
}
|
|
243
255
|
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Viewport and output dimensions configuration
|
|
3
|
+
*
|
|
4
|
+
* Browser Viewport:
|
|
5
|
+
* - Controls the browser window size during recording
|
|
6
|
+
* - Supports device scale factor (zoom) for high-DPI capture
|
|
7
|
+
*
|
|
8
|
+
* Output Dimensions:
|
|
9
|
+
* - Controls the final video resolution after processing
|
|
10
|
+
* - Supports common presets (720p, 1080p, 4k) or custom WxH
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
export const VIEWPORT_PRESETS = {
|
|
14
|
+
"720p": { width: 1280, height: 720 },
|
|
15
|
+
"1080p": { width: 1920, height: 1080 },
|
|
16
|
+
"1440p": { width: 2560, height: 1440 },
|
|
17
|
+
"4k": { width: 3840, height: 2160 },
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
export const OUTPUT_PRESETS = {
|
|
21
|
+
// Landscape formats
|
|
22
|
+
"720p": { width: 1280, height: 720, description: "HD 720p (landscape)" },
|
|
23
|
+
"1080p": { width: 1920, height: 1080, description: "Full HD 1080p (landscape)" },
|
|
24
|
+
"1440p": { width: 2560, height: 1440, description: "QHD 1440p (landscape)" },
|
|
25
|
+
"4k": { width: 3840, height: 2160, description: "4K UHD (landscape)" },
|
|
26
|
+
// Portrait/vertical formats (for mobile, TikTok, YouTube Shorts, Reels)
|
|
27
|
+
"720p-portrait": { width: 720, height: 1280, description: "HD 720p (portrait)" },
|
|
28
|
+
"1080p-portrait": { width: 1080, height: 1920, description: "Full HD 1080p (portrait)" },
|
|
29
|
+
"tiktok": { width: 1080, height: 1920, description: "TikTok/Reels (1080x1920)" },
|
|
30
|
+
"shorts": { width: 1080, height: 1920, description: "YouTube Shorts (1080x1920)" },
|
|
31
|
+
"reels": { width: 1080, height: 1920, description: "Instagram Reels (1080x1920)" },
|
|
32
|
+
// Square format (Instagram posts)
|
|
33
|
+
"square": { width: 1080, height: 1080, description: "Square (1080x1080)" },
|
|
34
|
+
"square-720": { width: 720, height: 720, description: "Square (720x720)" },
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
export const DEFAULT_VIEWPORT = { width: 1440, height: 900 };
|
|
38
|
+
export const DEFAULT_DEVICE_SCALE_FACTOR = 1;
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Parse a dimension string like "1920x1080" or a preset name like "1080p"
|
|
42
|
+
* @param {string} value - Dimension string or preset name
|
|
43
|
+
* @param {Object} presets - Presets object to check against
|
|
44
|
+
* @param {Object} defaultValue - Default dimensions if parsing fails
|
|
45
|
+
* @returns {Object} { width, height }
|
|
46
|
+
*/
|
|
47
|
+
export function parseDimensions(value, presets, defaultValue) {
|
|
48
|
+
if (!value) {
|
|
49
|
+
return defaultValue;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Check if it's a preset name
|
|
53
|
+
const preset = presets[value.toLowerCase()];
|
|
54
|
+
if (preset) {
|
|
55
|
+
return { width: preset.width, height: preset.height };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Try to parse WxH format
|
|
59
|
+
const match = value.match(/^(\d+)x(\d+)$/i);
|
|
60
|
+
if (match) {
|
|
61
|
+
return { width: Number(match[1]), height: Number(match[2]) };
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return defaultValue;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Parse viewport options from CLI arguments
|
|
69
|
+
* @param {string[]} args - CLI arguments
|
|
70
|
+
* @param {Function} getFlagValue - Function to get flag values
|
|
71
|
+
* @returns {Object} { width, height }
|
|
72
|
+
*/
|
|
73
|
+
export function parseViewportArgs(args, getFlagValue) {
|
|
74
|
+
const viewportValue = getFlagValue(args, "--viewport");
|
|
75
|
+
const widthValue = getFlagValue(args, "--width");
|
|
76
|
+
const heightValue = getFlagValue(args, "--height");
|
|
77
|
+
|
|
78
|
+
// Individual width/height override viewport string
|
|
79
|
+
if (widthValue || heightValue) {
|
|
80
|
+
const width = widthValue ? Number(widthValue) : DEFAULT_VIEWPORT.width;
|
|
81
|
+
const height = heightValue ? Number(heightValue) : DEFAULT_VIEWPORT.height;
|
|
82
|
+
return { width, height };
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return parseDimensions(viewportValue, VIEWPORT_PRESETS, DEFAULT_VIEWPORT);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Parse device scale factor (zoom) from CLI arguments
|
|
90
|
+
* @param {string[]} args - CLI arguments
|
|
91
|
+
* @param {Function} getFlagValue - Function to get flag values
|
|
92
|
+
* @returns {number} Device scale factor (1 = 100%, 1.5 = 150%, 2 = 200%)
|
|
93
|
+
*/
|
|
94
|
+
export function parseDeviceScaleFactor(args, getFlagValue) {
|
|
95
|
+
const zoomValue = getFlagValue(args, "--zoom");
|
|
96
|
+
const scaleValue = getFlagValue(args, "--device-scale-factor");
|
|
97
|
+
|
|
98
|
+
// --zoom takes a percentage (100, 150, 200)
|
|
99
|
+
if (zoomValue !== null && zoomValue !== undefined) {
|
|
100
|
+
const zoom = Number(zoomValue);
|
|
101
|
+
if (Number.isFinite(zoom) && zoom > 0) {
|
|
102
|
+
return zoom / 100; // Convert percentage to scale factor
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// --device-scale-factor takes the actual scale (1, 1.5, 2)
|
|
107
|
+
if (scaleValue !== null && scaleValue !== undefined) {
|
|
108
|
+
const scale = Number(scaleValue);
|
|
109
|
+
if (Number.isFinite(scale) && scale > 0) {
|
|
110
|
+
return scale;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return DEFAULT_DEVICE_SCALE_FACTOR;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Parse output dimensions from CLI arguments
|
|
119
|
+
* @param {string[]} args - CLI arguments
|
|
120
|
+
* @param {Function} getFlagValue - Function to get flag values
|
|
121
|
+
* @returns {Object|null} { width, height } or null if not specified
|
|
122
|
+
*/
|
|
123
|
+
export function parseOutputDimensions(args, getFlagValue) {
|
|
124
|
+
const outputSize = getFlagValue(args, "--output-size");
|
|
125
|
+
const outputWidth = getFlagValue(args, "--output-width");
|
|
126
|
+
const outputHeight = getFlagValue(args, "--output-height");
|
|
127
|
+
|
|
128
|
+
// Individual width/height override --output-size
|
|
129
|
+
if (outputWidth || outputHeight) {
|
|
130
|
+
// Need both for explicit dimensions, or use -1 for auto-scale
|
|
131
|
+
const width = outputWidth ? Number(outputWidth) : -1;
|
|
132
|
+
const height = outputHeight ? Number(outputHeight) : -1;
|
|
133
|
+
if ((width > 0 || width === -1) && (height > 0 || height === -1)) {
|
|
134
|
+
return { width, height };
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
if (!outputSize) {
|
|
139
|
+
return null; // No scaling, keep original dimensions
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Check if it's a preset
|
|
143
|
+
const preset = OUTPUT_PRESETS[outputSize.toLowerCase()];
|
|
144
|
+
if (preset) {
|
|
145
|
+
return { width: preset.width, height: preset.height };
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Try to parse WxH format
|
|
149
|
+
const match = outputSize.match(/^(\d+)x(\d+)$/i);
|
|
150
|
+
if (match) {
|
|
151
|
+
return { width: Number(match[1]), height: Number(match[2]) };
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
console.warn(`[warning] Invalid output size "${outputSize}", keeping original dimensions`);
|
|
155
|
+
return null;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Get FFmpeg scale filter arguments
|
|
160
|
+
* @param {Object|null} dimensions - { width, height } or null for no scaling
|
|
161
|
+
* Use -1 for auto-scale maintaining aspect ratio
|
|
162
|
+
* @returns {string[]} FFmpeg arguments for scaling, or empty array
|
|
163
|
+
*/
|
|
164
|
+
export function getScaleFilterArgs(dimensions) {
|
|
165
|
+
if (!dimensions) {
|
|
166
|
+
return [];
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
const { width, height } = dimensions;
|
|
170
|
+
|
|
171
|
+
// If either dimension is -1, scale while maintaining aspect ratio (no padding)
|
|
172
|
+
if (width === -1 || height === -1) {
|
|
173
|
+
return ["-vf", `scale=${width}:${height}`];
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Use scale filter with padding to maintain aspect ratio and fit exact dimensions
|
|
177
|
+
// scale=W:H:force_original_aspect_ratio=decrease,pad=W:H:(ow-iw)/2:(oh-ih)/2
|
|
178
|
+
return [
|
|
179
|
+
"-vf",
|
|
180
|
+
`scale=${width}:${height}:force_original_aspect_ratio=decrease,pad=${width}:${height}:(ow-iw)/2:(oh-ih)/2:black`,
|
|
181
|
+
];
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Get help text for viewport options (recording)
|
|
186
|
+
* @returns {string}
|
|
187
|
+
*/
|
|
188
|
+
export function getViewportHelpText() {
|
|
189
|
+
return `
|
|
190
|
+
Viewport Options (Recording):
|
|
191
|
+
--viewport <WxH|preset> Browser viewport size (default: 1440x900)
|
|
192
|
+
Presets: 720p, 1080p, 1440p, 4k
|
|
193
|
+
Example: --viewport 1920x1080 or --viewport 1080p
|
|
194
|
+
--width <px> Viewport width (overrides --viewport)
|
|
195
|
+
--height <px> Viewport height (overrides --viewport)
|
|
196
|
+
--zoom <percent> Browser zoom level: 100, 150, 200 (default: 100)
|
|
197
|
+
--device-scale-factor <n> Device pixel ratio: 1, 1.5, 2 (alternative to --zoom)`;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Get help text for output dimension options (video processing)
|
|
202
|
+
* @returns {string}
|
|
203
|
+
*/
|
|
204
|
+
export function getOutputDimensionsHelpText() {
|
|
205
|
+
return `
|
|
206
|
+
Output Dimensions:
|
|
207
|
+
--output-size <WxH|preset> Scale output video to dimensions
|
|
208
|
+
Landscape: 720p, 1080p, 1440p, 4k
|
|
209
|
+
Portrait: 720p-portrait, 1080p-portrait
|
|
210
|
+
Mobile: tiktok, shorts, reels (1080x1920)
|
|
211
|
+
Square: square (1080x1080), square-720
|
|
212
|
+
Custom: --output-size 1920x1080
|
|
213
|
+
--output-width <px> Output width (use with --output-height or -1 for auto)
|
|
214
|
+
--output-height <px> Output height (use with --output-width or -1 for auto)
|
|
215
|
+
If not specified, keeps original recording dimensions`;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* Log viewport settings being used
|
|
220
|
+
* @param {Object} viewport - { width, height }
|
|
221
|
+
* @param {number} deviceScaleFactor - Scale factor
|
|
222
|
+
* @param {string} prefix - Log prefix
|
|
223
|
+
*/
|
|
224
|
+
export function logViewportSettings(viewport, deviceScaleFactor, prefix = "") {
|
|
225
|
+
const zoomPercent = Math.round(deviceScaleFactor * 100);
|
|
226
|
+
console.log(
|
|
227
|
+
`${prefix} Viewport: ${viewport.width}x${viewport.height} @ ${zoomPercent}% zoom`
|
|
228
|
+
);
|
|
229
|
+
if (deviceScaleFactor !== 1) {
|
|
230
|
+
const effectiveRes = {
|
|
231
|
+
width: Math.round(viewport.width * deviceScaleFactor),
|
|
232
|
+
height: Math.round(viewport.height * deviceScaleFactor),
|
|
233
|
+
};
|
|
234
|
+
console.log(
|
|
235
|
+
`${prefix} Effective capture resolution: ${effectiveRes.width}x${effectiveRes.height}`
|
|
236
|
+
);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
/**
|
|
241
|
+
* Log output dimension settings
|
|
242
|
+
* @param {Object|null} dimensions - { width, height } or null
|
|
243
|
+
* @param {string} prefix - Log prefix
|
|
244
|
+
*/
|
|
245
|
+
export function logOutputDimensions(dimensions, prefix = "") {
|
|
246
|
+
if (dimensions) {
|
|
247
|
+
console.log(`${prefix} Output size: ${dimensions.width}x${dimensions.height}`);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
@@ -25,6 +25,13 @@ sceneforge record --definition <path> [options]
|
|
|
25
25
|
| `--storage` | Path to storage state JSON |
|
|
26
26
|
| `--slowmo` | Slow down actions by ms |
|
|
27
27
|
| `--env-file` | Path to .env file |
|
|
28
|
+
| `--viewport` | Browser viewport size (preset or WxH, default: 1440x900) |
|
|
29
|
+
| `--width` | Viewport width (overrides --viewport) |
|
|
30
|
+
| `--height` | Viewport height (overrides --viewport) |
|
|
31
|
+
| `--zoom` | Browser zoom level: 100, 150, 200 (default: 100) |
|
|
32
|
+
| `--device-scale-factor` | Device pixel ratio: 1, 1.5, 2 |
|
|
33
|
+
|
|
34
|
+
**Viewport Presets:** 720p (1280x720), 1080p (1920x1080), 1440p (2560x1440), 4k (3840x2160)
|
|
28
35
|
|
|
29
36
|
**Examples:**
|
|
30
37
|
```bash
|
|
@@ -36,6 +43,12 @@ sceneforge record -d demo.yaml -b http://localhost:3000 --headed
|
|
|
36
43
|
|
|
37
44
|
# With auth state
|
|
38
45
|
sceneforge record -d demo.yaml -b http://localhost:3000 --storage ./auth.json
|
|
46
|
+
|
|
47
|
+
# Record at 1080p viewport
|
|
48
|
+
sceneforge record -d demo.yaml -b http://localhost:3000 --viewport 1080p
|
|
49
|
+
|
|
50
|
+
# Record with zoom for extra detail
|
|
51
|
+
sceneforge record -d demo.yaml -b http://localhost:3000 --viewport 1920x1080 --zoom 150
|
|
39
52
|
```
|
|
40
53
|
|
|
41
54
|
### setup
|
|
@@ -78,6 +91,14 @@ sceneforge pipeline --definition <path> [options]
|
|
|
78
91
|
| `--skip-voiceover` | Skip voiceover generation |
|
|
79
92
|
| `--skip-concat` | Skip final concatenation |
|
|
80
93
|
| `--resume` | Resume from last successful step |
|
|
94
|
+
| `--quality` | Quality preset: low, medium, high |
|
|
95
|
+
| `--crf` | Override CRF value |
|
|
96
|
+
| `--codec` | Video codec: libx264, libx265 |
|
|
97
|
+
| `--viewport` | Browser viewport size (preset or WxH) |
|
|
98
|
+
| `--zoom` | Browser zoom level: 100, 150, 200 |
|
|
99
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
100
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
101
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
81
102
|
|
|
82
103
|
**Examples:**
|
|
83
104
|
```bash
|
|
@@ -89,6 +110,18 @@ sceneforge pipeline -d demo.yaml -b http://localhost:3000 --resume
|
|
|
89
110
|
|
|
90
111
|
# Skip recording (use existing videos)
|
|
91
112
|
sceneforge pipeline -d demo.yaml --skip-record
|
|
113
|
+
|
|
114
|
+
# High quality 1080p output
|
|
115
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 --quality high --output-size 1080p
|
|
116
|
+
|
|
117
|
+
# TikTok/YouTube Shorts format
|
|
118
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 --output-size tiktok --quality high
|
|
119
|
+
|
|
120
|
+
# Full options: viewport + zoom + output scaling
|
|
121
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 \
|
|
122
|
+
--viewport 1080p --zoom 150 \
|
|
123
|
+
--output-size 1080p \
|
|
124
|
+
--quality high
|
|
92
125
|
```
|
|
93
126
|
|
|
94
127
|
### split
|
|
@@ -106,9 +139,14 @@ sceneforge split --demo <name> [options]
|
|
|
106
139
|
| `--quality` | Quality preset: low, medium, high (default: medium) |
|
|
107
140
|
| `--crf` | Override CRF value (0-51, lower = better) |
|
|
108
141
|
| `--codec` | Video codec: libx264, libx265 (default: libx264) |
|
|
142
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
143
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
144
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
109
145
|
|
|
110
146
|
**Video Quality:** Configurable via `--quality` preset or `--crf`/`--codec` flags. Default: medium preset (CRF 18, libx264).
|
|
111
147
|
|
|
148
|
+
**Output Presets:** 720p, 1080p, 1440p, 4k (landscape), tiktok/shorts/reels (1080x1920 portrait), square (1080x1080)
|
|
149
|
+
|
|
112
150
|
### voiceover
|
|
113
151
|
Generate voiceover audio from scripts.
|
|
114
152
|
|
|
@@ -143,9 +181,14 @@ sceneforge add-audio --demo <name> [options]
|
|
|
143
181
|
| `--quality` | Quality preset: low, medium, high (default: medium) |
|
|
144
182
|
| `--crf` | Override CRF value (0-51, lower = better) |
|
|
145
183
|
| `--codec` | Video codec: libx264, libx265 (default: libx264) |
|
|
184
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
185
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
186
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
146
187
|
|
|
147
188
|
**Video Quality:** Configurable via `--quality` preset. Default: medium (CRF 18, libx264). Audio: AAC 192kbps.
|
|
148
189
|
|
|
190
|
+
**Output Presets:** 720p, 1080p, 1440p, 4k (landscape), tiktok/shorts/reels (1080x1920 portrait), square (1080x1080)
|
|
191
|
+
|
|
149
192
|
### concat
|
|
150
193
|
Concatenate step clips into final video.
|
|
151
194
|
|
|
@@ -161,6 +204,9 @@ sceneforge concat --demo <name> [options]
|
|
|
161
204
|
| `--quality` | Quality preset: low, medium, high (default: medium) |
|
|
162
205
|
| `--crf` | Override CRF value (0-51, lower = better) |
|
|
163
206
|
| `--codec` | Video codec: libx264, libx265 (default: libx264) |
|
|
207
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
208
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
209
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
164
210
|
| `--intro` | Intro video to prepend |
|
|
165
211
|
| `--outro` | Outro video to append |
|
|
166
212
|
| `--music` | Background music file |
|
|
@@ -168,6 +214,8 @@ sceneforge concat --demo <name> [options]
|
|
|
168
214
|
|
|
169
215
|
**Video Quality:** Configurable via `--quality` preset. Default: medium (CRF 18, libx264). Includes `+faststart` for web streaming. Audio: AAC 192kbps.
|
|
170
216
|
|
|
217
|
+
**Output Presets:** 720p, 1080p, 1440p, 4k (landscape), tiktok/shorts/reels (1080x1920 portrait), square (1080x1080)
|
|
218
|
+
|
|
171
219
|
### doctor
|
|
172
220
|
Run environment diagnostics.
|
|
173
221
|
|
|
@@ -314,3 +362,87 @@ sceneforge split --demo my-demo --crf 15
|
|
|
314
362
|
- 18 = Visually lossless (medium preset, default)
|
|
315
363
|
- 23 = FFmpeg default
|
|
316
364
|
- 28 = Lower quality (low preset)
|
|
365
|
+
|
|
366
|
+
## Viewport Settings
|
|
367
|
+
|
|
368
|
+
Control browser viewport size and zoom during recording for high-DPI capture.
|
|
369
|
+
|
|
370
|
+
### CLI Flags (record, pipeline)
|
|
371
|
+
|
|
372
|
+
```bash
|
|
373
|
+
--viewport <WxH|preset> # Browser viewport size (default: 1440x900)
|
|
374
|
+
--width <px> # Viewport width (overrides --viewport)
|
|
375
|
+
--height <px> # Viewport height (overrides --viewport)
|
|
376
|
+
--zoom <percent> # Browser zoom: 100, 150, 200 (default: 100)
|
|
377
|
+
--device-scale-factor <n> # Device pixel ratio: 1, 1.5, 2
|
|
378
|
+
```
|
|
379
|
+
|
|
380
|
+
### Viewport Presets
|
|
381
|
+
|
|
382
|
+
| Preset | Resolution |
|
|
383
|
+
|--------|------------|
|
|
384
|
+
| `720p` | 1280x720 |
|
|
385
|
+
| `1080p` | 1920x1080 |
|
|
386
|
+
| `1440p` | 2560x1440 |
|
|
387
|
+
| `4k` | 3840x2160 |
|
|
388
|
+
|
|
389
|
+
### Why Viewport Settings Matter
|
|
390
|
+
|
|
391
|
+
- Larger viewports capture more UI detail and clarity
|
|
392
|
+
- Zoom (device scale factor) increases effective pixel density
|
|
393
|
+
- A 1080p viewport at 150% zoom captures at 2880x1620 effective pixels
|
|
394
|
+
- Useful for creating videos that look sharp on retina displays
|
|
395
|
+
|
|
396
|
+
## Output Dimensions
|
|
397
|
+
|
|
398
|
+
Control final video resolution after processing. Supports landscape, portrait, and square formats.
|
|
399
|
+
|
|
400
|
+
### CLI Flags (split, add-audio, concat, pipeline)
|
|
401
|
+
|
|
402
|
+
```bash
|
|
403
|
+
--output-size <WxH|preset> # Output video dimensions
|
|
404
|
+
--output-width <px> # Output width (-1 for auto)
|
|
405
|
+
--output-height <px> # Output height (-1 for auto)
|
|
406
|
+
```
|
|
407
|
+
|
|
408
|
+
### Output Presets
|
|
409
|
+
|
|
410
|
+
| Preset | Resolution | Use Case |
|
|
411
|
+
|--------|------------|----------|
|
|
412
|
+
| `720p` | 1280x720 | HD landscape |
|
|
413
|
+
| `1080p` | 1920x1080 | Full HD landscape |
|
|
414
|
+
| `1440p` | 2560x1440 | QHD landscape |
|
|
415
|
+
| `4k` | 3840x2160 | 4K UHD landscape |
|
|
416
|
+
| `720p-portrait` | 720x1280 | HD portrait |
|
|
417
|
+
| `1080p-portrait` | 1080x1920 | Full HD portrait |
|
|
418
|
+
| `tiktok` | 1080x1920 | TikTok/Instagram Reels |
|
|
419
|
+
| `shorts` | 1080x1920 | YouTube Shorts |
|
|
420
|
+
| `reels` | 1080x1920 | Instagram Reels |
|
|
421
|
+
| `square` | 1080x1080 | Square format |
|
|
422
|
+
| `square-720` | 720x720 | Square format (smaller) |
|
|
423
|
+
|
|
424
|
+
### How Scaling Works
|
|
425
|
+
|
|
426
|
+
- Videos are scaled while maintaining aspect ratio
|
|
427
|
+
- Black padding (letterboxing/pillarboxing) is added when aspect ratios don't match
|
|
428
|
+
- For portrait formats, landscape recordings will have vertical black bars
|
|
429
|
+
- For auto-scale (`-1`), the dimension is calculated to maintain aspect ratio
|
|
430
|
+
|
|
431
|
+
### Examples
|
|
432
|
+
|
|
433
|
+
```bash
|
|
434
|
+
# Standard 1080p output
|
|
435
|
+
sceneforge concat --demo my-demo --output-size 1080p
|
|
436
|
+
|
|
437
|
+
# TikTok/YouTube Shorts (portrait)
|
|
438
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 --output-size tiktok
|
|
439
|
+
|
|
440
|
+
# Square for Instagram
|
|
441
|
+
sceneforge concat --demo my-demo --output-size square
|
|
442
|
+
|
|
443
|
+
# Full pipeline with all options
|
|
444
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 \
|
|
445
|
+
--viewport 1080p --zoom 150 \
|
|
446
|
+
--output-size 1080p \
|
|
447
|
+
--quality high
|
|
448
|
+
```
|
|
@@ -25,6 +25,13 @@ sceneforge record --definition <path> [options]
|
|
|
25
25
|
| `--storage` | Path to storage state JSON |
|
|
26
26
|
| `--slowmo` | Slow down actions by ms |
|
|
27
27
|
| `--env-file` | Path to .env file |
|
|
28
|
+
| `--viewport` | Browser viewport size (preset or WxH, default: 1440x900) |
|
|
29
|
+
| `--width` | Viewport width (overrides --viewport) |
|
|
30
|
+
| `--height` | Viewport height (overrides --viewport) |
|
|
31
|
+
| `--zoom` | Browser zoom level: 100, 150, 200 (default: 100) |
|
|
32
|
+
| `--device-scale-factor` | Device pixel ratio: 1, 1.5, 2 |
|
|
33
|
+
|
|
34
|
+
**Viewport Presets:** 720p (1280x720), 1080p (1920x1080), 1440p (2560x1440), 4k (3840x2160)
|
|
28
35
|
|
|
29
36
|
**Examples:**
|
|
30
37
|
```bash
|
|
@@ -36,6 +43,12 @@ sceneforge record -d demo.yaml -b http://localhost:3000 --headed
|
|
|
36
43
|
|
|
37
44
|
# With auth state
|
|
38
45
|
sceneforge record -d demo.yaml -b http://localhost:3000 --storage ./auth.json
|
|
46
|
+
|
|
47
|
+
# Record at 1080p viewport
|
|
48
|
+
sceneforge record -d demo.yaml -b http://localhost:3000 --viewport 1080p
|
|
49
|
+
|
|
50
|
+
# Record with zoom for extra detail
|
|
51
|
+
sceneforge record -d demo.yaml -b http://localhost:3000 --viewport 1920x1080 --zoom 150
|
|
39
52
|
```
|
|
40
53
|
|
|
41
54
|
### setup
|
|
@@ -78,6 +91,14 @@ sceneforge pipeline --definition <path> [options]
|
|
|
78
91
|
| `--skip-voiceover` | Skip voiceover generation |
|
|
79
92
|
| `--skip-concat` | Skip final concatenation |
|
|
80
93
|
| `--resume` | Resume from last successful step |
|
|
94
|
+
| `--quality` | Quality preset: low, medium, high |
|
|
95
|
+
| `--crf` | Override CRF value |
|
|
96
|
+
| `--codec` | Video codec: libx264, libx265 |
|
|
97
|
+
| `--viewport` | Browser viewport size (preset or WxH) |
|
|
98
|
+
| `--zoom` | Browser zoom level: 100, 150, 200 |
|
|
99
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
100
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
101
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
81
102
|
|
|
82
103
|
**Examples:**
|
|
83
104
|
```bash
|
|
@@ -89,6 +110,18 @@ sceneforge pipeline -d demo.yaml -b http://localhost:3000 --resume
|
|
|
89
110
|
|
|
90
111
|
# Skip recording (use existing videos)
|
|
91
112
|
sceneforge pipeline -d demo.yaml --skip-record
|
|
113
|
+
|
|
114
|
+
# High quality 1080p output
|
|
115
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 --quality high --output-size 1080p
|
|
116
|
+
|
|
117
|
+
# TikTok/YouTube Shorts format
|
|
118
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 --output-size tiktok --quality high
|
|
119
|
+
|
|
120
|
+
# Full options: viewport + zoom + output scaling
|
|
121
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 \
|
|
122
|
+
--viewport 1080p --zoom 150 \
|
|
123
|
+
--output-size 1080p \
|
|
124
|
+
--quality high
|
|
92
125
|
```
|
|
93
126
|
|
|
94
127
|
### split
|
|
@@ -106,9 +139,14 @@ sceneforge split --demo <name> [options]
|
|
|
106
139
|
| `--quality` | Quality preset: low, medium, high (default: medium) |
|
|
107
140
|
| `--crf` | Override CRF value (0-51, lower = better) |
|
|
108
141
|
| `--codec` | Video codec: libx264, libx265 (default: libx264) |
|
|
142
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
143
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
144
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
109
145
|
|
|
110
146
|
**Video Quality:** Configurable via `--quality` preset or `--crf`/`--codec` flags. Default: medium preset (CRF 18, libx264).
|
|
111
147
|
|
|
148
|
+
**Output Presets:** 720p, 1080p, 1440p, 4k (landscape), tiktok/shorts/reels (1080x1920 portrait), square (1080x1080)
|
|
149
|
+
|
|
112
150
|
### voiceover
|
|
113
151
|
Generate voiceover audio from scripts.
|
|
114
152
|
|
|
@@ -143,9 +181,14 @@ sceneforge add-audio --demo <name> [options]
|
|
|
143
181
|
| `--quality` | Quality preset: low, medium, high (default: medium) |
|
|
144
182
|
| `--crf` | Override CRF value (0-51, lower = better) |
|
|
145
183
|
| `--codec` | Video codec: libx264, libx265 (default: libx264) |
|
|
184
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
185
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
186
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
146
187
|
|
|
147
188
|
**Video Quality:** Configurable via `--quality` preset. Default: medium (CRF 18, libx264). Audio: AAC 192kbps.
|
|
148
189
|
|
|
190
|
+
**Output Presets:** 720p, 1080p, 1440p, 4k (landscape), tiktok/shorts/reels (1080x1920 portrait), square (1080x1080)
|
|
191
|
+
|
|
149
192
|
### concat
|
|
150
193
|
Concatenate step clips into final video.
|
|
151
194
|
|
|
@@ -161,6 +204,9 @@ sceneforge concat --demo <name> [options]
|
|
|
161
204
|
| `--quality` | Quality preset: low, medium, high (default: medium) |
|
|
162
205
|
| `--crf` | Override CRF value (0-51, lower = better) |
|
|
163
206
|
| `--codec` | Video codec: libx264, libx265 (default: libx264) |
|
|
207
|
+
| `--output-size` | Output video dimensions (preset or WxH) |
|
|
208
|
+
| `--output-width` | Output width (-1 for auto) |
|
|
209
|
+
| `--output-height` | Output height (-1 for auto) |
|
|
164
210
|
| `--intro` | Intro video to prepend |
|
|
165
211
|
| `--outro` | Outro video to append |
|
|
166
212
|
| `--music` | Background music file |
|
|
@@ -168,6 +214,8 @@ sceneforge concat --demo <name> [options]
|
|
|
168
214
|
|
|
169
215
|
**Video Quality:** Configurable via `--quality` preset. Default: medium (CRF 18, libx264). Includes `+faststart` for web streaming. Audio: AAC 192kbps.
|
|
170
216
|
|
|
217
|
+
**Output Presets:** 720p, 1080p, 1440p, 4k (landscape), tiktok/shorts/reels (1080x1920 portrait), square (1080x1080)
|
|
218
|
+
|
|
171
219
|
### doctor
|
|
172
220
|
Run environment diagnostics.
|
|
173
221
|
|
|
@@ -314,3 +362,87 @@ sceneforge split --demo my-demo --crf 15
|
|
|
314
362
|
- 18 = Visually lossless (medium preset, default)
|
|
315
363
|
- 23 = FFmpeg default
|
|
316
364
|
- 28 = Lower quality (low preset)
|
|
365
|
+
|
|
366
|
+
## Viewport Settings
|
|
367
|
+
|
|
368
|
+
Control browser viewport size and zoom during recording for high-DPI capture.
|
|
369
|
+
|
|
370
|
+
### CLI Flags (record, pipeline)
|
|
371
|
+
|
|
372
|
+
```bash
|
|
373
|
+
--viewport <WxH|preset> # Browser viewport size (default: 1440x900)
|
|
374
|
+
--width <px> # Viewport width (overrides --viewport)
|
|
375
|
+
--height <px> # Viewport height (overrides --viewport)
|
|
376
|
+
--zoom <percent> # Browser zoom: 100, 150, 200 (default: 100)
|
|
377
|
+
--device-scale-factor <n> # Device pixel ratio: 1, 1.5, 2
|
|
378
|
+
```
|
|
379
|
+
|
|
380
|
+
### Viewport Presets
|
|
381
|
+
|
|
382
|
+
| Preset | Resolution |
|
|
383
|
+
|--------|------------|
|
|
384
|
+
| `720p` | 1280x720 |
|
|
385
|
+
| `1080p` | 1920x1080 |
|
|
386
|
+
| `1440p` | 2560x1440 |
|
|
387
|
+
| `4k` | 3840x2160 |
|
|
388
|
+
|
|
389
|
+
### Why Viewport Settings Matter
|
|
390
|
+
|
|
391
|
+
- Larger viewports capture more UI detail and clarity
|
|
392
|
+
- Zoom (device scale factor) increases effective pixel density
|
|
393
|
+
- A 1080p viewport at 150% zoom captures at 2880x1620 effective pixels
|
|
394
|
+
- Useful for creating videos that look sharp on retina displays
|
|
395
|
+
|
|
396
|
+
## Output Dimensions
|
|
397
|
+
|
|
398
|
+
Control final video resolution after processing. Supports landscape, portrait, and square formats.
|
|
399
|
+
|
|
400
|
+
### CLI Flags (split, add-audio, concat, pipeline)
|
|
401
|
+
|
|
402
|
+
```bash
|
|
403
|
+
--output-size <WxH|preset> # Output video dimensions
|
|
404
|
+
--output-width <px> # Output width (-1 for auto)
|
|
405
|
+
--output-height <px> # Output height (-1 for auto)
|
|
406
|
+
```
|
|
407
|
+
|
|
408
|
+
### Output Presets
|
|
409
|
+
|
|
410
|
+
| Preset | Resolution | Use Case |
|
|
411
|
+
|--------|------------|----------|
|
|
412
|
+
| `720p` | 1280x720 | HD landscape |
|
|
413
|
+
| `1080p` | 1920x1080 | Full HD landscape |
|
|
414
|
+
| `1440p` | 2560x1440 | QHD landscape |
|
|
415
|
+
| `4k` | 3840x2160 | 4K UHD landscape |
|
|
416
|
+
| `720p-portrait` | 720x1280 | HD portrait |
|
|
417
|
+
| `1080p-portrait` | 1080x1920 | Full HD portrait |
|
|
418
|
+
| `tiktok` | 1080x1920 | TikTok/Instagram Reels |
|
|
419
|
+
| `shorts` | 1080x1920 | YouTube Shorts |
|
|
420
|
+
| `reels` | 1080x1920 | Instagram Reels |
|
|
421
|
+
| `square` | 1080x1080 | Square format |
|
|
422
|
+
| `square-720` | 720x720 | Square format (smaller) |
|
|
423
|
+
|
|
424
|
+
### How Scaling Works
|
|
425
|
+
|
|
426
|
+
- Videos are scaled while maintaining aspect ratio
|
|
427
|
+
- Black padding (letterboxing/pillarboxing) is added when aspect ratios don't match
|
|
428
|
+
- For portrait formats, landscape recordings will have vertical black bars
|
|
429
|
+
- For auto-scale (`-1`), the dimension is calculated to maintain aspect ratio
|
|
430
|
+
|
|
431
|
+
### Examples
|
|
432
|
+
|
|
433
|
+
```bash
|
|
434
|
+
# Standard 1080p output
|
|
435
|
+
sceneforge concat --demo my-demo --output-size 1080p
|
|
436
|
+
|
|
437
|
+
# TikTok/YouTube Shorts (portrait)
|
|
438
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 --output-size tiktok
|
|
439
|
+
|
|
440
|
+
# Square for Instagram
|
|
441
|
+
sceneforge concat --demo my-demo --output-size square
|
|
442
|
+
|
|
443
|
+
# Full pipeline with all options
|
|
444
|
+
sceneforge pipeline -d demo.yaml -b http://localhost:3000 \
|
|
445
|
+
--viewport 1080p --zoom 150 \
|
|
446
|
+
--output-size 1080p \
|
|
447
|
+
--quality high
|
|
448
|
+
```
|