@gannochenko/staticstripes 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierrc +8 -0
- package/Makefile +69 -0
- package/dist/asset-manager.d.ts +16 -0
- package/dist/asset-manager.d.ts.map +1 -0
- package/dist/asset-manager.js +50 -0
- package/dist/asset-manager.js.map +1 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +257 -0
- package/dist/cli.js.map +1 -0
- package/dist/container-renderer.d.ts +21 -0
- package/dist/container-renderer.d.ts.map +1 -0
- package/dist/container-renderer.js +149 -0
- package/dist/container-renderer.js.map +1 -0
- package/dist/expression-parser.d.ts +63 -0
- package/dist/expression-parser.d.ts.map +1 -0
- package/dist/expression-parser.js +145 -0
- package/dist/expression-parser.js.map +1 -0
- package/dist/ffmpeg.d.ts +375 -0
- package/dist/ffmpeg.d.ts.map +1 -0
- package/dist/ffmpeg.js +997 -0
- package/dist/ffmpeg.js.map +1 -0
- package/dist/ffprobe.d.ts +2 -0
- package/dist/ffprobe.d.ts.map +1 -0
- package/dist/ffprobe.js +31 -0
- package/dist/ffprobe.js.map +1 -0
- package/dist/html-parser.d.ts +56 -0
- package/dist/html-parser.d.ts.map +1 -0
- package/dist/html-parser.js +208 -0
- package/dist/html-parser.js.map +1 -0
- package/dist/html-project-parser.d.ts +169 -0
- package/dist/html-project-parser.d.ts.map +1 -0
- package/dist/html-project-parser.js +954 -0
- package/dist/html-project-parser.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +18 -0
- package/dist/index.js.map +1 -0
- package/dist/label-generator.d.ts +35 -0
- package/dist/label-generator.d.ts.map +1 -0
- package/dist/label-generator.js +69 -0
- package/dist/label-generator.js.map +1 -0
- package/dist/project.d.ts +29 -0
- package/dist/project.d.ts.map +1 -0
- package/dist/project.js +137 -0
- package/dist/project.js.map +1 -0
- package/dist/sample-sequences.d.ts +5 -0
- package/dist/sample-sequences.d.ts.map +1 -0
- package/dist/sample-sequences.js +199 -0
- package/dist/sample-sequences.js.map +1 -0
- package/dist/sample-streams.d.ts +2 -0
- package/dist/sample-streams.d.ts.map +1 -0
- package/dist/sample-streams.js +109 -0
- package/dist/sample-streams.js.map +1 -0
- package/dist/sequence.d.ts +21 -0
- package/dist/sequence.d.ts.map +1 -0
- package/dist/sequence.js +269 -0
- package/dist/sequence.js.map +1 -0
- package/dist/stream.d.ts +135 -0
- package/dist/stream.d.ts.map +1 -0
- package/dist/stream.js +779 -0
- package/dist/stream.js.map +1 -0
- package/dist/type.d.ts +73 -0
- package/dist/type.d.ts.map +1 -0
- package/dist/type.js +3 -0
- package/dist/type.js.map +1 -0
- package/eslint.config.js +44 -0
- package/package.json +50 -0
- package/src/asset-manager.ts +55 -0
- package/src/cli.ts +306 -0
- package/src/container-renderer.ts +190 -0
- package/src/expression-parser.test.ts +459 -0
- package/src/expression-parser.ts +199 -0
- package/src/ffmpeg.ts +1403 -0
- package/src/ffprobe.ts +29 -0
- package/src/html-parser.ts +221 -0
- package/src/html-project-parser.ts +1195 -0
- package/src/index.ts +9 -0
- package/src/label-generator.ts +74 -0
- package/src/project.ts +180 -0
- package/src/sample-sequences.ts +225 -0
- package/src/sample-streams.ts +142 -0
- package/src/sequence.ts +330 -0
- package/src/stream.ts +1012 -0
- package/src/type.ts +81 -0
- package/tsconfig.json +24 -0
package/src/ffmpeg.ts
ADDED
|
@@ -0,0 +1,1403 @@
|
|
|
1
|
+
import { spawn } from 'child_process';
|
|
2
|
+
import { getLabel } from './label-generator';
|
|
3
|
+
import { Project } from './project';
|
|
4
|
+
|
|
5
|
+
export type Label = {
|
|
6
|
+
tag: string;
|
|
7
|
+
isAudio: boolean; // false for video, true for audio
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Checks if FFmpeg is installed and available in the system PATH
|
|
12
|
+
* @throws Error if FFmpeg is not found
|
|
13
|
+
*/
|
|
14
|
+
export async function checkFFmpegInstalled(): Promise<void> {
|
|
15
|
+
return new Promise<void>((resolve, reject) => {
|
|
16
|
+
const ffmpeg = spawn('ffmpeg', ['-version'], {
|
|
17
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
let hasOutput = false;
|
|
21
|
+
|
|
22
|
+
ffmpeg.stdout.on('data', () => {
|
|
23
|
+
hasOutput = true;
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
ffmpeg.on('close', (code) => {
|
|
27
|
+
if (code === 0 && hasOutput) {
|
|
28
|
+
resolve();
|
|
29
|
+
} else {
|
|
30
|
+
reject(
|
|
31
|
+
new Error(
|
|
32
|
+
'FFmpeg not found. Please install FFmpeg to use StaticStripes.\n' +
|
|
33
|
+
'Visit https://ffmpeg.org/download.html for installation instructions.',
|
|
34
|
+
),
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
ffmpeg.on('error', (error) => {
|
|
40
|
+
if (error.message.includes('ENOENT')) {
|
|
41
|
+
reject(
|
|
42
|
+
new Error(
|
|
43
|
+
'FFmpeg not found in system PATH. Please install FFmpeg to use StaticStripes.\n' +
|
|
44
|
+
'Visit https://ffmpeg.org/download.html for installation instructions.\n\n' +
|
|
45
|
+
'Quick install:\n' +
|
|
46
|
+
' macOS: brew install ffmpeg\n' +
|
|
47
|
+
' Ubuntu/Debian: sudo apt-get install ffmpeg\n' +
|
|
48
|
+
' Windows: Download from https://ffmpeg.org/download.html',
|
|
49
|
+
),
|
|
50
|
+
);
|
|
51
|
+
} else {
|
|
52
|
+
reject(error);
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export type Millisecond = number;
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Helper function to format milliseconds for FFmpeg time parameters
|
|
62
|
+
* @param value - Time value in milliseconds
|
|
63
|
+
* @returns Formatted string with 'ms' suffix (e.g., "1500ms")
|
|
64
|
+
*/
|
|
65
|
+
export function ms(value: Millisecond): string {
|
|
66
|
+
return `${value}ms`;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export class Filter {
|
|
70
|
+
constructor(
|
|
71
|
+
private inputs: Label[],
|
|
72
|
+
public outputs: Label[],
|
|
73
|
+
public body: string,
|
|
74
|
+
) {}
|
|
75
|
+
|
|
76
|
+
public render(): string {
|
|
77
|
+
let result = '';
|
|
78
|
+
this.inputs.forEach((input) => {
|
|
79
|
+
result += wrap(input.tag);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
result += this.body;
|
|
83
|
+
|
|
84
|
+
this.outputs.forEach((input) => {
|
|
85
|
+
result += wrap(input.tag);
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
return result;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Generates the complete ffmpeg command for rendering the project
|
|
94
|
+
*/
|
|
95
|
+
export function makeFFmpegCommand(
|
|
96
|
+
project: Project,
|
|
97
|
+
filterComplex: string,
|
|
98
|
+
outputName: string,
|
|
99
|
+
preset: 'ultrafast' | 'medium' = 'medium',
|
|
100
|
+
): string {
|
|
101
|
+
const parts: string[] = ['ffmpeg'];
|
|
102
|
+
|
|
103
|
+
// Overwrite output file without asking
|
|
104
|
+
parts.push('-y');
|
|
105
|
+
|
|
106
|
+
// Add input files in order of their index mapping
|
|
107
|
+
const inputsByIndex = new Map<number, string>();
|
|
108
|
+
for (const [assetName, index] of project.getAssetIndexMap()) {
|
|
109
|
+
const asset = project.getAssetByName(assetName);
|
|
110
|
+
if (asset) {
|
|
111
|
+
inputsByIndex.set(index, asset.path);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Add inputs in sorted order
|
|
116
|
+
const sortedIndices = Array.from(inputsByIndex.keys()).sort((a, b) => a - b);
|
|
117
|
+
for (const index of sortedIndices) {
|
|
118
|
+
const path = inputsByIndex.get(index);
|
|
119
|
+
if (path) {
|
|
120
|
+
parts.push(`-i "${path}"`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Add filter_complex
|
|
125
|
+
if (filterComplex) {
|
|
126
|
+
parts.push(`-filter_complex "${filterComplex}"`);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Map the output streams (video and audio)
|
|
130
|
+
parts.push('-map "[outv]"');
|
|
131
|
+
parts.push('-map "[outa]"');
|
|
132
|
+
|
|
133
|
+
// Increase buffer queue size for complex filter graphs
|
|
134
|
+
parts.push('-max_muxing_queue_size 4096');
|
|
135
|
+
|
|
136
|
+
// Add output parameters
|
|
137
|
+
const output = project.getOutput(outputName);
|
|
138
|
+
if (!output) {
|
|
139
|
+
throw new Error(`Output "${outputName}" not found`);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
const { width, height } = output.resolution;
|
|
143
|
+
|
|
144
|
+
// Video encoding parameters
|
|
145
|
+
parts.push(`-s ${width}x${height}`);
|
|
146
|
+
parts.push(`-r ${output.fps}`);
|
|
147
|
+
parts.push('-pix_fmt yuv420p'); // Standard pixel format for compatibility
|
|
148
|
+
parts.push(`-preset ${preset}`); // Encoding speed preset
|
|
149
|
+
|
|
150
|
+
// Audio encoding parameters
|
|
151
|
+
parts.push('-c:a aac'); // AAC audio codec
|
|
152
|
+
parts.push('-b:a 192k'); // Audio bitrate
|
|
153
|
+
|
|
154
|
+
// Add output path
|
|
155
|
+
parts.push(`"${output.path}"`);
|
|
156
|
+
|
|
157
|
+
return parts.join(' ');
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
export const runFFMpeg = async (ffmpegCommand: string) => {
|
|
161
|
+
const args =
|
|
162
|
+
ffmpegCommand
|
|
163
|
+
.slice('ffmpeg '.length)
|
|
164
|
+
.match(/(?:[^\s"]+|"[^"]*")+/g)
|
|
165
|
+
?.map((arg) => arg.replace(/^"|"$/g, '')) || [];
|
|
166
|
+
|
|
167
|
+
return new Promise<void>((resolve, reject) => {
|
|
168
|
+
const ffmpeg = spawn('ffmpeg', args, {
|
|
169
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
170
|
+
});
|
|
171
|
+
|
|
172
|
+
// FFmpeg outputs progress to stderr
|
|
173
|
+
let stderrBuffer = '';
|
|
174
|
+
ffmpeg.stderr.on('data', (data) => {
|
|
175
|
+
const output = data.toString();
|
|
176
|
+
stderrBuffer += output;
|
|
177
|
+
|
|
178
|
+
// Show all output for debugging
|
|
179
|
+
process.stderr.write(output);
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
ffmpeg.on('close', (code) => {
|
|
183
|
+
process.stdout.write('\n');
|
|
184
|
+
if (code === 0) {
|
|
185
|
+
console.log('\n=== Render Complete ===');
|
|
186
|
+
resolve();
|
|
187
|
+
} else {
|
|
188
|
+
console.error(`\n=== Render Failed ===`);
|
|
189
|
+
console.error(`FFmpeg exited with code ${code}`);
|
|
190
|
+
reject(new Error(`FFmpeg process exited with code ${code}`));
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
ffmpeg.on('error', (error) => {
|
|
195
|
+
console.error('\n=== Render Failed ===');
|
|
196
|
+
console.error('Error:', error.message);
|
|
197
|
+
reject(error);
|
|
198
|
+
});
|
|
199
|
+
});
|
|
200
|
+
};
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Creates a concat filter
|
|
204
|
+
* Automatically determines the number of segments (n) and stream counts (v, a) from input labels
|
|
205
|
+
* and generates appropriate output labels
|
|
206
|
+
* @param inputs - Array of input stream labels
|
|
207
|
+
* @returns Filter with auto-generated outputs
|
|
208
|
+
*/
|
|
209
|
+
export function makeConcat(inputs: Label[]): Filter {
|
|
210
|
+
if (inputs.length === 0) {
|
|
211
|
+
throw new Error('makeConcat: inputs cannot be empty');
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Count total video and audio streams in inputs
|
|
215
|
+
let totalVideo = 0;
|
|
216
|
+
let totalAudio = 0;
|
|
217
|
+
for (const input of inputs) {
|
|
218
|
+
if (input.isAudio) {
|
|
219
|
+
totalAudio++;
|
|
220
|
+
} else {
|
|
221
|
+
totalVideo++;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Find the pattern: try to determine n, v, a where:
|
|
226
|
+
// - n * v = totalVideo
|
|
227
|
+
// - n * a = totalAudio
|
|
228
|
+
// - n * (v + a) = inputs.length
|
|
229
|
+
// We want the largest n (most segments, fewest streams per segment)
|
|
230
|
+
// Note: n=1 always works, so we're guaranteed to find a pattern
|
|
231
|
+
|
|
232
|
+
let n = 0;
|
|
233
|
+
let v = 0;
|
|
234
|
+
let a = 0;
|
|
235
|
+
|
|
236
|
+
// Try from largest n down to 1
|
|
237
|
+
for (let tryN = inputs.length; tryN >= 1; tryN--) {
|
|
238
|
+
if (totalVideo % tryN === 0 && totalAudio % tryN === 0) {
|
|
239
|
+
const tryV = totalVideo / tryN;
|
|
240
|
+
const tryA = totalAudio / tryN;
|
|
241
|
+
if (tryV + tryA === inputs.length / tryN) {
|
|
242
|
+
n = tryN;
|
|
243
|
+
v = tryV;
|
|
244
|
+
a = tryA;
|
|
245
|
+
break;
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// n should always be set (at minimum n=1 always works), but check to be safe
|
|
251
|
+
if (n === 0) {
|
|
252
|
+
throw new Error(
|
|
253
|
+
'makeConcat: Internal error - failed to determine pattern (this should never happen)',
|
|
254
|
+
);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// Generate output labels
|
|
258
|
+
const outputs: Label[] = [];
|
|
259
|
+
|
|
260
|
+
// Add video outputs
|
|
261
|
+
for (let i = 0; i < v; i++) {
|
|
262
|
+
outputs.push({
|
|
263
|
+
tag: getLabel(),
|
|
264
|
+
isAudio: false,
|
|
265
|
+
});
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Add audio outputs
|
|
269
|
+
for (let i = 0; i < a; i++) {
|
|
270
|
+
outputs.push({
|
|
271
|
+
tag: getLabel(),
|
|
272
|
+
isAudio: true,
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
return new Filter(inputs, outputs, `concat=n=${n}:v=${v}:a=${a}`);
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* Creates an xfade (crossfade) filter for video streams
|
|
281
|
+
* Note: xfade only works with video, not audio
|
|
282
|
+
* @param input1 - First video input stream label
|
|
283
|
+
* @param input2 - Second video input stream label
|
|
284
|
+
* @param options - Transition parameters
|
|
285
|
+
* @returns Filter with auto-generated video output
|
|
286
|
+
*/
|
|
287
|
+
export function makeXFade(
|
|
288
|
+
inputs: Label[],
|
|
289
|
+
options: {
|
|
290
|
+
duration: Millisecond;
|
|
291
|
+
offset: Millisecond;
|
|
292
|
+
transition?: string;
|
|
293
|
+
},
|
|
294
|
+
): Filter {
|
|
295
|
+
if (inputs.length !== 2) {
|
|
296
|
+
throw new Error(`makeXFade: expects two inputs`);
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
const input1 = inputs[0];
|
|
300
|
+
const input2 = inputs[1];
|
|
301
|
+
|
|
302
|
+
// Validate that both inputs are video (xfade doesn't support audio)
|
|
303
|
+
if (input1.isAudio) {
|
|
304
|
+
throw new Error(
|
|
305
|
+
`makeXFade: input1 must be video, got audio (tag: ${input1.tag})`,
|
|
306
|
+
);
|
|
307
|
+
}
|
|
308
|
+
if (input2.isAudio) {
|
|
309
|
+
throw new Error(
|
|
310
|
+
`makeXFade: input2 must be video, got audio (tag: ${input2.tag})`,
|
|
311
|
+
);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
const transition = options.transition ?? 'fade';
|
|
315
|
+
|
|
316
|
+
// Auto-generate video output
|
|
317
|
+
const output: Label = {
|
|
318
|
+
tag: getLabel(),
|
|
319
|
+
isAudio: false,
|
|
320
|
+
};
|
|
321
|
+
|
|
322
|
+
return new Filter(
|
|
323
|
+
[input1, input2],
|
|
324
|
+
[output],
|
|
325
|
+
`xfade=transition=${transition}:duration=${ms(options.duration)}:offset=${ms(options.offset)}`,
|
|
326
|
+
);
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
/**
|
|
330
|
+
* Creates a null filter (passthrough)
|
|
331
|
+
* @param input - Input stream label
|
|
332
|
+
*/
|
|
333
|
+
export function makeNull(inputs: Label[]): Filter {
|
|
334
|
+
if (inputs.length !== 1) {
|
|
335
|
+
throw new Error(`makeNull: expects one input`);
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
const input1 = inputs[0];
|
|
339
|
+
|
|
340
|
+
const outputLabelTag = getLabel();
|
|
341
|
+
|
|
342
|
+
return new Filter(
|
|
343
|
+
[input1],
|
|
344
|
+
[
|
|
345
|
+
{
|
|
346
|
+
tag: outputLabelTag,
|
|
347
|
+
isAudio: input1.isAudio,
|
|
348
|
+
},
|
|
349
|
+
],
|
|
350
|
+
input1.isAudio ? 'anull' : 'null',
|
|
351
|
+
);
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
export function makeOverlay(
|
|
355
|
+
inputs: Label[],
|
|
356
|
+
options?: {
|
|
357
|
+
x?: string | number;
|
|
358
|
+
y?: string | number;
|
|
359
|
+
},
|
|
360
|
+
): Filter {
|
|
361
|
+
if (inputs.length !== 2) {
|
|
362
|
+
throw new Error(`makeOverlay: expects two inputs`);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
const input1 = inputs[0];
|
|
366
|
+
const input2 = inputs[1];
|
|
367
|
+
|
|
368
|
+
// Validate that both inputs are video (xfade doesn't support audio)
|
|
369
|
+
if (input1.isAudio) {
|
|
370
|
+
throw new Error(
|
|
371
|
+
`makeOverlay: input1 must be video, got audio (tag: ${input1.tag})`,
|
|
372
|
+
);
|
|
373
|
+
}
|
|
374
|
+
if (input2.isAudio) {
|
|
375
|
+
throw new Error(
|
|
376
|
+
`makeOverlay: input2 must be video, got audio (tag: ${input2.tag})`,
|
|
377
|
+
);
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
const output = {
|
|
381
|
+
tag: getLabel(),
|
|
382
|
+
isAudio: false,
|
|
383
|
+
};
|
|
384
|
+
|
|
385
|
+
let overlayParams = 'format=auto';
|
|
386
|
+
if (options?.x !== undefined || options?.y !== undefined) {
|
|
387
|
+
const x = options.x ?? 0;
|
|
388
|
+
const y = options.y ?? 0;
|
|
389
|
+
overlayParams = `x=${x}:y=${y}:format=auto`;
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
return new Filter(
|
|
393
|
+
inputs,
|
|
394
|
+
[output],
|
|
395
|
+
`overlay=${overlayParams}:eof_action=pass`,
|
|
396
|
+
);
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
export function makeFps(inputs: Label[], fps: number): Filter {
|
|
400
|
+
if (inputs.length !== 1) {
|
|
401
|
+
throw new Error(`makeFps: expects one input`);
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
const input1 = inputs[0];
|
|
405
|
+
if (input1.isAudio) {
|
|
406
|
+
throw new Error(
|
|
407
|
+
`makeFps: input1 must be video, got audio (tag: ${input1.tag})`,
|
|
408
|
+
);
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
const output = {
|
|
412
|
+
tag: getLabel(),
|
|
413
|
+
isAudio: false,
|
|
414
|
+
};
|
|
415
|
+
|
|
416
|
+
return new Filter(inputs, [output], `fps=${fps}`);
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
export function makeScale(
|
|
420
|
+
inputs: Label[],
|
|
421
|
+
options: { width: number | string; height: number | string; flags?: string },
|
|
422
|
+
): Filter {
|
|
423
|
+
if (inputs.length !== 1) {
|
|
424
|
+
throw new Error(`makeFps: expects one input`);
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
const input1 = inputs[0];
|
|
428
|
+
if (input1.isAudio) {
|
|
429
|
+
throw new Error(
|
|
430
|
+
`makeScale: input1 must be video, got audio (tag: ${input1.tag})`,
|
|
431
|
+
);
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
const output = {
|
|
435
|
+
tag: getLabel(),
|
|
436
|
+
isAudio: false,
|
|
437
|
+
};
|
|
438
|
+
|
|
439
|
+
const algo = options.flags;
|
|
440
|
+
|
|
441
|
+
return new Filter(
|
|
442
|
+
inputs,
|
|
443
|
+
[output],
|
|
444
|
+
`scale=${options.width}:${options.height}${algo ? `:${algo}` : ''}`,
|
|
445
|
+
);
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
/**
|
|
449
|
+
* Creates a split filter (splits one input into multiple outputs)
|
|
450
|
+
* @param input - Input stream label
|
|
451
|
+
* @param outputLabels - Array of output stream labels
|
|
452
|
+
*/
|
|
453
|
+
export function makeSplit(inputs: Label[]): Filter {
|
|
454
|
+
if (inputs.length !== 1) {
|
|
455
|
+
throw new Error(`makeFps: expects one input`);
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
const input1 = inputs[0];
|
|
459
|
+
|
|
460
|
+
const output1 = {
|
|
461
|
+
tag: getLabel(),
|
|
462
|
+
isAudio: input1.isAudio,
|
|
463
|
+
};
|
|
464
|
+
const output2 = {
|
|
465
|
+
tag: getLabel(),
|
|
466
|
+
isAudio: input1.isAudio,
|
|
467
|
+
};
|
|
468
|
+
|
|
469
|
+
return new Filter(inputs, [output1, output2], 'split');
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
export function makeTranspose(
|
|
473
|
+
inputs: Label[],
|
|
474
|
+
direction: 0 | 1 | 2 | 3,
|
|
475
|
+
): Filter {
|
|
476
|
+
const output = {
|
|
477
|
+
tag: getLabel(),
|
|
478
|
+
isAudio: false,
|
|
479
|
+
};
|
|
480
|
+
|
|
481
|
+
const input1 = inputs[0];
|
|
482
|
+
if (input1.isAudio) {
|
|
483
|
+
throw new Error(
|
|
484
|
+
`makeTranspose: input1 must be video, got audio (tag: ${input1.tag})`,
|
|
485
|
+
);
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
return new Filter(inputs, [output], `transpose=${direction}`);
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
/**
|
|
492
|
+
* Creates a trim filter to cut streams to a specific time range
|
|
493
|
+
* @param inputs - Input stream labels (video or audio)
|
|
494
|
+
* @param start - Start time in milliseconds
|
|
495
|
+
* @param end - End time in milliseconds
|
|
496
|
+
* @returns Filter with trimmed output
|
|
497
|
+
*/
|
|
498
|
+
export function makeTrim(
|
|
499
|
+
inputs: Label[],
|
|
500
|
+
start: Millisecond,
|
|
501
|
+
end: Millisecond,
|
|
502
|
+
): Filter {
|
|
503
|
+
const input1 = inputs[0];
|
|
504
|
+
|
|
505
|
+
const output = {
|
|
506
|
+
tag: getLabel(),
|
|
507
|
+
isAudio: input1.isAudio,
|
|
508
|
+
};
|
|
509
|
+
|
|
510
|
+
const prefix = input1.isAudio ? 'a' : '';
|
|
511
|
+
|
|
512
|
+
return new Filter(
|
|
513
|
+
inputs,
|
|
514
|
+
[output],
|
|
515
|
+
`${prefix}trim=start=${ms(start)}:end=${ms(end)},${prefix}setpts=PTS-STARTPTS`,
|
|
516
|
+
);
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
/**
|
|
520
|
+
* Creates a tpad/apad filter to add temporal padding (frames/silence)
|
|
521
|
+
* @param inputs - Input stream labels (video or audio)
|
|
522
|
+
* @param options - Padding parameters
|
|
523
|
+
* - start: Duration to add at the beginning (in milliseconds, default: 0)
|
|
524
|
+
* - stop: Duration to add at the end (in milliseconds, default: 0)
|
|
525
|
+
* - start_mode: 'clone' (duplicate frames) or 'add' (colored frames/silence, default)
|
|
526
|
+
* - stop_mode: 'clone' (duplicate frames) or 'add' (colored frames/silence, default)
|
|
527
|
+
* - color: Color of added frames (video only, e.g., 'black', '#00FF00', default: 'black')
|
|
528
|
+
*/
|
|
529
|
+
export function makeTPad(
|
|
530
|
+
inputs: Label[],
|
|
531
|
+
options: {
|
|
532
|
+
start?: Millisecond;
|
|
533
|
+
stop?: Millisecond;
|
|
534
|
+
color?: string;
|
|
535
|
+
startMode?: 'clone' | 'add';
|
|
536
|
+
stopMode?: 'clone' | 'add';
|
|
537
|
+
} = {},
|
|
538
|
+
): Filter {
|
|
539
|
+
const input = inputs[0];
|
|
540
|
+
|
|
541
|
+
const output = {
|
|
542
|
+
tag: getLabel(),
|
|
543
|
+
isAudio: input.isAudio,
|
|
544
|
+
};
|
|
545
|
+
|
|
546
|
+
const start = options.start ?? 0;
|
|
547
|
+
const stop = options.stop ?? 0;
|
|
548
|
+
const start_mode = options.startMode ?? 'add';
|
|
549
|
+
const stop_mode = options.stopMode ?? 'add';
|
|
550
|
+
const color = options.color ?? 'black';
|
|
551
|
+
|
|
552
|
+
const filterName = input.isAudio ? 'apad' : 'tpad';
|
|
553
|
+
|
|
554
|
+
if (input.isAudio) {
|
|
555
|
+
// For audio: use adelay for start padding, apad for stop padding
|
|
556
|
+
const filters: string[] = [];
|
|
557
|
+
|
|
558
|
+
// Add silence at the start using adelay (already in milliseconds)
|
|
559
|
+
if (start > 0) {
|
|
560
|
+
filters.push(`adelay=${start}|${start}`);
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
// Add silence at the end using apad
|
|
564
|
+
if (stop > 0) {
|
|
565
|
+
filters.push(`apad=pad_dur=${ms(stop)}`);
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
const filterStr = filters.length > 0 ? filters.join(',') : 'anull';
|
|
569
|
+
return new Filter(inputs, [output], filterStr);
|
|
570
|
+
} else {
|
|
571
|
+
// tpad for video
|
|
572
|
+
const params: string[] = [];
|
|
573
|
+
if (start > 0) {
|
|
574
|
+
params.push(`start_duration=${ms(start)}`);
|
|
575
|
+
params.push(`start_mode=${start_mode}`);
|
|
576
|
+
}
|
|
577
|
+
if (stop > 0) {
|
|
578
|
+
params.push(`stop_duration=${ms(stop)}`);
|
|
579
|
+
params.push(`stop_mode=${stop_mode}`);
|
|
580
|
+
}
|
|
581
|
+
// Add color parameter for added frames (when mode is 'add')
|
|
582
|
+
if (
|
|
583
|
+
(start_mode === 'add' && start > 0) ||
|
|
584
|
+
(stop_mode === 'add' && stop > 0)
|
|
585
|
+
) {
|
|
586
|
+
params.push(`color=${color}`);
|
|
587
|
+
}
|
|
588
|
+
const filterParams = params.length > 0 ? `=${params.join(':')}` : '';
|
|
589
|
+
return new Filter(inputs, [output], `${filterName}${filterParams}`);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
/**
|
|
594
|
+
* Creates a pad filter to add borders/letterboxing
|
|
595
|
+
* @param inputs - Input stream labels (must be video)
|
|
596
|
+
* @param width - Output width (can be expression like 'iw' or number)
|
|
597
|
+
* @param height - Output height (can be expression like 'ih' or number)
|
|
598
|
+
* @param x - X position (default: center using '(ow-iw)/2')
|
|
599
|
+
* @param y - Y position (default: center using '(oh-ih)/2')
|
|
600
|
+
* @param color - Background color (default: 'black')
|
|
601
|
+
*/
|
|
602
|
+
export function makePad(
|
|
603
|
+
inputs: Label[],
|
|
604
|
+
options: {
|
|
605
|
+
width: number | string;
|
|
606
|
+
height: number | string;
|
|
607
|
+
x?: string;
|
|
608
|
+
y?: string;
|
|
609
|
+
color?: string;
|
|
610
|
+
},
|
|
611
|
+
): Filter {
|
|
612
|
+
const input = inputs[0];
|
|
613
|
+
|
|
614
|
+
if (input.isAudio) {
|
|
615
|
+
throw new Error(
|
|
616
|
+
`makePad: input must be video, got audio (tag: ${input.tag})`,
|
|
617
|
+
);
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
const output = {
|
|
621
|
+
tag: getLabel(),
|
|
622
|
+
isAudio: false,
|
|
623
|
+
};
|
|
624
|
+
|
|
625
|
+
const x = options.x ?? '(ow-iw)/2';
|
|
626
|
+
const y = options.y ?? '(oh-ih)/2';
|
|
627
|
+
const color = options.color ?? 'black';
|
|
628
|
+
|
|
629
|
+
return new Filter(
|
|
630
|
+
inputs,
|
|
631
|
+
[output],
|
|
632
|
+
`pad=${options.width}:${options.height}:${x}:${y}:${color}`,
|
|
633
|
+
);
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
/**
|
|
637
|
+
* Creates a crop filter to cut video to specific dimensions
|
|
638
|
+
* @param inputs - Input stream labels (must be video)
|
|
639
|
+
* @param options - Crop parameters
|
|
640
|
+
* - width: Output width (can be expression or number)
|
|
641
|
+
* - height: Output height (can be expression or number)
|
|
642
|
+
* - x: X position to start crop (default: center using '(in_w-out_w)/2')
|
|
643
|
+
* - y: Y position to start crop (default: center using '(in_h-out_h)/2')
|
|
644
|
+
*/
|
|
645
|
+
export function makeCrop(
|
|
646
|
+
inputs: Label[],
|
|
647
|
+
options: {
|
|
648
|
+
width: number | string;
|
|
649
|
+
height: number | string;
|
|
650
|
+
x?: string;
|
|
651
|
+
y?: string;
|
|
652
|
+
},
|
|
653
|
+
): Filter {
|
|
654
|
+
const input = inputs[0];
|
|
655
|
+
|
|
656
|
+
if (input.isAudio) {
|
|
657
|
+
throw new Error(
|
|
658
|
+
`makeCrop: input must be video, got audio (tag: ${input.tag})`,
|
|
659
|
+
);
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
const output = {
|
|
663
|
+
tag: getLabel(),
|
|
664
|
+
isAudio: false,
|
|
665
|
+
};
|
|
666
|
+
|
|
667
|
+
const x = options.x ?? '(in_w-out_w)/2';
|
|
668
|
+
const y = options.y ?? '(in_h-out_h)/2';
|
|
669
|
+
|
|
670
|
+
return new Filter(
|
|
671
|
+
inputs,
|
|
672
|
+
[output],
|
|
673
|
+
`crop=${options.width}:${options.height}:${x}:${y}`,
|
|
674
|
+
);
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
/**
|
|
678
|
+
* Creates an eq (equalization) filter for color correction
|
|
679
|
+
* @param inputs - Input stream labels (must be video)
|
|
680
|
+
* @param options - Color adjustment parameters
|
|
681
|
+
* - brightness: -1.0 to 1.0 (default: 0)
|
|
682
|
+
* - contrast: -1000 to 1000 (default: 1.0)
|
|
683
|
+
* - saturation: 0 to 3 (default: 1.0)
|
|
684
|
+
* - gamma: 0.1 to 10 (default: 1.0)
|
|
685
|
+
*/
|
|
686
|
+
export function makeEq(
|
|
687
|
+
inputs: Label[],
|
|
688
|
+
options: {
|
|
689
|
+
brightness?: number;
|
|
690
|
+
contrast?: number;
|
|
691
|
+
saturation?: number;
|
|
692
|
+
gamma?: number;
|
|
693
|
+
},
|
|
694
|
+
): Filter {
|
|
695
|
+
const input = inputs[0];
|
|
696
|
+
|
|
697
|
+
if (input.isAudio) {
|
|
698
|
+
throw new Error(
|
|
699
|
+
`makeEq: input must be video, got audio (tag: ${input.tag})`,
|
|
700
|
+
);
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
const output = {
|
|
704
|
+
tag: getLabel(),
|
|
705
|
+
isAudio: false,
|
|
706
|
+
};
|
|
707
|
+
|
|
708
|
+
const params: string[] = [];
|
|
709
|
+
if (options.brightness !== undefined)
|
|
710
|
+
params.push(`brightness=${options.brightness}`);
|
|
711
|
+
if (options.contrast !== undefined)
|
|
712
|
+
params.push(`contrast=${options.contrast}`);
|
|
713
|
+
if (options.saturation !== undefined)
|
|
714
|
+
params.push(`saturation=${options.saturation}`);
|
|
715
|
+
if (options.gamma !== undefined) params.push(`gamma=${options.gamma}`);
|
|
716
|
+
|
|
717
|
+
const filterStr = params.length > 0 ? `eq=${params.join(':')}` : 'eq';
|
|
718
|
+
|
|
719
|
+
return new Filter(inputs, [output], filterStr);
|
|
720
|
+
}
|
|
721
|
+
|
|
722
|
+
/**
|
|
723
|
+
* Creates a colorchannelmixer filter for advanced color adjustment
|
|
724
|
+
* @param inputs - Input stream labels (must be video)
|
|
725
|
+
* @param options - Color channel mixing parameters
|
|
726
|
+
* - rr: Red contribution to red channel (-2 to 2, default: 1)
|
|
727
|
+
* - rg: Green contribution to red channel (-2 to 2, default: 0)
|
|
728
|
+
* - rb: Blue contribution to red channel (-2 to 2, default: 0)
|
|
729
|
+
* - ra: Alpha contribution to red channel (-2 to 2, default: 0)
|
|
730
|
+
* - gr: Red contribution to green channel (-2 to 2, default: 0)
|
|
731
|
+
* - gg: Green contribution to green channel (-2 to 2, default: 1)
|
|
732
|
+
* - gb: Blue contribution to green channel (-2 to 2, default: 0)
|
|
733
|
+
* - ga: Alpha contribution to green channel (-2 to 2, default: 0)
|
|
734
|
+
* - br: Red contribution to blue channel (-2 to 2, default: 0)
|
|
735
|
+
* - bg: Green contribution to blue channel (-2 to 2, default: 0)
|
|
736
|
+
* - bb: Blue contribution to blue channel (-2 to 2, default: 1)
|
|
737
|
+
* - ba: Alpha contribution to blue channel (-2 to 2, default: 0)
|
|
738
|
+
*/
|
|
739
|
+
export function makeColorChannelMixer(
|
|
740
|
+
inputs: Label[],
|
|
741
|
+
options: {
|
|
742
|
+
rr?: number;
|
|
743
|
+
rg?: number;
|
|
744
|
+
rb?: number;
|
|
745
|
+
ra?: number;
|
|
746
|
+
gr?: number;
|
|
747
|
+
gg?: number;
|
|
748
|
+
gb?: number;
|
|
749
|
+
ga?: number;
|
|
750
|
+
br?: number;
|
|
751
|
+
bg?: number;
|
|
752
|
+
bb?: number;
|
|
753
|
+
ba?: number;
|
|
754
|
+
} = {},
|
|
755
|
+
): Filter {
|
|
756
|
+
const input = inputs[0];
|
|
757
|
+
|
|
758
|
+
if (input.isAudio) {
|
|
759
|
+
throw new Error(
|
|
760
|
+
`makeColorChannelMixer: input must be video, got audio (tag: ${input.tag})`,
|
|
761
|
+
);
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
const output = {
|
|
765
|
+
tag: getLabel(),
|
|
766
|
+
isAudio: false,
|
|
767
|
+
};
|
|
768
|
+
|
|
769
|
+
const params: string[] = [];
|
|
770
|
+
if (options.rr !== undefined) params.push(`rr=${options.rr}`);
|
|
771
|
+
if (options.rg !== undefined) params.push(`rg=${options.rg}`);
|
|
772
|
+
if (options.rb !== undefined) params.push(`rb=${options.rb}`);
|
|
773
|
+
if (options.ra !== undefined) params.push(`ra=${options.ra}`);
|
|
774
|
+
if (options.gr !== undefined) params.push(`gr=${options.gr}`);
|
|
775
|
+
if (options.gg !== undefined) params.push(`gg=${options.gg}`);
|
|
776
|
+
if (options.gb !== undefined) params.push(`gb=${options.gb}`);
|
|
777
|
+
if (options.ga !== undefined) params.push(`ga=${options.ga}`);
|
|
778
|
+
if (options.br !== undefined) params.push(`br=${options.br}`);
|
|
779
|
+
if (options.bg !== undefined) params.push(`bg=${options.bg}`);
|
|
780
|
+
if (options.bb !== undefined) params.push(`bb=${options.bb}`);
|
|
781
|
+
if (options.ba !== undefined) params.push(`ba=${options.ba}`);
|
|
782
|
+
|
|
783
|
+
const filterStr =
|
|
784
|
+
params.length > 0
|
|
785
|
+
? `colorchannelmixer=${params.join(':')}`
|
|
786
|
+
: 'colorchannelmixer';
|
|
787
|
+
|
|
788
|
+
return new Filter(inputs, [output], filterStr);
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
/**
|
|
792
|
+
* Creates a curves filter for color grading (similar to Photoshop curves)
|
|
793
|
+
* @param inputs - Input stream labels (must be video)
|
|
794
|
+
* @param options - Curves parameters
|
|
795
|
+
* - preset: Preset curve name (e.g., 'darker', 'lighter', 'increase_contrast', 'vintage', etc.)
|
|
796
|
+
* - master: Master curve points (affects all channels, e.g., '0/0 0.5/0.6 1/1')
|
|
797
|
+
* - red: Red channel curve points
|
|
798
|
+
* - green: Green channel curve points
|
|
799
|
+
* - blue: Blue channel curve points
|
|
800
|
+
* - all: Apply same curve to all RGB channels
|
|
801
|
+
*/
|
|
802
|
+
export function makeCurves(
|
|
803
|
+
inputs: Label[],
|
|
804
|
+
options: {
|
|
805
|
+
preset?: string;
|
|
806
|
+
master?: string;
|
|
807
|
+
red?: string;
|
|
808
|
+
green?: string;
|
|
809
|
+
blue?: string;
|
|
810
|
+
all?: string;
|
|
811
|
+
} = {},
|
|
812
|
+
): Filter {
|
|
813
|
+
const input = inputs[0];
|
|
814
|
+
|
|
815
|
+
if (input.isAudio) {
|
|
816
|
+
throw new Error(
|
|
817
|
+
`makeCurves: input must be video, got audio (tag: ${input.tag})`,
|
|
818
|
+
);
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
const output = {
|
|
822
|
+
tag: getLabel(),
|
|
823
|
+
isAudio: false,
|
|
824
|
+
};
|
|
825
|
+
|
|
826
|
+
const params: string[] = [];
|
|
827
|
+
if (options.preset !== undefined) params.push(`preset=${options.preset}`);
|
|
828
|
+
if (options.master !== undefined) params.push(`master='${options.master}'`);
|
|
829
|
+
if (options.red !== undefined) params.push(`red='${options.red}'`);
|
|
830
|
+
if (options.green !== undefined) params.push(`green='${options.green}'`);
|
|
831
|
+
if (options.blue !== undefined) params.push(`blue='${options.blue}'`);
|
|
832
|
+
if (options.all !== undefined) params.push(`all='${options.all}'`);
|
|
833
|
+
|
|
834
|
+
const filterStr = params.length > 0 ? `curves=${params.join(':')}` : 'curves';
|
|
835
|
+
|
|
836
|
+
return new Filter(inputs, [output], filterStr);
|
|
837
|
+
}
|
|
838
|
+
|
|
839
|
+
/**
|
|
840
|
+
* Creates a vignette filter to darken the corners/edges
|
|
841
|
+
* @param inputs - Input stream labels (must be video)
|
|
842
|
+
* @param options - Vignette parameters
|
|
843
|
+
* - angle: Lens angle (0 to PI/2, default: PI/5)
|
|
844
|
+
* - x0: X coordinate of vignette center (0 to 1, default: w/2)
|
|
845
|
+
* - y0: Y coordinate of vignette center (0 to 1, default: h/2)
|
|
846
|
+
* - mode: Vignette mode ('forward' or 'backward', default: 'forward')
|
|
847
|
+
* - eval: When to evaluate expressions ('init' or 'frame', default: 'init')
|
|
848
|
+
*/
|
|
849
|
+
export function makeVignette(
|
|
850
|
+
inputs: Label[],
|
|
851
|
+
options: {
|
|
852
|
+
angle?: string;
|
|
853
|
+
x0?: string;
|
|
854
|
+
y0?: string;
|
|
855
|
+
mode?: 'forward' | 'backward';
|
|
856
|
+
eval?: 'init' | 'frame';
|
|
857
|
+
} = {},
|
|
858
|
+
): Filter {
|
|
859
|
+
const input = inputs[0];
|
|
860
|
+
|
|
861
|
+
if (input.isAudio) {
|
|
862
|
+
throw new Error(
|
|
863
|
+
`makeVignette: input must be video, got audio (tag: ${input.tag})`,
|
|
864
|
+
);
|
|
865
|
+
}
|
|
866
|
+
|
|
867
|
+
const output = {
|
|
868
|
+
tag: getLabel(),
|
|
869
|
+
isAudio: false,
|
|
870
|
+
};
|
|
871
|
+
|
|
872
|
+
const params: string[] = [];
|
|
873
|
+
if (options.angle !== undefined) params.push(`angle='${options.angle}'`);
|
|
874
|
+
if (options.x0 !== undefined) params.push(`x0='${options.x0}'`);
|
|
875
|
+
if (options.y0 !== undefined) params.push(`y0='${options.y0}'`);
|
|
876
|
+
if (options.mode !== undefined) params.push(`mode=${options.mode}`);
|
|
877
|
+
if (options.eval !== undefined) params.push(`eval=${options.eval}`);
|
|
878
|
+
|
|
879
|
+
const filterStr =
|
|
880
|
+
params.length > 0 ? `vignette=${params.join(':').replace(/'/g, '')}` : 'vignette';
|
|
881
|
+
|
|
882
|
+
return new Filter(inputs, [output], filterStr);
|
|
883
|
+
}
|
|
884
|
+
|
|
885
|
+
/**
|
|
886
|
+
* Creates a colorbalance filter to adjust colors in shadows, midtones, and highlights
|
|
887
|
+
* @param inputs - Input stream labels (must be video)
|
|
888
|
+
* @param options - Color balance parameters
|
|
889
|
+
* - rs: Red shift for shadows (-1 to 1, default: 0)
|
|
890
|
+
* - gs: Green shift for shadows (-1 to 1, default: 0)
|
|
891
|
+
* - bs: Blue shift for shadows (-1 to 1, default: 0)
|
|
892
|
+
* - rm: Red shift for midtones (-1 to 1, default: 0)
|
|
893
|
+
* - gm: Green shift for midtones (-1 to 1, default: 0)
|
|
894
|
+
* - bm: Blue shift for midtones (-1 to 1, default: 0)
|
|
895
|
+
* - rh: Red shift for highlights (-1 to 1, default: 0)
|
|
896
|
+
* - gh: Green shift for highlights (-1 to 1, default: 0)
|
|
897
|
+
* - bh: Blue shift for highlights (-1 to 1, default: 0)
|
|
898
|
+
*/
|
|
899
|
+
export function makeColorBalance(
|
|
900
|
+
inputs: Label[],
|
|
901
|
+
options: {
|
|
902
|
+
rs?: number;
|
|
903
|
+
gs?: number;
|
|
904
|
+
bs?: number;
|
|
905
|
+
rm?: number;
|
|
906
|
+
gm?: number;
|
|
907
|
+
bm?: number;
|
|
908
|
+
rh?: number;
|
|
909
|
+
gh?: number;
|
|
910
|
+
bh?: number;
|
|
911
|
+
} = {},
|
|
912
|
+
): Filter {
|
|
913
|
+
const input = inputs[0];
|
|
914
|
+
|
|
915
|
+
if (input.isAudio) {
|
|
916
|
+
throw new Error(
|
|
917
|
+
`makeColorBalance: input must be video, got audio (tag: ${input.tag})`,
|
|
918
|
+
);
|
|
919
|
+
}
|
|
920
|
+
|
|
921
|
+
const output = {
|
|
922
|
+
tag: getLabel(),
|
|
923
|
+
isAudio: false,
|
|
924
|
+
};
|
|
925
|
+
|
|
926
|
+
const params: string[] = [];
|
|
927
|
+
if (options.rs !== undefined) params.push(`rs=${options.rs}`);
|
|
928
|
+
if (options.gs !== undefined) params.push(`gs=${options.gs}`);
|
|
929
|
+
if (options.bs !== undefined) params.push(`bs=${options.bs}`);
|
|
930
|
+
if (options.rm !== undefined) params.push(`rm=${options.rm}`);
|
|
931
|
+
if (options.gm !== undefined) params.push(`gm=${options.gm}`);
|
|
932
|
+
if (options.bm !== undefined) params.push(`bm=${options.bm}`);
|
|
933
|
+
if (options.rh !== undefined) params.push(`rh=${options.rh}`);
|
|
934
|
+
if (options.gh !== undefined) params.push(`gh=${options.gh}`);
|
|
935
|
+
if (options.bh !== undefined) params.push(`bh=${options.bh}`);
|
|
936
|
+
|
|
937
|
+
const filterStr =
|
|
938
|
+
params.length > 0 ? `colorbalance=${params.join(':')}` : 'colorbalance';
|
|
939
|
+
|
|
940
|
+
return new Filter(inputs, [output], filterStr);
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
/**
|
|
944
|
+
* Creates a Gaussian blur filter
|
|
945
|
+
* @param inputs - Input stream labels (must be video)
|
|
946
|
+
* @param sigma - Blur strength (0.01 to 1024, default: 1.0)
|
|
947
|
+
* @param steps - Number of blur steps (1 to 6, default: 1, higher = smoother but slower)
|
|
948
|
+
*/
|
|
949
|
+
export function makeGblur(
|
|
950
|
+
inputs: Label[],
|
|
951
|
+
options: {
|
|
952
|
+
sigma?: number;
|
|
953
|
+
steps?: number;
|
|
954
|
+
} = {},
|
|
955
|
+
): Filter {
|
|
956
|
+
const input = inputs[0];
|
|
957
|
+
|
|
958
|
+
if (input.isAudio) {
|
|
959
|
+
throw new Error(
|
|
960
|
+
`makeGblur: input must be video, got audio (tag: ${input.tag})`,
|
|
961
|
+
);
|
|
962
|
+
}
|
|
963
|
+
|
|
964
|
+
const output = {
|
|
965
|
+
tag: getLabel(),
|
|
966
|
+
isAudio: false,
|
|
967
|
+
};
|
|
968
|
+
|
|
969
|
+
const sigma = options.sigma ?? 1.0;
|
|
970
|
+
const steps = options.steps ?? 1;
|
|
971
|
+
|
|
972
|
+
return new Filter(inputs, [output], `gblur=sigma=${sigma}:steps=${steps}`);
|
|
973
|
+
}
|
|
974
|
+
|
|
975
|
+
/**
|
|
976
|
+
* Creates a box blur filter (simpler, faster blur)
|
|
977
|
+
* @param inputs - Input stream labels (must be video)
|
|
978
|
+
* @param options - Blur parameters
|
|
979
|
+
* - luma_radius (lr): Horizontal luma blur radius (0 to min(w,h)/2)
|
|
980
|
+
* - luma_power (lp): Number of times to apply luma blur (0 to 2)
|
|
981
|
+
* - chroma_radius (cr): Horizontal chroma blur radius (0 to min(w,h)/2)
|
|
982
|
+
* - chroma_power (cp): Number of times to apply chroma blur (0 to 2)
|
|
983
|
+
*/
|
|
984
|
+
export function makeBoxblur(
|
|
985
|
+
inputs: Label[],
|
|
986
|
+
options: {
|
|
987
|
+
luma_radius?: number;
|
|
988
|
+
luma_power?: number;
|
|
989
|
+
chroma_radius?: number;
|
|
990
|
+
chroma_power?: number;
|
|
991
|
+
} = {},
|
|
992
|
+
): Filter {
|
|
993
|
+
const input = inputs[0];
|
|
994
|
+
|
|
995
|
+
if (input.isAudio) {
|
|
996
|
+
throw new Error(
|
|
997
|
+
`makeBoxblur: input must be video, got audio (tag: ${input.tag})`,
|
|
998
|
+
);
|
|
999
|
+
}
|
|
1000
|
+
|
|
1001
|
+
const output = {
|
|
1002
|
+
tag: getLabel(),
|
|
1003
|
+
isAudio: false,
|
|
1004
|
+
};
|
|
1005
|
+
|
|
1006
|
+
const lr = options.luma_radius ?? 2;
|
|
1007
|
+
const lp = options.luma_power ?? 1;
|
|
1008
|
+
const cr = options.chroma_radius ?? lr;
|
|
1009
|
+
const cp = options.chroma_power ?? lp;
|
|
1010
|
+
|
|
1011
|
+
return new Filter(
|
|
1012
|
+
inputs,
|
|
1013
|
+
[output],
|
|
1014
|
+
`boxblur=lr=${lr}:lp=${lp}:cr=${cr}:cp=${cp}`,
|
|
1015
|
+
);
|
|
1016
|
+
}
|
|
1017
|
+
|
|
1018
|
+
/**
|
|
1019
|
+
* Creates an unsharp filter (sharpen or blur)
|
|
1020
|
+
* @param inputs - Input stream labels (must be video)
|
|
1021
|
+
* @param options - Sharpening parameters
|
|
1022
|
+
* - luma_amount: Luma sharpening amount (-2 to 5, default: 1.0, negative = blur)
|
|
1023
|
+
* - chroma_amount: Chroma sharpening amount (-2 to 5, default: 0)
|
|
1024
|
+
*/
|
|
1025
|
+
export function makeUnsharp(
|
|
1026
|
+
inputs: Label[],
|
|
1027
|
+
options: {
|
|
1028
|
+
luma_amount?: number;
|
|
1029
|
+
chroma_amount?: number;
|
|
1030
|
+
} = {},
|
|
1031
|
+
): Filter {
|
|
1032
|
+
const input = inputs[0];
|
|
1033
|
+
|
|
1034
|
+
if (input.isAudio) {
|
|
1035
|
+
throw new Error(
|
|
1036
|
+
`makeUnsharp: input must be video, got audio (tag: ${input.tag})`,
|
|
1037
|
+
);
|
|
1038
|
+
}
|
|
1039
|
+
|
|
1040
|
+
const output = {
|
|
1041
|
+
tag: getLabel(),
|
|
1042
|
+
isAudio: false,
|
|
1043
|
+
};
|
|
1044
|
+
|
|
1045
|
+
const la = options.luma_amount ?? 1.0;
|
|
1046
|
+
const ca = options.chroma_amount ?? 0;
|
|
1047
|
+
|
|
1048
|
+
return new Filter(
|
|
1049
|
+
inputs,
|
|
1050
|
+
[output],
|
|
1051
|
+
`unsharp=luma_amount=${la}:chroma_amount=${ca}`,
|
|
1052
|
+
);
|
|
1053
|
+
}
|
|
1054
|
+
|
|
1055
|
+
/**
|
|
1056
|
+
* Creates a hue adjustment filter
|
|
1057
|
+
* @param inputs - Input stream labels (must be video)
|
|
1058
|
+
* @param options - Hue adjustment parameters
|
|
1059
|
+
* - hue: Hue angle in degrees (0 to 360)
|
|
1060
|
+
* - saturation: Saturation multiplier (-10 to 10, default: 1.0)
|
|
1061
|
+
* - brightness: Brightness adjustment (-10 to 10, default: 0)
|
|
1062
|
+
*/
|
|
1063
|
+
export function makeHue(
|
|
1064
|
+
inputs: Label[],
|
|
1065
|
+
options: {
|
|
1066
|
+
hue?: number;
|
|
1067
|
+
saturation?: number;
|
|
1068
|
+
brightness?: number;
|
|
1069
|
+
} = {},
|
|
1070
|
+
): Filter {
|
|
1071
|
+
const input = inputs[0];
|
|
1072
|
+
|
|
1073
|
+
if (input.isAudio) {
|
|
1074
|
+
throw new Error(
|
|
1075
|
+
`makeHue: input must be video, got audio (tag: ${input.tag})`,
|
|
1076
|
+
);
|
|
1077
|
+
}
|
|
1078
|
+
|
|
1079
|
+
const output = {
|
|
1080
|
+
tag: getLabel(),
|
|
1081
|
+
isAudio: false,
|
|
1082
|
+
};
|
|
1083
|
+
|
|
1084
|
+
const params: string[] = [];
|
|
1085
|
+
if (options.hue !== undefined) params.push(`h=${options.hue}`);
|
|
1086
|
+
if (options.saturation !== undefined) params.push(`s=${options.saturation}`);
|
|
1087
|
+
if (options.brightness !== undefined) params.push(`b=${options.brightness}`);
|
|
1088
|
+
|
|
1089
|
+
const filterStr = params.length > 0 ? `hue=${params.join(':')}` : 'hue';
|
|
1090
|
+
|
|
1091
|
+
return new Filter(inputs, [output], filterStr);
|
|
1092
|
+
}
|
|
1093
|
+
|
|
1094
|
+
/**
|
|
1095
|
+
* Creates a horizontal flip filter (mirrors video left-right)
|
|
1096
|
+
* Note: Only works with video streams
|
|
1097
|
+
*/
|
|
1098
|
+
export function makeHflip(inputs: Label[]): Filter {
|
|
1099
|
+
const input = inputs[0];
|
|
1100
|
+
|
|
1101
|
+
if (input.isAudio) {
|
|
1102
|
+
throw new Error(
|
|
1103
|
+
`makeHflip: input must be video, got audio (tag: ${input.tag})`,
|
|
1104
|
+
);
|
|
1105
|
+
}
|
|
1106
|
+
|
|
1107
|
+
const output = {
|
|
1108
|
+
tag: getLabel(),
|
|
1109
|
+
isAudio: false,
|
|
1110
|
+
};
|
|
1111
|
+
|
|
1112
|
+
return new Filter(inputs, [output], 'hflip');
|
|
1113
|
+
}
|
|
1114
|
+
|
|
1115
|
+
/**
|
|
1116
|
+
* Creates a vertical flip filter (mirrors video top-bottom)
|
|
1117
|
+
* Note: Only works with video streams
|
|
1118
|
+
*/
|
|
1119
|
+
export function makeVflip(inputs: Label[]): Filter {
|
|
1120
|
+
const input = inputs[0];
|
|
1121
|
+
|
|
1122
|
+
if (input.isAudio) {
|
|
1123
|
+
throw new Error(
|
|
1124
|
+
`makeVflip: input must be video, got audio (tag: ${input.tag})`,
|
|
1125
|
+
);
|
|
1126
|
+
}
|
|
1127
|
+
|
|
1128
|
+
const output = {
|
|
1129
|
+
tag: getLabel(),
|
|
1130
|
+
isAudio: false,
|
|
1131
|
+
};
|
|
1132
|
+
|
|
1133
|
+
return new Filter(inputs, [output], 'vflip');
|
|
1134
|
+
}
|
|
1135
|
+
|
|
1136
|
+
/**
|
|
1137
|
+
* Creates a chromakey filter for green/blue screen removal
|
|
1138
|
+
* @param inputs - Input stream labels (must be video)
|
|
1139
|
+
* @param options - Chromakey parameters
|
|
1140
|
+
* - color: Color to key out (e.g., 'green', '0x00FF00', '#00FF00')
|
|
1141
|
+
* - similarity: How similar colors need to be to match (0.01 to 1.0, default: 0.01)
|
|
1142
|
+
* - blend: Blend percentage for edges (0.0 to 1.0, default: 0.0)
|
|
1143
|
+
*/
|
|
1144
|
+
export function makeChromakey(
|
|
1145
|
+
inputs: Label[],
|
|
1146
|
+
options: {
|
|
1147
|
+
color: string;
|
|
1148
|
+
similarity?: number;
|
|
1149
|
+
blend?: number;
|
|
1150
|
+
},
|
|
1151
|
+
): Filter {
|
|
1152
|
+
const input = inputs[0];
|
|
1153
|
+
|
|
1154
|
+
if (input.isAudio) {
|
|
1155
|
+
throw new Error(
|
|
1156
|
+
`makeChromakey: input must be video, got audio (tag: ${input.tag})`,
|
|
1157
|
+
);
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
const output = {
|
|
1161
|
+
tag: getLabel(),
|
|
1162
|
+
isAudio: false,
|
|
1163
|
+
};
|
|
1164
|
+
|
|
1165
|
+
const similarity = options.similarity ?? 0.01;
|
|
1166
|
+
const blend = options.blend ?? 0.0;
|
|
1167
|
+
|
|
1168
|
+
return new Filter(
|
|
1169
|
+
inputs,
|
|
1170
|
+
[output],
|
|
1171
|
+
`chromakey=${options.color}:${similarity}:${blend}`,
|
|
1172
|
+
);
|
|
1173
|
+
}
|
|
1174
|
+
|
|
1175
|
+
/**
|
|
1176
|
+
* Creates a despill filter to remove color spill from chromakey
|
|
1177
|
+
* @param inputs - Input stream labels (must be video)
|
|
1178
|
+
* @param options - Despill parameters
|
|
1179
|
+
* - type: Color to despill ('green' or 'blue', default: 'green')
|
|
1180
|
+
* - mix: Mix factor (0.0 to 1.0, default: 0.5)
|
|
1181
|
+
* - expand: Expand factor (0.0 to 1.0, default: 0.0)
|
|
1182
|
+
*/
|
|
1183
|
+
export function makeDespill(
|
|
1184
|
+
inputs: Label[],
|
|
1185
|
+
options: {
|
|
1186
|
+
type?: 'green' | 'blue';
|
|
1187
|
+
mix?: number;
|
|
1188
|
+
expand?: number;
|
|
1189
|
+
} = {},
|
|
1190
|
+
): Filter {
|
|
1191
|
+
const input = inputs[0];
|
|
1192
|
+
|
|
1193
|
+
if (input.isAudio) {
|
|
1194
|
+
throw new Error(
|
|
1195
|
+
`makeDespill: input must be video, got audio (tag: ${input.tag})`,
|
|
1196
|
+
);
|
|
1197
|
+
}
|
|
1198
|
+
|
|
1199
|
+
const output = {
|
|
1200
|
+
tag: getLabel(),
|
|
1201
|
+
isAudio: false,
|
|
1202
|
+
};
|
|
1203
|
+
|
|
1204
|
+
const type = options.type ?? 'green';
|
|
1205
|
+
const mix = options.mix ?? 0.5;
|
|
1206
|
+
const expand = options.expand ?? 0.0;
|
|
1207
|
+
|
|
1208
|
+
return new Filter(
|
|
1209
|
+
inputs,
|
|
1210
|
+
[output],
|
|
1211
|
+
`despill=type=${type}:mix=${mix}:expand=${expand}`,
|
|
1212
|
+
);
|
|
1213
|
+
}
|
|
1214
|
+
|
|
1215
|
+
export function makeFade(
|
|
1216
|
+
inputs: Label[],
|
|
1217
|
+
options: {
|
|
1218
|
+
fades: Array<{
|
|
1219
|
+
type: 'in' | 'out';
|
|
1220
|
+
startTime: Millisecond;
|
|
1221
|
+
duration: Millisecond;
|
|
1222
|
+
color?: string;
|
|
1223
|
+
curve?: string;
|
|
1224
|
+
}>;
|
|
1225
|
+
},
|
|
1226
|
+
): Filter {
|
|
1227
|
+
const input = inputs[0];
|
|
1228
|
+
|
|
1229
|
+
if (!options.fades || options.fades.length === 0) {
|
|
1230
|
+
throw new Error(`makeFade: at least one fade operation is required`);
|
|
1231
|
+
}
|
|
1232
|
+
|
|
1233
|
+
const output = {
|
|
1234
|
+
tag: getLabel(),
|
|
1235
|
+
isAudio: input.isAudio,
|
|
1236
|
+
};
|
|
1237
|
+
|
|
1238
|
+
// Use 'afade' for audio, 'fade' for video
|
|
1239
|
+
const filterName = input.isAudio ? 'afade' : 'fade';
|
|
1240
|
+
|
|
1241
|
+
// Build fade filter string by chaining multiple fade operations
|
|
1242
|
+
const fadeStrings = options.fades.map((fade) => {
|
|
1243
|
+
const params: string[] = [];
|
|
1244
|
+
params.push(`t=${fade.type}`);
|
|
1245
|
+
params.push(`st=${ms(fade.startTime)}`);
|
|
1246
|
+
params.push(`d=${ms(fade.duration)}`);
|
|
1247
|
+
|
|
1248
|
+
// Color parameter only applies to video (fade, not afade)
|
|
1249
|
+
if (fade.color && !input.isAudio) {
|
|
1250
|
+
params.push(`color=${fade.color}`);
|
|
1251
|
+
}
|
|
1252
|
+
|
|
1253
|
+
// Curve parameter works for both video and audio
|
|
1254
|
+
if (fade.curve) {
|
|
1255
|
+
params.push(`curve=${fade.curve}`);
|
|
1256
|
+
}
|
|
1257
|
+
|
|
1258
|
+
return `${filterName}=${params.join(':')}`;
|
|
1259
|
+
});
|
|
1260
|
+
|
|
1261
|
+
return new Filter(inputs, [output], fadeStrings.join(','));
|
|
1262
|
+
}
|
|
1263
|
+
|
|
1264
|
+
/**
|
|
1265
|
+
* Creates a color source filter to generate blank video
|
|
1266
|
+
* @param options - Video parameters
|
|
1267
|
+
* - duration: Duration in milliseconds
|
|
1268
|
+
* - width: Video width in pixels
|
|
1269
|
+
* - height: Video height in pixels
|
|
1270
|
+
* - fps: Frame rate (default: 30)
|
|
1271
|
+
* - color: Color (default: 'black', supports alpha with format '#RRGGBBAA')
|
|
1272
|
+
* @returns Filter with video output
|
|
1273
|
+
*/
|
|
1274
|
+
export function makeColor(options: {
|
|
1275
|
+
duration: Millisecond;
|
|
1276
|
+
width: number;
|
|
1277
|
+
height: number;
|
|
1278
|
+
fps?: number;
|
|
1279
|
+
color?: string;
|
|
1280
|
+
}): Filter {
|
|
1281
|
+
const output = {
|
|
1282
|
+
tag: getLabel(),
|
|
1283
|
+
isAudio: false,
|
|
1284
|
+
};
|
|
1285
|
+
|
|
1286
|
+
const color = options.color ?? 'black';
|
|
1287
|
+
const fps = options.fps ?? 30;
|
|
1288
|
+
|
|
1289
|
+
// Check if color has alpha channel (8-digit hex with alpha)
|
|
1290
|
+
const hasAlpha = color.length === 9 && color.startsWith('#');
|
|
1291
|
+
|
|
1292
|
+
// color source generates video, add format filter for alpha if needed
|
|
1293
|
+
let filterStr = `color=c=${color}:s=${options.width}x${options.height}:r=${fps}:d=${ms(options.duration)}`;
|
|
1294
|
+
|
|
1295
|
+
if (hasAlpha) {
|
|
1296
|
+
// Add format filter to ensure proper alpha channel handling
|
|
1297
|
+
filterStr += ',format=yuva420p';
|
|
1298
|
+
}
|
|
1299
|
+
|
|
1300
|
+
return new Filter([], [output], filterStr);
|
|
1301
|
+
}
|
|
1302
|
+
|
|
1303
|
+
/**
|
|
1304
|
+
* Creates an anullsrc filter to generate silent audio
|
|
1305
|
+
* @param options - Audio parameters
|
|
1306
|
+
* - duration: Duration in milliseconds
|
|
1307
|
+
* - channel_layout: Audio channel layout (default: 'stereo')
|
|
1308
|
+
* - sample_rate: Sample rate in Hz (default: 48000)
|
|
1309
|
+
* @returns Filter with audio output
|
|
1310
|
+
*/
|
|
1311
|
+
export function makeAnullsrc(options: {
|
|
1312
|
+
duration: Millisecond;
|
|
1313
|
+
channel_layout?: string;
|
|
1314
|
+
sample_rate?: number;
|
|
1315
|
+
}): Filter {
|
|
1316
|
+
const output = {
|
|
1317
|
+
tag: getLabel(),
|
|
1318
|
+
isAudio: true,
|
|
1319
|
+
};
|
|
1320
|
+
|
|
1321
|
+
const channelLayout = options.channel_layout ?? 'stereo';
|
|
1322
|
+
const sampleRate = options.sample_rate ?? 48000;
|
|
1323
|
+
const duration = options.duration;
|
|
1324
|
+
|
|
1325
|
+
// anullsrc generates infinite silence, so we trim to the desired duration
|
|
1326
|
+
const filterStr = `anullsrc=channel_layout=${channelLayout}:sample_rate=${sampleRate},atrim=duration=${ms(duration)},asetpts=PTS-STARTPTS`;
|
|
1327
|
+
|
|
1328
|
+
return new Filter([], [output], filterStr);
|
|
1329
|
+
}
|
|
1330
|
+
|
|
1331
|
+
/**
|
|
1332
|
+
* Creates an amix filter to mix multiple audio streams
|
|
1333
|
+
* @param inputs - Input stream labels (must all be audio)
|
|
1334
|
+
* @param options - Mix parameters
|
|
1335
|
+
* - duration: Output duration mode ('longest', 'shortest', 'first', default: 'longest')
|
|
1336
|
+
* - dropout_transition: Transition time when input ends in seconds (default: 2)
|
|
1337
|
+
* - weights: Array of weights for each input (e.g., [1, 0.5] makes second input quieter)
|
|
1338
|
+
* - normalize: If true, automatically normalize weights to prevent clipping (default: true)
|
|
1339
|
+
*/
|
|
1340
|
+
export function makeAmix(
|
|
1341
|
+
inputs: Label[],
|
|
1342
|
+
options: {
|
|
1343
|
+
duration?: 'longest' | 'shortest' | 'first';
|
|
1344
|
+
dropout_transition?: number;
|
|
1345
|
+
weights?: number[];
|
|
1346
|
+
normalize?: boolean;
|
|
1347
|
+
} = {},
|
|
1348
|
+
): Filter {
|
|
1349
|
+
if (inputs.length < 2) {
|
|
1350
|
+
throw new Error('makeAmix: requires at least 2 input streams');
|
|
1351
|
+
}
|
|
1352
|
+
|
|
1353
|
+
// Validate that all inputs are audio
|
|
1354
|
+
for (const input of inputs) {
|
|
1355
|
+
if (!input.isAudio) {
|
|
1356
|
+
throw new Error(
|
|
1357
|
+
`makeAmix: all inputs must be audio, got video (tag: ${input.tag})`,
|
|
1358
|
+
);
|
|
1359
|
+
}
|
|
1360
|
+
}
|
|
1361
|
+
|
|
1362
|
+
const output = {
|
|
1363
|
+
tag: getLabel(),
|
|
1364
|
+
isAudio: true,
|
|
1365
|
+
};
|
|
1366
|
+
|
|
1367
|
+
const params: string[] = [];
|
|
1368
|
+
params.push(`inputs=${inputs.length}`);
|
|
1369
|
+
|
|
1370
|
+
if (options.duration) {
|
|
1371
|
+
params.push(`duration=${options.duration}`);
|
|
1372
|
+
}
|
|
1373
|
+
|
|
1374
|
+
if (options.dropout_transition !== undefined) {
|
|
1375
|
+
params.push(`dropout_transition=${options.dropout_transition}`);
|
|
1376
|
+
}
|
|
1377
|
+
|
|
1378
|
+
if (options.weights && options.weights.length > 0) {
|
|
1379
|
+
// Ensure weights array matches inputs length
|
|
1380
|
+
const weights =
|
|
1381
|
+
options.weights.length === inputs.length
|
|
1382
|
+
? options.weights
|
|
1383
|
+
: [
|
|
1384
|
+
...options.weights,
|
|
1385
|
+
...Array(inputs.length - options.weights.length).fill(1),
|
|
1386
|
+
];
|
|
1387
|
+
|
|
1388
|
+
params.push(`weights=${weights.join(' ')}`);
|
|
1389
|
+
}
|
|
1390
|
+
|
|
1391
|
+
if (options.normalize !== undefined) {
|
|
1392
|
+
params.push(`normalize=${options.normalize ? '1' : '0'}`);
|
|
1393
|
+
}
|
|
1394
|
+
|
|
1395
|
+
return new Filter(inputs, [output], `amix=${params.join(':')}`);
|
|
1396
|
+
}
|
|
1397
|
+
|
|
1398
|
+
/**
|
|
1399
|
+
* Wraps a label in brackets
|
|
1400
|
+
*/
|
|
1401
|
+
function wrap(label: string): string {
|
|
1402
|
+
return `[${label}]`;
|
|
1403
|
+
}
|