ffmpeg-framecraft 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/engine.js ADDED
@@ -0,0 +1,365 @@
1
+ const path = require('path');
2
+ const fs = require('fs');
3
+ const { run, probe } = require('./executor');
4
+ const { cropTo916Filter, subtitleFilter, amixFilter, buildSlicesWithTransitionsFilter } = require('./filters');
5
+ const shortsPreset = require('./presets/shorts');
6
+ const { getTransition } = require('./presets/transitions');
7
+ const { getPreset } = require('./presets/presets');
8
+ const { timeStringToSeconds } = require('./utils');
9
+
10
+ function sliceTimeToSeconds(value) {
11
+ if (typeof value === 'number' && !Number.isNaN(value)) return value;
12
+ if (typeof value === 'string') return timeStringToSeconds(value);
13
+ throw new Error(`Invalid slice time: ${value}`);
14
+ }
15
+
16
+ /**
17
+ * FramecraftEngine - FFmpeg-based video processing (crop, slice, transitions, subtitles, audio).
18
+ */
19
+ class FramecraftEngine {
20
+ /**
21
+ * Crop video to 9:16 vertical format (720x1280).
22
+ * Uses ffprobe to get dimensions, then applies centered crop + scale.
23
+ *
24
+ * @param {string} inputPath - Input video path
25
+ * @param {string} outputPath - Output video path
26
+ * @param {object} [opts] - Options (future: quality presets, etc.)
27
+ * @param {function(object): void} [opts.onProgress] - Progress callback
28
+ * @returns {Promise<void>}
29
+ */
30
+ async cropTo916(inputPath, outputPath, opts = {}) {
31
+ const meta = await probe(inputPath);
32
+ const filter = cropTo916Filter(meta.width, meta.height);
33
+
34
+ await run({
35
+ input: inputPath,
36
+ output: outputPath,
37
+ videoFilters: filter,
38
+ outputOptions: shortsPreset.outputOptions(),
39
+ expectedDuration: meta.duration || undefined,
40
+ onProgress: opts.onProgress,
41
+ });
42
+ }
43
+
44
+ /**
45
+ * Slice video by timestamp range.
46
+ *
47
+ * @param {string} inputPath - Input video path
48
+ * @param {string} outputPath - Output video path
49
+ * @param {{ start: number|string, end: number|string }} range - Start and end time (seconds or "mm:ss.ms")
50
+ * @param {object} [opts] - Options
51
+ * @param {function(object): void} [opts.onProgress] - Progress callback
52
+ * @returns {Promise<void>}
53
+ */
54
+ async slice(inputPath, outputPath, range, opts = {}) {
55
+ let { start, end } = range;
56
+ start = typeof start === 'string' ? timeStringToSeconds(start) : start;
57
+ end = typeof end === 'string' ? timeStringToSeconds(end) : end;
58
+ const duration =
59
+ typeof start === 'number' && typeof end === 'number'
60
+ ? end - start
61
+ : undefined;
62
+
63
+ await run({
64
+ input: inputPath,
65
+ output: outputPath,
66
+ seek: start,
67
+ duration,
68
+ outputOptions: shortsPreset.outputOptions(),
69
+ expectedDuration: typeof duration === 'number' ? duration : undefined,
70
+ onProgress: opts.onProgress,
71
+ });
72
+ }
73
+
74
+ /**
75
+ * Add SRT subtitle overlay to video.
76
+ *
77
+ * @param {string} inputPath - Input video path
78
+ * @param {string} outputPath - Output video path
79
+ * @param {string} srtPath - Path to SRT file
80
+ * @param {object} [opts] - Style options for future AI caption styling
81
+ * @param {function(object): void} [opts.onProgress] - Progress callback
82
+ * @returns {Promise<void>}
83
+ */
84
+ async addSubtitles(inputPath, outputPath, srtPath, opts = {}) {
85
+ const { onProgress, ...style } = opts;
86
+ const filter = subtitleFilter(srtPath, style);
87
+
88
+ await run({
89
+ input: inputPath,
90
+ output: outputPath,
91
+ videoFilters: filter,
92
+ outputOptions: shortsPreset.outputOptions(),
93
+ onProgress,
94
+ });
95
+ }
96
+
97
+ /**
98
+ * Extract a single frame as thumbnail image.
99
+ *
100
+ * @param {string} inputPath - Input video path
101
+ * @param {string} outputPath - Output path (e.g. frame.jpg)
102
+ * @param {number|string} [time=0] - Timestamp in seconds or "mm:ss.ms"
103
+ * @returns {Promise<void>}
104
+ */
105
+ async extractThumbnail(inputPath, outputPath, time = 0) {
106
+ const ext = path.extname(outputPath).toLowerCase();
107
+ const format = ext === '.png' ? 'png' : 'image2';
108
+
109
+ await run({
110
+ input: inputPath,
111
+ output: outputPath,
112
+ seek: time,
113
+ frames: 1,
114
+ format,
115
+ });
116
+ }
117
+
118
+ /**
119
+ * Mix background music with video.
120
+ * Video keeps original stream; audio is mix of video audio + music (or music only if no video audio).
121
+ *
122
+ * @param {string} inputPath - Input video path
123
+ * @param {string} outputPath - Output video path
124
+ * @param {string} musicPath - Path to music/audio file
125
+ * @param {object} [opts] - Options
126
+ * @param {number} [opts.musicVolume=1] - Music volume 0-1 (future)
127
+ * @param {function(object): void} [opts.onProgress] - Progress callback
128
+ * @returns {Promise<void>}
129
+ */
130
+ async addBackgroundMusic(inputPath, outputPath, musicPath, opts = {}) {
131
+ const { onProgress } = opts;
132
+ const meta = await probe(inputPath);
133
+ const filterComplex = amixFilter(meta.hasAudio);
134
+
135
+ await run({
136
+ input: inputPath,
137
+ inputs: [musicPath],
138
+ output: outputPath,
139
+ complexFilter: filterComplex,
140
+ complexFilterMap: ['0:v', '[aout]'],
141
+ outputOptions: [
142
+ '-c:v', 'libx264', '-preset', 'medium', '-crf', '23',
143
+ '-c:a', 'aac', '-b:a', '128k',
144
+ ],
145
+ expectedDuration: meta.duration || undefined,
146
+ onProgress,
147
+ });
148
+ }
149
+
150
+ /**
151
+ * Extract multiple slices from one video and join them with transitions.
152
+ * Use this to build a single output from several time ranges (e.g. 0–10s, 20–30s, 40–50s) with fade/wipe/slide between clips.
153
+ *
154
+ * @param {string} inputPath - Input video path
155
+ * @param {string} outputPath - Output video path
156
+ * @param {object} options - Options
157
+ * @param {Array<{ start: number|string, end: number|string }>} options.slices - Time ranges (seconds or "H:MM:SS" / "MM:SS")
158
+ * @param {string|{ type: string, duration: number }} [options.transition='fade'] - Preset name (e.g. 'fade', 'wipeleft', 'dissolve') or { type, duration } in seconds
159
+ * @param {object} [opts] - Additional options
160
+ * @param {function(object): void} [opts.onProgress] - Progress callback
161
+ * @returns {Promise<void>}
162
+ */
163
+ async slicesWithTransitions(inputPath, outputPath, options, opts = {}) {
164
+ const { slices, preset } = options;
165
+ let transitionOption = options.transition;
166
+ if (transitionOption === undefined && preset) {
167
+ const p = typeof preset === 'string' ? getPreset(preset) : preset;
168
+ transitionOption = p.transition != null
169
+ ? { type: p.transition, duration: p.transitionDuration ?? 0.5 }
170
+ : 'fade';
171
+ }
172
+ transitionOption = transitionOption ?? 'fade';
173
+ if (!Array.isArray(slices) || slices.length === 0) {
174
+ throw new Error('slicesWithTransitions requires a non-empty slices array');
175
+ }
176
+
177
+ const meta = await probe(inputPath);
178
+ const fileDuration = meta.duration || 0;
179
+
180
+ const defaultTransition = getTransition(transitionOption);
181
+ const slicesNormalized = slices.map((slice, index) => {
182
+ let startSeconds = sliceTimeToSeconds(slice.start);
183
+ let endSeconds = sliceTimeToSeconds(slice.end);
184
+
185
+ if (fileDuration > 0) {
186
+ endSeconds = Math.min(endSeconds, fileDuration);
187
+ startSeconds = Math.min(startSeconds, endSeconds - 0.01);
188
+ }
189
+ const duration = endSeconds - startSeconds;
190
+ if (duration <= 0) {
191
+ throw new Error(
192
+ `slicesWithTransitions: slice ${index} has invalid or zero duration (start=${slice.start}, end=${slice.end}); file duration is ${fileDuration}s`
193
+ );
194
+ }
195
+
196
+ // For boundary between slice[i-1] -> slice[i], use slice.transition if provided,
197
+ // otherwise fall back to the common/default transition.
198
+ let transition = null;
199
+ if (index > 0) {
200
+ const transitionConfig = {
201
+ type: slice.transition?.type ? slice.transition.type : defaultTransition.type,
202
+ duration: slice.transition?.duration ? slice.transition.duration : defaultTransition.duration,
203
+ }
204
+ transition = getTransition(transitionConfig);
205
+ console.log('transition', transition);
206
+ }
207
+
208
+ return { startSeconds, endSeconds, transition };
209
+ });
210
+
211
+ const durations = slicesNormalized.map(
212
+ ({ startSeconds, endSeconds }) => endSeconds - startSeconds
213
+ );
214
+
215
+ let outputDuration = durations[0];
216
+ for (let i = 1; i < durations.length; i++) {
217
+ const prevDuration = durations[i - 1];
218
+ const currentDuration = durations[i];
219
+ const sliceTransition = slicesNormalized[i].transition;
220
+ const maxT = Math.min(prevDuration, currentDuration) - 0.01;
221
+ const t = Math.min(sliceTransition.duration, Math.max(0.01, maxT));
222
+ outputDuration = outputDuration + currentDuration - t;
223
+ }
224
+
225
+ const { filterComplex, mapVideo, mapAudio } = buildSlicesWithTransitionsFilter(
226
+ slicesNormalized,
227
+ meta.hasAudio
228
+ );
229
+
230
+ const complexFilterMap = [mapVideo];
231
+ if (mapAudio) complexFilterMap.push(mapAudio);
232
+
233
+ await run({
234
+ input: inputPath,
235
+ output: outputPath,
236
+ complexFilter: filterComplex,
237
+ complexFilterMap,
238
+ outputOptions: shortsPreset.outputOptions(),
239
+ expectedDuration: outputDuration,
240
+ onProgress: opts.onProgress,
241
+ });
242
+ }
243
+
244
+ /**
245
+ * Apply a named preset or run a pipeline of operations sequentially.
246
+ *
247
+ * @param {string} inputPath - Input video path
248
+ * @param {string} outputPath - Output video path
249
+ * @param {string|Array<{ op: string, [key: string]: any }>} [pipeline='shorts'] - Preset name or list of { op, ...args }
250
+ * @param {object} [opts] - Options
251
+ * @param {function(object): void} [opts.onProgress] - Progress callback
252
+ * @returns {Promise<void>}
253
+ */
254
+ async compose(inputPath, outputPath, pipeline = 'shorts', opts = {}) {
255
+ if (pipeline === 'shorts') {
256
+ await this.cropTo916(inputPath, outputPath, opts);
257
+ return;
258
+ }
259
+
260
+ if (!Array.isArray(pipeline) || pipeline.length === 0) {
261
+ throw new Error(`Unknown preset or empty pipeline: ${pipeline}`);
262
+ }
263
+
264
+ let current = inputPath;
265
+ const tempDir = path.dirname(outputPath);
266
+ const intermediates = [];
267
+
268
+ for (let i = 0; i < pipeline.length; i++) {
269
+ const step = pipeline[i];
270
+ const isLast = i === pipeline.length - 1;
271
+ const stepOutputPath = typeof step === 'object' && step.outputPath != null
272
+ ? step.outputPath
273
+ : outputPath;
274
+ const nextPath = isLast ? stepOutputPath : path.join(tempDir, `_compose_step_${i}.mp4`);
275
+
276
+ if (!isLast) {
277
+ intermediates.push(nextPath);
278
+ }
279
+
280
+ const op = typeof step === 'string' ? step : step.op;
281
+ if (!op) throw new Error(`Pipeline step ${i} missing 'op'`);
282
+
283
+ switch (op) {
284
+ case 'crop916':
285
+ await this.cropTo916(current, nextPath, opts);
286
+ break;
287
+ case 'slice':
288
+ if (step.start == null || step.end == null) {
289
+ throw new Error('slice step requires start and end');
290
+ }
291
+ await this.slice(current, nextPath, { start: step.start, end: step.end }, opts);
292
+ break;
293
+ case 'subtitles':
294
+ if (!step.srtPath) throw new Error('subtitles step requires srtPath');
295
+ await this.addSubtitles(current, nextPath, step.srtPath, opts);
296
+ break;
297
+ case 'music':
298
+ if (!step.musicPath) throw new Error('music step requires musicPath');
299
+ await this.addBackgroundMusic(current, nextPath, step.musicPath, opts);
300
+ break;
301
+ case 'slicesWithTransitions':
302
+ if (!step.slices?.length) throw new Error('slicesWithTransitions step requires slices array');
303
+ await this.slicesWithTransitions(current, nextPath, {
304
+ slices: step.slices,
305
+ transition: step.transition,
306
+ preset: step.preset,
307
+ }, opts);
308
+ break;
309
+ case 'audioOnly':
310
+ if (!step.outputPath) throw new Error('extractAudioOnly step requires outputPath');
311
+ await this.extractAudioOnly(current, step.outputPath, opts);
312
+ break;
313
+ default:
314
+ throw new Error(`Unknown pipeline op: ${op}`);
315
+ }
316
+
317
+ current = nextPath;
318
+ }
319
+
320
+ for (const file of intermediates) {
321
+ try {
322
+ await fs.promises.unlink(file);
323
+ } catch (e) {
324
+ if (e.code !== 'ENOENT') throw e;
325
+ }
326
+ }
327
+ }
328
+
329
+ /**
330
+ * Extract audio only from video. Converts to MP3 or M4A based on output file extension.
331
+ *
332
+ * @param {string} inputPath - Input video path
333
+ * @param {string} outputPath - Output audio path (.m4a = AAC, .mp3 = MP3)
334
+ * @param {object} [opts] - Options
335
+ * @param {number} [opts.bitrate] - Audio bitrate in kbps (default: 256 for AAC, 320 for MP3)
336
+ * @param {number} [opts.sampleRate=48000] - Sample rate in Hz
337
+ * @param {function(object): void} [opts.onProgress] - Progress callback
338
+ * @returns {Promise<void>}
339
+ */
340
+ async extractAudioOnly(inputPath, outputPath, opts = {}) {
341
+ const { onProgress, sampleRate = 48000 } = opts;
342
+ const ext = path.extname(outputPath).toLowerCase();
343
+ const isMp3 = ext === '.mp3';
344
+
345
+ const bitrate = opts.bitrate ?? (isMp3 ? 320 : 256);
346
+ const outputOptions = [
347
+ '-vn',
348
+ '-c:a',
349
+ isMp3 ? 'libmp3lame' : 'aac',
350
+ '-b:a',
351
+ `${bitrate}k`,
352
+ '-ar',
353
+ String(sampleRate),
354
+ ];
355
+
356
+ await run({
357
+ input: inputPath,
358
+ output: outputPath,
359
+ outputOptions,
360
+ onProgress,
361
+ });
362
+ }
363
+ }
364
+
365
+ module.exports = { FramecraftEngine };
@@ -0,0 +1,158 @@
1
+ const ffmpeg = require('fluent-ffmpeg');
2
+
3
+ /**
4
+ * FFmpegExecutor - Single point of FFmpeg invocation.
5
+ * Wraps fluent-ffmpeg; can be swapped for raw spawn in Phase 2.
6
+ *
7
+ * Use complexFilter + complexFilterMap for -filter_complex (recommended) — uses
8
+ * fluent-ffmpeg's native API. Pass outputOptions only for encoding options.
9
+ *
10
+ * @typedef {object} ExecutorConfig
11
+ * @property {string} input - Input file path
12
+ * @property {string|string[]} [inputs] - Additional inputs (for multi-input commands)
13
+ * @property {string} output - Output file path
14
+ * @property {string|string[]} [videoFilters] - Video filter chain (-vf)
15
+ * @property {string|string[]} [audioFilters] - Audio filter chain (-af)
16
+ * @property {string} [complexFilter] - Full -filter_complex string (use fluent API; do not put in outputOptions)
17
+ * @property {string|string[]} [complexFilterMap] - Streams to map from filter (e.g. '[vout]' or ['[vout]','[aout]'])
18
+ * @property {string|string[]} [inputOptions] - Input options
19
+ * @property {string|string[]} [outputOptions] - Output options (encoding only; no -filter_complex/-map when using complexFilter)
20
+ * @property {number|string} [seek] - Input start time (seconds or timestamp)
21
+ * @property {number|string} [duration] - Output duration (seconds or timestamp)
22
+ * @property {number} [frames] - Number of frames to encode
23
+ * @property {string} [format] - Output format (e.g. 'mp4', 'image2')
24
+ * @property {number} [expectedDuration] - Expected output duration in seconds (for percent calculation)
25
+ * @property {function(object): void} [onProgress] - Progress callback
26
+ */
27
+
28
+ function timemarkToSeconds(timemark) {
29
+ if (!timemark) return 0;
30
+ const parts = String(timemark).split(':');
31
+ if (parts.length !== 3) return 0;
32
+ const [h, m, s] = parts;
33
+ const seconds = parseFloat(s);
34
+ if (Number.isNaN(seconds)) return 0;
35
+ return Number(h) * 3600 + Number(m) * 60 + seconds;
36
+ }
37
+
38
+ /**
39
+ * Run FFmpeg with the given configuration.
40
+ *
41
+ * @param {ExecutorConfig} config
42
+ * @returns {Promise<void>}
43
+ */
44
+ function run(config) {
45
+ return new Promise((resolve, reject) => {
46
+ const {
47
+ input,
48
+ inputs = [],
49
+ output,
50
+ videoFilters,
51
+ audioFilters,
52
+ complexFilter,
53
+ complexFilterMap,
54
+ inputOptions,
55
+ outputOptions,
56
+ seek,
57
+ duration,
58
+ frames,
59
+ format,
60
+ expectedDuration,
61
+ onProgress,
62
+ } = config;
63
+
64
+ let command = ffmpeg(input);
65
+
66
+ const extraInputs = Array.isArray(inputs) ? inputs : [inputs].filter(Boolean);
67
+ for (const inp of extraInputs) {
68
+ command = command.input(inp);
69
+ }
70
+
71
+ if (seek != null) {
72
+ command = command.seekInput(seek);
73
+ }
74
+ if (inputOptions && inputOptions.length > 0) {
75
+ command = command.inputOptions(inputOptions);
76
+ }
77
+
78
+ if (duration != null) {
79
+ command = command.duration(duration);
80
+ }
81
+ if (frames != null) {
82
+ command = command.frames(frames);
83
+ }
84
+ if (videoFilters) {
85
+ const vf = Array.isArray(videoFilters) ? videoFilters : [videoFilters];
86
+ command = command.videoFilters(vf);
87
+ }
88
+ if (audioFilters) {
89
+ const af = Array.isArray(audioFilters) ? audioFilters : [audioFilters];
90
+ command = command.audioFilters(af);
91
+ }
92
+ if (format) {
93
+ command = command.format(format);
94
+ }
95
+
96
+ if (complexFilter) {
97
+ const mapArr = complexFilterMap != null
98
+ ? (Array.isArray(complexFilterMap) ? complexFilterMap : [complexFilterMap])
99
+ : [];
100
+ command = command.complexFilter(complexFilter, mapArr.length ? mapArr : undefined);
101
+ }
102
+ if (outputOptions && outputOptions.length > 0) {
103
+ command = command.outputOptions(...outputOptions);
104
+ }
105
+
106
+ command = command.output(output);
107
+
108
+ if (typeof onProgress === 'function') {
109
+ command.on('progress', (progress) => {
110
+ const next = { ...progress };
111
+ if (expectedDuration && expectedDuration > 0 && next.timemark) {
112
+ const currentSeconds = timemarkToSeconds(next.timemark);
113
+ const rawPercent = (currentSeconds / expectedDuration) * 100;
114
+ if (Number.isFinite(rawPercent)) {
115
+ next.percent = Math.max(0, Math.min(100, rawPercent));
116
+ }
117
+ }
118
+ onProgress(next);
119
+ });
120
+ }
121
+ command.on('error', (err) => reject(err));
122
+ command.on('end', () => {
123
+ // if (typeof onProgress === 'function') onProgress({ percent: 100 });
124
+ resolve();
125
+ });
126
+
127
+ command.run();
128
+ });
129
+ }
130
+
131
+ /**
132
+ * Get video metadata (width, height, duration, etc.) via ffprobe.
133
+ *
134
+ * @param {string} inputPath
135
+ * @returns {Promise<{ width: number, height: number, duration: number, hasAudio: boolean }>}
136
+ */
137
+ function probe(inputPath) {
138
+ return new Promise((resolve, reject) => {
139
+ ffmpeg.ffprobe(inputPath, (err, metadata) => {
140
+ if (err) return reject(err);
141
+
142
+ const videoStream = metadata.streams?.find((s) => s.codec_type === 'video');
143
+ const audioStream = metadata.streams?.find((s) => s.codec_type === 'audio');
144
+
145
+ const width = videoStream?.width ?? 0;
146
+ const height = videoStream?.height ?? 0;
147
+ const duration = metadata.format?.duration ?? 0;
148
+ const hasAudio = !!audioStream;
149
+
150
+ resolve({ width, height, duration, hasAudio });
151
+ });
152
+ });
153
+ }
154
+
155
+ module.exports = {
156
+ run,
157
+ probe,
158
+ };
package/src/filters.js ADDED
@@ -0,0 +1,173 @@
1
+ const path = require('path');
2
+
3
+ /** @constant {string} Shorts target width */
4
+ const SHORTS_WIDTH = 720;
5
+ /** @constant {string} Shorts target height */
6
+ const SHORTS_HEIGHT = 1280;
7
+
8
+ /**
9
+ * Build FFmpeg -vf filter string for 9:16 vertical crop (Shorts/TikTok format).
10
+ * Crops centered 9:16 region from source, then scales to 720x1280.
11
+ *
12
+ * @param {number} width - Source video width
13
+ * @param {number} height - Source video height
14
+ * @returns {string} FFmpeg video filter string (crop,scale)
15
+ */
16
+ function cropTo916Filter(width, height) {
17
+ const cropWidth = Math.floor((height * 9) / 16);
18
+ const cropHeight = height;
19
+ const x = Math.floor((width - cropWidth) / 2);
20
+ const y = 0;
21
+
22
+ const crop = `crop=${cropWidth}:${cropHeight}:${x}:${y}`;
23
+ const scale = `scale=${SHORTS_WIDTH}:${SHORTS_HEIGHT}`;
24
+
25
+ return `${crop},${scale}`;
26
+ }
27
+
28
+ /**
29
+ * Build FFmpeg subtitles filter string.
30
+ * Escape special characters in path for FFmpeg (colons, backslashes).
31
+ *
32
+ * @param {string} srtPath - Path to SRT subtitle file
33
+ * @param {object} [style] - Optional ASS force_style for future AI caption styling
34
+ * @param {string} [style.fontName] - Font name
35
+ * @param {string} [style.fontSize] - Font size
36
+ * @param {string} [style.primaryColour] - Primary colour (ASS format)
37
+ * @param {string} [style.outlineColour] - Outline colour
38
+ * @param {string} [style.backColour] - Background colour
39
+ * @param {number} [style.outline] - Outline thickness
40
+ * @param {number} [style.shadow] - Shadow depth
41
+ * @returns {string} FFmpeg subtitles filter string
42
+ */
43
+ function subtitleFilter(srtPath, style = {}) {
44
+ const absPath = path.resolve(srtPath);
45
+ const escaped = absPath.replace(/\\/g, '\\\\').replace(/:/g, '\\:');
46
+
47
+ let filter = `subtitles='${escaped}'`;
48
+
49
+ const styleParts = [];
50
+ if (style.fontName) styleParts.push(`FontName=${style.fontName}`);
51
+ if (style.fontSize) styleParts.push(`FontSize=${style.fontSize}`);
52
+ if (style.primaryColour) styleParts.push(`PrimaryColour=${style.primaryColour}`);
53
+ if (style.outlineColour) styleParts.push(`OutlineColour=${style.outlineColour}`);
54
+ if (style.backColour) styleParts.push(`BackColour=${style.backColour}`);
55
+ if (style.outline !== undefined) styleParts.push(`Outline=${style.outline}`);
56
+ if (style.shadow !== undefined) styleParts.push(`Shadow=${style.shadow}`);
57
+
58
+ if (styleParts.length > 0) {
59
+ filter += `:force_style='${styleParts.join(',')}'`;
60
+ }
61
+
62
+ return filter;
63
+ }
64
+
65
+ /**
66
+ * Build FFmpeg -filter_complex string for mixing video with background music.
67
+ * Video keeps original stream; audio is mix of video audio + music.
68
+ *
69
+ * @param {boolean} [videoHasAudio=true] - Whether source video has audio track
70
+ * @returns {string} FFmpeg filter_complex string
71
+ */
72
+ function amixFilter(videoHasAudio = true) {
73
+ if (videoHasAudio) {
74
+ return '[0:a][1:a]amix=inputs=2:duration=shortest[aout]';
75
+ }
76
+ return '[1:a]anull[aout]';
77
+ }
78
+
79
+ /**
80
+ * Build -filter_complex for multiple slices from one input with xfade/acrossfade transitions.
81
+ * Slices are applied to the same input (trim each range, then chain xfade/acrossfade).
82
+ *
83
+ * Each slice may optionally carry its own transition ({ type, duration }) which
84
+ * controls how the previous slice transitions into this one. If not present,
85
+ * the caller should already have applied the default/common transition.
86
+ *
87
+ * @param {Array<{ startSeconds: number, endSeconds: number, transition?: { type: string, duration: number } }>} slices
88
+ * Normalized slice ranges in seconds, with optional per-slice transition (for slice i > 0).
89
+ * @param {boolean} hasAudio - Whether input has audio (to include acrossfade)
90
+ * @returns {{ filterComplex: string, mapVideo: string, mapAudio: string | null }}
91
+ */
92
+ function buildSlicesWithTransitionsFilter(slices, hasAudio = true) {
93
+ if (!slices.length) throw new Error('slices must have at least one range');
94
+
95
+ const parts = [];
96
+ const durations = [];
97
+
98
+ for (let i = 0; i < slices.length; i++) {
99
+ const { startSeconds, endSeconds } = slices[i];
100
+ const d = endSeconds - startSeconds;
101
+ durations.push(d);
102
+ parts.push(
103
+ `[0:v]trim=start=${startSeconds}:end=${endSeconds},setpts=PTS-STARTPTS[v${i}]`
104
+ );
105
+ if (hasAudio) {
106
+ parts.push(
107
+ `[0:a]atrim=start=${startSeconds}:end=${endSeconds},asetpts=PTS-STARTPTS[a${i}]`
108
+ );
109
+ }
110
+ }
111
+
112
+ if (slices.length === 1) {
113
+ const filterComplex = parts.join(';');
114
+ return {
115
+ filterComplex,
116
+ mapVideo: '[v0]',
117
+ mapAudio: hasAudio ? '[a0]' : null,
118
+ };
119
+ }
120
+
121
+ let prevV = 'v0';
122
+ let prevA = 'a0';
123
+ let cumulativeOutDuration = durations[0];
124
+
125
+ for (let i = 1; i < slices.length; i++) {
126
+ const nextV = `v${i}`;
127
+ const nextA = `a${i}`;
128
+ const outV = i === slices.length - 1 ? 'vout' : `v${i - 1}${i}`;
129
+ const outA = i === slices.length - 1 ? 'aout' : `a${i - 1}${i}`;
130
+
131
+ const sliceTransition = slices[i].transition;
132
+ if (!sliceTransition) {
133
+ throw new Error(
134
+ `Missing transition for boundary before slice index ${i}; engine should provide default/common transition.`
135
+ );
136
+ }
137
+
138
+ const maxT = Math.min(durations[i - 1], durations[i]) - 0.01;
139
+ const t = Math.min(sliceTransition.duration, Math.max(0.01, maxT));
140
+ const type = sliceTransition.type;
141
+ const offset = cumulativeOutDuration - t;
142
+
143
+ parts.push(
144
+ `[${prevV}][${nextV}]xfade=transition=${type}:duration=${t}:offset=${offset}[${outV}]`
145
+ );
146
+ if (hasAudio) {
147
+ parts.push(
148
+ `[${prevA}][${nextA}]acrossfade=d=${t}:c1=tri:c2=tri[${outA}]`
149
+ );
150
+ }
151
+
152
+ cumulativeOutDuration = cumulativeOutDuration + durations[i] - t;
153
+ prevV = outV;
154
+ prevA = outA;
155
+ }
156
+
157
+ parts.push('[vout]format=yuv420p[v420]');
158
+ const filterComplex = parts.join(';');
159
+ return {
160
+ filterComplex,
161
+ mapVideo: '[v420]',
162
+ mapAudio: hasAudio ? '[aout]' : null,
163
+ };
164
+ }
165
+
166
+ module.exports = {
167
+ cropTo916Filter,
168
+ subtitleFilter,
169
+ amixFilter,
170
+ buildSlicesWithTransitionsFilter,
171
+ SHORTS_WIDTH,
172
+ SHORTS_HEIGHT,
173
+ };