@gannochenko/staticstripes 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/.prettierrc +8 -0
  2. package/Makefile +69 -0
  3. package/dist/asset-manager.d.ts +16 -0
  4. package/dist/asset-manager.d.ts.map +1 -0
  5. package/dist/asset-manager.js +50 -0
  6. package/dist/asset-manager.js.map +1 -0
  7. package/dist/cli.d.ts +3 -0
  8. package/dist/cli.d.ts.map +1 -0
  9. package/dist/cli.js +257 -0
  10. package/dist/cli.js.map +1 -0
  11. package/dist/container-renderer.d.ts +21 -0
  12. package/dist/container-renderer.d.ts.map +1 -0
  13. package/dist/container-renderer.js +149 -0
  14. package/dist/container-renderer.js.map +1 -0
  15. package/dist/expression-parser.d.ts +63 -0
  16. package/dist/expression-parser.d.ts.map +1 -0
  17. package/dist/expression-parser.js +145 -0
  18. package/dist/expression-parser.js.map +1 -0
  19. package/dist/ffmpeg.d.ts +375 -0
  20. package/dist/ffmpeg.d.ts.map +1 -0
  21. package/dist/ffmpeg.js +997 -0
  22. package/dist/ffmpeg.js.map +1 -0
  23. package/dist/ffprobe.d.ts +2 -0
  24. package/dist/ffprobe.d.ts.map +1 -0
  25. package/dist/ffprobe.js +31 -0
  26. package/dist/ffprobe.js.map +1 -0
  27. package/dist/html-parser.d.ts +56 -0
  28. package/dist/html-parser.d.ts.map +1 -0
  29. package/dist/html-parser.js +208 -0
  30. package/dist/html-parser.js.map +1 -0
  31. package/dist/html-project-parser.d.ts +169 -0
  32. package/dist/html-project-parser.d.ts.map +1 -0
  33. package/dist/html-project-parser.js +954 -0
  34. package/dist/html-project-parser.js.map +1 -0
  35. package/dist/index.d.ts +6 -0
  36. package/dist/index.d.ts.map +1 -0
  37. package/dist/index.js +18 -0
  38. package/dist/index.js.map +1 -0
  39. package/dist/label-generator.d.ts +35 -0
  40. package/dist/label-generator.d.ts.map +1 -0
  41. package/dist/label-generator.js +69 -0
  42. package/dist/label-generator.js.map +1 -0
  43. package/dist/project.d.ts +29 -0
  44. package/dist/project.d.ts.map +1 -0
  45. package/dist/project.js +137 -0
  46. package/dist/project.js.map +1 -0
  47. package/dist/sample-sequences.d.ts +5 -0
  48. package/dist/sample-sequences.d.ts.map +1 -0
  49. package/dist/sample-sequences.js +199 -0
  50. package/dist/sample-sequences.js.map +1 -0
  51. package/dist/sample-streams.d.ts +2 -0
  52. package/dist/sample-streams.d.ts.map +1 -0
  53. package/dist/sample-streams.js +109 -0
  54. package/dist/sample-streams.js.map +1 -0
  55. package/dist/sequence.d.ts +21 -0
  56. package/dist/sequence.d.ts.map +1 -0
  57. package/dist/sequence.js +269 -0
  58. package/dist/sequence.js.map +1 -0
  59. package/dist/stream.d.ts +135 -0
  60. package/dist/stream.d.ts.map +1 -0
  61. package/dist/stream.js +779 -0
  62. package/dist/stream.js.map +1 -0
  63. package/dist/type.d.ts +73 -0
  64. package/dist/type.d.ts.map +1 -0
  65. package/dist/type.js +3 -0
  66. package/dist/type.js.map +1 -0
  67. package/eslint.config.js +44 -0
  68. package/package.json +50 -0
  69. package/src/asset-manager.ts +55 -0
  70. package/src/cli.ts +306 -0
  71. package/src/container-renderer.ts +190 -0
  72. package/src/expression-parser.test.ts +459 -0
  73. package/src/expression-parser.ts +199 -0
  74. package/src/ffmpeg.ts +1403 -0
  75. package/src/ffprobe.ts +29 -0
  76. package/src/html-parser.ts +221 -0
  77. package/src/html-project-parser.ts +1195 -0
  78. package/src/index.ts +9 -0
  79. package/src/label-generator.ts +74 -0
  80. package/src/project.ts +180 -0
  81. package/src/sample-sequences.ts +225 -0
  82. package/src/sample-streams.ts +142 -0
  83. package/src/sequence.ts +330 -0
  84. package/src/stream.ts +1012 -0
  85. package/src/type.ts +81 -0
  86. package/tsconfig.json +24 -0
package/dist/ffmpeg.js ADDED
@@ -0,0 +1,997 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.runFFMpeg = exports.Filter = void 0;
4
+ exports.checkFFmpegInstalled = checkFFmpegInstalled;
5
+ exports.ms = ms;
6
+ exports.makeFFmpegCommand = makeFFmpegCommand;
7
+ exports.makeConcat = makeConcat;
8
+ exports.makeXFade = makeXFade;
9
+ exports.makeNull = makeNull;
10
+ exports.makeOverlay = makeOverlay;
11
+ exports.makeFps = makeFps;
12
+ exports.makeScale = makeScale;
13
+ exports.makeSplit = makeSplit;
14
+ exports.makeTranspose = makeTranspose;
15
+ exports.makeTrim = makeTrim;
16
+ exports.makeTPad = makeTPad;
17
+ exports.makePad = makePad;
18
+ exports.makeCrop = makeCrop;
19
+ exports.makeEq = makeEq;
20
+ exports.makeColorChannelMixer = makeColorChannelMixer;
21
+ exports.makeCurves = makeCurves;
22
+ exports.makeVignette = makeVignette;
23
+ exports.makeColorBalance = makeColorBalance;
24
+ exports.makeGblur = makeGblur;
25
+ exports.makeBoxblur = makeBoxblur;
26
+ exports.makeUnsharp = makeUnsharp;
27
+ exports.makeHue = makeHue;
28
+ exports.makeHflip = makeHflip;
29
+ exports.makeVflip = makeVflip;
30
+ exports.makeChromakey = makeChromakey;
31
+ exports.makeDespill = makeDespill;
32
+ exports.makeFade = makeFade;
33
+ exports.makeColor = makeColor;
34
+ exports.makeAnullsrc = makeAnullsrc;
35
+ exports.makeAmix = makeAmix;
36
+ const child_process_1 = require("child_process");
37
+ const label_generator_1 = require("./label-generator");
38
+ /**
39
+ * Checks if FFmpeg is installed and available in the system PATH
40
+ * @throws Error if FFmpeg is not found
41
+ */
42
+ async function checkFFmpegInstalled() {
43
+ return new Promise((resolve, reject) => {
44
+ const ffmpeg = (0, child_process_1.spawn)('ffmpeg', ['-version'], {
45
+ stdio: ['ignore', 'pipe', 'pipe'],
46
+ });
47
+ let hasOutput = false;
48
+ ffmpeg.stdout.on('data', () => {
49
+ hasOutput = true;
50
+ });
51
+ ffmpeg.on('close', (code) => {
52
+ if (code === 0 && hasOutput) {
53
+ resolve();
54
+ }
55
+ else {
56
+ reject(new Error('FFmpeg not found. Please install FFmpeg to use StaticStripes.\n' +
57
+ 'Visit https://ffmpeg.org/download.html for installation instructions.'));
58
+ }
59
+ });
60
+ ffmpeg.on('error', (error) => {
61
+ if (error.message.includes('ENOENT')) {
62
+ reject(new Error('FFmpeg not found in system PATH. Please install FFmpeg to use StaticStripes.\n' +
63
+ 'Visit https://ffmpeg.org/download.html for installation instructions.\n\n' +
64
+ 'Quick install:\n' +
65
+ ' macOS: brew install ffmpeg\n' +
66
+ ' Ubuntu/Debian: sudo apt-get install ffmpeg\n' +
67
+ ' Windows: Download from https://ffmpeg.org/download.html'));
68
+ }
69
+ else {
70
+ reject(error);
71
+ }
72
+ });
73
+ });
74
+ }
75
+ /**
76
+ * Helper function to format milliseconds for FFmpeg time parameters
77
+ * @param value - Time value in milliseconds
78
+ * @returns Formatted string with 'ms' suffix (e.g., "1500ms")
79
+ */
80
+ function ms(value) {
81
+ return `${value}ms`;
82
+ }
83
+ class Filter {
84
+ inputs;
85
+ outputs;
86
+ body;
87
+ constructor(inputs, outputs, body) {
88
+ this.inputs = inputs;
89
+ this.outputs = outputs;
90
+ this.body = body;
91
+ }
92
+ render() {
93
+ let result = '';
94
+ this.inputs.forEach((input) => {
95
+ result += wrap(input.tag);
96
+ });
97
+ result += this.body;
98
+ this.outputs.forEach((input) => {
99
+ result += wrap(input.tag);
100
+ });
101
+ return result;
102
+ }
103
+ }
104
+ exports.Filter = Filter;
105
+ /**
106
+ * Generates the complete ffmpeg command for rendering the project
107
+ */
108
+ function makeFFmpegCommand(project, filterComplex, outputName, preset = 'medium') {
109
+ const parts = ['ffmpeg'];
110
+ // Overwrite output file without asking
111
+ parts.push('-y');
112
+ // Add input files in order of their index mapping
113
+ const inputsByIndex = new Map();
114
+ for (const [assetName, index] of project.getAssetIndexMap()) {
115
+ const asset = project.getAssetByName(assetName);
116
+ if (asset) {
117
+ inputsByIndex.set(index, asset.path);
118
+ }
119
+ }
120
+ // Add inputs in sorted order
121
+ const sortedIndices = Array.from(inputsByIndex.keys()).sort((a, b) => a - b);
122
+ for (const index of sortedIndices) {
123
+ const path = inputsByIndex.get(index);
124
+ if (path) {
125
+ parts.push(`-i "${path}"`);
126
+ }
127
+ }
128
+ // Add filter_complex
129
+ if (filterComplex) {
130
+ parts.push(`-filter_complex "${filterComplex}"`);
131
+ }
132
+ // Map the output streams (video and audio)
133
+ parts.push('-map "[outv]"');
134
+ parts.push('-map "[outa]"');
135
+ // Increase buffer queue size for complex filter graphs
136
+ parts.push('-max_muxing_queue_size 4096');
137
+ // Add output parameters
138
+ const output = project.getOutput(outputName);
139
+ if (!output) {
140
+ throw new Error(`Output "${outputName}" not found`);
141
+ }
142
+ const { width, height } = output.resolution;
143
+ // Video encoding parameters
144
+ parts.push(`-s ${width}x${height}`);
145
+ parts.push(`-r ${output.fps}`);
146
+ parts.push('-pix_fmt yuv420p'); // Standard pixel format for compatibility
147
+ parts.push(`-preset ${preset}`); // Encoding speed preset
148
+ // Audio encoding parameters
149
+ parts.push('-c:a aac'); // AAC audio codec
150
+ parts.push('-b:a 192k'); // Audio bitrate
151
+ // Add output path
152
+ parts.push(`"${output.path}"`);
153
+ return parts.join(' ');
154
+ }
155
+ const runFFMpeg = async (ffmpegCommand) => {
156
+ const args = ffmpegCommand
157
+ .slice('ffmpeg '.length)
158
+ .match(/(?:[^\s"]+|"[^"]*")+/g)
159
+ ?.map((arg) => arg.replace(/^"|"$/g, '')) || [];
160
+ return new Promise((resolve, reject) => {
161
+ const ffmpeg = (0, child_process_1.spawn)('ffmpeg', args, {
162
+ stdio: ['ignore', 'pipe', 'pipe'],
163
+ });
164
+ // FFmpeg outputs progress to stderr
165
+ let stderrBuffer = '';
166
+ ffmpeg.stderr.on('data', (data) => {
167
+ const output = data.toString();
168
+ stderrBuffer += output;
169
+ // Show all output for debugging
170
+ process.stderr.write(output);
171
+ });
172
+ ffmpeg.on('close', (code) => {
173
+ process.stdout.write('\n');
174
+ if (code === 0) {
175
+ console.log('\n=== Render Complete ===');
176
+ resolve();
177
+ }
178
+ else {
179
+ console.error(`\n=== Render Failed ===`);
180
+ console.error(`FFmpeg exited with code ${code}`);
181
+ reject(new Error(`FFmpeg process exited with code ${code}`));
182
+ }
183
+ });
184
+ ffmpeg.on('error', (error) => {
185
+ console.error('\n=== Render Failed ===');
186
+ console.error('Error:', error.message);
187
+ reject(error);
188
+ });
189
+ });
190
+ };
191
+ exports.runFFMpeg = runFFMpeg;
192
+ /**
193
+ * Creates a concat filter
194
+ * Automatically determines the number of segments (n) and stream counts (v, a) from input labels
195
+ * and generates appropriate output labels
196
+ * @param inputs - Array of input stream labels
197
+ * @returns Filter with auto-generated outputs
198
+ */
199
+ function makeConcat(inputs) {
200
+ if (inputs.length === 0) {
201
+ throw new Error('makeConcat: inputs cannot be empty');
202
+ }
203
+ // Count total video and audio streams in inputs
204
+ let totalVideo = 0;
205
+ let totalAudio = 0;
206
+ for (const input of inputs) {
207
+ if (input.isAudio) {
208
+ totalAudio++;
209
+ }
210
+ else {
211
+ totalVideo++;
212
+ }
213
+ }
214
+ // Find the pattern: try to determine n, v, a where:
215
+ // - n * v = totalVideo
216
+ // - n * a = totalAudio
217
+ // - n * (v + a) = inputs.length
218
+ // We want the largest n (most segments, fewest streams per segment)
219
+ // Note: n=1 always works, so we're guaranteed to find a pattern
220
+ let n = 0;
221
+ let v = 0;
222
+ let a = 0;
223
+ // Try from largest n down to 1
224
+ for (let tryN = inputs.length; tryN >= 1; tryN--) {
225
+ if (totalVideo % tryN === 0 && totalAudio % tryN === 0) {
226
+ const tryV = totalVideo / tryN;
227
+ const tryA = totalAudio / tryN;
228
+ if (tryV + tryA === inputs.length / tryN) {
229
+ n = tryN;
230
+ v = tryV;
231
+ a = tryA;
232
+ break;
233
+ }
234
+ }
235
+ }
236
+ // n should always be set (at minimum n=1 always works), but check to be safe
237
+ if (n === 0) {
238
+ throw new Error('makeConcat: Internal error - failed to determine pattern (this should never happen)');
239
+ }
240
+ // Generate output labels
241
+ const outputs = [];
242
+ // Add video outputs
243
+ for (let i = 0; i < v; i++) {
244
+ outputs.push({
245
+ tag: (0, label_generator_1.getLabel)(),
246
+ isAudio: false,
247
+ });
248
+ }
249
+ // Add audio outputs
250
+ for (let i = 0; i < a; i++) {
251
+ outputs.push({
252
+ tag: (0, label_generator_1.getLabel)(),
253
+ isAudio: true,
254
+ });
255
+ }
256
+ return new Filter(inputs, outputs, `concat=n=${n}:v=${v}:a=${a}`);
257
+ }
258
+ /**
259
+ * Creates an xfade (crossfade) filter for video streams
260
+ * Note: xfade only works with video, not audio
261
+ * @param input1 - First video input stream label
262
+ * @param input2 - Second video input stream label
263
+ * @param options - Transition parameters
264
+ * @returns Filter with auto-generated video output
265
+ */
266
+ function makeXFade(inputs, options) {
267
+ if (inputs.length !== 2) {
268
+ throw new Error(`makeXFade: expects two inputs`);
269
+ }
270
+ const input1 = inputs[0];
271
+ const input2 = inputs[1];
272
+ // Validate that both inputs are video (xfade doesn't support audio)
273
+ if (input1.isAudio) {
274
+ throw new Error(`makeXFade: input1 must be video, got audio (tag: ${input1.tag})`);
275
+ }
276
+ if (input2.isAudio) {
277
+ throw new Error(`makeXFade: input2 must be video, got audio (tag: ${input2.tag})`);
278
+ }
279
+ const transition = options.transition ?? 'fade';
280
+ // Auto-generate video output
281
+ const output = {
282
+ tag: (0, label_generator_1.getLabel)(),
283
+ isAudio: false,
284
+ };
285
+ return new Filter([input1, input2], [output], `xfade=transition=${transition}:duration=${ms(options.duration)}:offset=${ms(options.offset)}`);
286
+ }
287
+ /**
288
+ * Creates a null filter (passthrough)
289
+ * @param input - Input stream label
290
+ */
291
+ function makeNull(inputs) {
292
+ if (inputs.length !== 1) {
293
+ throw new Error(`makeNull: expects one input`);
294
+ }
295
+ const input1 = inputs[0];
296
+ const outputLabelTag = (0, label_generator_1.getLabel)();
297
+ return new Filter([input1], [
298
+ {
299
+ tag: outputLabelTag,
300
+ isAudio: input1.isAudio,
301
+ },
302
+ ], input1.isAudio ? 'anull' : 'null');
303
+ }
304
+ function makeOverlay(inputs, options) {
305
+ if (inputs.length !== 2) {
306
+ throw new Error(`makeOverlay: expects two inputs`);
307
+ }
308
+ const input1 = inputs[0];
309
+ const input2 = inputs[1];
310
+ // Validate that both inputs are video (xfade doesn't support audio)
311
+ if (input1.isAudio) {
312
+ throw new Error(`makeOverlay: input1 must be video, got audio (tag: ${input1.tag})`);
313
+ }
314
+ if (input2.isAudio) {
315
+ throw new Error(`makeOverlay: input2 must be video, got audio (tag: ${input2.tag})`);
316
+ }
317
+ const output = {
318
+ tag: (0, label_generator_1.getLabel)(),
319
+ isAudio: false,
320
+ };
321
+ let overlayParams = 'format=auto';
322
+ if (options?.x !== undefined || options?.y !== undefined) {
323
+ const x = options.x ?? 0;
324
+ const y = options.y ?? 0;
325
+ overlayParams = `x=${x}:y=${y}:format=auto`;
326
+ }
327
+ return new Filter(inputs, [output], `overlay=${overlayParams}:eof_action=pass`);
328
+ }
329
+ function makeFps(inputs, fps) {
330
+ if (inputs.length !== 1) {
331
+ throw new Error(`makeFps: expects one input`);
332
+ }
333
+ const input1 = inputs[0];
334
+ if (input1.isAudio) {
335
+ throw new Error(`makeFps: input1 must be video, got audio (tag: ${input1.tag})`);
336
+ }
337
+ const output = {
338
+ tag: (0, label_generator_1.getLabel)(),
339
+ isAudio: false,
340
+ };
341
+ return new Filter(inputs, [output], `fps=${fps}`);
342
+ }
343
+ function makeScale(inputs, options) {
344
+ if (inputs.length !== 1) {
345
+ throw new Error(`makeFps: expects one input`);
346
+ }
347
+ const input1 = inputs[0];
348
+ if (input1.isAudio) {
349
+ throw new Error(`makeScale: input1 must be video, got audio (tag: ${input1.tag})`);
350
+ }
351
+ const output = {
352
+ tag: (0, label_generator_1.getLabel)(),
353
+ isAudio: false,
354
+ };
355
+ const algo = options.flags;
356
+ return new Filter(inputs, [output], `scale=${options.width}:${options.height}${algo ? `:${algo}` : ''}`);
357
+ }
358
+ /**
359
+ * Creates a split filter (splits one input into multiple outputs)
360
+ * @param input - Input stream label
361
+ * @param outputLabels - Array of output stream labels
362
+ */
363
+ function makeSplit(inputs) {
364
+ if (inputs.length !== 1) {
365
+ throw new Error(`makeFps: expects one input`);
366
+ }
367
+ const input1 = inputs[0];
368
+ const output1 = {
369
+ tag: (0, label_generator_1.getLabel)(),
370
+ isAudio: input1.isAudio,
371
+ };
372
+ const output2 = {
373
+ tag: (0, label_generator_1.getLabel)(),
374
+ isAudio: input1.isAudio,
375
+ };
376
+ return new Filter(inputs, [output1, output2], 'split');
377
+ }
378
+ function makeTranspose(inputs, direction) {
379
+ const output = {
380
+ tag: (0, label_generator_1.getLabel)(),
381
+ isAudio: false,
382
+ };
383
+ const input1 = inputs[0];
384
+ if (input1.isAudio) {
385
+ throw new Error(`makeTranspose: input1 must be video, got audio (tag: ${input1.tag})`);
386
+ }
387
+ return new Filter(inputs, [output], `transpose=${direction}`);
388
+ }
389
+ /**
390
+ * Creates a trim filter to cut streams to a specific time range
391
+ * @param inputs - Input stream labels (video or audio)
392
+ * @param start - Start time in milliseconds
393
+ * @param end - End time in milliseconds
394
+ * @returns Filter with trimmed output
395
+ */
396
+ function makeTrim(inputs, start, end) {
397
+ const input1 = inputs[0];
398
+ const output = {
399
+ tag: (0, label_generator_1.getLabel)(),
400
+ isAudio: input1.isAudio,
401
+ };
402
+ const prefix = input1.isAudio ? 'a' : '';
403
+ return new Filter(inputs, [output], `${prefix}trim=start=${ms(start)}:end=${ms(end)},${prefix}setpts=PTS-STARTPTS`);
404
+ }
405
+ /**
406
+ * Creates a tpad/apad filter to add temporal padding (frames/silence)
407
+ * @param inputs - Input stream labels (video or audio)
408
+ * @param options - Padding parameters
409
+ * - start: Duration to add at the beginning (in milliseconds, default: 0)
410
+ * - stop: Duration to add at the end (in milliseconds, default: 0)
411
+ * - start_mode: 'clone' (duplicate frames) or 'add' (colored frames/silence, default)
412
+ * - stop_mode: 'clone' (duplicate frames) or 'add' (colored frames/silence, default)
413
+ * - color: Color of added frames (video only, e.g., 'black', '#00FF00', default: 'black')
414
+ */
415
+ function makeTPad(inputs, options = {}) {
416
+ const input = inputs[0];
417
+ const output = {
418
+ tag: (0, label_generator_1.getLabel)(),
419
+ isAudio: input.isAudio,
420
+ };
421
+ const start = options.start ?? 0;
422
+ const stop = options.stop ?? 0;
423
+ const start_mode = options.startMode ?? 'add';
424
+ const stop_mode = options.stopMode ?? 'add';
425
+ const color = options.color ?? 'black';
426
+ const filterName = input.isAudio ? 'apad' : 'tpad';
427
+ if (input.isAudio) {
428
+ // For audio: use adelay for start padding, apad for stop padding
429
+ const filters = [];
430
+ // Add silence at the start using adelay (already in milliseconds)
431
+ if (start > 0) {
432
+ filters.push(`adelay=${start}|${start}`);
433
+ }
434
+ // Add silence at the end using apad
435
+ if (stop > 0) {
436
+ filters.push(`apad=pad_dur=${ms(stop)}`);
437
+ }
438
+ const filterStr = filters.length > 0 ? filters.join(',') : 'anull';
439
+ return new Filter(inputs, [output], filterStr);
440
+ }
441
+ else {
442
+ // tpad for video
443
+ const params = [];
444
+ if (start > 0) {
445
+ params.push(`start_duration=${ms(start)}`);
446
+ params.push(`start_mode=${start_mode}`);
447
+ }
448
+ if (stop > 0) {
449
+ params.push(`stop_duration=${ms(stop)}`);
450
+ params.push(`stop_mode=${stop_mode}`);
451
+ }
452
+ // Add color parameter for added frames (when mode is 'add')
453
+ if ((start_mode === 'add' && start > 0) ||
454
+ (stop_mode === 'add' && stop > 0)) {
455
+ params.push(`color=${color}`);
456
+ }
457
+ const filterParams = params.length > 0 ? `=${params.join(':')}` : '';
458
+ return new Filter(inputs, [output], `${filterName}${filterParams}`);
459
+ }
460
+ }
461
+ /**
462
+ * Creates a pad filter to add borders/letterboxing
463
+ * @param inputs - Input stream labels (must be video)
464
+ * @param width - Output width (can be expression like 'iw' or number)
465
+ * @param height - Output height (can be expression like 'ih' or number)
466
+ * @param x - X position (default: center using '(ow-iw)/2')
467
+ * @param y - Y position (default: center using '(oh-ih)/2')
468
+ * @param color - Background color (default: 'black')
469
+ */
470
+ function makePad(inputs, options) {
471
+ const input = inputs[0];
472
+ if (input.isAudio) {
473
+ throw new Error(`makePad: input must be video, got audio (tag: ${input.tag})`);
474
+ }
475
+ const output = {
476
+ tag: (0, label_generator_1.getLabel)(),
477
+ isAudio: false,
478
+ };
479
+ const x = options.x ?? '(ow-iw)/2';
480
+ const y = options.y ?? '(oh-ih)/2';
481
+ const color = options.color ?? 'black';
482
+ return new Filter(inputs, [output], `pad=${options.width}:${options.height}:${x}:${y}:${color}`);
483
+ }
484
+ /**
485
+ * Creates a crop filter to cut video to specific dimensions
486
+ * @param inputs - Input stream labels (must be video)
487
+ * @param options - Crop parameters
488
+ * - width: Output width (can be expression or number)
489
+ * - height: Output height (can be expression or number)
490
+ * - x: X position to start crop (default: center using '(in_w-out_w)/2')
491
+ * - y: Y position to start crop (default: center using '(in_h-out_h)/2')
492
+ */
493
+ function makeCrop(inputs, options) {
494
+ const input = inputs[0];
495
+ if (input.isAudio) {
496
+ throw new Error(`makeCrop: input must be video, got audio (tag: ${input.tag})`);
497
+ }
498
+ const output = {
499
+ tag: (0, label_generator_1.getLabel)(),
500
+ isAudio: false,
501
+ };
502
+ const x = options.x ?? '(in_w-out_w)/2';
503
+ const y = options.y ?? '(in_h-out_h)/2';
504
+ return new Filter(inputs, [output], `crop=${options.width}:${options.height}:${x}:${y}`);
505
+ }
506
+ /**
507
+ * Creates an eq (equalization) filter for color correction
508
+ * @param inputs - Input stream labels (must be video)
509
+ * @param options - Color adjustment parameters
510
+ * - brightness: -1.0 to 1.0 (default: 0)
511
+ * - contrast: -1000 to 1000 (default: 1.0)
512
+ * - saturation: 0 to 3 (default: 1.0)
513
+ * - gamma: 0.1 to 10 (default: 1.0)
514
+ */
515
+ function makeEq(inputs, options) {
516
+ const input = inputs[0];
517
+ if (input.isAudio) {
518
+ throw new Error(`makeEq: input must be video, got audio (tag: ${input.tag})`);
519
+ }
520
+ const output = {
521
+ tag: (0, label_generator_1.getLabel)(),
522
+ isAudio: false,
523
+ };
524
+ const params = [];
525
+ if (options.brightness !== undefined)
526
+ params.push(`brightness=${options.brightness}`);
527
+ if (options.contrast !== undefined)
528
+ params.push(`contrast=${options.contrast}`);
529
+ if (options.saturation !== undefined)
530
+ params.push(`saturation=${options.saturation}`);
531
+ if (options.gamma !== undefined)
532
+ params.push(`gamma=${options.gamma}`);
533
+ const filterStr = params.length > 0 ? `eq=${params.join(':')}` : 'eq';
534
+ return new Filter(inputs, [output], filterStr);
535
+ }
536
+ /**
537
+ * Creates a colorchannelmixer filter for advanced color adjustment
538
+ * @param inputs - Input stream labels (must be video)
539
+ * @param options - Color channel mixing parameters
540
+ * - rr: Red contribution to red channel (-2 to 2, default: 1)
541
+ * - rg: Green contribution to red channel (-2 to 2, default: 0)
542
+ * - rb: Blue contribution to red channel (-2 to 2, default: 0)
543
+ * - ra: Alpha contribution to red channel (-2 to 2, default: 0)
544
+ * - gr: Red contribution to green channel (-2 to 2, default: 0)
545
+ * - gg: Green contribution to green channel (-2 to 2, default: 1)
546
+ * - gb: Blue contribution to green channel (-2 to 2, default: 0)
547
+ * - ga: Alpha contribution to green channel (-2 to 2, default: 0)
548
+ * - br: Red contribution to blue channel (-2 to 2, default: 0)
549
+ * - bg: Green contribution to blue channel (-2 to 2, default: 0)
550
+ * - bb: Blue contribution to blue channel (-2 to 2, default: 1)
551
+ * - ba: Alpha contribution to blue channel (-2 to 2, default: 0)
552
+ */
553
+ function makeColorChannelMixer(inputs, options = {}) {
554
+ const input = inputs[0];
555
+ if (input.isAudio) {
556
+ throw new Error(`makeColorChannelMixer: input must be video, got audio (tag: ${input.tag})`);
557
+ }
558
+ const output = {
559
+ tag: (0, label_generator_1.getLabel)(),
560
+ isAudio: false,
561
+ };
562
+ const params = [];
563
+ if (options.rr !== undefined)
564
+ params.push(`rr=${options.rr}`);
565
+ if (options.rg !== undefined)
566
+ params.push(`rg=${options.rg}`);
567
+ if (options.rb !== undefined)
568
+ params.push(`rb=${options.rb}`);
569
+ if (options.ra !== undefined)
570
+ params.push(`ra=${options.ra}`);
571
+ if (options.gr !== undefined)
572
+ params.push(`gr=${options.gr}`);
573
+ if (options.gg !== undefined)
574
+ params.push(`gg=${options.gg}`);
575
+ if (options.gb !== undefined)
576
+ params.push(`gb=${options.gb}`);
577
+ if (options.ga !== undefined)
578
+ params.push(`ga=${options.ga}`);
579
+ if (options.br !== undefined)
580
+ params.push(`br=${options.br}`);
581
+ if (options.bg !== undefined)
582
+ params.push(`bg=${options.bg}`);
583
+ if (options.bb !== undefined)
584
+ params.push(`bb=${options.bb}`);
585
+ if (options.ba !== undefined)
586
+ params.push(`ba=${options.ba}`);
587
+ const filterStr = params.length > 0
588
+ ? `colorchannelmixer=${params.join(':')}`
589
+ : 'colorchannelmixer';
590
+ return new Filter(inputs, [output], filterStr);
591
+ }
592
+ /**
593
+ * Creates a curves filter for color grading (similar to Photoshop curves)
594
+ * @param inputs - Input stream labels (must be video)
595
+ * @param options - Curves parameters
596
+ * - preset: Preset curve name (e.g., 'darker', 'lighter', 'increase_contrast', 'vintage', etc.)
597
+ * - master: Master curve points (affects all channels, e.g., '0/0 0.5/0.6 1/1')
598
+ * - red: Red channel curve points
599
+ * - green: Green channel curve points
600
+ * - blue: Blue channel curve points
601
+ * - all: Apply same curve to all RGB channels
602
+ */
603
+ function makeCurves(inputs, options = {}) {
604
+ const input = inputs[0];
605
+ if (input.isAudio) {
606
+ throw new Error(`makeCurves: input must be video, got audio (tag: ${input.tag})`);
607
+ }
608
+ const output = {
609
+ tag: (0, label_generator_1.getLabel)(),
610
+ isAudio: false,
611
+ };
612
+ const params = [];
613
+ if (options.preset !== undefined)
614
+ params.push(`preset=${options.preset}`);
615
+ if (options.master !== undefined)
616
+ params.push(`master='${options.master}'`);
617
+ if (options.red !== undefined)
618
+ params.push(`red='${options.red}'`);
619
+ if (options.green !== undefined)
620
+ params.push(`green='${options.green}'`);
621
+ if (options.blue !== undefined)
622
+ params.push(`blue='${options.blue}'`);
623
+ if (options.all !== undefined)
624
+ params.push(`all='${options.all}'`);
625
+ const filterStr = params.length > 0 ? `curves=${params.join(':')}` : 'curves';
626
+ return new Filter(inputs, [output], filterStr);
627
+ }
628
+ /**
629
+ * Creates a vignette filter to darken the corners/edges
630
+ * @param inputs - Input stream labels (must be video)
631
+ * @param options - Vignette parameters
632
+ * - angle: Lens angle (0 to PI/2, default: PI/5)
633
+ * - x0: X coordinate of vignette center (0 to 1, default: w/2)
634
+ * - y0: Y coordinate of vignette center (0 to 1, default: h/2)
635
+ * - mode: Vignette mode ('forward' or 'backward', default: 'forward')
636
+ * - eval: When to evaluate expressions ('init' or 'frame', default: 'init')
637
+ */
638
+ function makeVignette(inputs, options = {}) {
639
+ const input = inputs[0];
640
+ if (input.isAudio) {
641
+ throw new Error(`makeVignette: input must be video, got audio (tag: ${input.tag})`);
642
+ }
643
+ const output = {
644
+ tag: (0, label_generator_1.getLabel)(),
645
+ isAudio: false,
646
+ };
647
+ const params = [];
648
+ if (options.angle !== undefined)
649
+ params.push(`angle='${options.angle}'`);
650
+ if (options.x0 !== undefined)
651
+ params.push(`x0='${options.x0}'`);
652
+ if (options.y0 !== undefined)
653
+ params.push(`y0='${options.y0}'`);
654
+ if (options.mode !== undefined)
655
+ params.push(`mode=${options.mode}`);
656
+ if (options.eval !== undefined)
657
+ params.push(`eval=${options.eval}`);
658
+ const filterStr = params.length > 0 ? `vignette=${params.join(':').replace(/'/g, '')}` : 'vignette';
659
+ return new Filter(inputs, [output], filterStr);
660
+ }
661
+ /**
662
+ * Creates a colorbalance filter to adjust colors in shadows, midtones, and highlights
663
+ * @param inputs - Input stream labels (must be video)
664
+ * @param options - Color balance parameters
665
+ * - rs: Red shift for shadows (-1 to 1, default: 0)
666
+ * - gs: Green shift for shadows (-1 to 1, default: 0)
667
+ * - bs: Blue shift for shadows (-1 to 1, default: 0)
668
+ * - rm: Red shift for midtones (-1 to 1, default: 0)
669
+ * - gm: Green shift for midtones (-1 to 1, default: 0)
670
+ * - bm: Blue shift for midtones (-1 to 1, default: 0)
671
+ * - rh: Red shift for highlights (-1 to 1, default: 0)
672
+ * - gh: Green shift for highlights (-1 to 1, default: 0)
673
+ * - bh: Blue shift for highlights (-1 to 1, default: 0)
674
+ */
675
+ function makeColorBalance(inputs, options = {}) {
676
+ const input = inputs[0];
677
+ if (input.isAudio) {
678
+ throw new Error(`makeColorBalance: input must be video, got audio (tag: ${input.tag})`);
679
+ }
680
+ const output = {
681
+ tag: (0, label_generator_1.getLabel)(),
682
+ isAudio: false,
683
+ };
684
+ const params = [];
685
+ if (options.rs !== undefined)
686
+ params.push(`rs=${options.rs}`);
687
+ if (options.gs !== undefined)
688
+ params.push(`gs=${options.gs}`);
689
+ if (options.bs !== undefined)
690
+ params.push(`bs=${options.bs}`);
691
+ if (options.rm !== undefined)
692
+ params.push(`rm=${options.rm}`);
693
+ if (options.gm !== undefined)
694
+ params.push(`gm=${options.gm}`);
695
+ if (options.bm !== undefined)
696
+ params.push(`bm=${options.bm}`);
697
+ if (options.rh !== undefined)
698
+ params.push(`rh=${options.rh}`);
699
+ if (options.gh !== undefined)
700
+ params.push(`gh=${options.gh}`);
701
+ if (options.bh !== undefined)
702
+ params.push(`bh=${options.bh}`);
703
+ const filterStr = params.length > 0 ? `colorbalance=${params.join(':')}` : 'colorbalance';
704
+ return new Filter(inputs, [output], filterStr);
705
+ }
706
+ /**
707
+ * Creates a Gaussian blur filter
708
+ * @param inputs - Input stream labels (must be video)
709
+ * @param sigma - Blur strength (0.01 to 1024, default: 1.0)
710
+ * @param steps - Number of blur steps (1 to 6, default: 1, higher = smoother but slower)
711
+ */
712
+ function makeGblur(inputs, options = {}) {
713
+ const input = inputs[0];
714
+ if (input.isAudio) {
715
+ throw new Error(`makeGblur: input must be video, got audio (tag: ${input.tag})`);
716
+ }
717
+ const output = {
718
+ tag: (0, label_generator_1.getLabel)(),
719
+ isAudio: false,
720
+ };
721
+ const sigma = options.sigma ?? 1.0;
722
+ const steps = options.steps ?? 1;
723
+ return new Filter(inputs, [output], `gblur=sigma=${sigma}:steps=${steps}`);
724
+ }
725
+ /**
726
+ * Creates a box blur filter (simpler, faster blur)
727
+ * @param inputs - Input stream labels (must be video)
728
+ * @param options - Blur parameters
729
+ * - luma_radius (lr): Horizontal luma blur radius (0 to min(w,h)/2)
730
+ * - luma_power (lp): Number of times to apply luma blur (0 to 2)
731
+ * - chroma_radius (cr): Horizontal chroma blur radius (0 to min(w,h)/2)
732
+ * - chroma_power (cp): Number of times to apply chroma blur (0 to 2)
733
+ */
734
+ function makeBoxblur(inputs, options = {}) {
735
+ const input = inputs[0];
736
+ if (input.isAudio) {
737
+ throw new Error(`makeBoxblur: input must be video, got audio (tag: ${input.tag})`);
738
+ }
739
+ const output = {
740
+ tag: (0, label_generator_1.getLabel)(),
741
+ isAudio: false,
742
+ };
743
+ const lr = options.luma_radius ?? 2;
744
+ const lp = options.luma_power ?? 1;
745
+ const cr = options.chroma_radius ?? lr;
746
+ const cp = options.chroma_power ?? lp;
747
+ return new Filter(inputs, [output], `boxblur=lr=${lr}:lp=${lp}:cr=${cr}:cp=${cp}`);
748
+ }
749
+ /**
750
+ * Creates an unsharp filter (sharpen or blur)
751
+ * @param inputs - Input stream labels (must be video)
752
+ * @param options - Sharpening parameters
753
+ * - luma_amount: Luma sharpening amount (-2 to 5, default: 1.0, negative = blur)
754
+ * - chroma_amount: Chroma sharpening amount (-2 to 5, default: 0)
755
+ */
756
+ function makeUnsharp(inputs, options = {}) {
757
+ const input = inputs[0];
758
+ if (input.isAudio) {
759
+ throw new Error(`makeUnsharp: input must be video, got audio (tag: ${input.tag})`);
760
+ }
761
+ const output = {
762
+ tag: (0, label_generator_1.getLabel)(),
763
+ isAudio: false,
764
+ };
765
+ const la = options.luma_amount ?? 1.0;
766
+ const ca = options.chroma_amount ?? 0;
767
+ return new Filter(inputs, [output], `unsharp=luma_amount=${la}:chroma_amount=${ca}`);
768
+ }
769
+ /**
770
+ * Creates a hue adjustment filter
771
+ * @param inputs - Input stream labels (must be video)
772
+ * @param options - Hue adjustment parameters
773
+ * - hue: Hue angle in degrees (0 to 360)
774
+ * - saturation: Saturation multiplier (-10 to 10, default: 1.0)
775
+ * - brightness: Brightness adjustment (-10 to 10, default: 0)
776
+ */
777
+ function makeHue(inputs, options = {}) {
778
+ const input = inputs[0];
779
+ if (input.isAudio) {
780
+ throw new Error(`makeHue: input must be video, got audio (tag: ${input.tag})`);
781
+ }
782
+ const output = {
783
+ tag: (0, label_generator_1.getLabel)(),
784
+ isAudio: false,
785
+ };
786
+ const params = [];
787
+ if (options.hue !== undefined)
788
+ params.push(`h=${options.hue}`);
789
+ if (options.saturation !== undefined)
790
+ params.push(`s=${options.saturation}`);
791
+ if (options.brightness !== undefined)
792
+ params.push(`b=${options.brightness}`);
793
+ const filterStr = params.length > 0 ? `hue=${params.join(':')}` : 'hue';
794
+ return new Filter(inputs, [output], filterStr);
795
+ }
796
+ /**
797
+ * Creates a horizontal flip filter (mirrors video left-right)
798
+ * Note: Only works with video streams
799
+ */
800
+ function makeHflip(inputs) {
801
+ const input = inputs[0];
802
+ if (input.isAudio) {
803
+ throw new Error(`makeHflip: input must be video, got audio (tag: ${input.tag})`);
804
+ }
805
+ const output = {
806
+ tag: (0, label_generator_1.getLabel)(),
807
+ isAudio: false,
808
+ };
809
+ return new Filter(inputs, [output], 'hflip');
810
+ }
811
+ /**
812
+ * Creates a vertical flip filter (mirrors video top-bottom)
813
+ * Note: Only works with video streams
814
+ */
815
+ function makeVflip(inputs) {
816
+ const input = inputs[0];
817
+ if (input.isAudio) {
818
+ throw new Error(`makeVflip: input must be video, got audio (tag: ${input.tag})`);
819
+ }
820
+ const output = {
821
+ tag: (0, label_generator_1.getLabel)(),
822
+ isAudio: false,
823
+ };
824
+ return new Filter(inputs, [output], 'vflip');
825
+ }
826
+ /**
827
+ * Creates a chromakey filter for green/blue screen removal
828
+ * @param inputs - Input stream labels (must be video)
829
+ * @param options - Chromakey parameters
830
+ * - color: Color to key out (e.g., 'green', '0x00FF00', '#00FF00')
831
+ * - similarity: How similar colors need to be to match (0.01 to 1.0, default: 0.01)
832
+ * - blend: Blend percentage for edges (0.0 to 1.0, default: 0.0)
833
+ */
834
+ function makeChromakey(inputs, options) {
835
+ const input = inputs[0];
836
+ if (input.isAudio) {
837
+ throw new Error(`makeChromakey: input must be video, got audio (tag: ${input.tag})`);
838
+ }
839
+ const output = {
840
+ tag: (0, label_generator_1.getLabel)(),
841
+ isAudio: false,
842
+ };
843
+ const similarity = options.similarity ?? 0.01;
844
+ const blend = options.blend ?? 0.0;
845
+ return new Filter(inputs, [output], `chromakey=${options.color}:${similarity}:${blend}`);
846
+ }
847
+ /**
848
+ * Creates a despill filter to remove color spill from chromakey
849
+ * @param inputs - Input stream labels (must be video)
850
+ * @param options - Despill parameters
851
+ * - type: Color to despill ('green' or 'blue', default: 'green')
852
+ * - mix: Mix factor (0.0 to 1.0, default: 0.5)
853
+ * - expand: Expand factor (0.0 to 1.0, default: 0.0)
854
+ */
855
+ function makeDespill(inputs, options = {}) {
856
+ const input = inputs[0];
857
+ if (input.isAudio) {
858
+ throw new Error(`makeDespill: input must be video, got audio (tag: ${input.tag})`);
859
+ }
860
+ const output = {
861
+ tag: (0, label_generator_1.getLabel)(),
862
+ isAudio: false,
863
+ };
864
+ const type = options.type ?? 'green';
865
+ const mix = options.mix ?? 0.5;
866
+ const expand = options.expand ?? 0.0;
867
+ return new Filter(inputs, [output], `despill=type=${type}:mix=${mix}:expand=${expand}`);
868
+ }
869
+ function makeFade(inputs, options) {
870
+ const input = inputs[0];
871
+ if (!options.fades || options.fades.length === 0) {
872
+ throw new Error(`makeFade: at least one fade operation is required`);
873
+ }
874
+ const output = {
875
+ tag: (0, label_generator_1.getLabel)(),
876
+ isAudio: input.isAudio,
877
+ };
878
+ // Use 'afade' for audio, 'fade' for video
879
+ const filterName = input.isAudio ? 'afade' : 'fade';
880
+ // Build fade filter string by chaining multiple fade operations
881
+ const fadeStrings = options.fades.map((fade) => {
882
+ const params = [];
883
+ params.push(`t=${fade.type}`);
884
+ params.push(`st=${ms(fade.startTime)}`);
885
+ params.push(`d=${ms(fade.duration)}`);
886
+ // Color parameter only applies to video (fade, not afade)
887
+ if (fade.color && !input.isAudio) {
888
+ params.push(`color=${fade.color}`);
889
+ }
890
+ // Curve parameter works for both video and audio
891
+ if (fade.curve) {
892
+ params.push(`curve=${fade.curve}`);
893
+ }
894
+ return `${filterName}=${params.join(':')}`;
895
+ });
896
+ return new Filter(inputs, [output], fadeStrings.join(','));
897
+ }
898
+ /**
899
+ * Creates a color source filter to generate blank video
900
+ * @param options - Video parameters
901
+ * - duration: Duration in milliseconds
902
+ * - width: Video width in pixels
903
+ * - height: Video height in pixels
904
+ * - fps: Frame rate (default: 30)
905
+ * - color: Color (default: 'black', supports alpha with format '#RRGGBBAA')
906
+ * @returns Filter with video output
907
+ */
908
+ function makeColor(options) {
909
+ const output = {
910
+ tag: (0, label_generator_1.getLabel)(),
911
+ isAudio: false,
912
+ };
913
+ const color = options.color ?? 'black';
914
+ const fps = options.fps ?? 30;
915
+ // Check if color has alpha channel (8-digit hex with alpha)
916
+ const hasAlpha = color.length === 9 && color.startsWith('#');
917
+ // color source generates video, add format filter for alpha if needed
918
+ let filterStr = `color=c=${color}:s=${options.width}x${options.height}:r=${fps}:d=${ms(options.duration)}`;
919
+ if (hasAlpha) {
920
+ // Add format filter to ensure proper alpha channel handling
921
+ filterStr += ',format=yuva420p';
922
+ }
923
+ return new Filter([], [output], filterStr);
924
+ }
925
+ /**
926
+ * Creates an anullsrc filter to generate silent audio
927
+ * @param options - Audio parameters
928
+ * - duration: Duration in milliseconds
929
+ * - channel_layout: Audio channel layout (default: 'stereo')
930
+ * - sample_rate: Sample rate in Hz (default: 48000)
931
+ * @returns Filter with audio output
932
+ */
933
+ function makeAnullsrc(options) {
934
+ const output = {
935
+ tag: (0, label_generator_1.getLabel)(),
936
+ isAudio: true,
937
+ };
938
+ const channelLayout = options.channel_layout ?? 'stereo';
939
+ const sampleRate = options.sample_rate ?? 48000;
940
+ const duration = options.duration;
941
+ // anullsrc generates infinite silence, so we trim to the desired duration
942
+ const filterStr = `anullsrc=channel_layout=${channelLayout}:sample_rate=${sampleRate},atrim=duration=${ms(duration)},asetpts=PTS-STARTPTS`;
943
+ return new Filter([], [output], filterStr);
944
+ }
945
+ /**
946
+ * Creates an amix filter to mix multiple audio streams
947
+ * @param inputs - Input stream labels (must all be audio)
948
+ * @param options - Mix parameters
949
+ * - duration: Output duration mode ('longest', 'shortest', 'first', default: 'longest')
950
+ * - dropout_transition: Transition time when input ends in seconds (default: 2)
951
+ * - weights: Array of weights for each input (e.g., [1, 0.5] makes second input quieter)
952
+ * - normalize: If true, automatically normalize weights to prevent clipping (default: true)
953
+ */
954
+ function makeAmix(inputs, options = {}) {
955
+ if (inputs.length < 2) {
956
+ throw new Error('makeAmix: requires at least 2 input streams');
957
+ }
958
+ // Validate that all inputs are audio
959
+ for (const input of inputs) {
960
+ if (!input.isAudio) {
961
+ throw new Error(`makeAmix: all inputs must be audio, got video (tag: ${input.tag})`);
962
+ }
963
+ }
964
+ const output = {
965
+ tag: (0, label_generator_1.getLabel)(),
966
+ isAudio: true,
967
+ };
968
+ const params = [];
969
+ params.push(`inputs=${inputs.length}`);
970
+ if (options.duration) {
971
+ params.push(`duration=${options.duration}`);
972
+ }
973
+ if (options.dropout_transition !== undefined) {
974
+ params.push(`dropout_transition=${options.dropout_transition}`);
975
+ }
976
+ if (options.weights && options.weights.length > 0) {
977
+ // Ensure weights array matches inputs length
978
+ const weights = options.weights.length === inputs.length
979
+ ? options.weights
980
+ : [
981
+ ...options.weights,
982
+ ...Array(inputs.length - options.weights.length).fill(1),
983
+ ];
984
+ params.push(`weights=${weights.join(' ')}`);
985
+ }
986
+ if (options.normalize !== undefined) {
987
+ params.push(`normalize=${options.normalize ? '1' : '0'}`);
988
+ }
989
+ return new Filter(inputs, [output], `amix=${params.join(':')}`);
990
+ }
991
+ /**
992
+ * Wraps a label in brackets
993
+ */
994
+ function wrap(label) {
995
+ return `[${label}]`;
996
+ }
997
+ //# sourceMappingURL=ffmpeg.js.map