simple-ffmpegjs 0.3.5 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,76 @@
1
1
  const fs = require("fs");
2
+ const { detectVisualGaps } = require("./gaps");
3
+
4
+ // ========================================================================
5
+ // FFmpeg named colors (X11/CSS color names accepted by libavutil)
6
+ // This list is extremely stable — identical across FFmpeg versions.
7
+ // Reference: https://ffmpeg.org/ffmpeg-utils.html#Color
8
+ // ========================================================================
9
+ const FFMPEG_NAMED_COLORS = new Set([
10
+ "aliceblue", "antiquewhite", "aqua", "aquamarine", "azure",
11
+ "beige", "bisque", "black", "blanchedalmond", "blue",
12
+ "blueviolet", "brown", "burlywood", "cadetblue", "chartreuse",
13
+ "chocolate", "coral", "cornflowerblue", "cornsilk", "crimson",
14
+ "cyan", "darkblue", "darkcyan", "darkgoldenrod", "darkgray",
15
+ "darkgreen", "darkgrey", "darkkhaki", "darkmagenta", "darkolivegreen",
16
+ "darkorange", "darkorchid", "darkred", "darksalmon", "darkseagreen",
17
+ "darkslateblue", "darkslategray", "darkslategrey", "darkturquoise", "darkviolet",
18
+ "deeppink", "deepskyblue", "dimgray", "dimgrey", "dodgerblue",
19
+ "firebrick", "floralwhite", "forestgreen", "fuchsia", "gainsboro",
20
+ "ghostwhite", "gold", "goldenrod", "gray", "green",
21
+ "greenyellow", "grey", "honeydew", "hotpink", "indianred",
22
+ "indigo", "ivory", "khaki", "lavender", "lavenderblush",
23
+ "lawngreen", "lemonchiffon", "lightblue", "lightcoral", "lightcyan",
24
+ "lightgoldenrodyellow", "lightgray", "lightgreen", "lightgrey", "lightpink",
25
+ "lightsalmon", "lightseagreen", "lightskyblue", "lightslategray", "lightslategrey",
26
+ "lightsteelblue", "lightyellow", "lime", "limegreen", "linen",
27
+ "magenta", "maroon", "mediumaquamarine", "mediumblue", "mediumorchid",
28
+ "mediumpurple", "mediumseagreen", "mediumslateblue", "mediumspringgreen", "mediumturquoise",
29
+ "mediumvioletred", "midnightblue", "mintcream", "mistyrose", "moccasin",
30
+ "navajowhite", "navy", "oldlace", "olive", "olivedrab",
31
+ "orange", "orangered", "orchid", "palegoldenrod", "palegreen",
32
+ "paleturquoise", "palevioletred", "papayawhip", "peachpuff", "peru",
33
+ "pink", "plum", "powderblue", "purple", "red",
34
+ "rosybrown", "royalblue", "saddlebrown", "salmon", "sandybrown",
35
+ "seagreen", "seashell", "sienna", "silver", "skyblue",
36
+ "slateblue", "slategray", "slategrey", "snow", "springgreen",
37
+ "steelblue", "tan", "teal", "thistle", "tomato",
38
+ "turquoise", "violet", "wheat", "white", "whitesmoke",
39
+ "yellow", "yellowgreen",
40
+ ]);
41
+
42
+ // Hex patterns accepted by FFmpeg: #RGB, #RRGGBB, #RRGGBBAA, 0xRRGGBB, 0xRRGGBBAA
43
+ const HEX_COLOR_RE = /^(#[0-9a-fA-F]{3}|#[0-9a-fA-F]{6}|#[0-9a-fA-F]{8}|0x[0-9a-fA-F]{6}|0x[0-9a-fA-F]{8})$/;
44
+
45
+ /**
46
+ * Check whether a string is a valid FFmpeg color value.
47
+ *
48
+ * Accepted formats:
49
+ * - Named colors (case-insensitive): "black", "Red", "DarkSlateGray", …
50
+ * - Hex: #RGB, #RRGGBB, #RRGGBBAA, 0xRRGGBB, 0xRRGGBBAA
51
+ * - Special keyword: "random"
52
+ * - Any of the above with an @alpha suffix: "white@0.5", "#FF0000@0.8"
53
+ *
54
+ * @param {string} value
55
+ * @returns {boolean}
56
+ */
57
+ function isValidFFmpegColor(value) {
58
+ if (typeof value !== "string" || value.length === 0) return false;
59
+
60
+ // Strip optional @alpha suffix (e.g. "white@0.5", "#FF0000@0.8")
61
+ let color = value;
62
+ const atIdx = value.indexOf("@");
63
+ if (atIdx > 0) {
64
+ const alphaPart = value.slice(atIdx + 1);
65
+ const alpha = Number(alphaPart);
66
+ if (!Number.isFinite(alpha) || alpha < 0 || alpha > 1) return false;
67
+ color = value.slice(0, atIdx);
68
+ }
69
+
70
+ if (color === "random") return true;
71
+ if (HEX_COLOR_RE.test(color)) return true;
72
+ return FFMPEG_NAMED_COLORS.has(color.toLowerCase());
73
+ }
2
74
 
3
75
  /**
4
76
  * Error/warning codes for programmatic handling
@@ -34,6 +106,172 @@ function createIssue(code, path, message, received = undefined) {
34
106
  return issue;
35
107
  }
36
108
 
109
+ const EFFECT_TYPES = ["vignette", "filmGrain", "gaussianBlur", "colorAdjust"];
110
+ const EFFECT_EASING = ["linear", "ease-in", "ease-out", "ease-in-out"];
111
+
112
+ function validateFiniteNumber(value, path, errors, opts = {}) {
113
+ const { min = null, max = null, minInclusive = true, maxInclusive = true } = opts;
114
+ if (typeof value !== "number" || !Number.isFinite(value)) {
115
+ errors.push(
116
+ createIssue(
117
+ ValidationCodes.INVALID_VALUE,
118
+ path,
119
+ "Must be a finite number",
120
+ value
121
+ )
122
+ );
123
+ return;
124
+ }
125
+ if (min != null) {
126
+ const failsMin = minInclusive ? value < min : value <= min;
127
+ if (failsMin) {
128
+ errors.push(
129
+ createIssue(
130
+ ValidationCodes.INVALID_RANGE,
131
+ path,
132
+ minInclusive ? `Must be >= ${min}` : `Must be > ${min}`,
133
+ value
134
+ )
135
+ );
136
+ return;
137
+ }
138
+ }
139
+ if (max != null) {
140
+ const failsMax = maxInclusive ? value > max : value >= max;
141
+ if (failsMax) {
142
+ errors.push(
143
+ createIssue(
144
+ ValidationCodes.INVALID_RANGE,
145
+ path,
146
+ maxInclusive ? `Must be <= ${max}` : `Must be < ${max}`,
147
+ value
148
+ )
149
+ );
150
+ }
151
+ }
152
+ }
153
+
154
+ function validateEffectClip(clip, path, errors) {
155
+ if (!EFFECT_TYPES.includes(clip.effect)) {
156
+ errors.push(
157
+ createIssue(
158
+ ValidationCodes.INVALID_VALUE,
159
+ `${path}.effect`,
160
+ `Invalid effect '${clip.effect}'. Expected: ${EFFECT_TYPES.join(", ")}`,
161
+ clip.effect
162
+ )
163
+ );
164
+ }
165
+
166
+ if (clip.fadeIn != null) {
167
+ validateFiniteNumber(clip.fadeIn, `${path}.fadeIn`, errors, { min: 0 });
168
+ }
169
+ if (clip.fadeOut != null) {
170
+ validateFiniteNumber(clip.fadeOut, `${path}.fadeOut`, errors, { min: 0 });
171
+ }
172
+ if (clip.easing != null && !EFFECT_EASING.includes(clip.easing)) {
173
+ errors.push(
174
+ createIssue(
175
+ ValidationCodes.INVALID_VALUE,
176
+ `${path}.easing`,
177
+ `Invalid easing '${clip.easing}'. Expected: ${EFFECT_EASING.join(", ")}`,
178
+ clip.easing
179
+ )
180
+ );
181
+ }
182
+
183
+ if (typeof clip.position === "number" && typeof clip.end === "number") {
184
+ const duration = clip.end - clip.position;
185
+ const fadeTotal = (clip.fadeIn || 0) + (clip.fadeOut || 0);
186
+ if (fadeTotal > duration + 1e-9) {
187
+ errors.push(
188
+ createIssue(
189
+ ValidationCodes.INVALID_TIMELINE,
190
+ `${path}`,
191
+ `fadeIn + fadeOut (${fadeTotal}) must be <= clip duration (${duration})`,
192
+ { fadeIn: clip.fadeIn || 0, fadeOut: clip.fadeOut || 0, duration }
193
+ )
194
+ );
195
+ }
196
+ }
197
+
198
+ if (
199
+ clip.params == null ||
200
+ typeof clip.params !== "object" ||
201
+ Array.isArray(clip.params)
202
+ ) {
203
+ errors.push(
204
+ createIssue(
205
+ ValidationCodes.MISSING_REQUIRED,
206
+ `${path}.params`,
207
+ "params is required and must be an object for effect clips",
208
+ clip.params
209
+ )
210
+ );
211
+ return;
212
+ }
213
+
214
+ const params = clip.params;
215
+ if (params.amount != null) {
216
+ validateFiniteNumber(params.amount, `${path}.params.amount`, errors, {
217
+ min: 0,
218
+ max: 1,
219
+ });
220
+ }
221
+
222
+ if (clip.effect === "vignette") {
223
+ if (params.angle != null) {
224
+ validateFiniteNumber(params.angle, `${path}.params.angle`, errors, {
225
+ min: 0,
226
+ max: 6.283185307179586,
227
+ });
228
+ }
229
+ } else if (clip.effect === "filmGrain") {
230
+ if (params.temporal != null && typeof params.temporal !== "boolean") {
231
+ errors.push(
232
+ createIssue(
233
+ ValidationCodes.INVALID_VALUE,
234
+ `${path}.params.temporal`,
235
+ "temporal must be a boolean",
236
+ params.temporal
237
+ )
238
+ );
239
+ }
240
+ } else if (clip.effect === "gaussianBlur") {
241
+ if (params.sigma != null) {
242
+ validateFiniteNumber(params.sigma, `${path}.params.sigma`, errors, {
243
+ min: 0,
244
+ max: 100,
245
+ });
246
+ }
247
+ } else if (clip.effect === "colorAdjust") {
248
+ if (params.brightness != null) {
249
+ validateFiniteNumber(params.brightness, `${path}.params.brightness`, errors, {
250
+ min: -1,
251
+ max: 1,
252
+ });
253
+ }
254
+ if (params.contrast != null) {
255
+ validateFiniteNumber(params.contrast, `${path}.params.contrast`, errors, {
256
+ min: 0,
257
+ max: 3,
258
+ });
259
+ }
260
+ if (params.saturation != null) {
261
+ validateFiniteNumber(params.saturation, `${path}.params.saturation`, errors, {
262
+ min: 0,
263
+ max: 3,
264
+ });
265
+ }
266
+ if (params.gamma != null) {
267
+ validateFiniteNumber(params.gamma, `${path}.params.gamma`, errors, {
268
+ min: 0.1,
269
+ max: 10,
270
+ });
271
+ }
272
+ }
273
+ }
274
+
37
275
  /**
38
276
  * Validate a single clip and return issues
39
277
  */
@@ -52,6 +290,8 @@ function validateClip(clip, index, options = {}) {
52
290
  "backgroundAudio",
53
291
  "image",
54
292
  "subtitle",
293
+ "color",
294
+ "effect",
55
295
  ];
56
296
 
57
297
  // Check type
@@ -123,7 +363,7 @@ function validateClip(clip, index, options = {}) {
123
363
  }
124
364
 
125
365
  // Types that require position/end on timeline
126
- const requiresTimeline = ["video", "audio", "text", "image"].includes(
366
+ const requiresTimeline = ["video", "audio", "text", "image", "color", "effect"].includes(
127
367
  clip.type
128
368
  );
129
369
 
@@ -525,6 +765,29 @@ function validateClip(clip, index, options = {}) {
525
765
  );
526
766
  }
527
767
  }
768
+
769
+ // Validate text clip color properties
770
+ const textColorProps = [
771
+ "fontColor",
772
+ "borderColor",
773
+ "shadowColor",
774
+ "backgroundColor",
775
+ "highlightColor",
776
+ ];
777
+ for (const prop of textColorProps) {
778
+ if (clip[prop] != null && typeof clip[prop] === "string") {
779
+ if (!isValidFFmpegColor(clip[prop])) {
780
+ warnings.push(
781
+ createIssue(
782
+ ValidationCodes.INVALID_VALUE,
783
+ `${path}.${prop}`,
784
+ `Invalid color "${clip[prop]}". Use a named color (e.g. "white", "red"), hex (#RRGGBB), or color@alpha (e.g. "black@0.5").`,
785
+ clip[prop]
786
+ )
787
+ );
788
+ }
789
+ }
790
+ }
528
791
  }
529
792
 
530
793
  // Subtitle clip validation
@@ -583,6 +846,23 @@ function validateClip(clip, index, options = {}) {
583
846
  )
584
847
  );
585
848
  }
849
+
850
+ // Validate subtitle color properties
851
+ const subtitleColorProps = ["fontColor", "borderColor"];
852
+ for (const prop of subtitleColorProps) {
853
+ if (clip[prop] != null && typeof clip[prop] === "string") {
854
+ if (!isValidFFmpegColor(clip[prop])) {
855
+ warnings.push(
856
+ createIssue(
857
+ ValidationCodes.INVALID_VALUE,
858
+ `${path}.${prop}`,
859
+ `Invalid color "${clip[prop]}". Use a named color (e.g. "white", "red"), hex (#RRGGBB), or color@alpha (e.g. "black@0.5").`,
860
+ clip[prop]
861
+ )
862
+ );
863
+ }
864
+ }
865
+ }
586
866
  }
587
867
 
588
868
  // Image clip validation
@@ -595,9 +875,13 @@ function validateClip(clip, index, options = {}) {
595
875
  "pan-right",
596
876
  "pan-up",
597
877
  "pan-down",
878
+ "smart",
879
+ "custom",
598
880
  ];
599
881
  const kbType =
600
- typeof clip.kenBurns === "string" ? clip.kenBurns : clip.kenBurns.type;
882
+ typeof clip.kenBurns === "string"
883
+ ? clip.kenBurns
884
+ : clip.kenBurns.type;
601
885
  if (kbType && !validKenBurns.includes(kbType)) {
602
886
  errors.push(
603
887
  createIssue(
@@ -611,6 +895,105 @@ function validateClip(clip, index, options = {}) {
611
895
  );
612
896
  }
613
897
 
898
+ if (typeof clip.kenBurns === "object") {
899
+ const {
900
+ anchor,
901
+ easing,
902
+ startZoom,
903
+ endZoom,
904
+ startX,
905
+ startY,
906
+ endX,
907
+ endY,
908
+ } =
909
+ clip.kenBurns;
910
+ if (anchor !== undefined) {
911
+ const validAnchors = ["top", "bottom", "left", "right"];
912
+ if (!validAnchors.includes(anchor)) {
913
+ errors.push(
914
+ createIssue(
915
+ ValidationCodes.INVALID_VALUE,
916
+ `${path}.kenBurns.anchor`,
917
+ `Invalid kenBurns anchor '${anchor}'. Expected: ${validAnchors.join(
918
+ ", "
919
+ )}`,
920
+ anchor
921
+ )
922
+ );
923
+ }
924
+ }
925
+
926
+ if (easing !== undefined) {
927
+ const validEasing = ["linear", "ease-in", "ease-out", "ease-in-out"];
928
+ if (!validEasing.includes(easing)) {
929
+ errors.push(
930
+ createIssue(
931
+ ValidationCodes.INVALID_VALUE,
932
+ `${path}.kenBurns.easing`,
933
+ `Invalid kenBurns easing '${easing}'. Expected: ${validEasing.join(
934
+ ", "
935
+ )}`,
936
+ easing
937
+ )
938
+ );
939
+ }
940
+ }
941
+
942
+ const numericFields = [
943
+ ["startZoom", startZoom],
944
+ ["endZoom", endZoom],
945
+ ["startX", startX],
946
+ ["startY", startY],
947
+ ["endX", endX],
948
+ ["endY", endY],
949
+ ];
950
+
951
+ numericFields.forEach(([field, value]) => {
952
+ if (value === undefined) {
953
+ return;
954
+ }
955
+ if (typeof value !== "number" || !Number.isFinite(value)) {
956
+ errors.push(
957
+ createIssue(
958
+ ValidationCodes.INVALID_TYPE,
959
+ `${path}.kenBurns.${field}`,
960
+ `kenBurns.${field} must be a finite number`,
961
+ value
962
+ )
963
+ );
964
+ return;
965
+ }
966
+
967
+ if ((field === "startZoom" || field === "endZoom") && value <= 0) {
968
+ errors.push(
969
+ createIssue(
970
+ ValidationCodes.INVALID_RANGE,
971
+ `${path}.kenBurns.${field}`,
972
+ `kenBurns.${field} must be > 0`,
973
+ value
974
+ )
975
+ );
976
+ }
977
+
978
+ if (
979
+ (field === "startX" ||
980
+ field === "startY" ||
981
+ field === "endX" ||
982
+ field === "endY") &&
983
+ (value < 0 || value > 1)
984
+ ) {
985
+ errors.push(
986
+ createIssue(
987
+ ValidationCodes.INVALID_RANGE,
988
+ `${path}.kenBurns.${field}`,
989
+ `kenBurns.${field} must be between 0 and 1`,
990
+ value
991
+ )
992
+ );
993
+ }
994
+ });
995
+ }
996
+
614
997
  // Check if image dimensions are provided and sufficient for project dimensions
615
998
  // By default, undersized images are upscaled automatically (with a warning)
616
999
  // Set strictKenBurns: true to make this an error instead
@@ -651,8 +1034,95 @@ function validateClip(clip, index, options = {}) {
651
1034
  }
652
1035
  }
653
1036
 
654
- // Video transition validation
655
- if (clip.type === "video" && clip.transition) {
1037
+ // Color clip validation
1038
+ if (clip.type === "color") {
1039
+ if (clip.color == null) {
1040
+ errors.push(
1041
+ createIssue(
1042
+ ValidationCodes.MISSING_REQUIRED,
1043
+ `${path}.color`,
1044
+ "Color is required for color clips",
1045
+ clip.color
1046
+ )
1047
+ );
1048
+ } else if (typeof clip.color === "string") {
1049
+ if (!isValidFFmpegColor(clip.color)) {
1050
+ errors.push(
1051
+ createIssue(
1052
+ ValidationCodes.INVALID_VALUE,
1053
+ `${path}.color`,
1054
+ `Invalid color "${clip.color}". Use a named color (e.g. "black", "navy"), hex (#RRGGBB, 0xRRGGBB), or "random".`,
1055
+ clip.color
1056
+ )
1057
+ );
1058
+ }
1059
+ } else if (typeof clip.color === "object" && clip.color !== null) {
1060
+ const validGradientTypes = ["linear-gradient", "radial-gradient"];
1061
+ if (!clip.color.type || !validGradientTypes.includes(clip.color.type)) {
1062
+ errors.push(
1063
+ createIssue(
1064
+ ValidationCodes.INVALID_VALUE,
1065
+ `${path}.color.type`,
1066
+ `Invalid gradient type '${clip.color.type}'. Expected: ${validGradientTypes.join(", ")}`,
1067
+ clip.color.type
1068
+ )
1069
+ );
1070
+ }
1071
+ if (!Array.isArray(clip.color.colors) || clip.color.colors.length < 2) {
1072
+ errors.push(
1073
+ createIssue(
1074
+ ValidationCodes.INVALID_VALUE,
1075
+ `${path}.color.colors`,
1076
+ "Gradient colors must be an array of at least 2 color strings",
1077
+ clip.color.colors
1078
+ )
1079
+ );
1080
+ } else {
1081
+ clip.color.colors.forEach((c, ci) => {
1082
+ if (typeof c !== "string" || !isValidFFmpegColor(c)) {
1083
+ errors.push(
1084
+ createIssue(
1085
+ ValidationCodes.INVALID_VALUE,
1086
+ `${path}.color.colors[${ci}]`,
1087
+ `Invalid gradient color "${c}". Use a named color (e.g. "black", "navy"), hex (#RRGGBB), or "random".`,
1088
+ c
1089
+ )
1090
+ );
1091
+ }
1092
+ });
1093
+ }
1094
+ if (clip.color.direction != null) {
1095
+ const validDirections = ["vertical", "horizontal"];
1096
+ if (typeof clip.color.direction !== "number" && !validDirections.includes(clip.color.direction)) {
1097
+ errors.push(
1098
+ createIssue(
1099
+ ValidationCodes.INVALID_VALUE,
1100
+ `${path}.color.direction`,
1101
+ `Invalid gradient direction '${clip.color.direction}'. Expected: "vertical", "horizontal", or a number (angle in degrees)`,
1102
+ clip.color.direction
1103
+ )
1104
+ );
1105
+ }
1106
+ }
1107
+ } else {
1108
+ errors.push(
1109
+ createIssue(
1110
+ ValidationCodes.INVALID_VALUE,
1111
+ `${path}.color`,
1112
+ "Color must be a string (flat color) or an object (gradient spec)",
1113
+ clip.color
1114
+ )
1115
+ );
1116
+ }
1117
+ }
1118
+
1119
+ if (clip.type === "effect") {
1120
+ validateEffectClip(clip, path, errors);
1121
+ }
1122
+
1123
+ // Visual clip transition validation (video, image, color)
1124
+ const visualTypes = ["video", "image", "color"];
1125
+ if (visualTypes.includes(clip.type) && clip.transition) {
656
1126
  if (typeof clip.transition.duration !== "number") {
657
1127
  errors.push(
658
1128
  createIssue(
@@ -687,65 +1157,60 @@ function validateClip(clip, index, options = {}) {
687
1157
  }
688
1158
 
689
1159
  /**
690
- * Validate timeline gaps (visual continuity)
1160
+ * Validate timeline gaps (visual continuity).
1161
+ * Uses detectVisualGaps() from gaps.js as the single source of truth
1162
+ * for gap detection logic.
691
1163
  */
692
- function validateTimelineGaps(clips, options = {}) {
693
- const { fillGaps = "none" } = options;
1164
+ function validateTimelineGaps(clips) {
694
1165
  const errors = [];
695
1166
 
696
- // Skip gap checking if fillGaps is enabled
697
- if (fillGaps !== "none") {
1167
+ // Build clip objects with original indices for error messages
1168
+ const indexed = clips.map((c, i) => ({ ...c, _origIndex: i }));
1169
+ const gaps = detectVisualGaps(indexed);
1170
+
1171
+ if (gaps.length === 0) {
698
1172
  return { errors, warnings: [] };
699
1173
  }
700
1174
 
701
- // Get visual clips (video and image)
1175
+ // Build a sorted visual clip list so we can reference neighbours in messages
702
1176
  const visual = clips
703
1177
  .map((c, i) => ({ clip: c, index: i }))
704
- .filter(({ clip }) => clip.type === "video" || clip.type === "image")
1178
+ .filter(({ clip }) => clip.type === "video" || clip.type === "image" || clip.type === "color")
705
1179
  .filter(
706
1180
  ({ clip }) =>
707
1181
  typeof clip.position === "number" && typeof clip.end === "number"
708
1182
  )
709
1183
  .sort((a, b) => a.clip.position - b.clip.position);
710
1184
 
711
- if (visual.length === 0) {
712
- return { errors, warnings: [] };
713
- }
714
-
715
- const eps = 1e-3;
716
-
717
- // Check for leading gap
718
- if (visual[0].clip.position > eps) {
719
- errors.push(
720
- createIssue(
721
- ValidationCodes.TIMELINE_GAP,
722
- "timeline",
723
- `Gap at start of timeline [0, ${visual[0].clip.position.toFixed(
724
- 3
725
- )}s] - no video/image content. Use fillGaps: 'black' to auto-fill.`,
726
- { start: 0, end: visual[0].clip.position }
727
- )
728
- );
729
- }
1185
+ for (const gap of gaps) {
1186
+ const isLeading = gap.start === 0 || (visual.length > 0 && gap.end <= visual[0].clip.position + 1e-3);
730
1187
 
731
- // Check for gaps between clips
732
- for (let i = 1; i < visual.length; i++) {
733
- const prev = visual[i - 1].clip;
734
- const curr = visual[i].clip;
735
- const gapStart = prev.end;
736
- const gapEnd = curr.position;
1188
+ if (isLeading && gap.start < 1e-3) {
1189
+ errors.push(
1190
+ createIssue(
1191
+ ValidationCodes.TIMELINE_GAP,
1192
+ "timeline",
1193
+ `Gap at start of visual timeline [0, ${gap.end.toFixed(
1194
+ 3
1195
+ )}s]. If intentional, fill it with a { type: "color" } clip. Otherwise, start your first clip at position 0.`,
1196
+ { start: gap.start, end: gap.end }
1197
+ )
1198
+ );
1199
+ } else {
1200
+ // Find the surrounding clip indices for a helpful message
1201
+ const before = visual.filter(v => v.clip.end <= gap.start + 1e-3);
1202
+ const after = visual.filter(v => v.clip.position >= gap.end - 1e-3);
1203
+ const prevIdx = before.length > 0 ? before[before.length - 1].index : "?";
1204
+ const nextIdx = after.length > 0 ? after[0].index : "?";
737
1205
 
738
- if (gapEnd - gapStart > eps) {
739
1206
  errors.push(
740
1207
  createIssue(
741
1208
  ValidationCodes.TIMELINE_GAP,
742
1209
  "timeline",
743
- `Gap in timeline [${gapStart.toFixed(3)}s, ${gapEnd.toFixed(
1210
+ `Gap in visual timeline [${gap.start.toFixed(3)}s, ${gap.end.toFixed(
744
1211
  3
745
- )}s] between clips[${visual[i - 1].index}] and clips[${
746
- visual[i].index
747
- }]. Use fillGaps: 'black' to auto-fill.`,
748
- { start: gapStart, end: gapEnd }
1212
+ )}s] between clips[${prevIdx}] and clips[${nextIdx}]. If intentional, fill it with a { type: "color" } clip. Otherwise, adjust clip positions to remove the gap.`,
1213
+ { start: gap.start, end: gap.end }
749
1214
  )
750
1215
  );
751
1216
  }
@@ -760,7 +1225,6 @@ function validateTimelineGaps(clips, options = {}) {
760
1225
  * @param {Array} clips - Array of clip objects to validate
761
1226
  * @param {Object} options - Validation options
762
1227
  * @param {boolean} options.skipFileChecks - Skip file existence checks (useful for AI validation)
763
- * @param {string} options.fillGaps - Gap handling mode ('none' | 'black')
764
1228
  * @returns {Object} Validation result { valid, errors, warnings }
765
1229
  */
766
1230
  function validateConfig(clips, options = {}) {
@@ -845,4 +1309,6 @@ module.exports = {
845
1309
  validateConfig,
846
1310
  formatValidationResult,
847
1311
  ValidationCodes,
1312
+ isValidFFmpegColor,
1313
+ FFMPEG_NAMED_COLORS,
848
1314
  };
@@ -11,7 +11,9 @@ function buildAudioForVideoClips(project, videoClips, transitionOffsets) {
11
11
 
12
12
  videoClips.forEach((clip) => {
13
13
  if (!clip.hasAudio) return;
14
- const inputIndex = project.videoOrAudioClips.indexOf(clip);
14
+ const inputIndex = project._inputIndexMap
15
+ ? project._inputIndexMap.get(clip)
16
+ : project.videoOrAudioClips.indexOf(clip);
15
17
  const requestedDuration = Math.max(
16
18
  0,
17
19
  (clip.end || 0) - (clip.position || 0)
@@ -29,7 +29,9 @@ function buildBackgroundMusicMix(
29
29
  let filter = "";
30
30
  const bgLabels = [];
31
31
  backgroundClips.forEach((clip, i) => {
32
- const inputIndex = project.videoOrAudioClips.indexOf(clip);
32
+ const inputIndex = project._inputIndexMap
33
+ ? project._inputIndexMap.get(clip)
34
+ : project.videoOrAudioClips.indexOf(clip);
33
35
  const effectivePosition =
34
36
  typeof clip.position === "number" ? clip.position : 0;
35
37
  const effectiveEnd =