simple-ffmpegjs 0.3.2 → 0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -298,6 +298,98 @@ Load clip descriptors into the project. Validates the timeline and reads media m
298
298
  await project.load(clips: Clip[]): Promise<void[]>
299
299
  ```
300
300
 
301
+ #### `SIMPLEFFMPEG.getDuration(clips)`
302
+
303
+ Calculate the total visual timeline duration from a clips array. Handles `duration` and auto-sequencing shorthand, and subtracts transition overlaps. Pure function — no file I/O.
304
+
305
+ ```ts
306
+ const clips = [
307
+ { type: "video", url: "./a.mp4", duration: 5 },
308
+ {
309
+ type: "video",
310
+ url: "./b.mp4",
311
+ duration: 10,
312
+ transition: { type: "fade", duration: 0.5 },
313
+ },
314
+ ];
315
+ SIMPLEFFMPEG.getDuration(clips); // 14.5
316
+ ```
317
+
318
+ Useful for computing text overlay timings or background music end times before calling `load()`.
319
+
320
+ **Duration and Auto-Sequencing:**
321
+
322
+ For video, image, and audio clips, you can use shorthand to avoid specifying explicit `position` and `end` values:
323
+
324
+ - **`duration`** — Use instead of `end`. The library computes `end = position + duration`. You cannot specify both `duration` and `end` on the same clip.
325
+ - **Omit `position`** — The clip is placed immediately after the previous clip on its track. Video and image clips share the visual track; audio clips have their own track. The first clip defaults to `position: 0`.
326
+
327
+ These can be combined:
328
+
329
+ ```ts
330
+ // Before: manual position/end for every clip
331
+ await project.load([
332
+ { type: "video", url: "./a.mp4", position: 0, end: 5 },
333
+ { type: "video", url: "./b.mp4", position: 5, end: 10 },
334
+ { type: "video", url: "./c.mp4", position: 10, end: 18, cutFrom: 3 },
335
+ ]);
336
+
337
+ // After: auto-sequencing + duration
338
+ await project.load([
339
+ { type: "video", url: "./a.mp4", duration: 5 },
340
+ { type: "video", url: "./b.mp4", duration: 5 },
341
+ { type: "video", url: "./c.mp4", duration: 8, cutFrom: 3 },
342
+ ]);
343
+ ```
344
+
345
+ You can mix explicit and implicit positioning freely. Clips with explicit `position` are placed there; subsequent auto-sequenced clips follow from the last clip's end:
346
+
347
+ ```ts
348
+ await project.load([
349
+ { type: "video", url: "./a.mp4", duration: 5 }, // position: 0, end: 5
350
+ { type: "video", url: "./b.mp4", position: 10, end: 15 }, // explicit gap
351
+ { type: "video", url: "./c.mp4", duration: 5 }, // position: 15, end: 20
352
+ ]);
353
+ ```
354
+
355
+ Text clips always require an explicit `position` (they're overlays on specific moments). Background music and subtitle clips already have optional `position`/`end` with their own defaults.
356
+
357
+ #### `SIMPLEFFMPEG.probe(filePath)`
358
+
359
+ Probe a media file and return comprehensive metadata using ffprobe. Works with video, audio, and image files.
360
+
361
+ ```ts
362
+ const info = await SIMPLEFFMPEG.probe("./video.mp4");
363
+ // {
364
+ // duration: 30.5, // seconds
365
+ // width: 1920, // pixels
366
+ // height: 1080, // pixels
367
+ // hasVideo: true,
368
+ // hasAudio: true,
369
+ // rotation: 0, // iPhone/mobile rotation
370
+ // videoCodec: "h264",
371
+ // audioCodec: "aac",
372
+ // format: "mov,mp4,m4a,3gp,3g2,mj2",
373
+ // fps: 30,
374
+ // size: 15728640, // bytes
375
+ // bitrate: 4125000, // bits/sec
376
+ // sampleRate: 48000, // Hz
377
+ // channels: 2 // stereo
378
+ // }
379
+ ```
380
+
381
+ Fields that don't apply to the file type are `null` (e.g. `width`/`height`/`videoCodec`/`fps` for audio-only files, `audioCodec`/`sampleRate`/`channels` for video-only files).
382
+
383
+ Throws `MediaNotFoundError` if the file cannot be found or probed.
384
+
385
+ ```ts
386
+ // Audio file
387
+ const audio = await SIMPLEFFMPEG.probe("./music.wav");
388
+ console.log(audio.hasVideo); // false
389
+ console.log(audio.duration); // 180.5
390
+ console.log(audio.sampleRate); // 44100
391
+ ```
392
+
301
393
  #### `project.export(options)`
302
394
 
303
395
  Build and execute the FFmpeg command to render the final video.
@@ -353,8 +445,9 @@ await project.preview(options?: ExportOptions): Promise<{
353
445
  {
354
446
  type: "video";
355
447
  url: string; // File path
356
- position: number; // Timeline start (seconds)
357
- end: number; // Timeline end (seconds)
448
+ position?: number; // Timeline start (seconds). Omit to auto-sequence after previous clip.
449
+ end?: number; // Timeline end (seconds). Use end OR duration, not both.
450
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
358
451
  cutFrom?: number; // Source offset (default: 0)
359
452
  volume?: number; // Audio volume (default: 1)
360
453
  transition?: {
@@ -372,8 +465,9 @@ All [xfade transitions](https://trac.ffmpeg.org/wiki/Xfade) are supported.
372
465
  {
373
466
  type: "audio";
374
467
  url: string;
375
- position: number;
376
- end: number;
468
+ position?: number; // Omit to auto-sequence after previous audio clip
469
+ end?: number; // Use end OR duration, not both
470
+ duration?: number; // Duration in seconds (alternative to end)
377
471
  cutFrom?: number;
378
472
  volume?: number;
379
473
  }
@@ -412,8 +506,9 @@ await project.load([
412
506
  {
413
507
  type: "image";
414
508
  url: string;
415
- position: number;
416
- end: number;
509
+ position?: number; // Omit to auto-sequence after previous video/image clip
510
+ end?: number; // Use end OR duration, not both
511
+ duration?: number; // Duration in seconds (alternative to end)
417
512
  kenBurns?: "zoom-in" | "zoom-out" | "pan-left" | "pan-right" | "pan-up" | "pan-down";
418
513
  }
419
514
  ```
@@ -424,7 +519,8 @@ await project.load([
424
519
  {
425
520
  type: "text";
426
521
  position: number;
427
- end: number;
522
+ end?: number; // Use end OR duration, not both
523
+ duration?: number; // Duration in seconds (alternative to end)
428
524
 
429
525
  // Content
430
526
  text?: string;
@@ -601,6 +697,7 @@ The `onProgress` callback receives:
601
697
  ```ts
602
698
  {
603
699
  percent?: number; // 0-100
700
+ phase?: string; // "rendering" or "batching"
604
701
  timeProcessed?: number; // Seconds processed
605
702
  frame?: number; // Current frame
606
703
  fps?: number; // Processing speed
@@ -608,6 +705,23 @@ The `onProgress` callback receives:
608
705
  }
609
706
  ```
610
707
 
708
+ The `phase` field indicates what the export is doing:
709
+
710
+ - `"rendering"` — main video export (includes `percent`, `frame`, etc.)
711
+ - `"batching"` — text overlay passes are running (fired once when batching starts)
712
+
713
+ Use `phase` to update your UI when the export hits 100% but still has work to do:
714
+
715
+ ```ts
716
+ onProgress: ({ percent, phase }) => {
717
+ if (phase === "batching") {
718
+ console.log("Applying text overlays...");
719
+ } else {
720
+ console.log(`${percent}%`);
721
+ }
722
+ }
723
+ ```
724
+
611
725
  ### Error Handling
612
726
 
613
727
  The library provides custom error classes for structured error handling:
@@ -700,31 +814,15 @@ await project.load([
700
814
 
701
815
  ```ts
702
816
  await project.load([
703
- {
704
- type: "image",
705
- url: "./photo1.jpg",
706
- position: 0,
707
- end: 3,
708
- kenBurns: "zoom-in",
709
- },
710
- {
711
- type: "image",
712
- url: "./photo2.jpg",
713
- position: 3,
714
- end: 6,
715
- kenBurns: "pan-right",
716
- },
717
- {
718
- type: "image",
719
- url: "./photo3.jpg",
720
- position: 6,
721
- end: 9,
722
- kenBurns: "zoom-out",
723
- },
817
+ { type: "image", url: "./photo1.jpg", duration: 3, kenBurns: "zoom-in" },
818
+ { type: "image", url: "./photo2.jpg", duration: 3, kenBurns: "pan-right" },
819
+ { type: "image", url: "./photo3.jpg", duration: 3, kenBurns: "zoom-out" },
724
820
  { type: "music", url: "./music.mp3", volume: 0.3 },
725
821
  ]);
726
822
  ```
727
823
 
824
+ When `position` is omitted, clips are placed sequentially — each one starts where the previous one ended. `duration` is an alternative to `end`: the library computes `end = position + duration`. The explicit form (`position: 0, end: 3`) still works identically.
825
+
728
826
  > **Note:** Ken Burns effects work best with images at least as large as your output resolution. Smaller images are automatically upscaled (with a validation warning). Use `strictKenBurns: true` in validation options to enforce size requirements instead.
729
827
 
730
828
  ### Text & Animations
@@ -952,16 +1050,19 @@ async function generateListingVideo(listing, outputPath) {
952
1050
  const photos = listing.photos; // ['kitchen.jpg', 'living-room.jpg', ...]
953
1051
  const slideDuration = 4;
954
1052
 
955
- // Build an image slideshow from listing photos
1053
+ // Build an image slideshow from listing photos (auto-sequenced with crossfades)
1054
+ const transitionDuration = 0.5;
956
1055
  const photoClips = photos.map((photo, i) => ({
957
1056
  type: "image",
958
1057
  url: photo,
959
- position: i * slideDuration,
960
- end: (i + 1) * slideDuration,
1058
+ duration: slideDuration,
961
1059
  kenBurns: i % 2 === 0 ? "zoom-in" : "pan-right",
1060
+ ...(i > 0 && {
1061
+ transition: { type: "fade", duration: transitionDuration },
1062
+ }),
962
1063
  }));
963
1064
 
964
- const totalDuration = photos.length * slideDuration;
1065
+ const totalDuration = SIMPLEFFMPEG.getDuration(photoClips);
965
1066
 
966
1067
  const clips = [
967
1068
  ...photoClips,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "simple-ffmpegjs",
3
- "version": "0.3.2",
3
+ "version": "0.3.3",
4
4
  "description": "Declarative video composition for Node.js — define clips, transitions, text, and audio as simple objects, and let FFmpeg handle the rest.",
5
5
  "author": "Brayden Blackwell <braydenblackwell21@gmail.com> (https://github.com/Fats403)",
6
6
  "license": "MIT",
@@ -57,101 +57,138 @@ function runFFprobe(args, timeoutMs = DEFAULT_FFPROBE_TIMEOUT_MS) {
57
57
  }
58
58
 
59
59
  /**
60
- * Get video metadata using ffprobe
61
- * @param {string} url - Path to the video file
62
- * @returns {Promise<{iphoneRotation: number, hasAudio: boolean, width: number|null, height: number|null, durationSec: number|null}>}
63
- * @throws {MediaNotFoundError} If the file cannot be probed
60
+ * Parse a fraction string like "30000/1001" or "30/1" into a number.
61
+ * Returns null if the input is not a valid fraction.
62
+ * @param {string} fraction
63
+ * @returns {number|null}
64
64
  */
65
- function getVideoMetadata(url) {
66
- return runFFprobe([
67
- "-v",
68
- "error",
69
- "-show_streams",
70
- "-show_format",
71
- "-of",
72
- "json",
73
- url,
74
- ])
75
- .then((stdout) => {
76
- let metadata;
77
- try {
78
- metadata = JSON.parse(stdout);
79
- } catch (parseError) {
80
- throw new Error(
81
- `Invalid JSON response from ffprobe: ${parseError.message}`
82
- );
83
- }
84
-
85
- // Validate metadata structure
86
- if (!metadata || !Array.isArray(metadata.streams)) {
87
- throw new Error(
88
- "Invalid metadata structure: missing or invalid 'streams' array"
89
- );
90
- }
91
-
92
- const videoStream = metadata.streams.find(
93
- (s) => s.codec_type === "video"
94
- );
95
- const hasAudio = metadata.streams.some((s) => s.codec_type === "audio");
96
- const iphoneRotation = videoStream?.side_data_list?.[0]?.rotation
97
- ? videoStream.side_data_list[0].rotation
98
- : 0;
99
- const formatDuration = metadata.format?.duration
100
- ? parseFloat(metadata.format.duration)
101
- : null;
102
- const streamDuration = videoStream?.duration
103
- ? parseFloat(videoStream.duration)
104
- : null;
105
- const durationSec = Number.isFinite(formatDuration)
106
- ? formatDuration
107
- : Number.isFinite(streamDuration)
108
- ? streamDuration
109
- : null;
110
- return {
111
- iphoneRotation,
112
- hasAudio,
113
- width: videoStream?.width,
114
- height: videoStream?.height,
115
- durationSec,
116
- };
117
- })
118
- .catch((error) => {
119
- throw new MediaNotFoundError(
120
- `Failed to get video metadata for "${url}": ${error.message}`,
121
- { path: url }
122
- );
123
- });
65
+ function parseFraction(fraction) {
66
+ if (!fraction || typeof fraction !== "string") return null;
67
+ const parts = fraction.split("/");
68
+ if (parts.length !== 2) return null;
69
+ const num = parseFloat(parts[0]);
70
+ const den = parseFloat(parts[1]);
71
+ if (!Number.isFinite(num) || !Number.isFinite(den) || den === 0) return null;
72
+ const result = num / den;
73
+ return Number.isFinite(result) ? result : null;
124
74
  }
125
75
 
126
76
  /**
127
- * Get media duration using ffprobe
128
- * @param {string} url - Path to the media file
129
- * @returns {Promise<number|null>} Duration in seconds, or null if unavailable
77
+ * Probe a media file and return comprehensive metadata.
78
+ *
79
+ * Returns a flat, user-friendly object with duration, dimensions, codecs,
80
+ * format, bitrate, audio details, and rotation info. All fields that are
81
+ * not applicable (e.g. width/height for audio-only files) are set to null.
82
+ *
83
+ * @param {string} filePath - Path to the media file
84
+ * @returns {Promise<{
85
+ * duration: number|null,
86
+ * width: number|null,
87
+ * height: number|null,
88
+ * hasVideo: boolean,
89
+ * hasAudio: boolean,
90
+ * rotation: number,
91
+ * videoCodec: string|null,
92
+ * audioCodec: string|null,
93
+ * format: string|null,
94
+ * fps: number|null,
95
+ * size: number|null,
96
+ * bitrate: number|null,
97
+ * sampleRate: number|null,
98
+ * channels: number|null
99
+ * }>}
130
100
  * @throws {MediaNotFoundError} If the file cannot be probed
131
101
  */
132
- function getMediaDuration(url) {
133
- return runFFprobe(["-v", "error", "-show_format", "-of", "json", url])
134
- .then((stdout) => {
135
- let metadata;
136
- try {
137
- metadata = JSON.parse(stdout);
138
- } catch (parseError) {
139
- throw new Error(
140
- `Invalid JSON response from ffprobe: ${parseError.message}`
141
- );
142
- }
143
-
144
- const formatDuration = metadata?.format?.duration
145
- ? parseFloat(metadata.format.duration)
146
- : null;
147
- return Number.isFinite(formatDuration) ? formatDuration : null;
148
- })
149
- .catch((error) => {
150
- throw new MediaNotFoundError(
151
- `Failed to get media duration for "${url}": ${error.message}`,
152
- { path: url }
153
- );
154
- });
102
+ async function probeMedia(filePath) {
103
+ let stdout;
104
+ try {
105
+ stdout = await runFFprobe([
106
+ "-v",
107
+ "error",
108
+ "-show_streams",
109
+ "-show_format",
110
+ "-of",
111
+ "json",
112
+ filePath,
113
+ ]);
114
+ } catch (error) {
115
+ throw new MediaNotFoundError(
116
+ `Failed to probe "${filePath}": ${error.message}`,
117
+ { path: filePath }
118
+ );
119
+ }
120
+
121
+ let metadata;
122
+ try {
123
+ metadata = JSON.parse(stdout);
124
+ } catch (parseError) {
125
+ throw new MediaNotFoundError(
126
+ `Invalid JSON response from ffprobe for "${filePath}": ${parseError.message}`,
127
+ { path: filePath }
128
+ );
129
+ }
130
+
131
+ if (!metadata || !Array.isArray(metadata.streams)) {
132
+ throw new MediaNotFoundError(
133
+ `Invalid metadata structure for "${filePath}": missing or invalid 'streams' array`,
134
+ { path: filePath }
135
+ );
136
+ }
137
+
138
+ const videoStream = metadata.streams.find((s) => s.codec_type === "video");
139
+ const audioStream = metadata.streams.find((s) => s.codec_type === "audio");
140
+ const format = metadata.format || {};
141
+
142
+ // ── Duration ────────────────────────────────────────────────────────────
143
+ const formatDuration = format.duration ? parseFloat(format.duration) : null;
144
+ const streamDuration = videoStream?.duration
145
+ ? parseFloat(videoStream.duration)
146
+ : null;
147
+ const duration = Number.isFinite(formatDuration)
148
+ ? formatDuration
149
+ : Number.isFinite(streamDuration)
150
+ ? streamDuration
151
+ : null;
152
+
153
+ // ── FPS ─────────────────────────────────────────────────────────────────
154
+ // Prefer avg_frame_rate, fall back to r_frame_rate
155
+ const fps =
156
+ parseFraction(videoStream?.avg_frame_rate) ??
157
+ parseFraction(videoStream?.r_frame_rate) ??
158
+ null;
159
+
160
+ // ── Rotation ────────────────────────────────────────────────────────────
161
+ const rotation = videoStream?.side_data_list?.[0]?.rotation
162
+ ? videoStream.side_data_list[0].rotation
163
+ : 0;
164
+
165
+ // ── Size & bitrate ─────────────────────────────────────────────────────
166
+ const size = format.size ? parseInt(format.size, 10) : null;
167
+ const bitrate = format.bit_rate ? parseInt(format.bit_rate, 10) : null;
168
+
169
+ // ── Audio details ──────────────────────────────────────────────────────
170
+ const sampleRate = audioStream?.sample_rate
171
+ ? parseInt(audioStream.sample_rate, 10)
172
+ : null;
173
+ const channels =
174
+ typeof audioStream?.channels === "number" ? audioStream.channels : null;
175
+
176
+ return {
177
+ duration,
178
+ width: videoStream?.width ?? null,
179
+ height: videoStream?.height ?? null,
180
+ hasVideo: !!videoStream,
181
+ hasAudio: !!audioStream,
182
+ rotation,
183
+ videoCodec: videoStream?.codec_name ?? null,
184
+ audioCodec: audioStream?.codec_name ?? null,
185
+ format: format.format_name ?? null,
186
+ fps: Number.isFinite(fps) ? Math.round(fps * 100) / 100 : null,
187
+ size: Number.isFinite(size) ? size : null,
188
+ bitrate: Number.isFinite(bitrate) ? bitrate : null,
189
+ sampleRate: Number.isFinite(sampleRate) ? sampleRate : null,
190
+ channels,
191
+ };
155
192
  }
156
193
 
157
- module.exports = { getVideoMetadata, getMediaDuration };
194
+ module.exports = { probeMedia };
@@ -0,0 +1,96 @@
1
+ /**
2
+ * Clip resolution — transforms shorthand clip properties into canonical form.
3
+ *
4
+ * This runs BEFORE validation, so the rest of the pipeline always sees
5
+ * standard { position, end } clips. Two features are handled here:
6
+ *
7
+ * 1. **duration → end**: If a clip has `duration` instead of `end`,
8
+ * compute `end = position + duration`.
9
+ *
10
+ * 2. **Auto-sequential positioning**: If a video/image/audio clip omits
11
+ * `position`, it is placed immediately after the previous clip on the
12
+ * same track (visual or audio). The first clip defaults to position 0.
13
+ *
14
+ * Clips are shallow-cloned — the caller's original objects are not mutated.
15
+ */
16
+
17
+ /**
18
+ * Types that auto-sequence on the visual track (video + image share a timeline).
19
+ */
20
+ const VISUAL_TYPES = ["video", "image"];
21
+
22
+ /**
23
+ * Types that auto-sequence on the audio track.
24
+ */
25
+ const AUDIO_TYPES = ["audio"];
26
+
27
+ /**
28
+ * All types eligible for auto-sequencing (position can be omitted).
29
+ */
30
+ const AUTO_SEQUENCE_TYPES = [...VISUAL_TYPES, ...AUDIO_TYPES];
31
+
32
+ /**
33
+ * Resolve shorthand clip properties into canonical { position, end } form.
34
+ *
35
+ * @param {Array} clips - Array of clip objects (not mutated)
36
+ * @returns {{ clips: Array, errors: Array }} Resolved clips and any resolution errors
37
+ */
38
+ function resolveClips(clips) {
39
+ if (!Array.isArray(clips)) {
40
+ return { clips, errors: [] };
41
+ }
42
+
43
+ const errors = [];
44
+ let lastVisualEnd = 0;
45
+ let lastAudioEnd = 0;
46
+
47
+ const resolved = clips.map((clip, index) => {
48
+ const c = { ...clip };
49
+ const path = `clips[${index}]`;
50
+
51
+ // ── Conflict check: duration + end ──────────────────────────────────
52
+ if (c.duration != null && c.end != null) {
53
+ errors.push({
54
+ code: "INVALID_VALUE",
55
+ path: `${path}`,
56
+ message:
57
+ "Cannot specify both 'duration' and 'end'. Use one or the other.",
58
+ received: { duration: c.duration, end: c.end },
59
+ });
60
+ // Don't resolve further — let validation report the canonical errors
61
+ return c;
62
+ }
63
+
64
+ // ── Auto-sequential positioning ─────────────────────────────────────
65
+ const isVisual = VISUAL_TYPES.includes(c.type);
66
+ const isAudio = AUDIO_TYPES.includes(c.type);
67
+ const canAutoSequence = AUTO_SEQUENCE_TYPES.includes(c.type);
68
+
69
+ if (canAutoSequence && c.position == null) {
70
+ c.position = isVisual ? lastVisualEnd : lastAudioEnd;
71
+ }
72
+
73
+ // ── Duration → end ──────────────────────────────────────────────────
74
+ if (c.duration != null && c.end == null) {
75
+ if (typeof c.position === "number" && typeof c.duration === "number") {
76
+ c.end = c.position + c.duration;
77
+ }
78
+ // Remove duration so the rest of the pipeline sees canonical { position, end }
79
+ delete c.duration;
80
+ }
81
+
82
+ // ── Track the end of the last clip on each track ────────────────────
83
+ if (isVisual && typeof c.end === "number") {
84
+ lastVisualEnd = c.end;
85
+ }
86
+ if (isAudio && typeof c.end === "number") {
87
+ lastAudioEnd = c.end;
88
+ }
89
+
90
+ return c;
91
+ });
92
+
93
+ return { clips: resolved, errors };
94
+ }
95
+
96
+ module.exports = { resolveClips };
@@ -79,6 +79,49 @@ function validateClip(clip, index, options = {}) {
79
79
  return { errors, warnings }; // Can't validate further with invalid type
80
80
  }
81
81
 
82
+ // Validate duration field if present (applies to all clip types)
83
+ if (clip.duration != null) {
84
+ if (typeof clip.duration !== "number") {
85
+ errors.push(
86
+ createIssue(
87
+ ValidationCodes.INVALID_VALUE,
88
+ `${path}.duration`,
89
+ "Duration must be a number",
90
+ clip.duration
91
+ )
92
+ );
93
+ } else if (!Number.isFinite(clip.duration)) {
94
+ errors.push(
95
+ createIssue(
96
+ ValidationCodes.INVALID_VALUE,
97
+ `${path}.duration`,
98
+ "Duration must be a finite number (not NaN or Infinity)",
99
+ clip.duration
100
+ )
101
+ );
102
+ } else if (clip.duration <= 0) {
103
+ errors.push(
104
+ createIssue(
105
+ ValidationCodes.INVALID_RANGE,
106
+ `${path}.duration`,
107
+ "Duration must be greater than 0",
108
+ clip.duration
109
+ )
110
+ );
111
+ }
112
+ // Conflict check: duration + end both set
113
+ if (clip.end != null) {
114
+ errors.push(
115
+ createIssue(
116
+ ValidationCodes.INVALID_VALUE,
117
+ `${path}`,
118
+ "Cannot specify both 'duration' and 'end'. Use one or the other.",
119
+ { duration: clip.duration, end: clip.end }
120
+ )
121
+ );
122
+ }
123
+ }
124
+
82
125
  // Types that require position/end on timeline
83
126
  const requiresTimeline = ["video", "audio", "text", "image"].includes(
84
127
  clip.type
package/src/lib/utils.js CHANGED
@@ -126,6 +126,7 @@ function runFFmpeg({ command, totalDuration = 0, onProgress, signal }) {
126
126
  if (onProgress && typeof onProgress === "function") {
127
127
  const progress = parseFFmpegProgress(chunk, totalDuration);
128
128
  if (Object.keys(progress).length > 0) {
129
+ progress.phase = "rendering";
129
130
  onProgress(progress);
130
131
  }
131
132
  }
package/src/loaders.js CHANGED
@@ -1,15 +1,15 @@
1
1
  const fs = require("fs");
2
2
  const path = require("path");
3
- const { getVideoMetadata, getMediaDuration } = require("./core/media_info");
3
+ const { probeMedia } = require("./core/media_info");
4
4
  const { ValidationError, MediaNotFoundError } = require("./core/errors");
5
5
  const C = require("./core/constants");
6
6
 
7
7
  async function loadVideo(project, clipObj) {
8
- const metadata = await getVideoMetadata(clipObj.url);
9
- if (typeof clipObj.cutFrom === "number" && metadata.durationSec != null) {
10
- if (clipObj.cutFrom >= metadata.durationSec) {
8
+ const metadata = await probeMedia(clipObj.url);
9
+ if (typeof clipObj.cutFrom === "number" && metadata.duration != null) {
10
+ if (clipObj.cutFrom >= metadata.duration) {
11
11
  throw new ValidationError(
12
- `Video clip cutFrom (${clipObj.cutFrom}s) must be < source duration (${metadata.durationSec}s)`,
12
+ `Video clip cutFrom (${clipObj.cutFrom}s) must be < source duration (${metadata.duration}s)`,
13
13
  {
14
14
  errors: [
15
15
  {
@@ -26,10 +26,10 @@ async function loadVideo(project, clipObj) {
26
26
  typeof clipObj.position === "number" &&
27
27
  typeof clipObj.end === "number" &&
28
28
  typeof clipObj.cutFrom === "number" &&
29
- metadata.durationSec != null
29
+ metadata.duration != null
30
30
  ) {
31
31
  const requestedDuration = Math.max(0, clipObj.end - clipObj.position);
32
- const maxAvailable = Math.max(0, metadata.durationSec - clipObj.cutFrom);
32
+ const maxAvailable = Math.max(0, metadata.duration - clipObj.cutFrom);
33
33
  if (requestedDuration > maxAvailable) {
34
34
  const clampedEnd = clipObj.position + maxAvailable;
35
35
  console.warn(
@@ -42,14 +42,15 @@ async function loadVideo(project, clipObj) {
42
42
  }
43
43
  project.videoOrAudioClips.push({
44
44
  ...clipObj,
45
- iphoneRotation: metadata.iphoneRotation,
45
+ iphoneRotation: metadata.rotation,
46
46
  hasAudio: metadata.hasAudio,
47
- mediaDuration: metadata.durationSec,
47
+ mediaDuration: metadata.duration,
48
48
  });
49
49
  }
50
50
 
51
51
  async function loadAudio(project, clipObj) {
52
- const durationSec = await getMediaDuration(clipObj.url);
52
+ const metadata = await probeMedia(clipObj.url);
53
+ const durationSec = metadata.duration;
53
54
  if (typeof clipObj.cutFrom === "number" && durationSec != null) {
54
55
  if (clipObj.cutFrom >= durationSec) {
55
56
  throw new ValidationError(
@@ -93,7 +94,8 @@ function loadImage(project, clipObj) {
93
94
  }
94
95
 
95
96
  async function loadBackgroundAudio(project, clipObj) {
96
- const durationSec = await getMediaDuration(clipObj.url);
97
+ const metadata = await probeMedia(clipObj.url);
98
+ const durationSec = metadata.duration;
97
99
  const clip = {
98
100
  ...clipObj,
99
101
  volume:
@@ -107,6 +107,12 @@ function formatSchema(modules, options = {}) {
107
107
  lines.push(
108
108
  "- All times are in **seconds**. `position` = when the clip starts, `end` = when it ends."
109
109
  );
110
+ lines.push(
111
+ "- **`duration`** can be used instead of `end`: the library computes `end = position + duration`. Cannot use both."
112
+ );
113
+ lines.push(
114
+ "- **Auto-sequencing:** For video, image, and audio clips, `position` can be omitted. The clip will be placed immediately after the previous clip on its track. The first clip defaults to position 0."
115
+ );
110
116
  lines.push(
111
117
  "- Video/image clips form the visual timeline. Audio, text, and music are layered on top."
112
118
  );
@@ -6,8 +6,9 @@ module.exports = {
6
6
  schema: `{
7
7
  type: "audio"; // Required: clip type identifier
8
8
  url: string; // Required: path to audio file
9
- position: number; // Required: start time on timeline (seconds)
10
- end: number; // Required: end time on timeline (seconds)
9
+ position?: number; // Start time on timeline (seconds). Omit to auto-sequence after previous audio clip.
10
+ end?: number; // End time on timeline (seconds). Use end OR duration, not both.
11
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
11
12
  cutFrom?: number; // Start playback from this point in the source (default: 0)
12
13
  volume?: number; // Volume multiplier (default: 1, 0 = mute, >1 = amplify)
13
14
  }`,
@@ -22,6 +23,8 @@ module.exports = {
22
23
  },
23
24
  ],
24
25
  notes: [
26
+ "If position is omitted, the clip is placed immediately after the previous audio clip (auto-sequencing). The first clip defaults to position 0.",
27
+ "Use duration instead of end to specify how long the clip plays: end = position + duration. Cannot use both.",
25
28
  "Audio clips are mixed (layered) with video audio and background music — they don't replace other audio.",
26
29
  "Use cutFrom to start playback partway through the source file.",
27
30
  ],
@@ -6,8 +6,9 @@ module.exports = {
6
6
  schema: `{
7
7
  type: "image"; // Required: clip type identifier
8
8
  url: string; // Required: path to image file (jpg, png, etc.)
9
- position: number; // Required: start time on timeline (seconds)
10
- end: number; // Required: end time on timeline (seconds)
9
+ position?: number; // Start time on timeline (seconds). Omit to auto-sequence after previous video/image clip.
10
+ end?: number; // End time on timeline (seconds). Use end OR duration, not both.
11
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
11
12
  kenBurns?: KenBurnsEffect; // Optional: apply pan/zoom motion to the image
12
13
  }`,
13
14
  enums: {
@@ -23,14 +24,20 @@ module.exports = {
23
24
  examples: [
24
25
  {
25
26
  label: "Static image for 5 seconds",
26
- code: `{ type: "image", url: "photo.jpg", position: 0, end: 5 }`,
27
+ code: `{ type: "image", url: "photo.jpg", duration: 5 }`,
27
28
  },
28
29
  {
29
- label: "Image with slow zoom-in effect",
30
- code: `{ type: "image", url: "landscape.png", position: 5, end: 12, kenBurns: "zoom-in" }`,
30
+ label: "Image slideshow with Ken Burns effects (auto-sequenced)",
31
+ code: `[
32
+ { type: "image", url: "photo1.jpg", duration: 3, kenBurns: "zoom-in" },
33
+ { type: "image", url: "photo2.jpg", duration: 3, kenBurns: "pan-right" },
34
+ { type: "image", url: "photo3.jpg", duration: 3, kenBurns: "zoom-out" }
35
+ ]`,
31
36
  },
32
37
  ],
33
38
  notes: [
39
+ "If position is omitted, the clip is placed immediately after the previous video/image clip (auto-sequencing). The first clip defaults to position 0.",
40
+ "Use duration instead of end to specify how long the image displays: end = position + duration. Cannot use both.",
34
41
  "Images are scaled to fill the project canvas. For Ken Burns, use images at least as large as the output resolution for best quality.",
35
42
  "Image clips can be placed on the same timeline as video clips and can use transitions between them.",
36
43
  ],
@@ -7,7 +7,8 @@ module.exports = {
7
7
  type: "music"; // Required: clip type ("music" or "backgroundAudio")
8
8
  url: string; // Required: path to audio file
9
9
  position?: number; // Start time on timeline (default: 0)
10
- end?: number; // End time on timeline (default: end of video)
10
+ end?: number; // End time on timeline (default: end of video). Use end OR duration, not both.
11
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
11
12
  cutFrom?: number; // Start playback from this point in the source (default: 0)
12
13
  volume?: number; // Volume multiplier (default: 0.2 — quieter than main audio)
13
14
  loop?: boolean; // Loop the track to fill the entire video duration (default: false)
@@ -7,7 +7,8 @@ module.exports = {
7
7
  type: "subtitle"; // Required: clip type identifier
8
8
  url: string; // Required: path to subtitle file (.srt, .vtt, .ass, .ssa)
9
9
  position?: number; // Timeline offset — shifts all subtitle timestamps forward (default: 0)
10
- end?: number; // Optional end time to cut off subtitles
10
+ end?: number; // Optional end time to cut off subtitles. Use end OR duration, not both.
11
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
11
12
 
12
13
  // Styling (applies to SRT/VTT imports — ASS/SSA files use their own embedded styles)
13
14
  fontFamily?: string; // Font family (default: "Sans")
@@ -6,7 +6,8 @@ module.exports = {
6
6
  schema: `{
7
7
  type: "text"; // Required: clip type identifier
8
8
  position: number; // Required: start time on timeline (seconds)
9
- end: number; // Required: end time on timeline (seconds)
9
+ end?: number; // End time on timeline (seconds). Use end OR duration, not both.
10
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
10
11
 
11
12
  // Content
12
13
  text?: string; // Text content (required for "static" mode)
@@ -111,6 +112,7 @@ module.exports = {
111
112
  },
112
113
  ],
113
114
  notes: [
115
+ "Use duration instead of end to specify how long the text appears: end = position + duration. Cannot use both.",
114
116
  "If no position is specified (xPercent/yPercent/x/y), text defaults to center of the screen.",
115
117
  "For karaoke mode, provide the words array with per-word start/end times.",
116
118
  "For word-replace and word-sequential, you can use either words[] or wordTimestamps[] (paired with a space-separated text string).",
@@ -6,8 +6,9 @@ module.exports = {
6
6
  schema: `{
7
7
  type: "video"; // Required: clip type identifier
8
8
  url: string; // Required: path to video file
9
- position: number; // Required: start time on timeline (seconds)
10
- end: number; // Required: end time on timeline (seconds)
9
+ position?: number; // Start time on timeline (seconds). Omit to auto-sequence after previous clip.
10
+ end?: number; // End time on timeline (seconds). Use end OR duration, not both.
11
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
11
12
  cutFrom?: number; // Trim: start playback from this point in the source (default: 0)
12
13
  volume?: number; // Audio volume multiplier (default: 1, 0 = mute, >1 = amplify)
13
14
  transition?: { // Crossfade transition INTO this clip from the previous one
@@ -70,11 +71,20 @@ module.exports = {
70
71
  ]`,
71
72
  },
72
73
  {
73
- label: "Trim source video (use 10s-20s of the file)",
74
- code: `{ type: "video", url: "long-clip.mp4", position: 0, end: 10, cutFrom: 10 }`,
74
+ label: "Auto-sequenced clips using duration",
75
+ code: `[
76
+ { type: "video", url: "intro.mp4", duration: 5 },
77
+ { type: "video", url: "main.mp4", duration: 10 }
78
+ ]`,
79
+ },
80
+ {
81
+ label: "Trim source video (use 10s starting at the 10s mark)",
82
+ code: `{ type: "video", url: "long-clip.mp4", cutFrom: 10, duration: 10 }`,
75
83
  },
76
84
  ],
77
85
  notes: [
86
+ "If position is omitted, the clip is placed immediately after the previous video/image clip (auto-sequencing). The first clip defaults to position 0.",
87
+ "Use duration instead of end to specify how long the clip appears: end = position + duration. Cannot use both.",
78
88
  "Transitions overlap clips: a 0.5s fade means clip B's position should start 0.5s before clip A's end.",
79
89
  "The first clip in the timeline cannot have a transition (there's nothing to transition from).",
80
90
  "The total video duration is shortened by the sum of all transition durations.",
@@ -41,6 +41,8 @@ const {
41
41
  buildASSFilter,
42
42
  } = require("./ffmpeg/subtitle_builder");
43
43
  const { getSchema, getSchemaModules } = require("./schema");
44
+ const { resolveClips } = require("./core/resolve");
45
+ const { probeMedia } = require("./core/media_info");
44
46
 
45
47
  class SIMPLEFFMPEG {
46
48
  /**
@@ -215,12 +217,20 @@ class SIMPLEFFMPEG {
215
217
  this._isLoading = true;
216
218
 
217
219
  try {
218
- const result = validateConfig(clipObjs, {
220
+ // Resolve shorthand: duration → end, auto-sequential positioning
221
+ const resolved = resolveClips(clipObjs);
222
+
223
+ // Merge resolution errors into validation
224
+ const result = validateConfig(resolved.clips, {
219
225
  fillGaps: this.options.fillGaps,
220
226
  width: this.options.width,
221
227
  height: this.options.height,
222
228
  });
223
229
 
230
+ // Prepend resolution errors (e.g. duration+end conflict)
231
+ result.errors.unshift(...resolved.errors);
232
+ result.valid = result.valid && resolved.errors.length === 0;
233
+
224
234
  if (!result.valid) {
225
235
  throw new ValidationError(formatValidationResult(result), {
226
236
  errors: result.errors,
@@ -236,8 +246,11 @@ class SIMPLEFFMPEG {
236
246
  result.warnings.forEach((w) => console.warn(`${w.path}: ${w.message}`));
237
247
  }
238
248
 
249
+ // Use resolved clips (with position/end computed) for loading
250
+ const resolvedClips = resolved.clips;
251
+
239
252
  await Promise.all(
240
- clipObjs.map((clipObj) => {
253
+ resolvedClips.map((clipObj) => {
241
254
  if (clipObj.type === "video" || clipObj.type === "audio") {
242
255
  clipObj.volume = clipObj.volume || 1;
243
256
  clipObj.cutFrom = clipObj.cutFrom || 0;
@@ -991,6 +1004,9 @@ class SIMPLEFFMPEG {
991
1004
  // Handle multi-pass text overlays if needed
992
1005
  let passes = 0;
993
1006
  if (needTextPasses) {
1007
+ if (onProgress && typeof onProgress === "function") {
1008
+ onProgress({ phase: "batching" });
1009
+ }
994
1010
  const {
995
1011
  finalPath,
996
1012
  tempOutputs,
@@ -1099,7 +1115,100 @@ class SIMPLEFFMPEG {
1099
1115
  * }
1100
1116
  */
1101
1117
  static validate(clips, options = {}) {
1102
- return validateConfig(clips, options);
1118
+ // Resolve shorthand (duration, auto-sequencing) before validation
1119
+ const resolved = resolveClips(clips);
1120
+ const result = validateConfig(resolved.clips, options);
1121
+
1122
+ // Merge resolution errors
1123
+ result.errors.unshift(...resolved.errors);
1124
+ result.valid = result.valid && resolved.errors.length === 0;
1125
+
1126
+ return result;
1127
+ }
1128
+
1129
+ /**
1130
+ * Calculate the total duration of a clips configuration.
1131
+ * Resolves shorthand (duration, auto-sequencing) before computing.
1132
+ * Returns the visual timeline duration: sum of video/image clip durations
1133
+ * minus transition overlaps.
1134
+ *
1135
+ * This is a pure function — same clips always produce the same result.
1136
+ * No file I/O is performed.
1137
+ *
1138
+ * @param {Array} clips - Array of clip objects
1139
+ * @returns {number} Total duration in seconds
1140
+ *
1141
+ * @example
1142
+ * const duration = SIMPLEFFMPEG.getDuration([
1143
+ * { type: "video", url: "./a.mp4", duration: 5 },
1144
+ * { type: "video", url: "./b.mp4", duration: 10, transition: { type: "fade", duration: 0.5 } },
1145
+ * ]);
1146
+ * // duration === 14.5 (15 - 0.5 transition overlap)
1147
+ */
1148
+ static getDuration(clips) {
1149
+ if (!Array.isArray(clips) || clips.length === 0) return 0;
1150
+
1151
+ // Resolve shorthand (duration → end, auto-sequencing)
1152
+ const { clips: resolved } = resolveClips(clips);
1153
+
1154
+ // Filter to visual clips (video + image)
1155
+ const visual = resolved.filter(
1156
+ (c) => c.type === "video" || c.type === "image"
1157
+ );
1158
+
1159
+ if (visual.length === 0) return 0;
1160
+
1161
+ const baseSum = visual.reduce(
1162
+ (acc, c) => acc + Math.max(0, (c.end || 0) - (c.position || 0)),
1163
+ 0
1164
+ );
1165
+
1166
+ const transitionsOverlap = visual.reduce((acc, c) => {
1167
+ const d =
1168
+ c.transition && typeof c.transition.duration === "number"
1169
+ ? c.transition.duration
1170
+ : 0;
1171
+ return acc + d;
1172
+ }, 0);
1173
+
1174
+ return Math.max(0, baseSum - transitionsOverlap);
1175
+ }
1176
+
1177
+ /**
1178
+ * Probe a media file and return comprehensive metadata.
1179
+ *
1180
+ * Uses ffprobe to extract duration, dimensions, codecs, format,
1181
+ * bitrate, audio details, and rotation info from any media file
1182
+ * (video, audio, or image).
1183
+ *
1184
+ * @param {string} filePath - Path to the media file
1185
+ * @returns {Promise<Object>} Media info object with:
1186
+ * - duration (number|null) — total duration in seconds
1187
+ * - width (number|null) — video width in pixels
1188
+ * - height (number|null) — video height in pixels
1189
+ * - hasVideo (boolean) — true if file contains a video stream
1190
+ * - hasAudio (boolean) — true if file contains an audio stream
1191
+ * - rotation (number) — iPhone/mobile rotation value (0 if none)
1192
+ * - videoCodec (string|null) — e.g. "h264", "hevc", "vp9"
1193
+ * - audioCodec (string|null) — e.g. "aac", "mp3"
1194
+ * - format (string|null) — container format, e.g. "mov,mp4,m4a,3gp,3g2,mj2"
1195
+ * - fps (number|null) — frames per second
1196
+ * - size (number|null) — file size in bytes
1197
+ * - bitrate (number|null) — overall bitrate in bits/sec
1198
+ * - sampleRate (number|null) — audio sample rate, e.g. 48000
1199
+ * - channels (number|null) — audio channels (1=mono, 2=stereo)
1200
+ * @throws {MediaNotFoundError} If the file cannot be found or probed
1201
+ *
1202
+ * @example
1203
+ * const info = await SIMPLEFFMPEG.probe("./video.mp4");
1204
+ * console.log(info.duration); // 30.5
1205
+ * console.log(info.width); // 1920
1206
+ * console.log(info.height); // 1080
1207
+ * console.log(info.videoCodec); // "h264"
1208
+ * console.log(info.hasAudio); // true
1209
+ */
1210
+ static async probe(filePath) {
1211
+ return probeMedia(filePath);
1103
1212
  }
1104
1213
 
1105
1214
  /**
package/types/index.d.mts CHANGED
@@ -50,8 +50,12 @@ declare namespace SIMPLEFFMPEG {
50
50
  interface BaseClip {
51
51
  type: ClipType;
52
52
  url?: string;
53
- position: number;
54
- end: number;
53
+ /** Start time on timeline in seconds. For video/image/audio: omit to auto-sequence after the previous clip. */
54
+ position?: number;
55
+ /** End time on timeline in seconds. Mutually exclusive with duration. */
56
+ end?: number;
57
+ /** Duration in seconds (alternative to end). Computes end = position + duration. Mutually exclusive with end. */
58
+ duration?: number;
55
59
  }
56
60
 
57
61
  interface VideoClip extends BaseClip {
@@ -307,6 +311,8 @@ declare namespace SIMPLEFFMPEG {
307
311
  bitrate?: number;
308
312
  /** Current output size in bytes */
309
313
  size?: number;
314
+ /** Export phase: "rendering" during main export, "batching" during text overlay passes */
315
+ phase?: "rendering" | "batching";
310
316
  }
311
317
 
312
318
  /** Metadata to embed in output file */
@@ -530,6 +536,42 @@ declare namespace SIMPLEFFMPEG {
530
536
  /** Total expected duration in seconds */
531
537
  totalDuration: number;
532
538
  }
539
+
540
+ // ─────────────────────────────────────────────────────────────────────────────
541
+ // Media Info (probe)
542
+ // ─────────────────────────────────────────────────────────────────────────────
543
+
544
+ /** Result from SIMPLEFFMPEG.probe() — comprehensive media file metadata */
545
+ interface MediaInfo {
546
+ /** Total duration in seconds */
547
+ duration: number | null;
548
+ /** Video width in pixels (null for audio-only files) */
549
+ width: number | null;
550
+ /** Video height in pixels (null for audio-only files) */
551
+ height: number | null;
552
+ /** Whether the file contains a video stream */
553
+ hasVideo: boolean;
554
+ /** Whether the file contains an audio stream */
555
+ hasAudio: boolean;
556
+ /** iPhone/mobile rotation value in degrees (0 if none) */
557
+ rotation: number;
558
+ /** Video codec name, e.g. "h264", "hevc", "vp9" (null if no video) */
559
+ videoCodec: string | null;
560
+ /** Audio codec name, e.g. "aac", "mp3", "pcm_s16le" (null if no audio) */
561
+ audioCodec: string | null;
562
+ /** Container format name, e.g. "mov,mp4,m4a,3gp,3g2,mj2" */
563
+ format: string | null;
564
+ /** Frames per second (null for non-video files) */
565
+ fps: number | null;
566
+ /** File size in bytes */
567
+ size: number | null;
568
+ /** Overall bitrate in bits per second */
569
+ bitrate: number | null;
570
+ /** Audio sample rate in Hz, e.g. 48000, 44100 (null if no audio) */
571
+ sampleRate: number | null;
572
+ /** Number of audio channels (1=mono, 2=stereo) (null if no audio) */
573
+ channels: number | null;
574
+ }
533
575
  }
534
576
 
535
577
  declare class SIMPLEFFMPEG {
@@ -587,6 +629,47 @@ declare class SIMPLEFFMPEG {
587
629
  options?: SIMPLEFFMPEG.ValidateOptions
588
630
  ): SIMPLEFFMPEG.ValidationResult;
589
631
 
632
+ /**
633
+ * Calculate the total duration of a clips configuration.
634
+ * Resolves shorthand (duration, auto-sequencing) before computing.
635
+ * Returns the visual timeline duration: sum of video/image clip durations
636
+ * minus transition overlaps.
637
+ *
638
+ * Pure function — same clips always produce the same result. No file I/O.
639
+ *
640
+ * @param clips - Array of clip objects
641
+ * @returns Total duration in seconds
642
+ *
643
+ * @example
644
+ * const duration = SIMPLEFFMPEG.getDuration([
645
+ * { type: "video", url: "./a.mp4", duration: 5 },
646
+ * { type: "video", url: "./b.mp4", duration: 10,
647
+ * transition: { type: "fade", duration: 0.5 } },
648
+ * ]);
649
+ * // duration === 14.5
650
+ */
651
+ static getDuration(clips: SIMPLEFFMPEG.Clip[]): number;
652
+
653
+ /**
654
+ * Probe a media file and return comprehensive metadata.
655
+ *
656
+ * Uses ffprobe to extract duration, dimensions, codecs, format,
657
+ * bitrate, audio details, and rotation info from any media file.
658
+ *
659
+ * @param filePath - Path to the media file
660
+ * @returns Media info object
661
+ * @throws {SIMPLEFFMPEG.MediaNotFoundError} If the file cannot be found or probed
662
+ *
663
+ * @example
664
+ * const info = await SIMPLEFFMPEG.probe("./video.mp4");
665
+ * console.log(info.duration); // 30.5
666
+ * console.log(info.width); // 1920
667
+ * console.log(info.height); // 1080
668
+ * console.log(info.videoCodec); // "h264"
669
+ * console.log(info.hasAudio); // true
670
+ */
671
+ static probe(filePath: string): Promise<SIMPLEFFMPEG.MediaInfo>;
672
+
590
673
  /**
591
674
  * Format validation result as human-readable string
592
675
  */
package/types/index.d.ts CHANGED
@@ -50,8 +50,12 @@ declare namespace SIMPLEFFMPEG {
50
50
  interface BaseClip {
51
51
  type: ClipType;
52
52
  url?: string;
53
- position: number;
54
- end: number;
53
+ /** Start time on timeline in seconds. For video/image/audio: omit to auto-sequence after the previous clip. */
54
+ position?: number;
55
+ /** End time on timeline in seconds. Mutually exclusive with duration. */
56
+ end?: number;
57
+ /** Duration in seconds (alternative to end). Computes end = position + duration. Mutually exclusive with end. */
58
+ duration?: number;
55
59
  }
56
60
 
57
61
  interface VideoClip extends BaseClip {
@@ -307,6 +311,8 @@ declare namespace SIMPLEFFMPEG {
307
311
  bitrate?: number;
308
312
  /** Current output size in bytes */
309
313
  size?: number;
314
+ /** Export phase: "rendering" during main export, "batching" during text overlay passes */
315
+ phase?: "rendering" | "batching";
310
316
  }
311
317
 
312
318
  /** Metadata to embed in output file */
@@ -627,6 +633,42 @@ declare namespace SIMPLEFFMPEG {
627
633
  /** Total expected duration in seconds */
628
634
  totalDuration: number;
629
635
  }
636
+
637
+ // ─────────────────────────────────────────────────────────────────────────────
638
+ // Media Info (probe)
639
+ // ─────────────────────────────────────────────────────────────────────────────
640
+
641
+ /** Result from SIMPLEFFMPEG.probe() — comprehensive media file metadata */
642
+ interface MediaInfo {
643
+ /** Total duration in seconds */
644
+ duration: number | null;
645
+ /** Video width in pixels (null for audio-only files) */
646
+ width: number | null;
647
+ /** Video height in pixels (null for audio-only files) */
648
+ height: number | null;
649
+ /** Whether the file contains a video stream */
650
+ hasVideo: boolean;
651
+ /** Whether the file contains an audio stream */
652
+ hasAudio: boolean;
653
+ /** iPhone/mobile rotation value in degrees (0 if none) */
654
+ rotation: number;
655
+ /** Video codec name, e.g. "h264", "hevc", "vp9" (null if no video) */
656
+ videoCodec: string | null;
657
+ /** Audio codec name, e.g. "aac", "mp3", "pcm_s16le" (null if no audio) */
658
+ audioCodec: string | null;
659
+ /** Container format name, e.g. "mov,mp4,m4a,3gp,3g2,mj2" */
660
+ format: string | null;
661
+ /** Frames per second (null for non-video files) */
662
+ fps: number | null;
663
+ /** File size in bytes */
664
+ size: number | null;
665
+ /** Overall bitrate in bits per second */
666
+ bitrate: number | null;
667
+ /** Audio sample rate in Hz, e.g. 48000, 44100 (null if no audio) */
668
+ sampleRate: number | null;
669
+ /** Number of audio channels (1=mono, 2=stereo) (null if no audio) */
670
+ channels: number | null;
671
+ }
630
672
  }
631
673
 
632
674
  declare class SIMPLEFFMPEG {
@@ -686,6 +728,47 @@ declare class SIMPLEFFMPEG {
686
728
  options?: SIMPLEFFMPEG.ValidateOptions
687
729
  ): SIMPLEFFMPEG.ValidationResult;
688
730
 
731
+ /**
732
+ * Calculate the total duration of a clips configuration.
733
+ * Resolves shorthand (duration, auto-sequencing) before computing.
734
+ * Returns the visual timeline duration: sum of video/image clip durations
735
+ * minus transition overlaps.
736
+ *
737
+ * Pure function — same clips always produce the same result. No file I/O.
738
+ *
739
+ * @param clips - Array of clip objects
740
+ * @returns Total duration in seconds
741
+ *
742
+ * @example
743
+ * const duration = SIMPLEFFMPEG.getDuration([
744
+ * { type: "video", url: "./a.mp4", duration: 5 },
745
+ * { type: "video", url: "./b.mp4", duration: 10,
746
+ * transition: { type: "fade", duration: 0.5 } },
747
+ * ]);
748
+ * // duration === 14.5
749
+ */
750
+ static getDuration(clips: SIMPLEFFMPEG.Clip[]): number;
751
+
752
+ /**
753
+ * Probe a media file and return comprehensive metadata.
754
+ *
755
+ * Uses ffprobe to extract duration, dimensions, codecs, format,
756
+ * bitrate, audio details, and rotation info from any media file.
757
+ *
758
+ * @param filePath - Path to the media file
759
+ * @returns Media info object
760
+ * @throws {SIMPLEFFMPEG.MediaNotFoundError} If the file cannot be found or probed
761
+ *
762
+ * @example
763
+ * const info = await SIMPLEFFMPEG.probe("./video.mp4");
764
+ * console.log(info.duration); // 30.5
765
+ * console.log(info.width); // 1920
766
+ * console.log(info.height); // 1080
767
+ * console.log(info.videoCodec); // "h264"
768
+ * console.log(info.hasAudio); // true
769
+ */
770
+ static probe(filePath: string): Promise<SIMPLEFFMPEG.MediaInfo>;
771
+
689
772
  /**
690
773
  * Format validation result as human-readable string
691
774
  */