simple-ffmpegjs 0.3.2 → 0.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -28,6 +28,7 @@
28
28
  - [Platform Presets](#platform-presets)
29
29
  - [Watermarks](#watermarks)
30
30
  - [Progress Information](#progress-information)
31
+ - [Logging](#logging)
31
32
  - [Error Handling](#error-handling)
32
33
  - [Cancellation](#cancellation)
33
34
  - [Gap Handling](#gap-handling)
@@ -298,6 +299,139 @@ Load clip descriptors into the project. Validates the timeline and reads media m
298
299
  await project.load(clips: Clip[]): Promise<void[]>
299
300
  ```
300
301
 
302
+ #### `SIMPLEFFMPEG.getDuration(clips)`
303
+
304
+ Calculate the total visual timeline duration from a clips array. Handles `duration` and auto-sequencing shorthand, and subtracts transition overlaps. Pure function — no file I/O.
305
+
306
+ ```ts
307
+ const clips = [
308
+ { type: "video", url: "./a.mp4", duration: 5 },
309
+ {
310
+ type: "video",
311
+ url: "./b.mp4",
312
+ duration: 10,
313
+ transition: { type: "fade", duration: 0.5 },
314
+ },
315
+ ];
316
+ SIMPLEFFMPEG.getDuration(clips); // 14.5
317
+ ```
318
+
319
+ Useful for computing text overlay timings or background music end times before calling `load()`.
320
+
321
+ **Duration and Auto-Sequencing:**
322
+
323
+ For video, image, and audio clips, you can use shorthand to avoid specifying explicit `position` and `end` values:
324
+
325
+ - **`duration`** — Use instead of `end`. The library computes `end = position + duration`. You cannot specify both `duration` and `end` on the same clip.
326
+ - **Omit `position`** — The clip is placed immediately after the previous clip on its track. Video and image clips share the visual track; audio clips have their own track. The first clip defaults to `position: 0`.
327
+
328
+ These can be combined:
329
+
330
+ ```ts
331
+ // Before: manual position/end for every clip
332
+ await project.load([
333
+ { type: "video", url: "./a.mp4", position: 0, end: 5 },
334
+ { type: "video", url: "./b.mp4", position: 5, end: 10 },
335
+ { type: "video", url: "./c.mp4", position: 10, end: 18, cutFrom: 3 },
336
+ ]);
337
+
338
+ // After: auto-sequencing + duration
339
+ await project.load([
340
+ { type: "video", url: "./a.mp4", duration: 5 },
341
+ { type: "video", url: "./b.mp4", duration: 5 },
342
+ { type: "video", url: "./c.mp4", duration: 8, cutFrom: 3 },
343
+ ]);
344
+ ```
345
+
346
+ You can mix explicit and implicit positioning freely. Clips with explicit `position` are placed there; subsequent auto-sequenced clips follow from the last clip's end:
347
+
348
+ ```ts
349
+ await project.load([
350
+ { type: "video", url: "./a.mp4", duration: 5 }, // position: 0, end: 5
351
+ { type: "video", url: "./b.mp4", position: 10, end: 15 }, // explicit gap
352
+ { type: "video", url: "./c.mp4", duration: 5 }, // position: 15, end: 20
353
+ ]);
354
+ ```
355
+
356
+ Text clips always require an explicit `position` (they're overlays on specific moments). Background music and subtitle clips already have optional `position`/`end` with their own defaults.
357
+
358
+ #### `SIMPLEFFMPEG.probe(filePath)`
359
+
360
+ Probe a media file and return comprehensive metadata using ffprobe. Works with video, audio, and image files.
361
+
362
+ ```ts
363
+ const info = await SIMPLEFFMPEG.probe("./video.mp4");
364
+ // {
365
+ // duration: 30.5, // seconds
366
+ // width: 1920, // pixels
367
+ // height: 1080, // pixels
368
+ // hasVideo: true,
369
+ // hasAudio: true,
370
+ // rotation: 0, // iPhone/mobile rotation
371
+ // videoCodec: "h264",
372
+ // audioCodec: "aac",
373
+ // format: "mov,mp4,m4a,3gp,3g2,mj2",
374
+ // fps: 30,
375
+ // size: 15728640, // bytes
376
+ // bitrate: 4125000, // bits/sec
377
+ // sampleRate: 48000, // Hz
378
+ // channels: 2 // stereo
379
+ // }
380
+ ```
381
+
382
+ Fields that don't apply to the file type are `null` (e.g. `width`/`height`/`videoCodec`/`fps` for audio-only files, `audioCodec`/`sampleRate`/`channels` for video-only files).
383
+
384
+ Throws `MediaNotFoundError` if the file cannot be found or probed.
385
+
386
+ ```ts
387
+ // Audio file
388
+ const audio = await SIMPLEFFMPEG.probe("./music.wav");
389
+ console.log(audio.hasVideo); // false
390
+ console.log(audio.duration); // 180.5
391
+ console.log(audio.sampleRate); // 44100
392
+ ```
393
+
394
+ #### `SIMPLEFFMPEG.snapshot(filePath, options)`
395
+
396
+ Capture a single frame from a video file and save it as an image. This is a static method — no project instance needed.
397
+
398
+ The output format is determined by the `outputPath` file extension. FFmpeg handles format detection internally, so `.jpg` produces JPEG, `.png` produces PNG, `.webp` produces WebP, etc.
399
+
400
+ ```ts
401
+ await SIMPLEFFMPEG.snapshot("./video.mp4", {
402
+ outputPath: "./frame.png",
403
+ time: 5,
404
+ });
405
+ ```
406
+
407
+ **Snapshot Options:**
408
+
409
+ | Option | Type | Default | Description |
410
+ | ------------ | -------- | ------- | --------------------------------------------------------------------------- |
411
+ | `outputPath` | `string` | - | **Required.** Output image path (extension determines format) |
412
+ | `time` | `number` | `0` | Time in seconds to capture the frame at |
413
+ | `width` | `number` | - | Output width in pixels (maintains aspect ratio if height omitted) |
414
+ | `height` | `number` | - | Output height in pixels (maintains aspect ratio if width omitted) |
415
+ | `quality` | `number` | `2` | JPEG quality 1-31, lower is better (only applies to `.jpg`/`.jpeg` output) |
416
+
417
+ **Supported formats:** `.jpg` / `.jpeg`, `.png`, `.webp`, `.bmp`, `.tiff`
418
+
419
+ ```ts
420
+ // Save as JPEG with quality control and resize
421
+ await SIMPLEFFMPEG.snapshot("./video.mp4", {
422
+ outputPath: "./thumb.jpg",
423
+ time: 10,
424
+ width: 640,
425
+ quality: 4,
426
+ });
427
+
428
+ // Save as WebP
429
+ await SIMPLEFFMPEG.snapshot("./video.mp4", {
430
+ outputPath: "./preview.webp",
431
+ time: 0,
432
+ });
433
+ ```
434
+
301
435
  #### `project.export(options)`
302
436
 
303
437
  Build and execute the FFmpeg command to render the final video.
@@ -329,6 +463,7 @@ await project.export(options?: ExportOptions): Promise<string>
329
463
  | `verbose` | `boolean` | `false` | Enable verbose logging |
330
464
  | `saveCommand` | `string` | - | Save FFmpeg command to file |
331
465
  | `onProgress` | `function` | - | Progress callback |
466
+ | `onLog` | `function` | - | FFmpeg log callback (see [Logging](#logging) section) |
332
467
  | `signal` | `AbortSignal` | - | Cancellation signal |
333
468
  | `watermark` | `object` | - | Add watermark overlay (see Watermarks section) |
334
469
  | `compensateTransitions` | `boolean` | `true` | Auto-adjust text timings for transition overlap (see below) |
@@ -353,8 +488,9 @@ await project.preview(options?: ExportOptions): Promise<{
353
488
  {
354
489
  type: "video";
355
490
  url: string; // File path
356
- position: number; // Timeline start (seconds)
357
- end: number; // Timeline end (seconds)
491
+ position?: number; // Timeline start (seconds). Omit to auto-sequence after previous clip.
492
+ end?: number; // Timeline end (seconds). Use end OR duration, not both.
493
+ duration?: number; // Duration in seconds (alternative to end). end = position + duration.
358
494
  cutFrom?: number; // Source offset (default: 0)
359
495
  volume?: number; // Audio volume (default: 1)
360
496
  transition?: {
@@ -372,8 +508,9 @@ All [xfade transitions](https://trac.ffmpeg.org/wiki/Xfade) are supported.
372
508
  {
373
509
  type: "audio";
374
510
  url: string;
375
- position: number;
376
- end: number;
511
+ position?: number; // Omit to auto-sequence after previous audio clip
512
+ end?: number; // Use end OR duration, not both
513
+ duration?: number; // Duration in seconds (alternative to end)
377
514
  cutFrom?: number;
378
515
  volume?: number;
379
516
  }
@@ -412,8 +549,9 @@ await project.load([
412
549
  {
413
550
  type: "image";
414
551
  url: string;
415
- position: number;
416
- end: number;
552
+ position?: number; // Omit to auto-sequence after previous video/image clip
553
+ end?: number; // Use end OR duration, not both
554
+ duration?: number; // Duration in seconds (alternative to end)
417
555
  kenBurns?: "zoom-in" | "zoom-out" | "pan-left" | "pan-right" | "pan-up" | "pan-down";
418
556
  }
419
557
  ```
@@ -424,7 +562,8 @@ await project.load([
424
562
  {
425
563
  type: "text";
426
564
  position: number;
427
- end: number;
565
+ end?: number; // Use end OR duration, not both
566
+ duration?: number; // Duration in seconds (alternative to end)
428
567
 
429
568
  // Content
430
569
  text?: string;
@@ -601,6 +740,7 @@ The `onProgress` callback receives:
601
740
  ```ts
602
741
  {
603
742
  percent?: number; // 0-100
743
+ phase?: string; // "rendering" or "batching"
604
744
  timeProcessed?: number; // Seconds processed
605
745
  frame?: number; // Current frame
606
746
  fps?: number; // Processing speed
@@ -608,6 +748,38 @@ The `onProgress` callback receives:
608
748
  }
609
749
  ```
610
750
 
751
+ The `phase` field indicates what the export is doing:
752
+
753
+ - `"rendering"` — main video export (includes `percent`, `frame`, etc.)
754
+ - `"batching"` — text overlay passes are running (fired once when batching starts)
755
+
756
+ Use `phase` to update your UI when the export hits 100% but still has work to do:
757
+
758
+ ```ts
759
+ onProgress: ({ percent, phase }) => {
760
+ if (phase === "batching") {
761
+ console.log("Applying text overlays...");
762
+ } else {
763
+ console.log(`${percent}%`);
764
+ }
765
+ }
766
+ ```
767
+
768
+ ### Logging
769
+
770
+ Use the `onLog` callback to receive real-time FFmpeg output. Each log entry includes a `level` (`"stderr"` or `"stdout"`) and the raw `message` string. This is useful for debugging, monitoring, or piping FFmpeg output to your own logging system.
771
+
772
+ ```ts
773
+ await project.export({
774
+ outputPath: "./output.mp4",
775
+ onLog: ({ level, message }) => {
776
+ console.log(`[ffmpeg:${level}] ${message}`);
777
+ },
778
+ });
779
+ ```
780
+
781
+ The callback fires for every data chunk FFmpeg writes, including encoding stats, warnings, and codec information. It works alongside `onProgress` — both can be used simultaneously.
782
+
611
783
  ### Error Handling
612
784
 
613
785
  The library provides custom error classes for structured error handling:
@@ -615,7 +787,7 @@ The library provides custom error classes for structured error handling:
615
787
  | Error Class | When Thrown | Properties |
616
788
  | ---------------------- | -------------------------- | --------------------------------------------------------------------------- |
617
789
  | `ValidationError` | Invalid clip configuration | `errors[]`, `warnings[]` (structured issues with `code`, `path`, `message`) |
618
- | `FFmpegError` | FFmpeg command fails | `stderr`, `command`, `exitCode` |
790
+ | `FFmpegError` | FFmpeg command fails | `stderr`, `command`, `exitCode`, `details` |
619
791
  | `MediaNotFoundError` | File not found | `path` |
620
792
  | `ExportCancelledError` | Export aborted | - |
621
793
 
@@ -632,8 +804,9 @@ try {
632
804
  console.warn(`[${w.code}] ${w.path}: ${w.message}`)
633
805
  );
634
806
  } else if (error.name === "FFmpegError") {
635
- console.error("FFmpeg failed:", error.stderr);
636
- console.error("Command was:", error.command);
807
+ // Structured details for bug reports (last 50 lines of stderr, command, exitCode)
808
+ console.error("FFmpeg failed:", error.details);
809
+ // { stderrTail: "...", command: "ffmpeg ...", exitCode: 1 }
637
810
  } else if (error.name === "MediaNotFoundError") {
638
811
  console.error("File not found:", error.path);
639
812
  } else if (error.name === "ExportCancelledError") {
@@ -700,31 +873,15 @@ await project.load([
700
873
 
701
874
  ```ts
702
875
  await project.load([
703
- {
704
- type: "image",
705
- url: "./photo1.jpg",
706
- position: 0,
707
- end: 3,
708
- kenBurns: "zoom-in",
709
- },
710
- {
711
- type: "image",
712
- url: "./photo2.jpg",
713
- position: 3,
714
- end: 6,
715
- kenBurns: "pan-right",
716
- },
717
- {
718
- type: "image",
719
- url: "./photo3.jpg",
720
- position: 6,
721
- end: 9,
722
- kenBurns: "zoom-out",
723
- },
876
+ { type: "image", url: "./photo1.jpg", duration: 3, kenBurns: "zoom-in" },
877
+ { type: "image", url: "./photo2.jpg", duration: 3, kenBurns: "pan-right" },
878
+ { type: "image", url: "./photo3.jpg", duration: 3, kenBurns: "zoom-out" },
724
879
  { type: "music", url: "./music.mp3", volume: 0.3 },
725
880
  ]);
726
881
  ```
727
882
 
883
+ When `position` is omitted, clips are placed sequentially — each one starts where the previous one ended. `duration` is an alternative to `end`: the library computes `end = position + duration`. The explicit form (`position: 0, end: 3`) still works identically.
884
+
728
885
  > **Note:** Ken Burns effects work best with images at least as large as your output resolution. Smaller images are automatically upscaled (with a validation warning). Use `strictKenBurns: true` in validation options to enforce size requirements instead.
729
886
 
730
887
  ### Text & Animations
@@ -952,16 +1109,19 @@ async function generateListingVideo(listing, outputPath) {
952
1109
  const photos = listing.photos; // ['kitchen.jpg', 'living-room.jpg', ...]
953
1110
  const slideDuration = 4;
954
1111
 
955
- // Build an image slideshow from listing photos
1112
+ // Build an image slideshow from listing photos (auto-sequenced with crossfades)
1113
+ const transitionDuration = 0.5;
956
1114
  const photoClips = photos.map((photo, i) => ({
957
1115
  type: "image",
958
1116
  url: photo,
959
- position: i * slideDuration,
960
- end: (i + 1) * slideDuration,
1117
+ duration: slideDuration,
961
1118
  kenBurns: i % 2 === 0 ? "zoom-in" : "pan-right",
1119
+ ...(i > 0 && {
1120
+ transition: { type: "fade", duration: transitionDuration },
1121
+ }),
962
1122
  }));
963
1123
 
964
- const totalDuration = photos.length * slideDuration;
1124
+ const totalDuration = SIMPLEFFMPEG.getDuration(photoClips);
965
1125
 
966
1126
  const clips = [
967
1127
  ...photoClips,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "simple-ffmpegjs",
3
- "version": "0.3.2",
3
+ "version": "0.3.4",
4
4
  "description": "Declarative video composition for Node.js — define clips, transitions, text, and audio as simple objects, and let FFmpeg handle the rest.",
5
5
  "author": "Brayden Blackwell <braydenblackwell21@gmail.com> (https://github.com/Fats403)",
6
6
  "license": "MIT",
@@ -32,6 +32,20 @@ class FFmpegError extends SimpleffmpegError {
32
32
  this.command = command;
33
33
  this.exitCode = exitCode;
34
34
  }
35
+
36
+ /**
37
+ * Structured error details for easy bug reporting.
38
+ * Contains the last 50 lines of stderr, the command, and exit code.
39
+ */
40
+ get details() {
41
+ const lines = (this.stderr || "").split("\n");
42
+ const tail = lines.slice(-50).join("\n");
43
+ return {
44
+ stderrTail: tail,
45
+ command: this.command,
46
+ exitCode: this.exitCode,
47
+ };
48
+ }
35
49
  }
36
50
 
37
51
  /**
@@ -57,101 +57,138 @@ function runFFprobe(args, timeoutMs = DEFAULT_FFPROBE_TIMEOUT_MS) {
57
57
  }
58
58
 
59
59
  /**
60
- * Get video metadata using ffprobe
61
- * @param {string} url - Path to the video file
62
- * @returns {Promise<{iphoneRotation: number, hasAudio: boolean, width: number|null, height: number|null, durationSec: number|null}>}
63
- * @throws {MediaNotFoundError} If the file cannot be probed
60
+ * Parse a fraction string like "30000/1001" or "30/1" into a number.
61
+ * Returns null if the input is not a valid fraction.
62
+ * @param {string} fraction
63
+ * @returns {number|null}
64
64
  */
65
- function getVideoMetadata(url) {
66
- return runFFprobe([
67
- "-v",
68
- "error",
69
- "-show_streams",
70
- "-show_format",
71
- "-of",
72
- "json",
73
- url,
74
- ])
75
- .then((stdout) => {
76
- let metadata;
77
- try {
78
- metadata = JSON.parse(stdout);
79
- } catch (parseError) {
80
- throw new Error(
81
- `Invalid JSON response from ffprobe: ${parseError.message}`
82
- );
83
- }
84
-
85
- // Validate metadata structure
86
- if (!metadata || !Array.isArray(metadata.streams)) {
87
- throw new Error(
88
- "Invalid metadata structure: missing or invalid 'streams' array"
89
- );
90
- }
91
-
92
- const videoStream = metadata.streams.find(
93
- (s) => s.codec_type === "video"
94
- );
95
- const hasAudio = metadata.streams.some((s) => s.codec_type === "audio");
96
- const iphoneRotation = videoStream?.side_data_list?.[0]?.rotation
97
- ? videoStream.side_data_list[0].rotation
98
- : 0;
99
- const formatDuration = metadata.format?.duration
100
- ? parseFloat(metadata.format.duration)
101
- : null;
102
- const streamDuration = videoStream?.duration
103
- ? parseFloat(videoStream.duration)
104
- : null;
105
- const durationSec = Number.isFinite(formatDuration)
106
- ? formatDuration
107
- : Number.isFinite(streamDuration)
108
- ? streamDuration
109
- : null;
110
- return {
111
- iphoneRotation,
112
- hasAudio,
113
- width: videoStream?.width,
114
- height: videoStream?.height,
115
- durationSec,
116
- };
117
- })
118
- .catch((error) => {
119
- throw new MediaNotFoundError(
120
- `Failed to get video metadata for "${url}": ${error.message}`,
121
- { path: url }
122
- );
123
- });
65
+ function parseFraction(fraction) {
66
+ if (!fraction || typeof fraction !== "string") return null;
67
+ const parts = fraction.split("/");
68
+ if (parts.length !== 2) return null;
69
+ const num = parseFloat(parts[0]);
70
+ const den = parseFloat(parts[1]);
71
+ if (!Number.isFinite(num) || !Number.isFinite(den) || den === 0) return null;
72
+ const result = num / den;
73
+ return Number.isFinite(result) ? result : null;
124
74
  }
125
75
 
126
76
  /**
127
- * Get media duration using ffprobe
128
- * @param {string} url - Path to the media file
129
- * @returns {Promise<number|null>} Duration in seconds, or null if unavailable
77
+ * Probe a media file and return comprehensive metadata.
78
+ *
79
+ * Returns a flat, user-friendly object with duration, dimensions, codecs,
80
+ * format, bitrate, audio details, and rotation info. All fields that are
81
+ * not applicable (e.g. width/height for audio-only files) are set to null.
82
+ *
83
+ * @param {string} filePath - Path to the media file
84
+ * @returns {Promise<{
85
+ * duration: number|null,
86
+ * width: number|null,
87
+ * height: number|null,
88
+ * hasVideo: boolean,
89
+ * hasAudio: boolean,
90
+ * rotation: number,
91
+ * videoCodec: string|null,
92
+ * audioCodec: string|null,
93
+ * format: string|null,
94
+ * fps: number|null,
95
+ * size: number|null,
96
+ * bitrate: number|null,
97
+ * sampleRate: number|null,
98
+ * channels: number|null
99
+ * }>}
130
100
  * @throws {MediaNotFoundError} If the file cannot be probed
131
101
  */
132
- function getMediaDuration(url) {
133
- return runFFprobe(["-v", "error", "-show_format", "-of", "json", url])
134
- .then((stdout) => {
135
- let metadata;
136
- try {
137
- metadata = JSON.parse(stdout);
138
- } catch (parseError) {
139
- throw new Error(
140
- `Invalid JSON response from ffprobe: ${parseError.message}`
141
- );
142
- }
143
-
144
- const formatDuration = metadata?.format?.duration
145
- ? parseFloat(metadata.format.duration)
146
- : null;
147
- return Number.isFinite(formatDuration) ? formatDuration : null;
148
- })
149
- .catch((error) => {
150
- throw new MediaNotFoundError(
151
- `Failed to get media duration for "${url}": ${error.message}`,
152
- { path: url }
153
- );
154
- });
102
+ async function probeMedia(filePath) {
103
+ let stdout;
104
+ try {
105
+ stdout = await runFFprobe([
106
+ "-v",
107
+ "error",
108
+ "-show_streams",
109
+ "-show_format",
110
+ "-of",
111
+ "json",
112
+ filePath,
113
+ ]);
114
+ } catch (error) {
115
+ throw new MediaNotFoundError(
116
+ `Failed to probe "${filePath}": ${error.message}`,
117
+ { path: filePath }
118
+ );
119
+ }
120
+
121
+ let metadata;
122
+ try {
123
+ metadata = JSON.parse(stdout);
124
+ } catch (parseError) {
125
+ throw new MediaNotFoundError(
126
+ `Invalid JSON response from ffprobe for "${filePath}": ${parseError.message}`,
127
+ { path: filePath }
128
+ );
129
+ }
130
+
131
+ if (!metadata || !Array.isArray(metadata.streams)) {
132
+ throw new MediaNotFoundError(
133
+ `Invalid metadata structure for "${filePath}": missing or invalid 'streams' array`,
134
+ { path: filePath }
135
+ );
136
+ }
137
+
138
+ const videoStream = metadata.streams.find((s) => s.codec_type === "video");
139
+ const audioStream = metadata.streams.find((s) => s.codec_type === "audio");
140
+ const format = metadata.format || {};
141
+
142
+ // ── Duration ────────────────────────────────────────────────────────────
143
+ const formatDuration = format.duration ? parseFloat(format.duration) : null;
144
+ const streamDuration = videoStream?.duration
145
+ ? parseFloat(videoStream.duration)
146
+ : null;
147
+ const duration = Number.isFinite(formatDuration)
148
+ ? formatDuration
149
+ : Number.isFinite(streamDuration)
150
+ ? streamDuration
151
+ : null;
152
+
153
+ // ── FPS ─────────────────────────────────────────────────────────────────
154
+ // Prefer avg_frame_rate, fall back to r_frame_rate
155
+ const fps =
156
+ parseFraction(videoStream?.avg_frame_rate) ??
157
+ parseFraction(videoStream?.r_frame_rate) ??
158
+ null;
159
+
160
+ // ── Rotation ────────────────────────────────────────────────────────────
161
+ const rotation = videoStream?.side_data_list?.[0]?.rotation
162
+ ? videoStream.side_data_list[0].rotation
163
+ : 0;
164
+
165
+ // ── Size & bitrate ─────────────────────────────────────────────────────
166
+ const size = format.size ? parseInt(format.size, 10) : null;
167
+ const bitrate = format.bit_rate ? parseInt(format.bit_rate, 10) : null;
168
+
169
+ // ── Audio details ──────────────────────────────────────────────────────
170
+ const sampleRate = audioStream?.sample_rate
171
+ ? parseInt(audioStream.sample_rate, 10)
172
+ : null;
173
+ const channels =
174
+ typeof audioStream?.channels === "number" ? audioStream.channels : null;
175
+
176
+ return {
177
+ duration,
178
+ width: videoStream?.width ?? null,
179
+ height: videoStream?.height ?? null,
180
+ hasVideo: !!videoStream,
181
+ hasAudio: !!audioStream,
182
+ rotation,
183
+ videoCodec: videoStream?.codec_name ?? null,
184
+ audioCodec: audioStream?.codec_name ?? null,
185
+ format: format.format_name ?? null,
186
+ fps: Number.isFinite(fps) ? Math.round(fps * 100) / 100 : null,
187
+ size: Number.isFinite(size) ? size : null,
188
+ bitrate: Number.isFinite(bitrate) ? bitrate : null,
189
+ sampleRate: Number.isFinite(sampleRate) ? sampleRate : null,
190
+ channels,
191
+ };
155
192
  }
156
193
 
157
- module.exports = { getVideoMetadata, getMediaDuration };
194
+ module.exports = { probeMedia };
@@ -0,0 +1,96 @@
1
+ /**
2
+ * Clip resolution — transforms shorthand clip properties into canonical form.
3
+ *
4
+ * This runs BEFORE validation, so the rest of the pipeline always sees
5
+ * standard { position, end } clips. Two features are handled here:
6
+ *
7
+ * 1. **duration → end**: If a clip has `duration` instead of `end`,
8
+ * compute `end = position + duration`.
9
+ *
10
+ * 2. **Auto-sequential positioning**: If a video/image/audio clip omits
11
+ * `position`, it is placed immediately after the previous clip on the
12
+ * same track (visual or audio). The first clip defaults to position 0.
13
+ *
14
+ * Clips are shallow-cloned — the caller's original objects are not mutated.
15
+ */
16
+
17
+ /**
18
+ * Types that auto-sequence on the visual track (video + image share a timeline).
19
+ */
20
+ const VISUAL_TYPES = ["video", "image"];
21
+
22
+ /**
23
+ * Types that auto-sequence on the audio track.
24
+ */
25
+ const AUDIO_TYPES = ["audio"];
26
+
27
+ /**
28
+ * All types eligible for auto-sequencing (position can be omitted).
29
+ */
30
+ const AUTO_SEQUENCE_TYPES = [...VISUAL_TYPES, ...AUDIO_TYPES];
31
+
32
+ /**
33
+ * Resolve shorthand clip properties into canonical { position, end } form.
34
+ *
35
+ * @param {Array} clips - Array of clip objects (not mutated)
36
+ * @returns {{ clips: Array, errors: Array }} Resolved clips and any resolution errors
37
+ */
38
+ function resolveClips(clips) {
39
+ if (!Array.isArray(clips)) {
40
+ return { clips, errors: [] };
41
+ }
42
+
43
+ const errors = [];
44
+ let lastVisualEnd = 0;
45
+ let lastAudioEnd = 0;
46
+
47
+ const resolved = clips.map((clip, index) => {
48
+ const c = { ...clip };
49
+ const path = `clips[${index}]`;
50
+
51
+ // ── Conflict check: duration + end ──────────────────────────────────
52
+ if (c.duration != null && c.end != null) {
53
+ errors.push({
54
+ code: "INVALID_VALUE",
55
+ path: `${path}`,
56
+ message:
57
+ "Cannot specify both 'duration' and 'end'. Use one or the other.",
58
+ received: { duration: c.duration, end: c.end },
59
+ });
60
+ // Don't resolve further — let validation report the canonical errors
61
+ return c;
62
+ }
63
+
64
+ // ── Auto-sequential positioning ─────────────────────────────────────
65
+ const isVisual = VISUAL_TYPES.includes(c.type);
66
+ const isAudio = AUDIO_TYPES.includes(c.type);
67
+ const canAutoSequence = AUTO_SEQUENCE_TYPES.includes(c.type);
68
+
69
+ if (canAutoSequence && c.position == null) {
70
+ c.position = isVisual ? lastVisualEnd : lastAudioEnd;
71
+ }
72
+
73
+ // ── Duration → end ──────────────────────────────────────────────────
74
+ if (c.duration != null && c.end == null) {
75
+ if (typeof c.position === "number" && typeof c.duration === "number") {
76
+ c.end = c.position + c.duration;
77
+ }
78
+ // Remove duration so the rest of the pipeline sees canonical { position, end }
79
+ delete c.duration;
80
+ }
81
+
82
+ // ── Track the end of the last clip on each track ────────────────────
83
+ if (isVisual && typeof c.end === "number") {
84
+ lastVisualEnd = c.end;
85
+ }
86
+ if (isAudio && typeof c.end === "number") {
87
+ lastAudioEnd = c.end;
88
+ }
89
+
90
+ return c;
91
+ });
92
+
93
+ return { clips: resolved, errors };
94
+ }
95
+
96
+ module.exports = { resolveClips };