node-av 5.1.0 โ†’ 5.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,275 +0,0 @@
1
- /**
2
- * Time and Memory Measurement Utilities
3
- *
4
- * Provides utilities for measuring execution time, memory usage,
5
- * and computing statistics for benchmark results.
6
- */
7
-
8
- /**
9
- * Result of a single benchmark run
10
- */
11
- export interface MeasureResult {
12
- /** Duration in milliseconds */
13
- durationMs: number;
14
- /** Peak memory usage in bytes */
15
- peakMemoryBytes: number;
16
- /** Memory samples taken during execution (bytes) */
17
- memorySamples: number[];
18
- /** Frames processed (if applicable) */
19
- framesProcessed?: number;
20
- /** FPS calculated from frames and duration */
21
- fps?: number;
22
- }
23
-
24
- /**
25
- * Statistics computed from multiple runs
26
- */
27
- export interface Stats {
28
- mean: number;
29
- min: number;
30
- max: number;
31
- stdDev: number;
32
- median: number;
33
- }
34
-
35
- /**
36
- * Aggregated benchmark result from multiple iterations
37
- */
38
- export interface AggregatedResult {
39
- iterations: number;
40
- durationMs: Stats;
41
- peakMemoryBytes: Stats;
42
- fps?: Stats;
43
- framesProcessed?: number;
44
- }
45
-
46
- /**
47
- * Memory sampler that records memory usage at regular intervals
48
- * Uses delta from baseline to measure actual memory growth during operation
49
- */
50
- export class MemorySampler {
51
- private samples: number[] = [];
52
- private interval: ReturnType<typeof setInterval> | null = null;
53
- private peakMemory = 0;
54
- private baselineMemory = 0;
55
-
56
- /**
57
- * Start sampling memory at the given interval
58
- * Records baseline before starting to measure delta
59
- */
60
- start(intervalMs = 100): void {
61
- this.samples = [];
62
- this.peakMemory = 0;
63
-
64
- // Force GC before taking baseline for more accurate measurement
65
- if (global.gc) {
66
- global.gc();
67
- }
68
-
69
- // Record baseline memory
70
- this.baselineMemory = process.memoryUsage().rss;
71
-
72
- this.interval = setInterval(() => {
73
- const memUsage = process.memoryUsage();
74
- const rss = memUsage.rss;
75
- this.samples.push(rss);
76
- if (rss > this.peakMemory) {
77
- this.peakMemory = rss;
78
- }
79
- }, intervalMs);
80
- }
81
-
82
- /**
83
- * Stop sampling and return results
84
- * Returns peak memory as delta from baseline
85
- */
86
- stop(): { samples: number[]; peakMemory: number; baselineMemory: number } {
87
- if (this.interval) {
88
- clearInterval(this.interval);
89
- this.interval = null;
90
- }
91
-
92
- // Take one final sample
93
- const finalRss = process.memoryUsage().rss;
94
- this.samples.push(finalRss);
95
- if (finalRss > this.peakMemory) {
96
- this.peakMemory = finalRss;
97
- }
98
-
99
- return {
100
- samples: this.samples,
101
- peakMemory: this.peakMemory,
102
- baselineMemory: this.baselineMemory,
103
- };
104
- }
105
- }
106
-
107
- /**
108
- * High-resolution timer for measuring execution time
109
- */
110
- export class Timer {
111
- private startTime = 0n;
112
- private endTime = 0n;
113
-
114
- /**
115
- * Start the timer
116
- */
117
- start(): void {
118
- this.startTime = process.hrtime.bigint();
119
- }
120
-
121
- /**
122
- * Stop the timer
123
- */
124
- stop(): void {
125
- this.endTime = process.hrtime.bigint();
126
- }
127
-
128
- /**
129
- * Get elapsed time in milliseconds
130
- */
131
- getElapsedMs(): number {
132
- return Number(this.endTime - this.startTime) / 1_000_000;
133
- }
134
-
135
- /**
136
- * Get elapsed time in seconds
137
- */
138
- getElapsedSeconds(): number {
139
- return this.getElapsedMs() / 1000;
140
- }
141
- }
142
-
143
- /**
144
- * Measure execution time and memory usage of an async function
145
- * Memory is measured as delta from baseline for accurate comparison
146
- */
147
- export async function measure(fn: () => Promise<{ framesProcessed?: number }>, options: { memorySampleIntervalMs?: number } = {}): Promise<MeasureResult> {
148
- const { memorySampleIntervalMs = 100 } = options;
149
-
150
- // Force garbage collection if available to get clean baseline
151
- if (global.gc) {
152
- global.gc();
153
- // Wait a bit for GC to complete
154
- await new Promise((resolve) => setTimeout(resolve, 50));
155
- global.gc();
156
- }
157
-
158
- const timer = new Timer();
159
- const memorySampler = new MemorySampler();
160
-
161
- memorySampler.start(memorySampleIntervalMs);
162
- timer.start();
163
-
164
- const result = await fn();
165
-
166
- timer.stop();
167
- const memoryResult = memorySampler.stop();
168
-
169
- const durationMs = timer.getElapsedMs();
170
- const framesProcessed = result.framesProcessed;
171
-
172
- // Calculate peak memory as delta from baseline
173
- // This gives us the actual memory growth during the operation
174
- const peakMemoryDelta = memoryResult.peakMemory - memoryResult.baselineMemory;
175
-
176
- return {
177
- durationMs,
178
- peakMemoryBytes: Math.max(0, peakMemoryDelta), // Ensure non-negative
179
- memorySamples: memoryResult.samples,
180
- framesProcessed,
181
- fps: framesProcessed ? (framesProcessed / durationMs) * 1000 : undefined,
182
- };
183
- }
184
-
185
- /**
186
- * Compute statistics from an array of numbers
187
- */
188
- export function computeStats(values: number[]): Stats {
189
- if (values.length === 0) {
190
- return { mean: 0, min: 0, max: 0, stdDev: 0, median: 0 };
191
- }
192
-
193
- const sorted = [...values].sort((a, b) => a - b);
194
- const sum = values.reduce((a, b) => a + b, 0);
195
- const mean = sum / values.length;
196
- const min = sorted[0];
197
- const max = sorted[sorted.length - 1];
198
-
199
- // Standard deviation
200
- const squaredDiffs = values.map((v) => Math.pow(v - mean, 2));
201
- const avgSquaredDiff = squaredDiffs.reduce((a, b) => a + b, 0) / values.length;
202
- const stdDev = Math.sqrt(avgSquaredDiff);
203
-
204
- // Median
205
- const mid = Math.floor(sorted.length / 2);
206
- const median = sorted.length % 2 !== 0 ? sorted[mid] : (sorted[mid - 1] + sorted[mid]) / 2;
207
-
208
- return { mean, min, max, stdDev, median };
209
- }
210
-
211
- /**
212
- * Aggregate multiple measurement results into statistics
213
- */
214
- export function aggregateResults(results: MeasureResult[]): AggregatedResult {
215
- const durations = results.map((r) => r.durationMs);
216
- const peakMemories = results.map((r) => r.peakMemoryBytes);
217
- const fpsValues = results.filter((r) => r.fps !== undefined).map((r) => r.fps!);
218
-
219
- const framesProcessed = results.find((r) => r.framesProcessed)?.framesProcessed;
220
-
221
- return {
222
- iterations: results.length,
223
- durationMs: computeStats(durations),
224
- peakMemoryBytes: computeStats(peakMemories),
225
- fps: fpsValues.length > 0 ? computeStats(fpsValues) : undefined,
226
- framesProcessed,
227
- };
228
- }
229
-
230
- /**
231
- * Format bytes to human-readable string
232
- */
233
- export function formatBytes(bytes: number): string {
234
- const units = ['B', 'KB', 'MB', 'GB'];
235
- let value = bytes;
236
- let unitIndex = 0;
237
-
238
- while (value >= 1024 && unitIndex < units.length - 1) {
239
- value /= 1024;
240
- unitIndex++;
241
- }
242
-
243
- return `${value.toFixed(1)} ${units[unitIndex]}`;
244
- }
245
-
246
- /**
247
- * Format milliseconds to human-readable string
248
- */
249
- export function formatDuration(ms: number): string {
250
- if (ms < 1000) {
251
- return `${ms.toFixed(1)}ms`;
252
- } else if (ms < 60000) {
253
- return `${(ms / 1000).toFixed(2)}s`;
254
- } else {
255
- const minutes = Math.floor(ms / 60000);
256
- const seconds = ((ms % 60000) / 1000).toFixed(1);
257
- return `${minutes}m ${seconds}s`;
258
- }
259
- }
260
-
261
- /**
262
- * Calculate percentage difference between two values
263
- */
264
- export function percentDiff(baseline: number, comparison: number): number {
265
- if (baseline === 0) return 0;
266
- return ((comparison - baseline) / baseline) * 100;
267
- }
268
-
269
- /**
270
- * Format percentage difference with sign
271
- */
272
- export function formatPercentDiff(diff: number): string {
273
- const sign = diff > 0 ? '+' : '';
274
- return `${sign}${diff.toFixed(1)}%`;
275
- }
@@ -1,405 +0,0 @@
1
- /* eslint-disable @stylistic/max-len */
2
- /**
3
- * Markdown Report Generator
4
- *
5
- * Generates a formatted BENCHMARK.md report from benchmark results.
6
- */
7
-
8
- import { execSync } from 'node:child_process';
9
- import { readFileSync, writeFileSync } from 'node:fs';
10
- import { arch, cpus, platform, release, totalmem } from 'node:os';
11
- import { basename, dirname, resolve } from 'node:path';
12
- import { fileURLToPath } from 'node:url';
13
-
14
- import { getFFmpegVersion, probeMediaFile } from './ffmpeg-cli.js';
15
- import { formatBytes, formatPercentDiff } from './measure.js';
16
-
17
- import type { LatencyMetrics } from '../cases/latency.js';
18
- import type { BenchmarkComparison } from '../runner.js';
19
-
20
- const __filename = fileURLToPath(import.meta.url);
21
- const __dirname = dirname(__filename);
22
- const rootDir = resolve(__dirname, '../..');
23
-
24
- /**
25
- * System information for the report
26
- */
27
- export interface SystemInfo {
28
- os: string;
29
- osVersion: string;
30
- arch: string;
31
- cpu: string;
32
- cpuCores: number;
33
- ram: string;
34
- gpu?: string;
35
- ffmpegVersion: string;
36
- nodeAVVersion: string;
37
- nodeVersion: string;
38
- }
39
-
40
- /**
41
- * Input file information
42
- */
43
- export interface InputFileInfo {
44
- path: string;
45
- duration: number;
46
- resolution?: string;
47
- codec?: string;
48
- fps?: number;
49
- }
50
-
51
- /**
52
- * Complete benchmark report data
53
- */
54
- export interface BenchmarkReport {
55
- systemInfo: SystemInfo;
56
- /** @deprecated Use inputFileInfos instead */
57
- inputFileInfo?: InputFileInfo;
58
- inputFileInfos?: InputFileInfo[];
59
- transcodeResults: BenchmarkComparison[];
60
- memoryResults: BenchmarkComparison[];
61
- latencyMetrics?: LatencyMetrics;
62
- timestamp: string;
63
- }
64
-
65
- /**
66
- * Gather system information
67
- */
68
- export async function getSystemInfo(): Promise<SystemInfo> {
69
- const cpuInfo = cpus();
70
- const cpu = cpuInfo[0]?.model ?? 'Unknown';
71
-
72
- // Try to detect GPU
73
- let gpu: string | undefined;
74
- try {
75
- if (platform() === 'darwin') {
76
- gpu = execSync('system_profiler SPDisplaysDataType 2>/dev/null | grep "Chipset Model" | head -1 | cut -d: -f2', {
77
- encoding: 'utf-8',
78
- }).trim();
79
- } else if (platform() === 'linux') {
80
- gpu = execSync("lspci 2>/dev/null | grep -i 'vga\\|3d\\|display' | head -1 | cut -d: -f3", { encoding: 'utf-8' }).trim();
81
- } else if (platform() === 'win32') {
82
- gpu = execSync('wmic path win32_VideoController get name 2>nul | findstr /v "Name"', { encoding: 'utf-8' }).trim();
83
- }
84
- } catch {
85
- // GPU detection failed, continue without it
86
- }
87
-
88
- // Get FFmpeg version
89
- let ffmpegVersion = 'Unknown';
90
- try {
91
- ffmpegVersion = await getFFmpegVersion();
92
- } catch {
93
- // Continue without version
94
- }
95
-
96
- // Get node-av version from package.json
97
- let nodeAVVersion = 'Unknown';
98
- try {
99
- const pkgPath = resolve(rootDir, 'package.json');
100
- const pkgContent = readFileSync(pkgPath, 'utf-8');
101
- const pkg = JSON.parse(pkgContent);
102
- nodeAVVersion = pkg.version;
103
- } catch {
104
- // Continue without version
105
- }
106
-
107
- return {
108
- os: platform(),
109
- osVersion: release(),
110
- arch: arch(),
111
- cpu,
112
- cpuCores: cpuInfo.length,
113
- ram: formatBytes(totalmem()),
114
- gpu: gpu ?? undefined,
115
- ffmpegVersion,
116
- nodeAVVersion,
117
- nodeVersion: process.version,
118
- };
119
- }
120
-
121
- /**
122
- * Format system info section for markdown
123
- */
124
- function formatSystemInfoSection(info: SystemInfo): string {
125
- let section = `## System Information
126
-
127
- | Property | Value |
128
- |----------|-------|
129
- | **OS** | ${info.os} ${info.osVersion} |
130
- | **Architecture** | ${info.arch} |
131
- | **CPU** | ${info.cpu} |
132
- | **CPU Cores** | ${info.cpuCores} |
133
- | **RAM** | ${info.ram} |`;
134
-
135
- if (info.gpu) {
136
- section += `\n| **GPU** | ${info.gpu} |`;
137
- }
138
-
139
- section += `
140
- | **Node.js** | ${info.nodeVersion} |
141
- | **FFmpeg** | ${info.ffmpegVersion} |
142
- | **node-av** | ${info.nodeAVVersion} |
143
- `;
144
-
145
- return section;
146
- }
147
-
148
- /**
149
- * Format input file info section (single file - legacy)
150
- */
151
- function formatInputFileSection(info: InputFileInfo | undefined): string {
152
- if (!info) return '';
153
-
154
- return `## Test Input
155
-
156
- | Property | Value |
157
- |----------|-------|
158
- | **File** | \`${info.path}\` |
159
- | **Duration** | ${info.duration.toFixed(1)}s |
160
- | **Resolution** | ${info.resolution ?? 'N/A'} |
161
- | **Codec** | ${info.codec ?? 'N/A'} |
162
- | **FPS** | ${info.fps?.toFixed(1) ?? 'N/A'} |
163
- `;
164
- }
165
-
166
- /**
167
- * Format input files info section (multiple files)
168
- */
169
- function formatInputFilesSection(infos: InputFileInfo[] | undefined): string {
170
- if (!infos || infos.length === 0) return '';
171
-
172
- let section = `## Test Inputs
173
-
174
- | File | Codec | Resolution | FPS | Duration |
175
- |------|-------|------------|-----|----------|
176
- `;
177
-
178
- for (const info of infos) {
179
- const filename = basename(info.path);
180
- section += `| ${filename} | ${info.codec ?? 'N/A'} | ${info.resolution ?? 'N/A'} | ${info.fps?.toFixed(0) ?? 'N/A'} | ${info.duration.toFixed(1)}s |\n`;
181
- }
182
-
183
- return section;
184
- }
185
-
186
- /**
187
- * Format transcode results table
188
- */
189
- function formatTranscodeSection(results: BenchmarkComparison[]): string {
190
- if (results.length === 0) return '';
191
-
192
- // Group results by input file
193
- const grouped = new Map<string, BenchmarkComparison[]>();
194
- for (const result of results) {
195
- const inputFile = basename(result.config.inputFile);
196
- if (!grouped.has(inputFile)) {
197
- grouped.set(inputFile, []);
198
- }
199
- grouped.get(inputFile)!.push(result);
200
- }
201
-
202
- let section = '## Transcode Speed\n';
203
-
204
- // If only one input file, don't show grouping
205
- if (grouped.size === 1) {
206
- const results = [...grouped.values()][0];
207
- section += formatTranscodeTable(results);
208
- } else {
209
- // Multiple input files - show grouped
210
- for (const [inputFile, groupResults] of grouped) {
211
- section += '\n### Input: ' + inputFile + '\n';
212
- section += formatTranscodeTable(groupResults);
213
- }
214
- }
215
-
216
- return section;
217
- }
218
-
219
- /**
220
- * Format a single transcode results table
221
- */
222
- function formatTranscodeTable(results: BenchmarkComparison[]): string {
223
- let section = `
224
- | Test | FFmpeg CLI (FPS) | node-av (FPS) | FFmpeg CLI (Time) | node-av (Time) | Diff |
225
- |------|------------------|---------------|-------------------|----------------|------|
226
- `;
227
-
228
- for (const result of results) {
229
- const ffmpegFps = result.ffmpegCLI.fps?.mean.toFixed(1) ?? 'N/A';
230
- const nodeAVFps = result.nodeAV.fps?.mean.toFixed(1) ?? 'N/A';
231
- const ffmpegDuration = formatDuration(result.ffmpegCLI.durationMs.mean);
232
- const nodeAVDuration = formatDuration(result.nodeAV.durationMs.mean);
233
- const diff = result.comparison.fpsDiffPercent;
234
- const diffStr = diff !== undefined ? formatPercentDiff(diff) : 'N/A';
235
-
236
- section += `| ${result.config.name} | ${ffmpegFps} fps | ${nodeAVFps} fps | ${ffmpegDuration} | ${nodeAVDuration} | ${diffStr} |\n`;
237
- }
238
-
239
- return section;
240
- }
241
-
242
- /**
243
- * Format memory results table
244
- */
245
- function formatMemorySection(results: BenchmarkComparison[]): string {
246
- if (results.length === 0) return '';
247
-
248
- // Group results by input file
249
- const grouped = new Map<string, BenchmarkComparison[]>();
250
- for (const result of results) {
251
- const inputFile = basename(result.config.inputFile);
252
- if (!grouped.has(inputFile)) {
253
- grouped.set(inputFile, []);
254
- }
255
- grouped.get(inputFile)!.push(result);
256
- }
257
-
258
- let section = '## Memory Usage\n';
259
-
260
- // If only one input file, don't show grouping
261
- if (grouped.size === 1) {
262
- const results = [...grouped.values()][0];
263
- section += formatMemoryTable(results);
264
- } else {
265
- // Multiple input files - show grouped
266
- for (const [inputFile, groupResults] of grouped) {
267
- section += '\n### Input: ' + inputFile + '\n';
268
- section += formatMemoryTable(groupResults);
269
- }
270
- }
271
-
272
- section += `
273
- *Note: FFmpeg CLI memory is measured via \`/usr/bin/time\` (macOS: \`-l\`, Linux: \`-v\`).
274
- `;
275
-
276
- return section;
277
- }
278
-
279
- /**
280
- * Format a single memory results table
281
- */
282
- function formatMemoryTable(results: BenchmarkComparison[]): string {
283
- let section = `
284
- | Test | FFmpeg CLI Peak | node-av Peak | Difference |
285
- |------|----------------|--------------|------------|
286
- `;
287
-
288
- for (const result of results) {
289
- // Note: FFmpeg CLI memory is estimated and may not be accurate
290
- const ffmpegMem = result.ffmpegCLI.peakMemoryBytes.mean > 0 ? formatBytes(result.ffmpegCLI.peakMemoryBytes.mean) : 'N/A*';
291
- const nodeAVMem = formatBytes(result.nodeAV.peakMemoryBytes.mean);
292
- const diffStr = result.ffmpegCLI.peakMemoryBytes.mean > 0 ? formatPercentDiff(result.comparison.memoryDiffPercent) : 'N/A';
293
-
294
- section += `| ${result.config.name} | ${ffmpegMem} | ${nodeAVMem} | ${diffStr} |\n`;
295
- }
296
-
297
- return section;
298
- }
299
-
300
- /**
301
- * Format latency section
302
- */
303
- function formatLatencySection(metrics: LatencyMetrics | undefined): string {
304
- if (!metrics) return '';
305
-
306
- return `## Latency
307
-
308
- | Metric | Mean | Min | Max | StdDev |
309
- |--------|------|-----|-----|--------|
310
- | Demuxer Open | ${formatMs(metrics.demuxerOpen.stats.mean)} | ${formatMs(metrics.demuxerOpen.stats.min)} | ${formatMs(metrics.demuxerOpen.stats.max)} | ${formatMs(metrics.demuxerOpen.stats.stdDev)} |
311
- | First Packet | ${formatMs(metrics.firstPacket.stats.mean)} | ${formatMs(metrics.firstPacket.stats.min)} | ${formatMs(metrics.firstPacket.stats.max)} | ${formatMs(metrics.firstPacket.stats.stdDev)} |
312
- | First Frame | ${formatMs(metrics.firstFrame.stats.mean)} | ${formatMs(metrics.firstFrame.stats.min)} | ${formatMs(metrics.firstFrame.stats.max)} | ${formatMs(metrics.firstFrame.stats.stdDev)} |
313
- | First Encoded Packet | ${formatMs(metrics.firstEncodedPacket.stats.mean)} | ${formatMs(metrics.firstEncodedPacket.stats.min)} | ${formatMs(metrics.firstEncodedPacket.stats.max)} | ${formatMs(metrics.firstEncodedPacket.stats.stdDev)} |
314
- | Pipeline Total | ${formatMs(metrics.pipelineTotal.stats.mean)} | ${formatMs(metrics.pipelineTotal.stats.min)} | ${formatMs(metrics.pipelineTotal.stats.max)} | ${formatMs(metrics.pipelineTotal.stats.stdDev)} |
315
-
316
- *Note: Each metric is measured independently. "First Encoded Packet" uses default encoder settings while "Pipeline Total" uses \`tune=zerolatency\` for low-latency output.*
317
- `;
318
- }
319
-
320
- /**
321
- * Format milliseconds
322
- */
323
- function formatMs(ms: number): string {
324
- if (ms < 1) {
325
- return `${(ms * 1000).toFixed(0)}ยตs`;
326
- }
327
- return `${ms.toFixed(1)}ms`;
328
- }
329
-
330
- /**
331
- * Format duration in milliseconds
332
- */
333
- function formatDuration(ms: number): string {
334
- if (ms < 1000) {
335
- return `${ms.toFixed(0)}ms`;
336
- }
337
- return `${(ms / 1000).toFixed(2)}s`;
338
- }
339
-
340
- /**
341
- * Generate the complete BENCHMARK.md report
342
- */
343
- export async function generateReport(report: BenchmarkReport): Promise<string> {
344
- const systemSection = formatSystemInfoSection(report.systemInfo);
345
- // Support both single file (legacy) and multiple files
346
- const inputSection =
347
- report.inputFileInfos && report.inputFileInfos.length > 0 ? formatInputFilesSection(report.inputFileInfos) : formatInputFileSection(report.inputFileInfo);
348
- const transcodeSection = formatTranscodeSection(report.transcodeResults);
349
- const memorySection = formatMemorySection(report.memoryResults);
350
- const latencySection = formatLatencySection(report.latencyMetrics);
351
-
352
- const markdown = `# node-av Benchmark Results
353
-
354
- > Generated: ${report.timestamp}
355
-
356
- ${systemSection}
357
-
358
- ${inputSection}
359
-
360
- ${transcodeSection}
361
-
362
- ${memorySection}
363
-
364
- ${latencySection}
365
- `;
366
-
367
- return markdown;
368
- }
369
-
370
- /**
371
- * Write the benchmark report to BENCHMARK.md
372
- */
373
- export async function writeReport(report: BenchmarkReport): Promise<void> {
374
- const markdown = await generateReport(report);
375
- const outputPath = resolve(rootDir, 'BENCHMARK.md');
376
- writeFileSync(outputPath, markdown, 'utf-8');
377
- console.log(`\n๐Ÿ“„ Report written to: ${outputPath}`);
378
- }
379
-
380
- /**
381
- * Save raw results to JSON for later analysis
382
- */
383
- export function saveResultsJSON(report: BenchmarkReport): void {
384
- const outputPath = resolve(__dirname, '../results/benchmark-results.json');
385
- writeFileSync(outputPath, JSON.stringify(report, null, 2), 'utf-8');
386
- console.log(`๐Ÿ“Š Raw results saved to: ${outputPath}`);
387
- }
388
-
389
- /**
390
- * Create input file info from probing
391
- */
392
- export async function createInputFileInfo(filePath: string): Promise<InputFileInfo> {
393
- try {
394
- const info = await probeMediaFile(filePath);
395
- return {
396
- path: filePath,
397
- duration: info.duration,
398
- resolution: info.width && info.height ? `${info.width}x${info.height}` : undefined,
399
- codec: info.videoCodec,
400
- fps: info.fps,
401
- };
402
- } catch {
403
- return { path: filePath, duration: 0 };
404
- }
405
- }