node-av 5.1.0 → 5.2.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,264 +0,0 @@
1
- #!/usr/bin/env tsx
2
- /**
3
- * node-av Benchmark CLI
4
- *
5
- * Comprehensive benchmark suite comparing node-av with FFmpeg CLI.
6
- *
7
- * Usage:
8
- * npm run benchmark # Run all benchmarks
9
- * npm run benchmark -- --help # Show help
10
- * npm run benchmark -- transcode # Run only transcode benchmarks
11
- * npm run benchmark -- memory # Run only memory benchmarks
12
- * npm run benchmark -- latency # Run only latency benchmarks
13
- * npm run benchmark -- -i file1.mp4 -i file2.mp4 # Multiple inputs
14
- */
15
-
16
- import { existsSync, readdirSync } from 'node:fs';
17
- import { mkdir } from 'node:fs/promises';
18
- import { basename, dirname, join, resolve } from 'node:path';
19
- import { fileURLToPath } from 'node:url';
20
-
21
- import { measureAllLatencies } from './cases/latency.js';
22
- import { runAllMemoryBenchmarks } from './cases/memory.js';
23
- import { runAllTranscodeBenchmarks } from './cases/transcode.js';
24
- import { runner } from './runner.js';
25
- import { createInputFileInfo, getSystemInfo, saveResultsJSON, writeReport } from './utils/report.js';
26
-
27
- import type { BenchmarkReport, InputFileInfo } from './utils/report.js';
28
-
29
- const __filename = fileURLToPath(import.meta.url);
30
- const __dirname = dirname(__filename);
31
-
32
- // Default paths
33
- const testDataDir = resolve(__dirname, '../testdata');
34
- const resultsDir = resolve(__dirname, 'results');
35
- const defaultInputFile = join(testDataDir, 'video.mp4');
36
-
37
- /**
38
- * Parse command line arguments
39
- */
40
- function parseArgs(): { category?: string; inputFiles: string[]; help: boolean; noReport: boolean; iterations?: number; pattern?: string } {
41
- const args = process.argv.slice(2);
42
- let category: string | undefined;
43
- const inputFiles: string[] = [];
44
- let help = false;
45
- let noReport = false;
46
- let iterations: number | undefined;
47
- let pattern: string | undefined;
48
-
49
- for (let i = 0; i < args.length; i++) {
50
- const arg = args[i];
51
-
52
- if (arg === '--help' || arg === '-h') {
53
- help = true;
54
- } else if (arg === '--no-report') {
55
- noReport = true;
56
- } else if (arg === '--input' || arg === '-i') {
57
- inputFiles.push(args[++i]);
58
- } else if (arg === '--pattern' || arg === '-p') {
59
- pattern = args[++i];
60
- } else if (arg === '--iterations' || arg === '-n') {
61
- iterations = parseInt(args[++i], 10);
62
- } else if (!arg.startsWith('-')) {
63
- category = arg;
64
- }
65
- }
66
-
67
- return { category, inputFiles, help, noReport, iterations, pattern };
68
- }
69
-
70
- /**
71
- * Expand glob pattern to file list
72
- */
73
- function expandPattern(pattern: string): string[] {
74
- const dir = dirname(pattern);
75
- const filePattern = basename(pattern).replace('*', '.*');
76
- const regex = new RegExp(`^${filePattern}$`);
77
-
78
- if (!existsSync(dir)) return [];
79
-
80
- return readdirSync(dir)
81
- .filter((file) => regex.test(file))
82
- .map((file) => join(dir, file))
83
- .sort();
84
- }
85
-
86
- /**
87
- * Print help message
88
- */
89
- function printHelp(): void {
90
- console.log(`
91
- node-av Benchmark Suite
92
- ========================
93
-
94
- Usage:
95
- npm run benchmark [options] [category]
96
-
97
- Categories:
98
- transcode Run transcode speed benchmarks
99
- memory Run memory usage benchmarks
100
- latency Run latency benchmarks
101
- all Run all benchmarks (default)
102
-
103
- Options:
104
- -h, --help Show this help message
105
- -i, --input FILE Use custom input file (can be repeated for multiple files)
106
- -p, --pattern PAT Use glob pattern to match input files
107
- -n, --iterations N Number of iterations per benchmark (default: 5)
108
- --no-report Skip generating BENCHMARK.md report
109
-
110
- Examples:
111
- npm run benchmark # Run all benchmarks with default input
112
- npm run benchmark transcode # Run only transcode benchmarks
113
- npm run benchmark -- -i input.mp4 # Use custom input file
114
- npm run benchmark -- -i h264.mp4 -i hevc.mp4 # Multiple input files
115
- npm run benchmark -- -p "testdata/bbb-4k-*" # Match pattern
116
- npm run benchmark -- -n 10 # Run 10 iterations
117
- npm run benchmark -- --no-report # Skip report generation
118
- `);
119
- }
120
-
121
- /**
122
- * Main benchmark runner
123
- */
124
- async function main(): Promise<void> {
125
- const { category, inputFiles, help, noReport, pattern } = parseArgs();
126
-
127
- if (help) {
128
- printHelp();
129
- process.exit(0);
130
- }
131
-
132
- // Resolve input files
133
- let inputs: string[] = [];
134
-
135
- if (pattern) {
136
- inputs = expandPattern(pattern);
137
- if (inputs.length === 0) {
138
- console.error(`Error: No files matched pattern: ${pattern}`);
139
- process.exit(1);
140
- }
141
- } else if (inputFiles.length > 0) {
142
- inputs = inputFiles;
143
- } else {
144
- inputs = [defaultInputFile];
145
- }
146
-
147
- // Validate all input files exist
148
- for (const input of inputs) {
149
- if (!existsSync(input)) {
150
- console.error(`Error: Input file not found: ${input}`);
151
- console.error('Please provide a valid input file with --input or ensure testdata/video.mp4 exists.');
152
- process.exit(1);
153
- }
154
- }
155
-
156
- // Ensure results directory exists
157
- if (!existsSync(resultsDir)) {
158
- await mkdir(resultsDir, { recursive: true });
159
- }
160
-
161
- console.log(`
162
- ╔═══════════════════════════════════════════════════════════════╗
163
- ā•‘ node-av Benchmark Suite ā•‘
164
- ā•šā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•
165
- `);
166
-
167
- // Print system info
168
- console.log('Gathering system information...\n');
169
- const systemInfo = await getSystemInfo();
170
-
171
- console.log(`System: ${systemInfo.os} ${systemInfo.osVersion} (${systemInfo.arch})`);
172
- console.log(`CPU: ${systemInfo.cpu} (${systemInfo.cpuCores} cores)`);
173
- console.log(`RAM: ${systemInfo.ram}`);
174
- if (systemInfo.gpu) {
175
- console.log(`GPU: ${systemInfo.gpu}`);
176
- }
177
- console.log(`Node.js: ${systemInfo.nodeVersion}`);
178
- console.log(`FFmpeg: ${systemInfo.ffmpegVersion}`);
179
- console.log(`node-av: ${systemInfo.nodeAVVersion}`);
180
- console.log(`Input files: ${inputs.length}`);
181
- for (const input of inputs) {
182
- console.log(` - ${basename(input)}`);
183
- }
184
-
185
- // Get input file info for all files
186
- const inputFileInfos: InputFileInfo[] = [];
187
- for (const input of inputs) {
188
- const info = await createInputFileInfo(input);
189
- inputFileInfos.push(info);
190
- console.log(`\n${basename(input)}:`);
191
- if (info.duration > 0) {
192
- console.log(` Duration: ${info.duration.toFixed(1)}s`);
193
- if (info.resolution) {
194
- console.log(` Resolution: ${info.resolution}`);
195
- }
196
- if (info.codec) {
197
- console.log(` Codec: ${info.codec}`);
198
- }
199
- if (info.fps) {
200
- console.log(` FPS: ${info.fps.toFixed(1)}`);
201
- }
202
- }
203
- }
204
-
205
- // Run benchmarks based on category for each input file
206
- const categoryLower = category?.toLowerCase() ?? 'all';
207
- let latencyMetrics;
208
-
209
- for (const input of inputs) {
210
- console.log(`\n${'='.repeat(60)}`);
211
- console.log(`šŸ“ Benchmarking: ${basename(input)}`);
212
- console.log('='.repeat(60));
213
-
214
- switch (categoryLower) {
215
- case 'transcode':
216
- await runAllTranscodeBenchmarks(input);
217
- break;
218
- case 'memory':
219
- await runAllMemoryBenchmarks(input);
220
- break;
221
- case 'latency':
222
- latencyMetrics = await measureAllLatencies(input, 10);
223
- break;
224
- case 'all':
225
- default:
226
- await runAllTranscodeBenchmarks(input);
227
- await runAllMemoryBenchmarks(input);
228
- // Only measure latency for first file (it's input-independent)
229
- if (input === inputs[0]) {
230
- latencyMetrics = await measureAllLatencies(input, 10);
231
- }
232
- break;
233
- }
234
- }
235
-
236
- // Generate report if not disabled
237
- if (!noReport) {
238
- const results = runner.getResults();
239
-
240
- const report: BenchmarkReport = {
241
- systemInfo,
242
- inputFileInfos,
243
- transcodeResults: results.filter((r) => r.config.category === 'transcode'),
244
- memoryResults: results.filter((r) => r.config.category === 'memory'),
245
- latencyMetrics,
246
- timestamp: new Date().toISOString(),
247
- };
248
-
249
- await writeReport(report);
250
- saveResultsJSON(report);
251
- }
252
-
253
- console.log(`
254
- ╔═══════════════════════════════════════════════════════════════╗
255
- ā•‘ Benchmarks Complete ā•‘
256
- ā•šā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•ā•
257
- `);
258
- }
259
-
260
- // Run main
261
- main().catch((error) => {
262
- console.error('Benchmark failed:', error);
263
- process.exit(1);
264
- });
@@ -1,22 +0,0 @@
1
- /**
2
- * Regenerate BENCHMARK.md from existing results JSON
3
- */
4
- import { readFileSync, writeFileSync } from 'node:fs';
5
- import { dirname, resolve } from 'node:path';
6
- import { fileURLToPath } from 'node:url';
7
-
8
- import { generateReport } from './utils/report.js';
9
-
10
- import type { BenchmarkReport } from './utils/report.js';
11
-
12
- const __dirname = dirname(fileURLToPath(import.meta.url));
13
- const resultsPath = resolve(__dirname, 'results/benchmark-results.json');
14
- const outputPath = resolve(__dirname, '../BENCHMARK.md');
15
-
16
- const report: BenchmarkReport = JSON.parse(readFileSync(resultsPath, 'utf-8'));
17
- report.timestamp = new Date().toISOString();
18
-
19
- const markdown = await generateReport(report);
20
- writeFileSync(outputPath, markdown, 'utf-8');
21
-
22
- console.log('āœ“ BENCHMARK.md regenerated');
@@ -1,2 +0,0 @@
1
- # This directory stores benchmark results (JSON)
2
- # These files are gitignored to keep the repository clean
@@ -1,247 +0,0 @@
1
- /**
2
- * Benchmark Runner
3
- *
4
- * Core class for running benchmarks and collecting results.
5
- * Supports both FFmpeg CLI and node-av implementations.
6
- */
7
-
8
- import { existsSync, unlinkSync } from 'node:fs';
9
- import { mkdir } from 'node:fs/promises';
10
- import { dirname } from 'node:path';
11
-
12
- import { measureFFmpegCLI } from './utils/ffmpeg-cli.js';
13
- import { aggregateResults, measure } from './utils/measure.js';
14
-
15
- import type { FFmpegRunOptions } from './utils/ffmpeg-cli.js';
16
- import type { AggregatedResult, MeasureResult } from './utils/measure.js';
17
-
18
- /**
19
- * Configuration for a benchmark test case
20
- */
21
- export interface BenchmarkConfig {
22
- /** Name of the benchmark */
23
- name: string;
24
- /** Description of what this benchmark tests */
25
- description: string;
26
- /** Category: transcode, memory, or latency */
27
- category: 'transcode' | 'memory' | 'latency';
28
- /** Number of iterations to run */
29
- iterations: number;
30
- /** Number of warmup iterations (not counted in results) */
31
- warmupIterations: number;
32
- /** Input file path */
33
- inputFile: string;
34
- /** Output file path (will be deleted after each run) */
35
- outputFile: string;
36
- }
37
-
38
- /**
39
- * Result of a benchmark comparison between FFmpeg CLI and node-av
40
- */
41
- export interface BenchmarkComparison {
42
- config: BenchmarkConfig;
43
- ffmpegCLI: AggregatedResult;
44
- nodeAV: AggregatedResult;
45
- /** Difference metrics */
46
- comparison: {
47
- /** Duration difference (positive means node-av is slower) */
48
- durationDiffPercent: number;
49
- /** Memory difference (positive means node-av uses more memory) */
50
- memoryDiffPercent: number;
51
- /** FPS difference (positive means node-av is faster) */
52
- fpsDiffPercent?: number;
53
- };
54
- }
55
-
56
- /**
57
- * Callback type for node-av benchmark function
58
- */
59
- export type NodeAVBenchmarkFn = () => Promise<{ framesProcessed?: number }>;
60
-
61
- /**
62
- * Runner for executing and comparing benchmarks
63
- */
64
- export class BenchmarkRunner {
65
- private results: BenchmarkComparison[] = [];
66
-
67
- /**
68
- * Run a single iteration and measure results
69
- */
70
- private async runSingleIteration(
71
- config: BenchmarkConfig,
72
- ffmpegOptions: FFmpegRunOptions,
73
- nodeAVFn: NodeAVBenchmarkFn,
74
- ): Promise<{ ffmpeg: MeasureResult; nodeAV: MeasureResult }> {
75
- // Ensure output directory exists
76
- const outputDir = dirname(config.outputFile);
77
- if (!existsSync(outputDir)) {
78
- await mkdir(outputDir, { recursive: true });
79
- }
80
-
81
- // Run FFmpeg CLI
82
- const ffmpegResult = await measureFFmpegCLI(ffmpegOptions);
83
-
84
- // Clean up output file
85
- if (existsSync(config.outputFile)) {
86
- unlinkSync(config.outputFile);
87
- }
88
-
89
- // Run node-av
90
- const nodeAVResult = await measure(nodeAVFn);
91
-
92
- // Clean up output file
93
- if (existsSync(config.outputFile)) {
94
- unlinkSync(config.outputFile);
95
- }
96
-
97
- return { ffmpeg: ffmpegResult, nodeAV: nodeAVResult };
98
- }
99
-
100
- /**
101
- * Run a complete benchmark with warmup and multiple iterations
102
- */
103
- async runBenchmark(config: BenchmarkConfig, ffmpegOptions: FFmpegRunOptions, nodeAVFn: NodeAVBenchmarkFn): Promise<BenchmarkComparison> {
104
- console.log(`\nšŸ“Š Running benchmark: ${config.name}`);
105
- console.log(` ${config.description}`);
106
- console.log(` Iterations: ${config.iterations} (+ ${config.warmupIterations} warmup)\n`);
107
-
108
- const ffmpegResults: MeasureResult[] = [];
109
- const nodeAVResults: MeasureResult[] = [];
110
-
111
- // Warmup iterations
112
- for (let i = 0; i < config.warmupIterations; i++) {
113
- process.stdout.write(` Warmup ${i + 1}/${config.warmupIterations}...\r`);
114
- await this.runSingleIteration(config, ffmpegOptions, nodeAVFn);
115
- // GC between iterations to prevent memory accumulation
116
- if (global.gc) {
117
- global.gc();
118
- await new Promise((resolve) => setTimeout(resolve, 100));
119
- }
120
- }
121
-
122
- // Actual benchmark iterations
123
- for (let i = 0; i < config.iterations; i++) {
124
- process.stdout.write(` Iteration ${i + 1}/${config.iterations}...\r`);
125
- const { ffmpeg, nodeAV } = await this.runSingleIteration(config, ffmpegOptions, nodeAVFn);
126
- ffmpegResults.push(ffmpeg);
127
- nodeAVResults.push(nodeAV);
128
- // GC between iterations to prevent memory accumulation
129
- if (global.gc) {
130
- global.gc();
131
- await new Promise((resolve) => setTimeout(resolve, 100));
132
- }
133
- }
134
-
135
- console.log(''); // New line after progress
136
-
137
- // Aggregate results
138
- const ffmpegAggregated = aggregateResults(ffmpegResults);
139
- const nodeAVAggregated = aggregateResults(nodeAVResults);
140
-
141
- // Calculate comparison metrics
142
- const durationDiffPercent = this.percentDiff(ffmpegAggregated.durationMs.mean, nodeAVAggregated.durationMs.mean);
143
-
144
- const memoryDiffPercent =
145
- ffmpegAggregated.peakMemoryBytes.mean > 0 ? this.percentDiff(ffmpegAggregated.peakMemoryBytes.mean, nodeAVAggregated.peakMemoryBytes.mean) : 0;
146
-
147
- const fpsDiffPercent = ffmpegAggregated.fps && nodeAVAggregated.fps ? this.percentDiff(ffmpegAggregated.fps.mean, nodeAVAggregated.fps.mean) : undefined;
148
-
149
- const result: BenchmarkComparison = {
150
- config,
151
- ffmpegCLI: ffmpegAggregated,
152
- nodeAV: nodeAVAggregated,
153
- comparison: {
154
- durationDiffPercent,
155
- memoryDiffPercent,
156
- fpsDiffPercent,
157
- },
158
- };
159
-
160
- this.results.push(result);
161
-
162
- // Print summary
163
- this.printBenchmarkSummary(result);
164
-
165
- return result;
166
- }
167
-
168
- /**
169
- * Calculate percentage difference
170
- */
171
- private percentDiff(baseline: number, comparison: number): number {
172
- if (baseline === 0) return 0;
173
- return ((comparison - baseline) / baseline) * 100;
174
- }
175
-
176
- /**
177
- * Print summary of a single benchmark result
178
- */
179
- private printBenchmarkSummary(result: BenchmarkComparison): void {
180
- const { ffmpegCLI, nodeAV, comparison } = result;
181
-
182
- console.log(' Results:');
183
- console.log(' ā”Œā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”¬ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”¬ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”');
184
- console.log(' │ Metric │ FFmpeg CLI │ node-av │');
185
- console.log(' ā”œā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”¼ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”¼ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”¤');
186
-
187
- const durationStr = `${ffmpegCLI.durationMs.mean.toFixed(0)}ms`;
188
- const nodeAVDurationStr = `${nodeAV.durationMs.mean.toFixed(0)}ms`;
189
- console.log(` │ Duration │ ${durationStr.padEnd(16)} │ ${nodeAVDurationStr.padEnd(16)} │`);
190
-
191
- if (ffmpegCLI.fps && nodeAV.fps) {
192
- const fpsStr = `${ffmpegCLI.fps.mean.toFixed(1)} fps`;
193
- const nodeAVFpsStr = `${nodeAV.fps.mean.toFixed(1)} fps`;
194
- console.log(` │ FPS │ ${fpsStr.padEnd(16)} │ ${nodeAVFpsStr.padEnd(16)} │`);
195
- }
196
-
197
- const memStr = this.formatBytes(ffmpegCLI.peakMemoryBytes.mean);
198
- const nodeAVMemStr = this.formatBytes(nodeAV.peakMemoryBytes.mean);
199
- console.log(` │ Peak Memory │ ${memStr.padEnd(16)} │ ${nodeAVMemStr.padEnd(16)} │`);
200
-
201
- console.log(' ā””ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”“ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”“ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”€ā”˜');
202
-
203
- const diffSign = comparison.durationDiffPercent > 0 ? '+' : '';
204
- console.log(` Difference: ${diffSign}${comparison.durationDiffPercent.toFixed(1)}% duration`);
205
-
206
- if (comparison.fpsDiffPercent !== undefined) {
207
- const fpsSign = comparison.fpsDiffPercent > 0 ? '+' : '';
208
- console.log(` ${fpsSign}${comparison.fpsDiffPercent.toFixed(1)}% FPS`);
209
- }
210
- }
211
-
212
- /**
213
- * Format bytes to human readable
214
- */
215
- private formatBytes(bytes: number): string {
216
- if (bytes === 0) return 'N/A';
217
- const units = ['B', 'KB', 'MB', 'GB'];
218
- let value = bytes;
219
- let unitIndex = 0;
220
-
221
- while (value >= 1024 && unitIndex < units.length - 1) {
222
- value /= 1024;
223
- unitIndex++;
224
- }
225
-
226
- return `${value.toFixed(1)} ${units[unitIndex]}`;
227
- }
228
-
229
- /**
230
- * Get all collected results
231
- */
232
- getResults(): BenchmarkComparison[] {
233
- return this.results;
234
- }
235
-
236
- /**
237
- * Clear all results
238
- */
239
- clearResults(): void {
240
- this.results = [];
241
- }
242
- }
243
-
244
- /**
245
- * Global benchmark runner instance
246
- */
247
- export const runner = new BenchmarkRunner();