@writechoice/mint-cli 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/PUBLISH.md +54 -332
- package/README.md +138 -287
- package/bin/cli.js +58 -8
- package/package.json +5 -4
- package/src/commands/fix/links.js +212 -0
- package/src/commands/validate/links.js +13 -299
- package/src/commands/validate/mdx.js +213 -0
- package/src/utils/config.js +115 -0
- package/src/utils/reports.js +182 -0
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MDX File Validation Tool
|
|
3
|
+
*
|
|
4
|
+
* Validates MDX files for parsing errors using the official @mdx-js/mdx compiler.
|
|
5
|
+
* Catches syntax errors, invalid JSX, mismatched tags, and other MDX parsing issues.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { existsSync, readdirSync, statSync, readFileSync } from "fs";
|
|
9
|
+
import { join, relative, resolve } from "path";
|
|
10
|
+
import { compile } from "@mdx-js/mdx";
|
|
11
|
+
import chalk from "chalk";
|
|
12
|
+
import { writeBothFormats, generateMdxParseMarkdown } from "../../utils/reports.js";
|
|
13
|
+
|
|
14
|
+
// Configuration
|
|
15
|
+
const EXCLUDED_DIRS = ["snippets", "node_modules", ".git"];
|
|
16
|
+
const MDX_DIRS = ["."];
|
|
17
|
+
|
|
18
|
+
// Data Structures
|
|
19
|
+
class ValidationResult {
|
|
20
|
+
constructor(filePath, status, error = null) {
|
|
21
|
+
this.filePath = filePath;
|
|
22
|
+
this.status = status; // "valid" or "error"
|
|
23
|
+
this.error = error ? {
|
|
24
|
+
message: error.message,
|
|
25
|
+
line: error.line || null,
|
|
26
|
+
column: error.column || null,
|
|
27
|
+
position: error.position || null,
|
|
28
|
+
reason: error.reason || null,
|
|
29
|
+
} : null;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Utility Functions
|
|
34
|
+
|
|
35
|
+
function findMdxFiles(repoRoot, directory = null, file = null) {
|
|
36
|
+
if (file) {
|
|
37
|
+
const fullPath = resolve(repoRoot, file);
|
|
38
|
+
return existsSync(fullPath) ? [fullPath] : [];
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const searchDirs = directory ? [resolve(repoRoot, directory)] : MDX_DIRS.map((d) => join(repoRoot, d));
|
|
42
|
+
|
|
43
|
+
const mdxFiles = [];
|
|
44
|
+
|
|
45
|
+
function walkDirectory(dir) {
|
|
46
|
+
const dirName = dir.split("/").pop();
|
|
47
|
+
if (EXCLUDED_DIRS.includes(dirName)) {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
const entries = readdirSync(dir);
|
|
53
|
+
|
|
54
|
+
for (const entry of entries) {
|
|
55
|
+
const fullPath = join(dir, entry);
|
|
56
|
+
const stat = statSync(fullPath);
|
|
57
|
+
|
|
58
|
+
if (stat.isDirectory()) {
|
|
59
|
+
walkDirectory(fullPath);
|
|
60
|
+
} else if (stat.isFile() && entry.endsWith(".mdx")) {
|
|
61
|
+
mdxFiles.push(fullPath);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
} catch (error) {
|
|
65
|
+
console.error(`Error reading directory ${dir}: ${error.message}`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
for (const dir of searchDirs) {
|
|
70
|
+
if (existsSync(dir)) {
|
|
71
|
+
walkDirectory(dir);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
return mdxFiles.sort();
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async function validateMdxFile(filePath, verbose = false) {
|
|
79
|
+
try {
|
|
80
|
+
const content = readFileSync(filePath, "utf-8");
|
|
81
|
+
|
|
82
|
+
if (verbose) {
|
|
83
|
+
console.log(` Validating: ${filePath}`);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Attempt to compile the MDX file
|
|
87
|
+
await compile(content, {
|
|
88
|
+
development: false,
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
return new ValidationResult(filePath, "valid");
|
|
92
|
+
} catch (error) {
|
|
93
|
+
if (verbose) {
|
|
94
|
+
console.log(chalk.red(` ✗ Error in: ${filePath}`));
|
|
95
|
+
console.log(chalk.red(` ${error.message}`));
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return new ValidationResult(filePath, "error", error);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
async function validateAllMdxFiles(files, verbose = false) {
|
|
103
|
+
const results = [];
|
|
104
|
+
|
|
105
|
+
for (let i = 0; i < files.length; i++) {
|
|
106
|
+
const file = files[i];
|
|
107
|
+
const progress = `[${i + 1}/${files.length}]`;
|
|
108
|
+
|
|
109
|
+
if (verbose) {
|
|
110
|
+
console.log(`${progress} Validating ${file}...`);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
const result = await validateMdxFile(file, verbose);
|
|
114
|
+
results.push(result);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
return results;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function generateReport(results, repoRoot) {
|
|
121
|
+
const summary = {
|
|
122
|
+
total: results.length,
|
|
123
|
+
valid: results.filter((r) => r.status === "valid").length,
|
|
124
|
+
errors: results.filter((r) => r.status === "error").length,
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
const errors = results
|
|
128
|
+
.filter((r) => r.status === "error")
|
|
129
|
+
.map((r) => ({
|
|
130
|
+
filePath: relative(repoRoot, r.filePath),
|
|
131
|
+
error: r.error,
|
|
132
|
+
}));
|
|
133
|
+
|
|
134
|
+
const valid = results
|
|
135
|
+
.filter((r) => r.status === "valid")
|
|
136
|
+
.map((r) => relative(repoRoot, r.filePath));
|
|
137
|
+
|
|
138
|
+
return {
|
|
139
|
+
summary,
|
|
140
|
+
errors,
|
|
141
|
+
valid,
|
|
142
|
+
timestamp: new Date().toISOString(),
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Main CLI Function
|
|
147
|
+
|
|
148
|
+
export async function validateMdxFiles(options) {
|
|
149
|
+
const repoRoot = process.cwd();
|
|
150
|
+
|
|
151
|
+
if (!options.quiet) {
|
|
152
|
+
console.log(chalk.bold("\n📝 MDX File Validation\n"));
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (options.verbose && !options.quiet) {
|
|
156
|
+
console.log("Finding MDX files...");
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
const mdxFiles = findMdxFiles(repoRoot, options.dir, options.file);
|
|
160
|
+
|
|
161
|
+
if (mdxFiles.length === 0) {
|
|
162
|
+
console.error("No MDX files found.");
|
|
163
|
+
process.exit(1);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
if (!options.quiet) {
|
|
167
|
+
console.log(`Found ${mdxFiles.length} MDX files\n`);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
if (options.verbose && !options.quiet) {
|
|
171
|
+
console.log("Validating MDX files...\n");
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
const startTime = Date.now();
|
|
175
|
+
const results = await validateAllMdxFiles(mdxFiles, options.verbose);
|
|
176
|
+
const duration = ((Date.now() - startTime) / 1000).toFixed(2);
|
|
177
|
+
|
|
178
|
+
// Generate report
|
|
179
|
+
const report = generateReport(results, repoRoot);
|
|
180
|
+
|
|
181
|
+
// Always generate markdown content for the MD report
|
|
182
|
+
report.markdownContent = generateMdxParseMarkdown(report);
|
|
183
|
+
|
|
184
|
+
// Write both JSON and MD reports
|
|
185
|
+
const { jsonPath, mdPath } = writeBothFormats(report, "mdx_errors_report", repoRoot);
|
|
186
|
+
|
|
187
|
+
// Display summary
|
|
188
|
+
if (!options.quiet) {
|
|
189
|
+
console.log(chalk.bold(`\n✓ Validation complete in ${duration}s\n`));
|
|
190
|
+
console.log(chalk.bold("Summary:"));
|
|
191
|
+
console.log(` Total files: ${report.summary.total}`);
|
|
192
|
+
console.log(chalk.green(` Valid files: ${report.summary.valid}`));
|
|
193
|
+
console.log(chalk.red(` Files with errors: ${report.summary.errors}`));
|
|
194
|
+
console.log(`\nReports saved to:`);
|
|
195
|
+
console.log(` JSON: ${chalk.cyan(jsonPath)}`);
|
|
196
|
+
console.log(` MD: ${chalk.cyan(mdPath)}`);
|
|
197
|
+
|
|
198
|
+
if (report.summary.errors > 0) {
|
|
199
|
+
console.log(chalk.yellow(`\n⚠️ Found ${report.summary.errors} file(s) with parsing errors`));
|
|
200
|
+
console.log("\nFiles with errors:");
|
|
201
|
+
report.errors.forEach((err) => {
|
|
202
|
+
console.log(chalk.red(` ✗ ${err.filePath}`));
|
|
203
|
+
console.log(` ${err.error.message}`);
|
|
204
|
+
if (err.error.line) {
|
|
205
|
+
console.log(` Line ${err.error.line}${err.error.column ? `, Column ${err.error.column}` : ''}`);
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
process.exit(1);
|
|
209
|
+
} else {
|
|
210
|
+
console.log(chalk.green("\n✓ All MDX files are valid!"));
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Configuration File Loader
|
|
3
|
+
*
|
|
4
|
+
* Loads optional config.json from the project root and merges with CLI arguments.
|
|
5
|
+
* CLI arguments take precedence over config file values.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { readFileSync, existsSync } from "fs";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Loads config.json from the current working directory if it exists
|
|
13
|
+
* @returns {Object|null} Configuration object or null if not found
|
|
14
|
+
*/
|
|
15
|
+
export function loadConfig() {
|
|
16
|
+
const configPath = join(process.cwd(), "config.json");
|
|
17
|
+
|
|
18
|
+
if (!existsSync(configPath)) {
|
|
19
|
+
return null;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
try {
|
|
23
|
+
const configContent = readFileSync(configPath, "utf-8");
|
|
24
|
+
const config = JSON.parse(configContent);
|
|
25
|
+
return config;
|
|
26
|
+
} catch (error) {
|
|
27
|
+
console.error(`Error reading config.json: ${error.message}`);
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Merges config file with CLI options for the links command
|
|
34
|
+
* CLI options take precedence over config file
|
|
35
|
+
*
|
|
36
|
+
* @param {string|undefined} baseUrl - Base URL from CLI
|
|
37
|
+
* @param {string|undefined} validationBaseUrl - Validation base URL from CLI
|
|
38
|
+
* @param {Object} options - CLI options
|
|
39
|
+
* @param {Object|null} config - Loaded config object
|
|
40
|
+
* @returns {Object} Merged configuration with baseUrl, validationBaseUrl, and options
|
|
41
|
+
*/
|
|
42
|
+
export function mergeLinksConfig(baseUrl, validationBaseUrl, options, config) {
|
|
43
|
+
if (!config) {
|
|
44
|
+
return { baseUrl, validationBaseUrl, options };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Get base URLs from config if not provided via CLI
|
|
48
|
+
const finalBaseUrl = baseUrl || config.source;
|
|
49
|
+
const finalValidationBaseUrl = validationBaseUrl || config.target;
|
|
50
|
+
|
|
51
|
+
// Get links-specific config
|
|
52
|
+
const linksConfig = config.links || {};
|
|
53
|
+
|
|
54
|
+
// Merge options: CLI > links config > global config > defaults
|
|
55
|
+
const mergedOptions = {
|
|
56
|
+
...options,
|
|
57
|
+
// Only use config values if CLI option wasn't provided
|
|
58
|
+
file: options.file || linksConfig.file,
|
|
59
|
+
dir: options.dir || linksConfig.dir,
|
|
60
|
+
output: options.output || linksConfig.output,
|
|
61
|
+
dryRun: options.dryRun !== undefined ? options.dryRun : linksConfig["dry-run"],
|
|
62
|
+
quiet: options.quiet !== undefined ? options.quiet : linksConfig.quiet,
|
|
63
|
+
concurrency: options.concurrency || linksConfig.concurrency,
|
|
64
|
+
headless: options.headless !== undefined ? options.headless :
|
|
65
|
+
(linksConfig.headless !== undefined ? linksConfig.headless : true),
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
return {
|
|
69
|
+
baseUrl: finalBaseUrl,
|
|
70
|
+
validationBaseUrl: finalValidationBaseUrl,
|
|
71
|
+
options: mergedOptions,
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Merges config file with CLI options for the parse command
|
|
77
|
+
* CLI options take precedence over config file
|
|
78
|
+
*
|
|
79
|
+
* @param {Object} options - CLI options
|
|
80
|
+
* @param {Object|null} config - Loaded config object
|
|
81
|
+
* @returns {Object} Merged options
|
|
82
|
+
*/
|
|
83
|
+
export function mergeParseConfig(options, config) {
|
|
84
|
+
if (!config) {
|
|
85
|
+
return options;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Get parse-specific config
|
|
89
|
+
const parseConfig = config.parse || {};
|
|
90
|
+
|
|
91
|
+
// Merge options: CLI > parse config > global config > defaults
|
|
92
|
+
return {
|
|
93
|
+
...options,
|
|
94
|
+
file: options.file || parseConfig.file,
|
|
95
|
+
dir: options.dir || parseConfig.dir,
|
|
96
|
+
quiet: options.quiet !== undefined ? options.quiet : parseConfig.quiet,
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Validates that required fields are present
|
|
102
|
+
* @param {string|undefined} baseUrl - Base URL
|
|
103
|
+
* @param {string} commandName - Name of the command for error messages
|
|
104
|
+
* @throws {Error} If required fields are missing
|
|
105
|
+
*/
|
|
106
|
+
export function validateRequiredConfig(baseUrl, commandName) {
|
|
107
|
+
if (!baseUrl) {
|
|
108
|
+
throw new Error(
|
|
109
|
+
`Missing required configuration: baseUrl must be provided either via CLI argument or in config.json (as "source")\n\n` +
|
|
110
|
+
`Usage:\n` +
|
|
111
|
+
` CLI: ${commandName} <baseUrl>\n` +
|
|
112
|
+
` config.json: { "source": "https://docs.example.com" }`
|
|
113
|
+
);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Report Generation Utilities
|
|
3
|
+
*
|
|
4
|
+
* Shared utilities for generating validation reports in different formats (JSON, Markdown).
|
|
5
|
+
* Used by both links and parse validation commands.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { writeFileSync } from "fs";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Writes a report to a file in the specified format
|
|
13
|
+
* @param {Object} reportData - The report data object
|
|
14
|
+
* @param {string} format - Output format: 'json' or 'md'
|
|
15
|
+
* @param {string} baseFileName - Base name for the report file (without extension)
|
|
16
|
+
* @param {string} repoRoot - Repository root directory
|
|
17
|
+
* @returns {string} Path to the written file
|
|
18
|
+
*/
|
|
19
|
+
export function writeReport(reportData, format, baseFileName, repoRoot) {
|
|
20
|
+
let content;
|
|
21
|
+
let extension;
|
|
22
|
+
|
|
23
|
+
if (format === "md" || format === "markdown") {
|
|
24
|
+
content = reportData.markdownContent || generateMarkdownFromJson(reportData);
|
|
25
|
+
extension = ".md";
|
|
26
|
+
} else {
|
|
27
|
+
content = JSON.stringify(reportData, null, 2);
|
|
28
|
+
extension = ".json";
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const outputPath = join(repoRoot, `${baseFileName}${extension}`);
|
|
32
|
+
writeFileSync(outputPath, content, "utf-8");
|
|
33
|
+
|
|
34
|
+
return outputPath;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Fallback: Generates basic markdown from JSON structure
|
|
39
|
+
* @param {Object} data - Report data
|
|
40
|
+
* @returns {string} Markdown content
|
|
41
|
+
*/
|
|
42
|
+
function generateMarkdownFromJson(data) {
|
|
43
|
+
let markdown = "# Validation Report\n\n";
|
|
44
|
+
markdown += "```json\n";
|
|
45
|
+
markdown += JSON.stringify(data, null, 2);
|
|
46
|
+
markdown += "\n```\n";
|
|
47
|
+
return markdown;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Generates markdown report for MDX parsing validation
|
|
52
|
+
* @param {Object} report - Report object with summary, errors, valid, timestamp
|
|
53
|
+
* @returns {string} Markdown content
|
|
54
|
+
*/
|
|
55
|
+
export function generateMdxParseMarkdown(report) {
|
|
56
|
+
let markdown = "# MDX Validation Report\n\n";
|
|
57
|
+
|
|
58
|
+
// Summary
|
|
59
|
+
markdown += `## Summary\n\n`;
|
|
60
|
+
markdown += `- **Total files**: ${report.summary.total}\n`;
|
|
61
|
+
markdown += `- **Valid files**: ${report.summary.valid}\n`;
|
|
62
|
+
markdown += `- **Files with errors**: ${report.summary.errors}\n`;
|
|
63
|
+
markdown += `- **Generated**: ${report.timestamp}\n\n`;
|
|
64
|
+
|
|
65
|
+
// Files with errors
|
|
66
|
+
if (report.errors.length > 0) {
|
|
67
|
+
markdown += `## Files with Errors\n\n`;
|
|
68
|
+
|
|
69
|
+
report.errors.forEach((err) => {
|
|
70
|
+
markdown += `### [${err.filePath}](${err.filePath})\n\n`;
|
|
71
|
+
markdown += `- **Line ${err.error.line || "unknown"}**: ${err.error.message}\n`;
|
|
72
|
+
|
|
73
|
+
if (err.error.column) {
|
|
74
|
+
markdown += ` - Column: ${err.error.column}\n`;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
markdown += `\n`;
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return markdown;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Generates markdown report for links validation
|
|
86
|
+
* @param {Object} report - Report object with summary, configuration, results_by_file, timestamp
|
|
87
|
+
* @returns {string} Markdown content
|
|
88
|
+
*/
|
|
89
|
+
export function generateLinksMarkdown(report) {
|
|
90
|
+
let markdown = "# Links Validation Report\n\n";
|
|
91
|
+
|
|
92
|
+
// Summary
|
|
93
|
+
markdown += `## Summary\n\n`;
|
|
94
|
+
markdown += `- **Total links**: ${report.summary.total_links}\n`;
|
|
95
|
+
markdown += `- **Success**: ${report.summary.success}\n`;
|
|
96
|
+
markdown += `- **Failure**: ${report.summary.failure}\n`;
|
|
97
|
+
markdown += `- **Error**: ${report.summary.error}\n`;
|
|
98
|
+
markdown += `- **Generated**: ${report.timestamp}\n`;
|
|
99
|
+
markdown += `- **Execution time**: ${report.configuration.execution_time_seconds}s\n\n`;
|
|
100
|
+
|
|
101
|
+
// Configuration
|
|
102
|
+
markdown += `## Configuration\n\n`;
|
|
103
|
+
markdown += `- **Base URL**: ${report.configuration.base_url}\n`;
|
|
104
|
+
markdown += `- **Concurrency**: ${report.configuration.concurrency}\n`;
|
|
105
|
+
markdown += `- **Scanned directories**: ${report.configuration.scanned_directories.join(", ")}\n`;
|
|
106
|
+
markdown += `- **Excluded directories**: ${report.configuration.excluded_directories.join(", ")}\n\n`;
|
|
107
|
+
|
|
108
|
+
// Results by file
|
|
109
|
+
const failedResults = [];
|
|
110
|
+
const errorResults = [];
|
|
111
|
+
|
|
112
|
+
Object.entries(report.results_by_file).forEach(([filePath, results]) => {
|
|
113
|
+
results.forEach((result) => {
|
|
114
|
+
if (result.status === "failure") {
|
|
115
|
+
failedResults.push({ filePath, result });
|
|
116
|
+
} else if (result.status === "error") {
|
|
117
|
+
errorResults.push({ filePath, result });
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
// Failed links
|
|
123
|
+
if (failedResults.length > 0) {
|
|
124
|
+
markdown += `## Failed Links\n\n`;
|
|
125
|
+
|
|
126
|
+
failedResults.forEach(({ filePath, result }) => {
|
|
127
|
+
markdown += `### [${filePath}:${result.source.lineNumber}](${filePath}#L${result.source.lineNumber})\n\n`;
|
|
128
|
+
markdown += `- **Link text**: "${result.source.linkText}"\n`;
|
|
129
|
+
markdown += `- **Raw href**: \`${result.source.rawHref}\`\n`;
|
|
130
|
+
markdown += `- **Source URL**: ${result.sourceUrl}\n`;
|
|
131
|
+
markdown += `- **Target URL**: ${result.targetUrl}\n`;
|
|
132
|
+
markdown += `- **Error**: ${result.errorMessage}\n\n`;
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Error links
|
|
137
|
+
if (errorResults.length > 0) {
|
|
138
|
+
markdown += `## Links with Errors\n\n`;
|
|
139
|
+
|
|
140
|
+
errorResults.forEach(({ filePath, result }) => {
|
|
141
|
+
markdown += `### [${filePath}:${result.source.lineNumber}](${filePath}#L${result.source.lineNumber})\n\n`;
|
|
142
|
+
markdown += `- **Link text**: "${result.source.linkText}"\n`;
|
|
143
|
+
markdown += `- **Raw href**: \`${result.source.rawHref}\`\n`;
|
|
144
|
+
markdown += `- **Target URL**: ${result.targetUrl}\n`;
|
|
145
|
+
markdown += `- **Error**: ${result.errorMessage}\n\n`;
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// Success message
|
|
150
|
+
if (failedResults.length === 0 && errorResults.length === 0) {
|
|
151
|
+
markdown += `## ✓ All Links Valid\n\n`;
|
|
152
|
+
markdown += `All ${report.summary.total_links} links validated successfully!\n`;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
return markdown;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Writes a report in both JSON and Markdown formats
|
|
160
|
+
* Always generates both files regardless of user preference
|
|
161
|
+
* @param {Object} reportData - The report data object
|
|
162
|
+
* @param {string} baseFileName - Base name for the report file (without extension)
|
|
163
|
+
* @param {string} repoRoot - Repository root directory
|
|
164
|
+
* @returns {Object} Object with jsonPath and mdPath
|
|
165
|
+
*/
|
|
166
|
+
export function writeBothFormats(reportData, baseFileName, repoRoot) {
|
|
167
|
+
const jsonPath = writeReport(reportData, "json", baseFileName, repoRoot);
|
|
168
|
+
const mdPath = writeReport(reportData, "md", baseFileName, repoRoot);
|
|
169
|
+
|
|
170
|
+
return { jsonPath, mdPath };
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Normalizes format string to lowercase and handles variations
|
|
175
|
+
* @param {string|undefined} format - Format string
|
|
176
|
+
* @returns {string} Normalized format ('json' or 'md')
|
|
177
|
+
*/
|
|
178
|
+
export function normalizeFormat(format) {
|
|
179
|
+
if (!format) return "json";
|
|
180
|
+
const normalized = format.toLowerCase();
|
|
181
|
+
return normalized === "markdown" ? "md" : normalized;
|
|
182
|
+
}
|