@writechoice/mint-cli 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -13
- package/bin/cli.js +9 -11
- package/package.json +1 -1
- package/src/commands/validate/links.js +147 -184
package/README.md
CHANGED
|
@@ -83,16 +83,10 @@ writechoice check links docs.example.com -f path/to/file.mdx
|
|
|
83
83
|
# Validate links in a specific directory
|
|
84
84
|
writechoice check links docs.example.com -d path/to/docs
|
|
85
85
|
|
|
86
|
-
# Use short aliases for common flags
|
|
87
|
-
writechoice check links docs.example.com -v -o my_report.json
|
|
88
|
-
|
|
89
86
|
# Dry run (extract links without validating)
|
|
90
87
|
writechoice check links docs.example.com --dry-run
|
|
91
88
|
|
|
92
|
-
#
|
|
93
|
-
writechoice check links docs.example.com -v
|
|
94
|
-
|
|
95
|
-
# Quiet mode (only generate report)
|
|
89
|
+
# Quiet mode (suppress terminal output, only generate report)
|
|
96
90
|
writechoice check links docs.example.com --quiet
|
|
97
91
|
|
|
98
92
|
# Custom output path for report
|
|
@@ -123,14 +117,15 @@ writechoice check links docs.example.com --fix-from-report custom_report.json
|
|
|
123
117
|
| `--dir <path>` | `-d` | Validate links in a specific directory | - |
|
|
124
118
|
| `--output <path>` | `-o` | Output path for JSON report | `links_report.json` |
|
|
125
119
|
| `--dry-run` | - | Extract and show links without validating | `false` |
|
|
126
|
-
| `--
|
|
127
|
-
| `--quiet` | - | Suppress stdout output (only generate report) | `false` |
|
|
120
|
+
| `--quiet` | - | Suppress terminal output (only generate report) | `false` |
|
|
128
121
|
| `--concurrency <number>` | `-c` | Number of concurrent browser tabs | `25` |
|
|
129
122
|
| `--headless` | - | Run browser in headless mode | `true` |
|
|
130
123
|
| `--no-headless` | - | Show browser window (for debugging) | - |
|
|
131
124
|
| `--fix` | - | Automatically fix anchor links in MDX files | `false` |
|
|
132
125
|
| `--fix-from-report [path]` | - | Fix anchor links from report file (optional path) | `links_report.json` |
|
|
133
126
|
|
|
127
|
+
**Note:** Detailed progress output is shown by default. Use `--quiet` to suppress terminal output.
|
|
128
|
+
|
|
134
129
|
## How It Works
|
|
135
130
|
|
|
136
131
|
### Link Extraction
|
|
@@ -273,16 +268,22 @@ The default concurrency is set to 25 concurrent browser tabs. Adjust this based
|
|
|
273
268
|
|
|
274
269
|
## Examples
|
|
275
270
|
|
|
276
|
-
### Validate all links with
|
|
271
|
+
### Validate all links (with progress output)
|
|
272
|
+
|
|
273
|
+
```bash
|
|
274
|
+
writechoice check links docs.example.com
|
|
275
|
+
```
|
|
276
|
+
|
|
277
|
+
### Validate quietly (suppress terminal output)
|
|
277
278
|
|
|
278
279
|
```bash
|
|
279
|
-
writechoice check links docs.example.com
|
|
280
|
+
writechoice check links docs.example.com --quiet
|
|
280
281
|
```
|
|
281
282
|
|
|
282
283
|
### Validate and fix issues in one command
|
|
283
284
|
|
|
284
285
|
```bash
|
|
285
|
-
writechoice check links docs.example.com --fix
|
|
286
|
+
writechoice check links docs.example.com --fix
|
|
286
287
|
```
|
|
287
288
|
|
|
288
289
|
### Two-step fix workflow
|
|
@@ -301,7 +302,7 @@ writechoice check links docs.example.com --fix-from-report
|
|
|
301
302
|
### Validate specific directory
|
|
302
303
|
|
|
303
304
|
```bash
|
|
304
|
-
writechoice check links docs.example.com -d docs/api
|
|
305
|
+
writechoice check links docs.example.com -d docs/api
|
|
305
306
|
```
|
|
306
307
|
|
|
307
308
|
## Troubleshooting
|
package/bin/cli.js
CHANGED
|
@@ -17,7 +17,7 @@ const program = new Command();
|
|
|
17
17
|
program
|
|
18
18
|
.name("writechoice")
|
|
19
19
|
.description("CLI tool for Mintlify documentation validation and utilities")
|
|
20
|
-
.version(packageJson.version, "-
|
|
20
|
+
.version(packageJson.version, "-v, --version", "Output the current version");
|
|
21
21
|
|
|
22
22
|
// Validate command
|
|
23
23
|
const check = program.command("check").description("Validation commands for documentation");
|
|
@@ -30,8 +30,7 @@ check
|
|
|
30
30
|
.option("-d, --dir <path>", "Validate links in a specific directory")
|
|
31
31
|
.option("-o, --output <path>", "Output path for JSON report", "links_report.json")
|
|
32
32
|
.option("--dry-run", "Extract and show links without validating")
|
|
33
|
-
.option("
|
|
34
|
-
.option("--quiet", "Suppress stdout output (only generate report)")
|
|
33
|
+
.option("--quiet", "Suppress terminal output (only generate report)")
|
|
35
34
|
.option("-c, --concurrency <number>", "Number of concurrent browser tabs", "25")
|
|
36
35
|
.option("--headless", "Run browser in headless mode (default)", true)
|
|
37
36
|
.option("--no-headless", "Show browser window (for debugging)")
|
|
@@ -39,6 +38,8 @@ check
|
|
|
39
38
|
.option("--fix-from-report [path]", "Fix anchor links from report file (default: links_report.json)")
|
|
40
39
|
.action(async (baseUrl, options) => {
|
|
41
40
|
const { validateLinks } = await import("../src/commands/validate/links.js");
|
|
41
|
+
// Verbose is now default (true unless --quiet is specified)
|
|
42
|
+
options.verbose = !options.quiet;
|
|
42
43
|
await validateLinks(baseUrl, options);
|
|
43
44
|
});
|
|
44
45
|
|
|
@@ -51,10 +52,7 @@ program
|
|
|
51
52
|
|
|
52
53
|
try {
|
|
53
54
|
// Get latest version from npm
|
|
54
|
-
const latestVersion = execSync(
|
|
55
|
-
`npm view ${packageJson.name} version`,
|
|
56
|
-
{ encoding: "utf-8" }
|
|
57
|
-
).trim();
|
|
55
|
+
const latestVersion = execSync(`npm view ${packageJson.name} version`, { encoding: "utf-8" }).trim();
|
|
58
56
|
|
|
59
57
|
const currentVersion = packageJson.version;
|
|
60
58
|
|
|
@@ -83,10 +81,10 @@ program
|
|
|
83
81
|
// Check for updates on every command (non-blocking)
|
|
84
82
|
async function checkForUpdates() {
|
|
85
83
|
try {
|
|
86
|
-
const latestVersion = execSync(
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
).trim();
|
|
84
|
+
const latestVersion = execSync(`npm view ${packageJson.name} version 2>/dev/null`, {
|
|
85
|
+
encoding: "utf-8",
|
|
86
|
+
timeout: 2000,
|
|
87
|
+
}).trim();
|
|
90
88
|
|
|
91
89
|
const currentVersion = packageJson.version;
|
|
92
90
|
|
package/package.json
CHANGED
|
@@ -6,11 +6,11 @@
|
|
|
6
6
|
* JavaScript-rendered Mintlify pages.
|
|
7
7
|
*/
|
|
8
8
|
|
|
9
|
-
import { readFileSync, writeFileSync, existsSync, readdirSync, statSync } from
|
|
10
|
-
import { join, relative, resolve, dirname } from
|
|
11
|
-
import { fileURLToPath } from
|
|
12
|
-
import { chromium } from
|
|
13
|
-
import chalk from
|
|
9
|
+
import { readFileSync, writeFileSync, existsSync, readdirSync, statSync } from "fs";
|
|
10
|
+
import { join, relative, resolve, dirname } from "path";
|
|
11
|
+
import { fileURLToPath } from "url";
|
|
12
|
+
import { chromium } from "playwright";
|
|
13
|
+
import chalk from "chalk";
|
|
14
14
|
import {
|
|
15
15
|
cleanHeadingText,
|
|
16
16
|
toKebabCase,
|
|
@@ -20,15 +20,15 @@ import {
|
|
|
20
20
|
findLineNumber,
|
|
21
21
|
removeCodeBlocksAndFrontmatter,
|
|
22
22
|
resolvePath as resolvePathUtil,
|
|
23
|
-
} from
|
|
23
|
+
} from "../../utils/helpers.js";
|
|
24
24
|
|
|
25
25
|
const __filename = fileURLToPath(import.meta.url);
|
|
26
26
|
const __dirname = dirname(__filename);
|
|
27
27
|
|
|
28
28
|
// Configuration
|
|
29
|
-
const DEFAULT_BASE_URL =
|
|
30
|
-
const EXCLUDED_DIRS = [
|
|
31
|
-
const MDX_DIRS = [
|
|
29
|
+
const DEFAULT_BASE_URL = "https://docs.nebius.com";
|
|
30
|
+
const EXCLUDED_DIRS = ["snippets"];
|
|
31
|
+
const MDX_DIRS = ["."];
|
|
32
32
|
const DEFAULT_TIMEOUT = 30000; // 30 seconds
|
|
33
33
|
const DEFAULT_CONCURRENCY = 25;
|
|
34
34
|
|
|
@@ -75,7 +75,7 @@ class ValidationResult {
|
|
|
75
75
|
actualHeading = null,
|
|
76
76
|
actualHeadingKebab = null,
|
|
77
77
|
errorMessage = null,
|
|
78
|
-
validationTimeMs = 0
|
|
78
|
+
validationTimeMs = 0,
|
|
79
79
|
) {
|
|
80
80
|
this.source = source;
|
|
81
81
|
this.targetUrl = targetUrl;
|
|
@@ -106,10 +106,10 @@ function urlToFilePath(url, baseUrl, repoRoot) {
|
|
|
106
106
|
}
|
|
107
107
|
}
|
|
108
108
|
|
|
109
|
-
path = path.replace(/^\/+/,
|
|
109
|
+
path = path.replace(/^\/+/, "");
|
|
110
110
|
|
|
111
|
-
if (!path || path ===
|
|
112
|
-
const indexPath = join(repoRoot,
|
|
111
|
+
if (!path || path === "/") {
|
|
112
|
+
const indexPath = join(repoRoot, "index.mdx");
|
|
113
113
|
return existsSync(indexPath) ? indexPath : null;
|
|
114
114
|
}
|
|
115
115
|
|
|
@@ -118,7 +118,7 @@ function urlToFilePath(url, baseUrl, repoRoot) {
|
|
|
118
118
|
return mdxPath;
|
|
119
119
|
}
|
|
120
120
|
|
|
121
|
-
const indexPath = join(repoRoot, path,
|
|
121
|
+
const indexPath = join(repoRoot, path, "index.mdx");
|
|
122
122
|
if (existsSync(indexPath)) {
|
|
123
123
|
return indexPath;
|
|
124
124
|
}
|
|
@@ -132,44 +132,44 @@ function resolvePath(mdxFilePath, href, baseUrl, repoRoot) {
|
|
|
132
132
|
}
|
|
133
133
|
|
|
134
134
|
let path, anchor;
|
|
135
|
-
if (href.includes(
|
|
136
|
-
[path, anchor] = href.split(
|
|
135
|
+
if (href.includes("#")) {
|
|
136
|
+
[path, anchor] = href.split("#", 2);
|
|
137
137
|
} else {
|
|
138
138
|
path = href;
|
|
139
|
-
anchor =
|
|
139
|
+
anchor = "";
|
|
140
140
|
}
|
|
141
141
|
|
|
142
142
|
if (!path && anchor) {
|
|
143
143
|
const relPath = relative(repoRoot, mdxFilePath);
|
|
144
|
-
const urlPath = relPath.replace(/\.mdx$/,
|
|
144
|
+
const urlPath = relPath.replace(/\.mdx$/, "");
|
|
145
145
|
const fullUrl = normalizeUrl(`${baseUrl}/${urlPath}`);
|
|
146
146
|
return `${fullUrl}#${anchor}`;
|
|
147
147
|
}
|
|
148
148
|
|
|
149
149
|
let fullUrl;
|
|
150
150
|
|
|
151
|
-
if (path.startsWith(
|
|
151
|
+
if (path.startsWith("/")) {
|
|
152
152
|
fullUrl = normalizeUrl(baseUrl + path);
|
|
153
153
|
} else {
|
|
154
154
|
const mdxDir = dirname(mdxFilePath);
|
|
155
155
|
|
|
156
|
-
if (path.startsWith(
|
|
156
|
+
if (path.startsWith("./")) {
|
|
157
157
|
path = path.slice(2);
|
|
158
158
|
}
|
|
159
159
|
|
|
160
160
|
const resolved = resolve(mdxDir, path);
|
|
161
161
|
|
|
162
162
|
const relToRoot = relative(repoRoot, resolved);
|
|
163
|
-
if (relToRoot.startsWith(
|
|
163
|
+
if (relToRoot.startsWith("..")) {
|
|
164
164
|
return null;
|
|
165
165
|
}
|
|
166
166
|
|
|
167
|
-
const urlPath = relToRoot.replace(/\.mdx$/,
|
|
167
|
+
const urlPath = relToRoot.replace(/\.mdx$/, "");
|
|
168
168
|
fullUrl = normalizeUrl(`${baseUrl}/${urlPath}`);
|
|
169
169
|
}
|
|
170
170
|
|
|
171
171
|
if (anchor) {
|
|
172
|
-
fullUrl +=
|
|
172
|
+
fullUrl += "#" + anchor;
|
|
173
173
|
}
|
|
174
174
|
|
|
175
175
|
return fullUrl;
|
|
@@ -179,7 +179,7 @@ function resolvePath(mdxFilePath, href, baseUrl, repoRoot) {
|
|
|
179
179
|
|
|
180
180
|
function extractMdxHeadings(filePath) {
|
|
181
181
|
try {
|
|
182
|
-
const content = readFileSync(filePath,
|
|
182
|
+
const content = readFileSync(filePath, "utf-8");
|
|
183
183
|
const { cleanedContent } = removeCodeBlocksAndFrontmatter(content);
|
|
184
184
|
|
|
185
185
|
const headingPattern = /^#{1,6}\s+(.+)$/gm;
|
|
@@ -189,7 +189,7 @@ function extractMdxHeadings(filePath) {
|
|
|
189
189
|
while ((match = headingPattern.exec(cleanedContent)) !== null) {
|
|
190
190
|
let headingText = match[1].trim();
|
|
191
191
|
// Remove any trailing {#custom-id} syntax if present
|
|
192
|
-
headingText = headingText.replace(/\s*\{#[^}]+\}\s*$/,
|
|
192
|
+
headingText = headingText.replace(/\s*\{#[^}]+\}\s*$/, "");
|
|
193
193
|
headings.push(headingText);
|
|
194
194
|
}
|
|
195
195
|
|
|
@@ -206,7 +206,7 @@ function extractLinksFromFile(filePath, baseUrl, repoRoot, verbose = false) {
|
|
|
206
206
|
|
|
207
207
|
let content;
|
|
208
208
|
try {
|
|
209
|
-
content = readFileSync(filePath,
|
|
209
|
+
content = readFileSync(filePath, "utf-8");
|
|
210
210
|
} catch (error) {
|
|
211
211
|
console.error(`Error reading ${filePath}: ${error.message}`);
|
|
212
212
|
return [];
|
|
@@ -234,8 +234,8 @@ function extractLinksFromFile(filePath, baseUrl, repoRoot, verbose = false) {
|
|
|
234
234
|
const markdownMatches = [...cleanedContent.matchAll(LINK_PATTERNS.markdown)];
|
|
235
235
|
for (const match of markdownMatches) {
|
|
236
236
|
// Check if this is actually an image by looking at the character before '['
|
|
237
|
-
const charBefore = match.index > 0 ? cleanedContent[match.index - 1] :
|
|
238
|
-
if (charBefore ===
|
|
237
|
+
const charBefore = match.index > 0 ? cleanedContent[match.index - 1] : "";
|
|
238
|
+
if (charBefore === "!") {
|
|
239
239
|
// This is a markdown image , skip it
|
|
240
240
|
continue;
|
|
241
241
|
}
|
|
@@ -252,21 +252,13 @@ function extractLinksFromFile(filePath, baseUrl, repoRoot, verbose = false) {
|
|
|
252
252
|
findLineNumber(content, match.index),
|
|
253
253
|
linkText.trim(),
|
|
254
254
|
href,
|
|
255
|
-
|
|
255
|
+
"markdown",
|
|
256
256
|
);
|
|
257
257
|
|
|
258
|
-
const [basePath, anchor =
|
|
258
|
+
const [basePath, anchor = ""] = targetUrl.split("#");
|
|
259
259
|
const expectedSlug = new URL(targetUrl).pathname;
|
|
260
260
|
|
|
261
|
-
links.push(
|
|
262
|
-
new Link(
|
|
263
|
-
location,
|
|
264
|
-
targetUrl,
|
|
265
|
-
basePath,
|
|
266
|
-
anchor || null,
|
|
267
|
-
expectedSlug
|
|
268
|
-
)
|
|
269
|
-
);
|
|
261
|
+
links.push(new Link(location, targetUrl, basePath, anchor || null, expectedSlug));
|
|
270
262
|
}
|
|
271
263
|
}
|
|
272
264
|
|
|
@@ -285,21 +277,13 @@ function extractLinksFromFile(filePath, baseUrl, repoRoot, verbose = false) {
|
|
|
285
277
|
findLineNumber(content, match.index),
|
|
286
278
|
linkText.trim(),
|
|
287
279
|
href,
|
|
288
|
-
|
|
280
|
+
"html",
|
|
289
281
|
);
|
|
290
282
|
|
|
291
|
-
const [basePath, anchor =
|
|
283
|
+
const [basePath, anchor = ""] = targetUrl.split("#");
|
|
292
284
|
const expectedSlug = new URL(targetUrl).pathname;
|
|
293
285
|
|
|
294
|
-
links.push(
|
|
295
|
-
new Link(
|
|
296
|
-
location,
|
|
297
|
-
targetUrl,
|
|
298
|
-
basePath,
|
|
299
|
-
anchor || null,
|
|
300
|
-
expectedSlug
|
|
301
|
-
)
|
|
302
|
-
);
|
|
286
|
+
links.push(new Link(location, targetUrl, basePath, anchor || null, expectedSlug));
|
|
303
287
|
}
|
|
304
288
|
}
|
|
305
289
|
|
|
@@ -318,21 +302,13 @@ function extractLinksFromFile(filePath, baseUrl, repoRoot, verbose = false) {
|
|
|
318
302
|
findLineNumber(content, match.index),
|
|
319
303
|
linkText.trim(),
|
|
320
304
|
href,
|
|
321
|
-
|
|
305
|
+
"jsx",
|
|
322
306
|
);
|
|
323
307
|
|
|
324
|
-
const [basePath, anchor =
|
|
308
|
+
const [basePath, anchor = ""] = targetUrl.split("#");
|
|
325
309
|
const expectedSlug = new URL(targetUrl).pathname;
|
|
326
310
|
|
|
327
|
-
links.push(
|
|
328
|
-
new Link(
|
|
329
|
-
location,
|
|
330
|
-
targetUrl,
|
|
331
|
-
basePath,
|
|
332
|
-
anchor || null,
|
|
333
|
-
expectedSlug
|
|
334
|
-
)
|
|
335
|
-
);
|
|
311
|
+
links.push(new Link(location, targetUrl, basePath, anchor || null, expectedSlug));
|
|
336
312
|
}
|
|
337
313
|
}
|
|
338
314
|
|
|
@@ -351,21 +327,13 @@ function extractLinksFromFile(filePath, baseUrl, repoRoot, verbose = false) {
|
|
|
351
327
|
findLineNumber(content, match.index),
|
|
352
328
|
linkText.trim(),
|
|
353
329
|
href,
|
|
354
|
-
|
|
330
|
+
"jsx",
|
|
355
331
|
);
|
|
356
332
|
|
|
357
|
-
const [basePath, anchor =
|
|
333
|
+
const [basePath, anchor = ""] = targetUrl.split("#");
|
|
358
334
|
const expectedSlug = new URL(targetUrl).pathname;
|
|
359
335
|
|
|
360
|
-
links.push(
|
|
361
|
-
new Link(
|
|
362
|
-
location,
|
|
363
|
-
targetUrl,
|
|
364
|
-
basePath,
|
|
365
|
-
anchor || null,
|
|
366
|
-
expectedSlug
|
|
367
|
-
)
|
|
368
|
-
);
|
|
336
|
+
links.push(new Link(location, targetUrl, basePath, anchor || null, expectedSlug));
|
|
369
337
|
}
|
|
370
338
|
}
|
|
371
339
|
|
|
@@ -378,9 +346,7 @@ function findMdxFiles(repoRoot, directory = null, file = null) {
|
|
|
378
346
|
return existsSync(fullPath) ? [fullPath] : [];
|
|
379
347
|
}
|
|
380
348
|
|
|
381
|
-
const searchDirs = directory
|
|
382
|
-
? [resolve(repoRoot, directory)]
|
|
383
|
-
: MDX_DIRS.map(d => join(repoRoot, d));
|
|
349
|
+
const searchDirs = directory ? [resolve(repoRoot, directory)] : MDX_DIRS.map((d) => join(repoRoot, d));
|
|
384
350
|
|
|
385
351
|
const files = [];
|
|
386
352
|
|
|
@@ -393,10 +359,10 @@ function findMdxFiles(repoRoot, directory = null, file = null) {
|
|
|
393
359
|
const stat = statSync(fullPath);
|
|
394
360
|
|
|
395
361
|
if (stat.isDirectory()) {
|
|
396
|
-
if (!EXCLUDED_DIRS.some(excluded => fullPath.includes(excluded))) {
|
|
362
|
+
if (!EXCLUDED_DIRS.some((excluded) => fullPath.includes(excluded))) {
|
|
397
363
|
walkDir(fullPath);
|
|
398
364
|
}
|
|
399
|
-
} else if (entry.endsWith(
|
|
365
|
+
} else if (entry.endsWith(".mdx")) {
|
|
400
366
|
files.push(fullPath);
|
|
401
367
|
}
|
|
402
368
|
}
|
|
@@ -411,7 +377,7 @@ function findMdxFiles(repoRoot, directory = null, file = null) {
|
|
|
411
377
|
|
|
412
378
|
// Playwright Validation Functions
|
|
413
379
|
|
|
414
|
-
async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, progress =
|
|
380
|
+
async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, progress = "") {
|
|
415
381
|
const startTime = Date.now();
|
|
416
382
|
|
|
417
383
|
try {
|
|
@@ -423,12 +389,12 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
423
389
|
const mdxFilePath = urlToFilePath(link.basePath, baseUrl, repoRoot);
|
|
424
390
|
if (mdxFilePath && existsSync(mdxFilePath)) {
|
|
425
391
|
const mdxHeadings = extractMdxHeadings(mdxFilePath);
|
|
426
|
-
const mdxHeadingsKebab = mdxHeadings.map(h => toKebabCase(h));
|
|
392
|
+
const mdxHeadingsKebab = mdxHeadings.map((h) => toKebabCase(h));
|
|
427
393
|
|
|
428
394
|
if (mdxHeadingsKebab.includes(link.anchor)) {
|
|
429
|
-
const heading = mdxHeadings.find(h => toKebabCase(h) === link.anchor);
|
|
395
|
+
const heading = mdxHeadings.find((h) => toKebabCase(h) === link.anchor);
|
|
430
396
|
if (verbose) {
|
|
431
|
-
console.log(
|
|
397
|
+
console.log(` ✓ Anchor validated locally in MDX file`);
|
|
432
398
|
}
|
|
433
399
|
return new ValidationResult(
|
|
434
400
|
link.source,
|
|
@@ -436,20 +402,20 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
436
402
|
link.basePath,
|
|
437
403
|
link.anchor,
|
|
438
404
|
link.expectedSlug,
|
|
439
|
-
|
|
405
|
+
"success",
|
|
440
406
|
link.basePath,
|
|
441
407
|
heading,
|
|
442
408
|
link.anchor,
|
|
443
409
|
null,
|
|
444
|
-
Date.now() - startTime
|
|
410
|
+
Date.now() - startTime,
|
|
445
411
|
);
|
|
446
412
|
} else if (verbose) {
|
|
447
|
-
console.log(
|
|
413
|
+
console.log(` Anchor not found in local MDX, checking online...`);
|
|
448
414
|
}
|
|
449
415
|
}
|
|
450
416
|
|
|
451
417
|
// Navigate to base page
|
|
452
|
-
await page.goto(link.basePath, { waitUntil:
|
|
418
|
+
await page.goto(link.basePath, { waitUntil: "networkidle", timeout: DEFAULT_TIMEOUT });
|
|
453
419
|
|
|
454
420
|
// Try to find heading by anchor
|
|
455
421
|
let heading = await page.$(`#${link.anchor}`);
|
|
@@ -465,12 +431,12 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
465
431
|
link.basePath,
|
|
466
432
|
link.anchor,
|
|
467
433
|
link.expectedSlug,
|
|
468
|
-
|
|
434
|
+
"failure",
|
|
469
435
|
null,
|
|
470
436
|
null,
|
|
471
437
|
null,
|
|
472
438
|
`Anchor #${link.anchor} not found on page`,
|
|
473
|
-
Date.now() - startTime
|
|
439
|
+
Date.now() - startTime,
|
|
474
440
|
);
|
|
475
441
|
}
|
|
476
442
|
|
|
@@ -482,7 +448,7 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
482
448
|
// Extract headings from the TARGET MDX file to verify
|
|
483
449
|
const mdxFilePath2 = urlToFilePath(link.basePath, baseUrl, repoRoot);
|
|
484
450
|
const mdxHeadings = mdxFilePath2 ? extractMdxHeadings(mdxFilePath2) : [];
|
|
485
|
-
const mdxHeadingsKebab = mdxHeadings.map(h => toKebabCase(h));
|
|
451
|
+
const mdxHeadingsKebab = mdxHeadings.map((h) => toKebabCase(h));
|
|
486
452
|
|
|
487
453
|
const matchesMdx = mdxHeadingsKebab.includes(actualKebab);
|
|
488
454
|
|
|
@@ -494,12 +460,12 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
494
460
|
link.basePath,
|
|
495
461
|
link.anchor,
|
|
496
462
|
link.expectedSlug,
|
|
497
|
-
|
|
463
|
+
"success",
|
|
498
464
|
link.basePath,
|
|
499
465
|
actualTextClean,
|
|
500
466
|
actualKebab,
|
|
501
467
|
null,
|
|
502
|
-
Date.now() - startTime
|
|
468
|
+
Date.now() - startTime,
|
|
503
469
|
);
|
|
504
470
|
} else {
|
|
505
471
|
return new ValidationResult(
|
|
@@ -508,12 +474,12 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
508
474
|
link.basePath,
|
|
509
475
|
link.anchor,
|
|
510
476
|
link.expectedSlug,
|
|
511
|
-
|
|
477
|
+
"failure",
|
|
512
478
|
null,
|
|
513
479
|
actualTextClean,
|
|
514
480
|
actualKebab,
|
|
515
481
|
`Anchor "#${link.anchor}" matches page heading "${actualTextClean}" but this heading is not found in the MDX file`,
|
|
516
|
-
Date.now() - startTime
|
|
482
|
+
Date.now() - startTime,
|
|
517
483
|
);
|
|
518
484
|
}
|
|
519
485
|
} else {
|
|
@@ -524,12 +490,12 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
524
490
|
link.basePath,
|
|
525
491
|
link.anchor,
|
|
526
492
|
link.expectedSlug,
|
|
527
|
-
|
|
493
|
+
"failure",
|
|
528
494
|
null,
|
|
529
495
|
actualTextClean,
|
|
530
496
|
actualKebab,
|
|
531
497
|
`Expected anchor "#${link.anchor}" but page heading "${actualTextClean}" should use "#${actualKebab}"`,
|
|
532
|
-
Date.now() - startTime
|
|
498
|
+
Date.now() - startTime,
|
|
533
499
|
);
|
|
534
500
|
} else {
|
|
535
501
|
return new ValidationResult(
|
|
@@ -538,12 +504,12 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
538
504
|
link.basePath,
|
|
539
505
|
link.anchor,
|
|
540
506
|
link.expectedSlug,
|
|
541
|
-
|
|
507
|
+
"failure",
|
|
542
508
|
null,
|
|
543
509
|
actualTextClean,
|
|
544
510
|
actualKebab,
|
|
545
511
|
`Expected anchor "#${link.anchor}" but found heading "${actualTextClean}" (#${actualKebab}) which is not in the MDX file`,
|
|
546
|
-
Date.now() - startTime
|
|
512
|
+
Date.now() - startTime,
|
|
547
513
|
);
|
|
548
514
|
}
|
|
549
515
|
}
|
|
@@ -554,17 +520,17 @@ async function validateAnchor(page, link, baseUrl, repoRoot, verbose = false, pr
|
|
|
554
520
|
link.basePath,
|
|
555
521
|
link.anchor,
|
|
556
522
|
link.expectedSlug,
|
|
557
|
-
|
|
523
|
+
"error",
|
|
558
524
|
null,
|
|
559
525
|
null,
|
|
560
526
|
null,
|
|
561
527
|
`Error validating anchor: ${error.message}`,
|
|
562
|
-
Date.now() - startTime
|
|
528
|
+
Date.now() - startTime,
|
|
563
529
|
);
|
|
564
530
|
}
|
|
565
531
|
}
|
|
566
532
|
|
|
567
|
-
async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false, progress =
|
|
533
|
+
async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false, progress = "") {
|
|
568
534
|
const startTime = Date.now();
|
|
569
535
|
|
|
570
536
|
try {
|
|
@@ -576,7 +542,7 @@ async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false
|
|
|
576
542
|
const mdxFilePath = urlToFilePath(link.targetUrl, baseUrl, repoRoot);
|
|
577
543
|
if (mdxFilePath && existsSync(mdxFilePath)) {
|
|
578
544
|
if (verbose) {
|
|
579
|
-
console.log(
|
|
545
|
+
console.log(` ✓ Link validated locally (file exists)`);
|
|
580
546
|
}
|
|
581
547
|
return new ValidationResult(
|
|
582
548
|
link.source,
|
|
@@ -584,19 +550,19 @@ async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false
|
|
|
584
550
|
link.basePath,
|
|
585
551
|
link.anchor,
|
|
586
552
|
link.expectedSlug,
|
|
587
|
-
|
|
553
|
+
"success",
|
|
588
554
|
link.targetUrl,
|
|
589
555
|
null,
|
|
590
556
|
null,
|
|
591
557
|
null,
|
|
592
|
-
Date.now() - startTime
|
|
558
|
+
Date.now() - startTime,
|
|
593
559
|
);
|
|
594
560
|
} else if (verbose) {
|
|
595
|
-
console.log(
|
|
561
|
+
console.log(` File not found locally, checking online...`);
|
|
596
562
|
}
|
|
597
563
|
|
|
598
564
|
// Navigate to the target URL
|
|
599
|
-
const response = await page.goto(link.targetUrl, { waitUntil:
|
|
565
|
+
const response = await page.goto(link.targetUrl, { waitUntil: "networkidle", timeout: DEFAULT_TIMEOUT });
|
|
600
566
|
|
|
601
567
|
if (!response) {
|
|
602
568
|
return new ValidationResult(
|
|
@@ -605,12 +571,12 @@ async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false
|
|
|
605
571
|
link.basePath,
|
|
606
572
|
link.anchor,
|
|
607
573
|
link.expectedSlug,
|
|
608
|
-
|
|
574
|
+
"error",
|
|
609
575
|
null,
|
|
610
576
|
null,
|
|
611
577
|
null,
|
|
612
|
-
|
|
613
|
-
Date.now() - startTime
|
|
578
|
+
"No response received",
|
|
579
|
+
Date.now() - startTime,
|
|
614
580
|
);
|
|
615
581
|
}
|
|
616
582
|
|
|
@@ -623,12 +589,12 @@ async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false
|
|
|
623
589
|
link.basePath,
|
|
624
590
|
link.anchor,
|
|
625
591
|
link.expectedSlug,
|
|
626
|
-
|
|
592
|
+
"failure",
|
|
627
593
|
actualUrl,
|
|
628
594
|
null,
|
|
629
595
|
null,
|
|
630
596
|
`HTTP ${response.status()}: ${response.statusText()}`,
|
|
631
|
-
Date.now() - startTime
|
|
597
|
+
Date.now() - startTime,
|
|
632
598
|
);
|
|
633
599
|
}
|
|
634
600
|
|
|
@@ -638,12 +604,12 @@ async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false
|
|
|
638
604
|
link.basePath,
|
|
639
605
|
link.anchor,
|
|
640
606
|
link.expectedSlug,
|
|
641
|
-
|
|
607
|
+
"success",
|
|
642
608
|
actualUrl,
|
|
643
609
|
null,
|
|
644
610
|
null,
|
|
645
611
|
null,
|
|
646
|
-
Date.now() - startTime
|
|
612
|
+
Date.now() - startTime,
|
|
647
613
|
);
|
|
648
614
|
} catch (error) {
|
|
649
615
|
return new ValidationResult(
|
|
@@ -652,17 +618,17 @@ async function validateNormalLink(page, link, baseUrl, repoRoot, verbose = false
|
|
|
652
618
|
link.basePath,
|
|
653
619
|
link.anchor,
|
|
654
620
|
link.expectedSlug,
|
|
655
|
-
|
|
621
|
+
"error",
|
|
656
622
|
null,
|
|
657
623
|
null,
|
|
658
624
|
null,
|
|
659
625
|
`Error validating link: ${error.message}`,
|
|
660
|
-
Date.now() - startTime
|
|
626
|
+
Date.now() - startTime,
|
|
661
627
|
);
|
|
662
628
|
}
|
|
663
629
|
}
|
|
664
630
|
|
|
665
|
-
async function validateLink(page, link, baseUrl, repoRoot, verbose = false, progress =
|
|
631
|
+
async function validateLink(page, link, baseUrl, repoRoot, verbose = false, progress = "") {
|
|
666
632
|
if (link.anchor) {
|
|
667
633
|
return await validateAnchor(page, link, baseUrl, repoRoot, verbose, progress);
|
|
668
634
|
} else {
|
|
@@ -677,14 +643,16 @@ async function validateLinksAsync(links, baseUrl, repoRoot, concurrency, headles
|
|
|
677
643
|
try {
|
|
678
644
|
browser = await chromium.launch({ headless });
|
|
679
645
|
} catch (error) {
|
|
680
|
-
if (
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
console.error(chalk.
|
|
686
|
-
console.error(
|
|
687
|
-
console.error(chalk.cyan(
|
|
646
|
+
if (
|
|
647
|
+
error.message.includes("Executable doesn't exist") ||
|
|
648
|
+
error.message.includes("Browser was not installed") ||
|
|
649
|
+
error.message.includes("browserType.launch")
|
|
650
|
+
) {
|
|
651
|
+
console.error(chalk.red("\n✗ Playwright browsers are not installed!"));
|
|
652
|
+
console.error(chalk.yellow("\nTo install Playwright browsers, run:"));
|
|
653
|
+
console.error(chalk.cyan(" npx playwright install chromium\n"));
|
|
654
|
+
console.error("Or install all browsers with:");
|
|
655
|
+
console.error(chalk.cyan(" npx playwright install\n"));
|
|
688
656
|
process.exit(1);
|
|
689
657
|
}
|
|
690
658
|
throw error;
|
|
@@ -696,7 +664,7 @@ async function validateLinksAsync(links, baseUrl, repoRoot, concurrency, headles
|
|
|
696
664
|
async function validateWithSemaphore(link) {
|
|
697
665
|
counter++;
|
|
698
666
|
const current = counter;
|
|
699
|
-
const progress = verbose ? `[${current}/${links.length}] ` :
|
|
667
|
+
const progress = verbose ? `[${current}/${links.length}] ` : "";
|
|
700
668
|
|
|
701
669
|
const context = await browser.newContext();
|
|
702
670
|
const page = await context.newPage();
|
|
@@ -716,7 +684,7 @@ async function validateLinksAsync(links, baseUrl, repoRoot, concurrency, headles
|
|
|
716
684
|
// Process links with concurrency control
|
|
717
685
|
for (let i = 0; i < links.length; i += concurrency) {
|
|
718
686
|
const batch = links.slice(i, i + concurrency);
|
|
719
|
-
const batchResults = await Promise.all(batch.map(link => validateWithSemaphore(link)));
|
|
687
|
+
const batchResults = await Promise.all(batch.map((link) => validateWithSemaphore(link)));
|
|
720
688
|
results.push(...batchResults);
|
|
721
689
|
}
|
|
722
690
|
|
|
@@ -735,7 +703,7 @@ function fixLinksFromReport(reportPath, repoRoot, verbose = false) {
|
|
|
735
703
|
|
|
736
704
|
let reportData;
|
|
737
705
|
try {
|
|
738
|
-
reportData = JSON.parse(readFileSync(reportPath,
|
|
706
|
+
reportData = JSON.parse(readFileSync(reportPath, "utf-8"));
|
|
739
707
|
} catch (error) {
|
|
740
708
|
console.error(`Error reading report file: ${error.message}`);
|
|
741
709
|
return {};
|
|
@@ -745,7 +713,7 @@ function fixLinksFromReport(reportPath, repoRoot, verbose = false) {
|
|
|
745
713
|
|
|
746
714
|
if (Object.keys(resultsByFile).length === 0) {
|
|
747
715
|
if (verbose) {
|
|
748
|
-
console.log(
|
|
716
|
+
console.log("No failures found in report.");
|
|
749
717
|
}
|
|
750
718
|
return {};
|
|
751
719
|
}
|
|
@@ -762,15 +730,13 @@ function fixLinksFromReport(reportPath, repoRoot, verbose = false) {
|
|
|
762
730
|
continue;
|
|
763
731
|
}
|
|
764
732
|
|
|
765
|
-
const fixableFailures = failures.filter(
|
|
766
|
-
f => f.status === 'failure' && f.actual_heading_kebab && f.anchor
|
|
767
|
-
);
|
|
733
|
+
const fixableFailures = failures.filter((f) => f.status === "failure" && f.actual_heading_kebab && f.anchor);
|
|
768
734
|
|
|
769
735
|
if (fixableFailures.length === 0) continue;
|
|
770
736
|
|
|
771
737
|
try {
|
|
772
|
-
const content = readFileSync(fullPath,
|
|
773
|
-
let lines = content.split(
|
|
738
|
+
const content = readFileSync(fullPath, "utf-8");
|
|
739
|
+
let lines = content.split("\n");
|
|
774
740
|
let modified = false;
|
|
775
741
|
let fixesCount = 0;
|
|
776
742
|
|
|
@@ -791,7 +757,7 @@ function fixLinksFromReport(reportPath, repoRoot, verbose = false) {
|
|
|
791
757
|
const newAnchor = failure.actual_heading_kebab;
|
|
792
758
|
const linkType = failure.source.link_type;
|
|
793
759
|
|
|
794
|
-
const pathPart = oldHref.includes(
|
|
760
|
+
const pathPart = oldHref.includes("#") ? oldHref.split("#")[0] : oldHref;
|
|
795
761
|
const newHref = pathPart ? `${pathPart}#${newAnchor}` : `#${newAnchor}`;
|
|
796
762
|
|
|
797
763
|
if (oldHref === newHref) {
|
|
@@ -803,14 +769,14 @@ function fixLinksFromReport(reportPath, repoRoot, verbose = false) {
|
|
|
803
769
|
|
|
804
770
|
let replaced = false;
|
|
805
771
|
|
|
806
|
-
if (linkType ===
|
|
772
|
+
if (linkType === "markdown") {
|
|
807
773
|
const oldPattern = `(${oldHref})`;
|
|
808
774
|
const newPattern = `(${newHref})`;
|
|
809
775
|
if (line.includes(oldPattern)) {
|
|
810
776
|
line = line.replace(oldPattern, newPattern);
|
|
811
777
|
replaced = true;
|
|
812
778
|
}
|
|
813
|
-
} else if (linkType ===
|
|
779
|
+
} else if (linkType === "html" || linkType === "jsx") {
|
|
814
780
|
for (const quote of ['"', "'"]) {
|
|
815
781
|
const oldPattern = `href=${quote}${oldHref}${quote}`;
|
|
816
782
|
const newPattern = `href=${quote}${newHref}${quote}`;
|
|
@@ -838,8 +804,8 @@ function fixLinksFromReport(reportPath, repoRoot, verbose = false) {
|
|
|
838
804
|
}
|
|
839
805
|
|
|
840
806
|
if (modified) {
|
|
841
|
-
const newContent = lines.join(
|
|
842
|
-
writeFileSync(fullPath, newContent,
|
|
807
|
+
const newContent = lines.join("\n");
|
|
808
|
+
writeFileSync(fullPath, newContent, "utf-8");
|
|
843
809
|
fixesApplied[filePath] = fixesCount;
|
|
844
810
|
|
|
845
811
|
if (verbose) {
|
|
@@ -860,7 +826,7 @@ function fixLinks(results, repoRoot, verbose = false) {
|
|
|
860
826
|
const failuresByFile = {};
|
|
861
827
|
|
|
862
828
|
for (const result of results) {
|
|
863
|
-
if (result.status !==
|
|
829
|
+
if (result.status !== "failure" || !result.actualHeadingKebab || !result.anchor) {
|
|
864
830
|
continue;
|
|
865
831
|
}
|
|
866
832
|
|
|
@@ -885,8 +851,8 @@ function fixLinks(results, repoRoot, verbose = false) {
|
|
|
885
851
|
}
|
|
886
852
|
|
|
887
853
|
try {
|
|
888
|
-
const content = readFileSync(fullPath,
|
|
889
|
-
let lines = content.split(
|
|
854
|
+
const content = readFileSync(fullPath, "utf-8");
|
|
855
|
+
let lines = content.split("\n");
|
|
890
856
|
let modified = false;
|
|
891
857
|
let fixesCount = 0;
|
|
892
858
|
|
|
@@ -906,7 +872,7 @@ function fixLinks(results, repoRoot, verbose = false) {
|
|
|
906
872
|
const oldHref = failure.source.rawHref;
|
|
907
873
|
const linkType = failure.source.linkType;
|
|
908
874
|
|
|
909
|
-
const pathPart = oldHref.includes(
|
|
875
|
+
const pathPart = oldHref.includes("#") ? oldHref.split("#")[0] : oldHref;
|
|
910
876
|
const newHref = pathPart ? `${pathPart}#${failure.actualHeadingKebab}` : `#${failure.actualHeadingKebab}`;
|
|
911
877
|
|
|
912
878
|
if (oldHref === newHref) {
|
|
@@ -918,14 +884,14 @@ function fixLinks(results, repoRoot, verbose = false) {
|
|
|
918
884
|
|
|
919
885
|
let replaced = false;
|
|
920
886
|
|
|
921
|
-
if (linkType ===
|
|
887
|
+
if (linkType === "markdown") {
|
|
922
888
|
const oldPattern = `(${oldHref})`;
|
|
923
889
|
const newPattern = `(${newHref})`;
|
|
924
890
|
if (line.includes(oldPattern)) {
|
|
925
891
|
line = line.replace(oldPattern, newPattern);
|
|
926
892
|
replaced = true;
|
|
927
893
|
}
|
|
928
|
-
} else if (linkType ===
|
|
894
|
+
} else if (linkType === "html" || linkType === "jsx") {
|
|
929
895
|
for (const quote of ['"', "'"]) {
|
|
930
896
|
const oldPattern = `href=${quote}${oldHref}${quote}`;
|
|
931
897
|
const newPattern = `href=${quote}${newHref}${quote}`;
|
|
@@ -953,8 +919,8 @@ function fixLinks(results, repoRoot, verbose = false) {
|
|
|
953
919
|
}
|
|
954
920
|
|
|
955
921
|
if (modified) {
|
|
956
|
-
const newContent = lines.join(
|
|
957
|
-
writeFileSync(fullPath, newContent,
|
|
922
|
+
const newContent = lines.join("\n");
|
|
923
|
+
writeFileSync(fullPath, newContent, "utf-8");
|
|
958
924
|
fixesApplied[filePath] = fixesCount;
|
|
959
925
|
|
|
960
926
|
if (verbose) {
|
|
@@ -975,9 +941,9 @@ function fixLinks(results, repoRoot, verbose = false) {
|
|
|
975
941
|
|
|
976
942
|
function generateReport(results, config, outputPath) {
|
|
977
943
|
const total = results.length;
|
|
978
|
-
const success = results.filter(r => r.status ===
|
|
979
|
-
const failure = results.filter(r => r.status ===
|
|
980
|
-
const error = results.filter(r => r.status ===
|
|
944
|
+
const success = results.filter((r) => r.status === "success").length;
|
|
945
|
+
const failure = results.filter((r) => r.status === "failure").length;
|
|
946
|
+
const error = results.filter((r) => r.status === "error").length;
|
|
981
947
|
|
|
982
948
|
const summaryByFile = {};
|
|
983
949
|
for (const result of results) {
|
|
@@ -992,7 +958,7 @@ function generateReport(results, config, outputPath) {
|
|
|
992
958
|
|
|
993
959
|
const resultsByFile = {};
|
|
994
960
|
for (const result of results) {
|
|
995
|
-
if (result.status ===
|
|
961
|
+
if (result.status === "success") continue;
|
|
996
962
|
|
|
997
963
|
const filePath = result.source.filePath;
|
|
998
964
|
if (!resultsByFile[filePath]) {
|
|
@@ -1015,7 +981,7 @@ function generateReport(results, config, outputPath) {
|
|
|
1015
981
|
results_by_file: resultsByFile,
|
|
1016
982
|
};
|
|
1017
983
|
|
|
1018
|
-
writeFileSync(outputPath, JSON.stringify(report, null, 2),
|
|
984
|
+
writeFileSync(outputPath, JSON.stringify(report, null, 2), "utf-8");
|
|
1019
985
|
|
|
1020
986
|
return report;
|
|
1021
987
|
}
|
|
@@ -1028,19 +994,14 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1028
994
|
// Handle --fix-from-report mode
|
|
1029
995
|
if (options.fixFromReport !== undefined) {
|
|
1030
996
|
// If flag is passed with a path, use that path; otherwise use default
|
|
1031
|
-
const reportPath =
|
|
1032
|
-
? options.fixFromReport
|
|
1033
|
-
: 'links_report.json';
|
|
997
|
+
const reportPath =
|
|
998
|
+
typeof options.fixFromReport === "string" && options.fixFromReport ? options.fixFromReport : "links_report.json";
|
|
1034
999
|
|
|
1035
1000
|
if (!options.quiet) {
|
|
1036
1001
|
console.log(`Applying fixes from report: ${reportPath}`);
|
|
1037
1002
|
}
|
|
1038
1003
|
|
|
1039
|
-
const fixesApplied = fixLinksFromReport(
|
|
1040
|
-
reportPath,
|
|
1041
|
-
repoRoot,
|
|
1042
|
-
options.verbose && !options.quiet
|
|
1043
|
-
);
|
|
1004
|
+
const fixesApplied = fixLinksFromReport(reportPath, repoRoot, options.verbose && !options.quiet);
|
|
1044
1005
|
|
|
1045
1006
|
if (!options.quiet) {
|
|
1046
1007
|
if (Object.keys(fixesApplied).length > 0) {
|
|
@@ -1049,9 +1010,9 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1049
1010
|
for (const [filePath, count] of Object.entries(fixesApplied)) {
|
|
1050
1011
|
console.log(` ${filePath}: ${count} fix(es)`);
|
|
1051
1012
|
}
|
|
1052
|
-
console.log(
|
|
1013
|
+
console.log("\nRun validation again to verify the fixes.");
|
|
1053
1014
|
} else {
|
|
1054
|
-
console.log(
|
|
1015
|
+
console.log("\nNo fixable issues found in report.");
|
|
1055
1016
|
}
|
|
1056
1017
|
}
|
|
1057
1018
|
|
|
@@ -1060,20 +1021,20 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1060
1021
|
|
|
1061
1022
|
// Normalize base URL - add https:// if not present
|
|
1062
1023
|
let normalizedBaseUrl = baseUrl;
|
|
1063
|
-
if (!normalizedBaseUrl.startsWith(
|
|
1064
|
-
normalizedBaseUrl =
|
|
1024
|
+
if (!normalizedBaseUrl.startsWith("http://") && !normalizedBaseUrl.startsWith("https://")) {
|
|
1025
|
+
normalizedBaseUrl = "https://" + normalizedBaseUrl;
|
|
1065
1026
|
}
|
|
1066
1027
|
// Remove trailing slash
|
|
1067
|
-
normalizedBaseUrl = normalizedBaseUrl.replace(/\/+$/,
|
|
1028
|
+
normalizedBaseUrl = normalizedBaseUrl.replace(/\/+$/, "");
|
|
1068
1029
|
|
|
1069
1030
|
if (options.verbose && !options.quiet) {
|
|
1070
|
-
console.log(
|
|
1031
|
+
console.log("Finding MDX files...");
|
|
1071
1032
|
}
|
|
1072
1033
|
|
|
1073
1034
|
const mdxFiles = findMdxFiles(repoRoot, options.dir, options.file);
|
|
1074
1035
|
|
|
1075
1036
|
if (mdxFiles.length === 0) {
|
|
1076
|
-
console.error(
|
|
1037
|
+
console.error("No MDX files found.");
|
|
1077
1038
|
process.exit(1);
|
|
1078
1039
|
}
|
|
1079
1040
|
|
|
@@ -1082,7 +1043,7 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1082
1043
|
}
|
|
1083
1044
|
|
|
1084
1045
|
if (options.verbose && !options.quiet) {
|
|
1085
|
-
console.log(
|
|
1046
|
+
console.log("Extracting links...");
|
|
1086
1047
|
}
|
|
1087
1048
|
|
|
1088
1049
|
const allLinks = [];
|
|
@@ -1092,7 +1053,7 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1092
1053
|
}
|
|
1093
1054
|
|
|
1094
1055
|
if (allLinks.length === 0) {
|
|
1095
|
-
console.log(
|
|
1056
|
+
console.log("No internal links found.");
|
|
1096
1057
|
return;
|
|
1097
1058
|
}
|
|
1098
1059
|
|
|
@@ -1101,7 +1062,7 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1101
1062
|
}
|
|
1102
1063
|
|
|
1103
1064
|
if (options.dryRun) {
|
|
1104
|
-
console.log(
|
|
1065
|
+
console.log("\nExtracted links:");
|
|
1105
1066
|
allLinks.forEach((link, i) => {
|
|
1106
1067
|
console.log(`\n${i + 1}. ${link.source.filePath}:${link.source.lineNumber}`);
|
|
1107
1068
|
console.log(` Text: ${link.source.linkText}`);
|
|
@@ -1117,7 +1078,7 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1117
1078
|
const startTime = Date.now();
|
|
1118
1079
|
|
|
1119
1080
|
if (!options.quiet) {
|
|
1120
|
-
console.log(
|
|
1081
|
+
console.log("\nValidating links...");
|
|
1121
1082
|
}
|
|
1122
1083
|
|
|
1123
1084
|
const results = await validateLinksAsync(
|
|
@@ -1126,14 +1087,14 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1126
1087
|
repoRoot,
|
|
1127
1088
|
parseInt(options.concurrency) || DEFAULT_CONCURRENCY,
|
|
1128
1089
|
options.headless !== false,
|
|
1129
|
-
options.verbose && !options.quiet
|
|
1090
|
+
options.verbose && !options.quiet,
|
|
1130
1091
|
);
|
|
1131
1092
|
|
|
1132
1093
|
const executionTime = (Date.now() - startTime) / 1000;
|
|
1133
1094
|
|
|
1134
1095
|
if (options.fix) {
|
|
1135
1096
|
if (!options.quiet) {
|
|
1136
|
-
console.log(
|
|
1097
|
+
console.log("\nApplying fixes...");
|
|
1137
1098
|
}
|
|
1138
1099
|
|
|
1139
1100
|
const fixesApplied = fixLinks(results, repoRoot, options.verbose && !options.quiet);
|
|
@@ -1145,9 +1106,9 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1145
1106
|
for (const [filePath, count] of Object.entries(fixesApplied)) {
|
|
1146
1107
|
console.log(` ${filePath}: ${count} fix(es)`);
|
|
1147
1108
|
}
|
|
1148
|
-
console.log(
|
|
1109
|
+
console.log("\nRun validation again to verify the fixes.");
|
|
1149
1110
|
} else {
|
|
1150
|
-
console.log(
|
|
1111
|
+
console.log("\nNo fixable issues found.");
|
|
1151
1112
|
}
|
|
1152
1113
|
}
|
|
1153
1114
|
}
|
|
@@ -1160,23 +1121,23 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1160
1121
|
execution_time_seconds: Math.round(executionTime * 100) / 100,
|
|
1161
1122
|
};
|
|
1162
1123
|
|
|
1163
|
-
const report = generateReport(results, config, options.output ||
|
|
1124
|
+
const report = generateReport(results, config, options.output || "links_report.json");
|
|
1164
1125
|
|
|
1165
1126
|
if (!options.quiet) {
|
|
1166
|
-
console.log(`\n${
|
|
1167
|
-
console.log(
|
|
1168
|
-
console.log(
|
|
1127
|
+
console.log(`\n${"=".repeat(60)}`);
|
|
1128
|
+
console.log("VALIDATION SUMMARY");
|
|
1129
|
+
console.log("=".repeat(60));
|
|
1169
1130
|
console.log(`Total links: ${report.summary.total_links}`);
|
|
1170
|
-
console.log(`Success: ${chalk.green(report.summary.success +
|
|
1171
|
-
console.log(`Failure: ${chalk.red(report.summary.failure +
|
|
1172
|
-
console.log(`Error: ${chalk.yellow(report.summary.error +
|
|
1131
|
+
console.log(`Success: ${chalk.green(report.summary.success + " ✓")}`);
|
|
1132
|
+
console.log(`Failure: ${chalk.red(report.summary.failure + " ✗")}`);
|
|
1133
|
+
console.log(`Error: ${chalk.yellow(report.summary.error + " ⚠")}`);
|
|
1173
1134
|
console.log(`Execution time: ${executionTime.toFixed(2)}s`);
|
|
1174
|
-
console.log(`\nReport saved to: ${options.output ||
|
|
1135
|
+
console.log(`\nReport saved to: ${options.output || "links_report.json"}`);
|
|
1175
1136
|
|
|
1176
1137
|
if (report.summary.failure > 0 || report.summary.error > 0) {
|
|
1177
|
-
console.log(`\n${
|
|
1178
|
-
console.log(
|
|
1179
|
-
console.log(
|
|
1138
|
+
console.log(`\n${"=".repeat(60)}`);
|
|
1139
|
+
console.log("ISSUES FOUND");
|
|
1140
|
+
console.log("=".repeat(60));
|
|
1180
1141
|
let shown = 0;
|
|
1181
1142
|
|
|
1182
1143
|
for (const [filePath, fileResults] of Object.entries(report.results_by_file)) {
|
|
@@ -1196,7 +1157,9 @@ export async function validateLinks(baseUrl, options) {
|
|
|
1196
1157
|
|
|
1197
1158
|
if (shown < report.summary.failure + report.summary.error) {
|
|
1198
1159
|
const remaining = report.summary.failure + report.summary.error - shown;
|
|
1199
|
-
console.log(
|
|
1160
|
+
console.log(
|
|
1161
|
+
`\n... and ${remaining} more issues. See ${options.output || "links_report.json"} for full details.`,
|
|
1162
|
+
);
|
|
1200
1163
|
}
|
|
1201
1164
|
}
|
|
1202
1165
|
}
|