@djangocfg/seo 2.1.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +192 -0
  2. package/dist/cli.d.ts +1 -0
  3. package/dist/cli.mjs +3780 -0
  4. package/dist/cli.mjs.map +1 -0
  5. package/dist/crawler/index.d.ts +88 -0
  6. package/dist/crawler/index.mjs +610 -0
  7. package/dist/crawler/index.mjs.map +1 -0
  8. package/dist/google-console/index.d.ts +95 -0
  9. package/dist/google-console/index.mjs +539 -0
  10. package/dist/google-console/index.mjs.map +1 -0
  11. package/dist/index.d.ts +285 -0
  12. package/dist/index.mjs +3236 -0
  13. package/dist/index.mjs.map +1 -0
  14. package/dist/link-checker/index.d.ts +76 -0
  15. package/dist/link-checker/index.mjs +326 -0
  16. package/dist/link-checker/index.mjs.map +1 -0
  17. package/dist/markdown-report-B3QdDzxE.d.ts +193 -0
  18. package/dist/reports/index.d.ts +24 -0
  19. package/dist/reports/index.mjs +836 -0
  20. package/dist/reports/index.mjs.map +1 -0
  21. package/dist/routes/index.d.ts +69 -0
  22. package/dist/routes/index.mjs +372 -0
  23. package/dist/routes/index.mjs.map +1 -0
  24. package/dist/scanner-Cz4Th2Pt.d.ts +60 -0
  25. package/dist/types/index.d.ts +144 -0
  26. package/dist/types/index.mjs +3 -0
  27. package/dist/types/index.mjs.map +1 -0
  28. package/package.json +114 -0
  29. package/src/analyzer.ts +256 -0
  30. package/src/cli/commands/audit.ts +260 -0
  31. package/src/cli/commands/content.ts +180 -0
  32. package/src/cli/commands/crawl.ts +32 -0
  33. package/src/cli/commands/index.ts +12 -0
  34. package/src/cli/commands/inspect.ts +60 -0
  35. package/src/cli/commands/links.ts +41 -0
  36. package/src/cli/commands/robots.ts +36 -0
  37. package/src/cli/commands/routes.ts +126 -0
  38. package/src/cli/commands/sitemap.ts +48 -0
  39. package/src/cli/index.ts +149 -0
  40. package/src/cli/types.ts +40 -0
  41. package/src/config.ts +207 -0
  42. package/src/content/index.ts +51 -0
  43. package/src/content/link-checker.ts +182 -0
  44. package/src/content/link-fixer.ts +188 -0
  45. package/src/content/scanner.ts +200 -0
  46. package/src/content/sitemap-generator.ts +321 -0
  47. package/src/content/types.ts +140 -0
  48. package/src/crawler/crawler.ts +425 -0
  49. package/src/crawler/index.ts +10 -0
  50. package/src/crawler/robots-parser.ts +171 -0
  51. package/src/crawler/sitemap-validator.ts +204 -0
  52. package/src/google-console/analyzer.ts +317 -0
  53. package/src/google-console/auth.ts +100 -0
  54. package/src/google-console/client.ts +281 -0
  55. package/src/google-console/index.ts +9 -0
  56. package/src/index.ts +144 -0
  57. package/src/link-checker/index.ts +461 -0
  58. package/src/reports/claude-context.ts +149 -0
  59. package/src/reports/generator.ts +244 -0
  60. package/src/reports/index.ts +27 -0
  61. package/src/reports/json-report.ts +320 -0
  62. package/src/reports/markdown-report.ts +246 -0
  63. package/src/reports/split-report.ts +252 -0
  64. package/src/routes/analyzer.ts +324 -0
  65. package/src/routes/index.ts +25 -0
  66. package/src/routes/scanner.ts +298 -0
  67. package/src/types/index.ts +222 -0
  68. package/src/utils/index.ts +154 -0
@@ -0,0 +1,260 @@
1
+ /**
2
+ * Audit Command - Full SEO audit
3
+ */
4
+
5
+ import consola from 'consola';
6
+ import chalk from 'chalk';
7
+ import { getSiteUrl, findGoogleServiceAccount, getGscKeyFilename } from '../../config.js';
8
+ import { GoogleConsoleClient, analyzeInspectionResults } from '../../google-console/index.js';
9
+ import { SiteCrawler, analyzeCrawlResults, analyzeRobotsTxt, analyzeAllSitemaps, analyzeSitemap } from '../../crawler/index.js';
10
+ import { checkLinks, linkResultsToSeoIssues } from '../../link-checker/index.js';
11
+ import { generateAndSaveReports, printReportSummary } from '../../reports/index.js';
12
+ import { scanRoutes, findAppDir, compareWithSitemap, analyzeRoutes } from '../../routes/index.js';
13
+ import type { SeoIssue, UrlInspectionResult, CrawlResult } from '../../types/index.js';
14
+ import { parseFormats, type CliOptions } from '../types.js';
15
+
16
+ interface StepResult {
17
+ name: string;
18
+ issues: SeoIssue[];
19
+ meta?: Record<string, any>;
20
+ error?: string;
21
+ }
22
+
23
+ export async function runAudit(options: CliOptions) {
24
+ const siteUrl = getSiteUrl(options);
25
+ const startTime = Date.now();
26
+
27
+ console.log('');
28
+ consola.box(`${chalk.bold('SEO Audit')}\n${siteUrl}`);
29
+
30
+ // Auto-detect or use explicit service account
31
+ const serviceAccountPath = findGoogleServiceAccount(options['service-account']);
32
+ const hasGsc = !!serviceAccountPath;
33
+
34
+ // Auto-detect app directory for routes
35
+ const appDir = options['app-dir'] || findAppDir();
36
+ const hasRoutes = !!appDir;
37
+
38
+ // Show hints
39
+ if (!serviceAccountPath) {
40
+ const keyFile = getGscKeyFilename();
41
+ console.log('');
42
+ consola.info(chalk.dim('GSC not configured. Save service account as ' + chalk.cyan(keyFile) + ' for indexing data.'));
43
+ }
44
+
45
+ const allIssues: SeoIssue[] = [];
46
+ const allInspections: UrlInspectionResult[] = [];
47
+ const allCrawlResults: CrawlResult[] = [];
48
+ const results: StepResult[] = [];
49
+ let collectedSitemapUrls: string[] = [];
50
+
51
+ // Progress tracking: robots + sitemap + crawl + links + (routes?) + (gsc?)
52
+ let totalSteps = 4;
53
+ if (hasRoutes) totalSteps++;
54
+ if (hasGsc) totalSteps++;
55
+ let completedSteps = 0;
56
+ const errors: string[] = [];
57
+
58
+ const updateProgress = (step: string, status: 'running' | 'done' | 'error') => {
59
+ const bar = '█'.repeat(completedSteps) + '░'.repeat(totalSteps - completedSteps);
60
+ const pct = Math.round((completedSteps / totalSteps) * 100);
61
+ if (status === 'running') {
62
+ process.stdout.write(`\r${chalk.cyan('▸')} ${bar} ${pct}% ${chalk.dim(step)}`);
63
+ } else if (status === 'done') {
64
+ completedSteps++;
65
+ const newBar = '█'.repeat(completedSteps) + '░'.repeat(totalSteps - completedSteps);
66
+ const newPct = Math.round((completedSteps / totalSteps) * 100);
67
+ process.stdout.write(`\r${chalk.green('✓')} ${newBar} ${newPct}% ${chalk.dim(step)}${' '.repeat(20)}\n`);
68
+ } else {
69
+ process.stdout.write(`\r${chalk.red('✗')} ${bar} ${pct}% ${chalk.dim(step)}${' '.repeat(20)}\n`);
70
+ }
71
+ };
72
+
73
+ // Phase 1: robots.txt (needed for sitemap URLs)
74
+ console.log('');
75
+ let sitemapUrls: string[] = [];
76
+ updateProgress('robots.txt', 'running');
77
+ try {
78
+ const analysis = await analyzeRobotsTxt(siteUrl);
79
+ sitemapUrls = analysis.sitemaps;
80
+ results.push({ name: 'robots.txt', issues: analysis.issues, meta: { exists: analysis.exists } });
81
+ allIssues.push(...analysis.issues);
82
+ updateProgress('robots.txt', 'done');
83
+ } catch (e) {
84
+ errors.push(`robots.txt: ${(e as Error).message}`);
85
+ updateProgress('robots.txt', 'error');
86
+ }
87
+
88
+ // Phase 2: Parallel execution (Sitemap + Crawl + Links)
89
+ const parallelTasks = await Promise.allSettled([
90
+ // Sitemap
91
+ (async () => {
92
+ updateProgress('Sitemap', 'running');
93
+ const issues: SeoIssue[] = [];
94
+ const sitemapsToCheck = sitemapUrls.length > 0
95
+ ? sitemapUrls
96
+ : [new URL('/sitemap.xml', siteUrl).href];
97
+
98
+ let totalUrls = 0;
99
+ for (const smUrl of sitemapsToCheck) {
100
+ const analyses = await analyzeAllSitemaps(smUrl);
101
+ for (const a of analyses) {
102
+ issues.push(...a.issues);
103
+ totalUrls += a.urls.length;
104
+ // Collect URLs for routes comparison
105
+ collectedSitemapUrls.push(...a.urls.map(u => u.loc));
106
+ }
107
+ }
108
+ updateProgress('Sitemap', 'done');
109
+ return { name: 'Sitemap', issues, meta: { urls: totalUrls } };
110
+ })(),
111
+
112
+ // Crawl
113
+ (async () => {
114
+ updateProgress('Crawl', 'running');
115
+ const crawler = new SiteCrawler(siteUrl, {
116
+ maxPages: parseInt(options['max-pages'], 10),
117
+ maxDepth: parseInt(options['max-depth'], 10),
118
+ });
119
+ const crawlResults = await crawler.crawl();
120
+ allCrawlResults.push(...crawlResults);
121
+ const issues = analyzeCrawlResults(crawlResults);
122
+ updateProgress('Crawl', 'done');
123
+ return { name: 'Crawl', issues, meta: { pages: crawlResults.length } };
124
+ })(),
125
+
126
+ // Links
127
+ (async () => {
128
+ updateProgress('Links', 'running');
129
+ const result = await checkLinks({
130
+ url: siteUrl,
131
+ timeout: parseInt(options.timeout, 10),
132
+ concurrency: parseInt(options.concurrency, 10),
133
+ verbose: false,
134
+ });
135
+ const issues = linkResultsToSeoIssues(result);
136
+ updateProgress('Links', 'done');
137
+ return { name: 'Links', issues, meta: { total: result.total, broken: result.broken } };
138
+ })(),
139
+ ]);
140
+
141
+ // Collect parallel results
142
+ for (const task of parallelTasks) {
143
+ if (task.status === 'fulfilled') {
144
+ results.push(task.value);
145
+ allIssues.push(...task.value.issues);
146
+ } else {
147
+ errors.push(task.reason?.message || 'Unknown error');
148
+ }
149
+ }
150
+
151
+ // Phase 3: Routes (needs sitemap URLs from parallel phase)
152
+ if (hasRoutes && appDir) {
153
+ updateProgress('Routes', 'running');
154
+ try {
155
+ const scanResult = scanRoutes({ appDir });
156
+ const comparison = compareWithSitemap(scanResult, collectedSitemapUrls, siteUrl);
157
+ const issues = analyzeRoutes(scanResult, comparison);
158
+
159
+ results.push({
160
+ name: 'Routes',
161
+ issues,
162
+ meta: {
163
+ static: scanResult.staticRoutes.length,
164
+ dynamic: scanResult.dynamicRoutes.length,
165
+ missing: comparison.missingFromSitemap.length,
166
+ },
167
+ });
168
+ allIssues.push(...issues);
169
+ updateProgress('Routes', 'done');
170
+ } catch (e) {
171
+ errors.push(`Routes: ${(e as Error).message}`);
172
+ updateProgress('Routes', 'error');
173
+ }
174
+ }
175
+
176
+ // Phase 4: GSC (needs crawl results)
177
+ if (hasGsc) {
178
+ updateProgress('GSC', 'running');
179
+ try {
180
+ const client = new GoogleConsoleClient({
181
+ siteUrl,
182
+ serviceAccountPath,
183
+ });
184
+
185
+ const isAuth = await client.verify();
186
+ if (isAuth) {
187
+ const urlsToInspect = allCrawlResults
188
+ .filter((r) => r.statusCode === 200)
189
+ .slice(0, 50)
190
+ .map((r) => r.url);
191
+
192
+ const inspections = await client.inspectUrls(urlsToInspect);
193
+ allInspections.push(...inspections);
194
+ const issues = analyzeInspectionResults(inspections);
195
+ results.push({ name: 'GSC', issues, meta: { inspected: inspections.length } });
196
+ allIssues.push(...issues);
197
+ } else {
198
+ results.push({ name: 'GSC', issues: [], meta: { skipped: true } });
199
+ }
200
+ updateProgress('GSC', 'done');
201
+ } catch (e) {
202
+ errors.push(`GSC: ${(e as Error).message}`);
203
+ updateProgress('GSC', 'error');
204
+ }
205
+ }
206
+
207
+ // Show errors if any
208
+ if (errors.length > 0) {
209
+ console.log('');
210
+ for (const err of errors) {
211
+ consola.error(err);
212
+ }
213
+ }
214
+
215
+ // Summary table
216
+ console.log('');
217
+ consola.log(chalk.bold('Results:'));
218
+ for (const r of results) {
219
+ const issueStr = r.issues.length > 0 ? chalk.yellow(`${r.issues.length} issues`) : chalk.green('OK');
220
+ const metaStr = r.meta ? chalk.dim(` (${Object.entries(r.meta).map(([k, v]) => `${k}: ${v}`).join(', ')})`) : '';
221
+ consola.log(` ${r.name}: ${issueStr}${metaStr}`);
222
+ }
223
+
224
+ // Generate reports
225
+ console.log('');
226
+ consola.start('Generating reports...');
227
+
228
+ const formats = parseFormats(options.format);
229
+ const { report, files } = await generateAndSaveReports(
230
+ siteUrl,
231
+ {
232
+ issues: allIssues,
233
+ urlInspections: allInspections,
234
+ crawlResults: allCrawlResults,
235
+ },
236
+ {
237
+ outputDir: options.output,
238
+ formats,
239
+ includeRawData: true,
240
+ }
241
+ );
242
+
243
+ // Summary
244
+ const duration = ((Date.now() - startTime) / 1000).toFixed(1);
245
+ console.log('');
246
+ printReportSummary(report);
247
+
248
+ console.log('');
249
+ consola.info(`Reports saved to: ${chalk.cyan(options.output)}`);
250
+ if (files.json) consola.log(` ${chalk.dim('→')} ${files.json}`);
251
+ if (files.markdown) consola.log(` ${chalk.dim('→')} ${files.markdown}`);
252
+ if (files.aiSummary) consola.log(` ${chalk.dim('→')} ${files.aiSummary}`);
253
+ if (files.split) {
254
+ consola.log(` ${chalk.dim('→')} ${files.split.index} ${chalk.dim('(index)')}`);
255
+ consola.log(` ${chalk.dim('→')} ${files.split.categories.length} category files`);
256
+ }
257
+
258
+ console.log('');
259
+ consola.success(`Audit completed in ${duration}s`);
260
+ }
@@ -0,0 +1,180 @@
1
+ /**
2
+ * Content Command - MDX/Nextra content tools
3
+ * Subcommands: check, fix, sitemap
4
+ */
5
+
6
+ import consola from 'consola';
7
+ import chalk from 'chalk';
8
+ import path from 'path';
9
+ import {
10
+ checkContentLinks,
11
+ groupBrokenLinksByFile,
12
+ fixContentLinks,
13
+ generateSitemap,
14
+ countSitemapItems,
15
+ detectProjectType,
16
+ findContentDir,
17
+ } from '../../content/index.js';
18
+ import type { CliOptions } from '../types.js';
19
+
20
+ const CONTENT_HELP = `
21
+ ${chalk.bold('Content Commands')} - MDX/Nextra content tools
22
+
23
+ ${chalk.bold('Usage:')}
24
+ djangocfg-seo content <subcommand> [options]
25
+
26
+ ${chalk.bold('Subcommands:')}
27
+ check Check links in content/ directory
28
+ fix Fix absolute links to relative
29
+ sitemap Generate sitemap.ts from content/
30
+
31
+ ${chalk.bold('Options:')}
32
+ --content-dir <path> Content directory (default: content/)
33
+ --output <path> Output file for sitemap (default: app/_core/sitemap.ts)
34
+ --fix Apply fixes (for 'fix' subcommand)
35
+ --base-path <path> Base URL path (default: /docs)
36
+
37
+ ${chalk.bold('Examples:')}
38
+ djangocfg-seo content check
39
+ djangocfg-seo content fix --fix
40
+ djangocfg-seo content sitemap --output app/_core/sitemap.ts
41
+ `;
42
+
43
+ export async function runContent(options: CliOptions) {
44
+ const subcommand = options._[1]; // content <subcommand>
45
+
46
+ if (!subcommand || subcommand === 'help') {
47
+ console.log(CONTENT_HELP);
48
+ return;
49
+ }
50
+
51
+ const cwd = process.cwd();
52
+ const contentDir = options['content-dir']
53
+ ? path.resolve(cwd, options['content-dir'])
54
+ : findContentDir(cwd);
55
+
56
+ if (!contentDir && subcommand !== 'sitemap') {
57
+ consola.error('Could not find content/ directory. Use --content-dir to specify path.');
58
+ process.exit(1);
59
+ }
60
+
61
+ // Detect project type
62
+ const projectType = detectProjectType(cwd);
63
+ console.log('');
64
+ consola.box(`${chalk.bold('Content Tools')}\nProject: ${projectType}\nPath: ${contentDir || cwd}`);
65
+
66
+ switch (subcommand) {
67
+ case 'check':
68
+ await runCheck(contentDir!, options);
69
+ break;
70
+ case 'fix':
71
+ await runFix(contentDir!, options);
72
+ break;
73
+ case 'sitemap':
74
+ await runSitemapGenerate(cwd, options);
75
+ break;
76
+ default:
77
+ consola.error(`Unknown subcommand: ${subcommand}`);
78
+ console.log(CONTENT_HELP);
79
+ process.exit(1);
80
+ }
81
+ }
82
+
83
+ /**
84
+ * Check links in content directory
85
+ */
86
+ async function runCheck(contentDir: string, options: CliOptions) {
87
+ consola.start('Checking links in content/ folder...');
88
+
89
+ const basePath = options['base-path'] || '/docs';
90
+ const result = checkContentLinks(contentDir, { basePath });
91
+
92
+ if (result.success) {
93
+ console.log('');
94
+ consola.success('All links are valid!');
95
+ console.log(` Checked ${result.filesChecked} files, ${result.uniqueLinks} unique links.`);
96
+ return;
97
+ }
98
+
99
+ console.log('');
100
+ consola.error(`Found ${result.brokenLinks.length} broken links:`);
101
+ console.log('');
102
+
103
+ const byFile = groupBrokenLinksByFile(result.brokenLinks);
104
+
105
+ for (const [file, links] of byFile) {
106
+ console.log(`${chalk.cyan('📄')} ${file}`);
107
+ for (const link of links) {
108
+ console.log(` L${link.line}: ${chalk.red('✗')} ${link.link} ${chalk.dim(`(${link.type}: "${link.raw}")`)}`);
109
+ }
110
+ console.log('');
111
+ }
112
+
113
+ console.log(`${chalk.bold('Summary:')} ${result.brokenLinks.length} broken links in ${byFile.size} files`);
114
+ console.log(` Checked ${result.filesChecked} files, ${result.uniqueLinks} unique links.`);
115
+
116
+ process.exit(1);
117
+ }
118
+
119
+ /**
120
+ * Fix absolute links to relative
121
+ */
122
+ async function runFix(contentDir: string, options: CliOptions) {
123
+ const applyFixes = options.fix === true;
124
+
125
+ consola.start(applyFixes ? 'Fixing links...' : 'Checking for absolute links that can be relative...');
126
+
127
+ const result = fixContentLinks(contentDir, { apply: applyFixes });
128
+
129
+ if (result.totalChanges === 0) {
130
+ console.log('');
131
+ consola.success('No absolute links that can be converted to relative.');
132
+ return;
133
+ }
134
+
135
+ console.log('');
136
+ console.log(`Found ${result.totalChanges} links that can be relative:`);
137
+ console.log('');
138
+
139
+ for (const { file, fixes } of result.fileChanges) {
140
+ console.log(`${chalk.cyan('📄')} ${file}`);
141
+ for (const { from, to, line } of fixes) {
142
+ console.log(` L${line}: ${from} ${chalk.yellow('→')} ${to}`);
143
+ }
144
+ console.log('');
145
+ }
146
+
147
+ if (applyFixes) {
148
+ consola.success(`Fixed ${result.totalChanges} links in ${result.fileChanges.length} files.`);
149
+ } else {
150
+ console.log(`${chalk.yellow('💡')} Run with --fix to apply changes:`);
151
+ console.log(` djangocfg-seo content fix --fix`);
152
+ }
153
+ }
154
+
155
+ /**
156
+ * Generate sitemap.ts
157
+ */
158
+ async function runSitemapGenerate(cwd: string, options: CliOptions) {
159
+ consola.start('Generating sitemap...');
160
+
161
+ // Use specific sitemap output, not the report output directory
162
+ // Check if output ends with .ts (sitemap file) or use default
163
+ const rawOutput = options.output;
164
+ const output = rawOutput?.endsWith('.ts') ? rawOutput : 'app/_core/sitemap.ts';
165
+ const contentDir = options['content-dir'] || 'content';
166
+ const basePath = options['base-path'] || '/docs';
167
+
168
+ const { outputPath, data } = await generateSitemap(cwd, {
169
+ output,
170
+ config: { contentDir, basePath },
171
+ });
172
+
173
+ const counts = countSitemapItems(data);
174
+
175
+ console.log('');
176
+ consola.success(`Sitemap generated at ${outputPath}`);
177
+ console.log(` ├── App pages: ${counts.app}`);
178
+ console.log(` ├── Doc pages: ${counts.docs}`);
179
+ console.log(` └── Total: ${counts.total}`);
180
+ }
@@ -0,0 +1,32 @@
1
+ /**
2
+ * Crawl Command - Site crawler
3
+ */
4
+
5
+ import consola from 'consola';
6
+ import { getSiteUrl } from '../../config.js';
7
+ import { SiteCrawler, analyzeCrawlResults } from '../../crawler/index.js';
8
+ import { generateAndSaveReports } from '../../reports/index.js';
9
+ import { parseFormats, type CliOptions } from '../types.js';
10
+
11
+ export async function runCrawl(options: CliOptions) {
12
+ const siteUrl = getSiteUrl(options);
13
+
14
+ consola.start(`Starting crawl of ${siteUrl}`);
15
+
16
+ const crawler = new SiteCrawler(siteUrl, {
17
+ maxPages: parseInt(options['max-pages'], 10),
18
+ maxDepth: parseInt(options['max-depth'], 10),
19
+ });
20
+
21
+ const crawlResults = await crawler.crawl();
22
+ const issues = analyzeCrawlResults(crawlResults);
23
+
24
+ consola.info(`Found ${issues.length} issues from ${crawlResults.length} pages`);
25
+
26
+ const formats = parseFormats(options.format);
27
+ await generateAndSaveReports(siteUrl, { issues, crawlResults }, {
28
+ outputDir: options.output,
29
+ formats,
30
+ includeRawData: true,
31
+ });
32
+ }
@@ -0,0 +1,12 @@
1
+ /**
2
+ * CLI Commands
3
+ */
4
+
5
+ export { runAudit } from './audit.js';
6
+ export { runRoutes } from './routes.js';
7
+ export { runInspect } from './inspect.js';
8
+ export { runCrawl } from './crawl.js';
9
+ export { runLinks } from './links.js';
10
+ export { runRobots } from './robots.js';
11
+ export { runSitemap } from './sitemap.js';
12
+ export { runContent } from './content.js';
@@ -0,0 +1,60 @@
1
+ /**
2
+ * Inspect Command - GSC URL inspection
3
+ */
4
+
5
+ import consola from 'consola';
6
+ import { getSiteUrl } from '../../config.js';
7
+ import { GoogleConsoleClient, analyzeInspectionResults } from '../../google-console/index.js';
8
+ import { generateAndSaveReports } from '../../reports/index.js';
9
+ import { loadUrlsFromFile } from '../../utils/index.js';
10
+ import { parseFormats, type CliOptions } from '../types.js';
11
+
12
+ export async function runInspect(options: CliOptions) {
13
+ const siteUrl = getSiteUrl(options);
14
+
15
+ consola.start('Starting URL inspection via Google Search Console');
16
+
17
+ const client = new GoogleConsoleClient({
18
+ siteUrl,
19
+ serviceAccountPath: options['service-account'],
20
+ });
21
+
22
+ const isAuth = await client.verify();
23
+ if (!isAuth) {
24
+ consola.error('Failed to authenticate with Google Search Console');
25
+ process.exit(1);
26
+ }
27
+
28
+ let urls: string[];
29
+
30
+ if (options.urls) {
31
+ urls = loadUrlsFromFile(options.urls);
32
+ consola.info(`Loaded ${urls.length} URLs from ${options.urls}`);
33
+ } else {
34
+ consola.info('Fetching URLs from search analytics...');
35
+ const today = new Date();
36
+ const startDate = new Date(today.getTime() - 30 * 24 * 60 * 60 * 1000);
37
+
38
+ const rows = await client.getSearchAnalytics({
39
+ startDate: startDate.toISOString().split('T')[0] as string,
40
+ endDate: today.toISOString().split('T')[0] as string,
41
+ dimensions: ['page'],
42
+ rowLimit: 100,
43
+ });
44
+
45
+ urls = rows.map((row) => row.keys?.[0] || '').filter(Boolean);
46
+ consola.info(`Found ${urls.length} URLs from search analytics`);
47
+ }
48
+
49
+ const results = await client.inspectUrls(urls);
50
+ const issues = analyzeInspectionResults(results);
51
+
52
+ consola.info(`Found ${issues.length} issues`);
53
+
54
+ const formats = parseFormats(options.format);
55
+ await generateAndSaveReports(siteUrl, { issues, urlInspections: results }, {
56
+ outputDir: options.output,
57
+ formats,
58
+ includeRawData: true,
59
+ });
60
+ }
@@ -0,0 +1,41 @@
1
+ /**
2
+ * Links Command - Broken link checker
3
+ */
4
+
5
+ import consola from 'consola';
6
+ import { getSiteUrl } from '../../config.js';
7
+ import { checkLinks, linkResultsToSeoIssues } from '../../link-checker/index.js';
8
+ import { generateAndSaveReports } from '../../reports/index.js';
9
+ import { parseFormats, type CliOptions } from '../types.js';
10
+
11
+ export async function runLinks(options: CliOptions) {
12
+ const siteUrl = getSiteUrl(options);
13
+
14
+ consola.start(`Checking links on ${siteUrl}`);
15
+
16
+ const result = await checkLinks({
17
+ url: siteUrl,
18
+ timeout: parseInt(options.timeout, 10),
19
+ concurrency: parseInt(options.concurrency, 10),
20
+ verbose: true,
21
+ });
22
+
23
+ if (result.success) {
24
+ consola.success(`All ${result.total} links are valid!`);
25
+ } else {
26
+ consola.error(`Found ${result.broken} broken links out of ${result.total}`);
27
+
28
+ // Generate report if output specified
29
+ if (options.output !== './seo-reports' || result.broken > 0) {
30
+ const issues = linkResultsToSeoIssues(result);
31
+ const formats = parseFormats(options.format);
32
+ await generateAndSaveReports(siteUrl, { issues }, {
33
+ outputDir: options.output,
34
+ formats,
35
+ includeRawData: false,
36
+ });
37
+ }
38
+ }
39
+
40
+ process.exit(result.success ? 0 : 1);
41
+ }
@@ -0,0 +1,36 @@
1
+ /**
2
+ * Robots Command - robots.txt analyzer
3
+ */
4
+
5
+ import consola from 'consola';
6
+ import { getSiteUrl } from '../../config.js';
7
+ import { analyzeRobotsTxt } from '../../crawler/index.js';
8
+ import type { CliOptions } from '../types.js';
9
+
10
+ export async function runRobots(options: CliOptions) {
11
+ const siteUrl = getSiteUrl(options);
12
+
13
+ consola.start(`Analyzing robots.txt for ${siteUrl}`);
14
+
15
+ const analysis = await analyzeRobotsTxt(siteUrl);
16
+
17
+ if (analysis.exists) {
18
+ consola.success('robots.txt found');
19
+ consola.info(`Sitemaps: ${analysis.sitemaps.length}`);
20
+ consola.info(`Disallow rules: ${analysis.disallowedPaths.length}`);
21
+ consola.info(`Allow rules: ${analysis.allowedPaths.length}`);
22
+
23
+ if (analysis.crawlDelay) {
24
+ consola.info(`Crawl-delay: ${analysis.crawlDelay}`);
25
+ }
26
+
27
+ if (analysis.issues.length > 0) {
28
+ consola.warn(`Issues found: ${analysis.issues.length}`);
29
+ for (const issue of analysis.issues) {
30
+ consola.log(` - [${issue.severity}] ${issue.title}`);
31
+ }
32
+ }
33
+ } else {
34
+ consola.warn('robots.txt not found');
35
+ }
36
+ }