@xyleapp/cli 0.8.0 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -0
- package/bin/xyle.mjs +1 -1
- package/package.json +1 -1
- package/src/api.mjs +75 -3
- package/src/commands.mjs +508 -27
- package/src/seed.mjs +19 -3
package/README.md
CHANGED
|
@@ -34,6 +34,18 @@ xyle rewrite --url https://example.com/blog/seo-guide --type title
|
|
|
34
34
|
# Crawl a page
|
|
35
35
|
xyle crawl --url https://example.com/blog/seo-guide
|
|
36
36
|
|
|
37
|
+
# Full-site crawl (BFS every internal page, detect site-wide issues)
|
|
38
|
+
xyle site-crawl https://example.com --max-pages 500
|
|
39
|
+
|
|
40
|
+
# View snapshot history
|
|
41
|
+
xyle history --url https://example.com
|
|
42
|
+
|
|
43
|
+
# View score trends
|
|
44
|
+
xyle trends --site example.com --days 30
|
|
45
|
+
|
|
46
|
+
# Compare two snapshots
|
|
47
|
+
xyle diff --before <snapshot-id> --after <snapshot-id>
|
|
48
|
+
|
|
37
49
|
# Sync Search Console data
|
|
38
50
|
xyle sync --site https://example.com
|
|
39
51
|
```
|
|
@@ -50,6 +62,10 @@ xyle sync --site https://example.com
|
|
|
50
62
|
| `analyze` | Analyze page content against competitors |
|
|
51
63
|
| `rewrite` | Get AI rewrite suggestions |
|
|
52
64
|
| `crawl` | Crawl a URL and extract SEO metadata |
|
|
65
|
+
| `site-crawl` | Full-site BFS crawl with issue detection, link graph, and site health score |
|
|
66
|
+
| `history` | View snapshot history for a URL or site |
|
|
67
|
+
| `trends` | View score trends over time |
|
|
68
|
+
| `diff` | Compare two snapshots side-by-side |
|
|
53
69
|
| `sync` | Sync Google Search Console data |
|
|
54
70
|
| `login` | Authenticate with Google OAuth |
|
|
55
71
|
| `logout` | Remove stored credentials |
|
package/bin/xyle.mjs
CHANGED
package/package.json
CHANGED
package/src/api.mjs
CHANGED
|
@@ -49,7 +49,12 @@ async function request(method, path, { params, body, timeout = 30000, auth = tru
|
|
|
49
49
|
let detail;
|
|
50
50
|
try {
|
|
51
51
|
const json = await resp.json();
|
|
52
|
-
|
|
52
|
+
const raw = json.detail;
|
|
53
|
+
if (Array.isArray(raw)) {
|
|
54
|
+
detail = raw.map((e) => e.msg || JSON.stringify(e)).join("; ");
|
|
55
|
+
} else {
|
|
56
|
+
detail = raw || resp.statusText;
|
|
57
|
+
}
|
|
53
58
|
} catch {
|
|
54
59
|
detail = resp.statusText;
|
|
55
60
|
}
|
|
@@ -65,7 +70,9 @@ export function checkHealth() {
|
|
|
65
70
|
}
|
|
66
71
|
|
|
67
72
|
export function getTopQueries(site, limit = 20) {
|
|
68
|
-
|
|
73
|
+
const creds = getCredentials();
|
|
74
|
+
const email = creds?.email || null;
|
|
75
|
+
return request("GET", "/queries", { params: { site, limit, email } });
|
|
69
76
|
}
|
|
70
77
|
|
|
71
78
|
export function getCompetitors(query) {
|
|
@@ -93,7 +100,9 @@ export function crawlPage(url) {
|
|
|
93
100
|
}
|
|
94
101
|
|
|
95
102
|
export function syncGsc(site) {
|
|
96
|
-
|
|
103
|
+
const creds = getCredentials();
|
|
104
|
+
const email = creds?.email || null;
|
|
105
|
+
return request("POST", "/admin/sync", { params: { site, email } });
|
|
97
106
|
}
|
|
98
107
|
|
|
99
108
|
export function listSites() {
|
|
@@ -117,4 +126,67 @@ export function getInstructions(tool) {
|
|
|
117
126
|
return request("GET", "/seed/instructions", { params: { tool }, timeout: 10000 });
|
|
118
127
|
}
|
|
119
128
|
|
|
129
|
+
export function listSnapshots(url, siteDomain, limit = 20) {
|
|
130
|
+
return request("GET", "/snapshots", { params: { url, site_domain: siteDomain, limit } });
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
export function getSnapshotTrends(url, siteDomain, days = 90) {
|
|
134
|
+
return request("GET", "/snapshots/trends", { params: { url, site_domain: siteDomain, days } });
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
export function diffSnapshots(beforeId, afterId) {
|
|
138
|
+
return request("GET", "/snapshots/diff", { params: { before: beforeId, after: afterId } });
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
export function askKnowledgeBase(question, url, topic, nSources = 5) {
|
|
142
|
+
const body = { question, n_sources: nSources };
|
|
143
|
+
if (url) body.context_url = url;
|
|
144
|
+
if (topic) body.topic = topic;
|
|
145
|
+
return request("POST", "/kb/ask", { body, timeout: 60000 });
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
export function getKbStats() {
|
|
149
|
+
return request("GET", "/kb/stats", { timeout: 10000 });
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// ---------------------------------------------------------------------------
|
|
153
|
+
// Site Crawl
|
|
154
|
+
// ---------------------------------------------------------------------------
|
|
155
|
+
|
|
156
|
+
export function startSiteCrawl(seedUrl, config = {}) {
|
|
157
|
+
// immediate=1 tells the API to run the crawl in-process via BackgroundTask.
|
|
158
|
+
// The CLI keeps polling /status, which keeps the Cloud Run instance warm
|
|
159
|
+
// for the duration. The web UI uses Trigger.dev instead and omits this.
|
|
160
|
+
return request("POST", "/site-crawl", {
|
|
161
|
+
params: { immediate: 1 },
|
|
162
|
+
body: { seed_url: seedUrl, config },
|
|
163
|
+
timeout: 30000,
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
export function getSiteCrawlStatus(jobId) {
|
|
168
|
+
// 60s timeout: the API container runs the crawl loop on the same event
|
|
169
|
+
// loop as the HTTP handler, so individual status requests can spike in
|
|
170
|
+
// latency when the crawl is hot. 15s was too tight.
|
|
171
|
+
return request("GET", `/site-crawl/${jobId}`, { timeout: 60000 });
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
export function getSiteCrawlPages(jobId, { limit = 50, offset = 0, filter } = {}) {
|
|
175
|
+
return request("GET", `/site-crawl/${jobId}/pages`, {
|
|
176
|
+
params: { limit, offset, filter },
|
|
177
|
+
timeout: 15000,
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
export function getSiteCrawlIssues(jobId, { severity, category, limit = 200 } = {}) {
|
|
182
|
+
return request("GET", `/site-crawl/${jobId}/issues`, {
|
|
183
|
+
params: { severity, category, limit },
|
|
184
|
+
timeout: 15000,
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
export function listSiteCrawls(limit = 25) {
|
|
189
|
+
return request("GET", "/site-crawl", { params: { limit }, timeout: 15000 });
|
|
190
|
+
}
|
|
191
|
+
|
|
120
192
|
export { SEO_BASE };
|
package/src/commands.mjs
CHANGED
|
@@ -3,10 +3,6 @@
|
|
|
3
3
|
* Mirrors the Python CLI 1:1.
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
import { createRequire } from "node:module";
|
|
7
|
-
import { existsSync } from "node:fs";
|
|
8
|
-
import { resolve } from "node:path";
|
|
9
|
-
import { execSync } from "node:child_process";
|
|
10
6
|
import { printJson, printTable } from "./formatting.mjs";
|
|
11
7
|
import {
|
|
12
8
|
checkHealth,
|
|
@@ -18,6 +14,16 @@ import {
|
|
|
18
14
|
crawlPage,
|
|
19
15
|
syncGsc,
|
|
20
16
|
listSites,
|
|
17
|
+
listSnapshots,
|
|
18
|
+
getSnapshotTrends,
|
|
19
|
+
diffSnapshots,
|
|
20
|
+
askKnowledgeBase,
|
|
21
|
+
getKbStats,
|
|
22
|
+
startSiteCrawl,
|
|
23
|
+
getSiteCrawlStatus,
|
|
24
|
+
getSiteCrawlPages,
|
|
25
|
+
getSiteCrawlIssues,
|
|
26
|
+
listSiteCrawls,
|
|
21
27
|
SEO_BASE,
|
|
22
28
|
} from "./api.mjs";
|
|
23
29
|
import { getCredentials, clearCredentials, runLoginFlow } from "./auth.mjs";
|
|
@@ -171,6 +177,12 @@ export function registerCommands(program) {
|
|
|
171
177
|
console.log(`${aeoColor}AEO Score: ${Math.round(data.aeo_score * 100)}%\x1b[0m`);
|
|
172
178
|
}
|
|
173
179
|
|
|
180
|
+
// GEO Score
|
|
181
|
+
if (data.geo_score != null) {
|
|
182
|
+
const geoColor = data.geo_score >= 0.7 ? "\x1b[32m" : "\x1b[33m";
|
|
183
|
+
console.log(`${geoColor}GEO Score: ${Math.round(data.geo_score * 100)}%\x1b[0m`);
|
|
184
|
+
}
|
|
185
|
+
|
|
174
186
|
// Structured Recommendations
|
|
175
187
|
const structured = data.recommendations || [];
|
|
176
188
|
if (structured.length) {
|
|
@@ -352,6 +364,23 @@ export function registerCommands(program) {
|
|
|
352
364
|
}
|
|
353
365
|
}
|
|
354
366
|
|
|
367
|
+
// GEO Signals
|
|
368
|
+
const geo = data.geo_signals;
|
|
369
|
+
if (geo) {
|
|
370
|
+
console.log(`\n\x1b[1mGEO Signals\x1b[0m`);
|
|
371
|
+
console.log(
|
|
372
|
+
` ${check(geo.has_summary_section)} Summary Section ${check(geo.has_last_reviewed_date)} Reviewed Date ${check(geo.has_methodology_section)} Methodology`
|
|
373
|
+
);
|
|
374
|
+
console.log(
|
|
375
|
+
` Quotable: ${geo.quotable_statement_count} Statistics: ${geo.statistic_count} Entities: ${geo.named_entity_count} Definitions: ${geo.definition_clarity_count}`
|
|
376
|
+
);
|
|
377
|
+
console.log(
|
|
378
|
+
` Comparisons: ${geo.comparison_structure_count} Steps: ${geo.step_by_step_count} Sources: ${geo.source_attribution_count} Expertise: ${geo.author_expertise_signals}`
|
|
379
|
+
);
|
|
380
|
+
console.log(scoreBar("Topical Coverage", geo.topical_coverage_score));
|
|
381
|
+
console.log(scoreBar("Content Segmentation", geo.content_segmentation_score));
|
|
382
|
+
}
|
|
383
|
+
|
|
355
384
|
const wc = data.word_count || 0;
|
|
356
385
|
if (wc > 0 && wc < 50) {
|
|
357
386
|
console.log(
|
|
@@ -382,6 +411,9 @@ export function registerCommands(program) {
|
|
|
382
411
|
console.log(
|
|
383
412
|
`\x1b[32mSynced ${data.synced_queries || 0} queries for ${data.site || opts.site}\x1b[0m`
|
|
384
413
|
);
|
|
414
|
+
if (data.warning) {
|
|
415
|
+
console.log(`\x1b[33mWarning: ${data.warning}\x1b[0m`);
|
|
416
|
+
}
|
|
385
417
|
}
|
|
386
418
|
} catch (e) {
|
|
387
419
|
handleError(e);
|
|
@@ -594,37 +626,486 @@ export function registerCommands(program) {
|
|
|
594
626
|
}
|
|
595
627
|
});
|
|
596
628
|
|
|
597
|
-
// ---
|
|
629
|
+
// --- history ---
|
|
598
630
|
program
|
|
599
|
-
.command("
|
|
600
|
-
.description("
|
|
601
|
-
.option("--
|
|
602
|
-
.option("--
|
|
603
|
-
.option("--
|
|
604
|
-
.option("--
|
|
631
|
+
.command("history")
|
|
632
|
+
.description("View snapshot history for a URL or site")
|
|
633
|
+
.option("--url <url>", "Page URL to view history for")
|
|
634
|
+
.option("--site <domain>", "Site domain to view history for")
|
|
635
|
+
.option("--limit <n>", "Max snapshots to return", "10")
|
|
636
|
+
.option("--json", "Output as JSON")
|
|
605
637
|
.action(async (opts) => {
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
process.stderr.write(
|
|
609
|
-
`\x1b[31mDeploy script not found: ${scriptPath}\x1b[0m\n` +
|
|
610
|
-
`\x1b[2mRun this command from the project root or use --dir <path>\x1b[0m\n`
|
|
611
|
-
);
|
|
638
|
+
if (!opts.url && !opts.site) {
|
|
639
|
+
process.stderr.write("\x1b[31mProvide --url or --site\x1b[0m\n");
|
|
612
640
|
process.exit(1);
|
|
613
641
|
}
|
|
642
|
+
try {
|
|
643
|
+
const data = await listSnapshots(opts.url, opts.site, parseInt(opts.limit, 10));
|
|
644
|
+
if (opts.json) {
|
|
645
|
+
console.log(printJson(data));
|
|
646
|
+
} else if (!data || data.length === 0) {
|
|
647
|
+
console.log("\x1b[33mNo snapshots found.\x1b[0m");
|
|
648
|
+
} else {
|
|
649
|
+
const rows = data.map((s) => {
|
|
650
|
+
const seo = s.seo_score != null ? Math.round(s.seo_score * 100) : null;
|
|
651
|
+
const aeo = s.aeo_score != null ? Math.round(s.aeo_score * 100) : null;
|
|
652
|
+
const seoColor = seo != null ? (seo >= 70 ? "\x1b[32m" : seo >= 40 ? "\x1b[33m" : "\x1b[31m") : "\x1b[2m";
|
|
653
|
+
const aeoColor = aeo != null ? (aeo >= 70 ? "\x1b[32m" : aeo >= 40 ? "\x1b[33m" : "\x1b[31m") : "\x1b[2m";
|
|
654
|
+
return {
|
|
655
|
+
created_at: new Date(s.created_at).toLocaleString(),
|
|
656
|
+
trigger: s.trigger_source || "-",
|
|
657
|
+
seo_score: `${seoColor}${seo != null ? seo + "%" : "-"}\x1b[0m`,
|
|
658
|
+
aeo_score: `${aeoColor}${aeo != null ? aeo + "%" : "-"}\x1b[0m`,
|
|
659
|
+
signals: `${s.passing_signals}/${s.total_signals}`,
|
|
660
|
+
};
|
|
661
|
+
});
|
|
662
|
+
console.log(`\n\x1b[1mSnapshot History\x1b[0m (${data.length} snapshots)\n`);
|
|
663
|
+
console.log(printTable(rows, ["created_at", "trigger", "seo_score", "aeo_score", "signals"]));
|
|
664
|
+
}
|
|
665
|
+
} catch (e) {
|
|
666
|
+
handleError(e);
|
|
667
|
+
}
|
|
668
|
+
});
|
|
614
669
|
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
670
|
+
// --- trends ---
|
|
671
|
+
program
|
|
672
|
+
.command("trends")
|
|
673
|
+
.description("View score trends over time")
|
|
674
|
+
.option("--url <url>", "Page URL to view trends for")
|
|
675
|
+
.option("--site <domain>", "Site domain to view trends for")
|
|
676
|
+
.option("--days <n>", "Number of days to look back", "90")
|
|
677
|
+
.option("--json", "Output as JSON")
|
|
678
|
+
.action(async (opts) => {
|
|
679
|
+
if (!opts.url && !opts.site) {
|
|
680
|
+
process.stderr.write("\x1b[31mProvide --url or --site\x1b[0m\n");
|
|
681
|
+
process.exit(1);
|
|
682
|
+
}
|
|
683
|
+
try {
|
|
684
|
+
const data = await getSnapshotTrends(opts.url, opts.site, parseInt(opts.days, 10));
|
|
685
|
+
if (opts.json) {
|
|
686
|
+
console.log(printJson(data));
|
|
687
|
+
} else {
|
|
688
|
+
const points = data.points || [];
|
|
689
|
+
if (points.length === 0) {
|
|
690
|
+
console.log("\x1b[33mNo trend data found.\x1b[0m");
|
|
691
|
+
} else {
|
|
692
|
+
const target = data.url || data.site || opts.url || opts.site;
|
|
693
|
+
console.log(`\n\x1b[1mScore Trends\x1b[0m ${target} (${data.period_days} days)\n`);
|
|
694
|
+
|
|
695
|
+
// ASCII sparkline
|
|
696
|
+
const seoVals = points.map((p) => (p.seo_score != null ? Math.round(p.seo_score * 100) : null));
|
|
697
|
+
const aeoVals = points.map((p) => (p.aeo_score != null ? Math.round(p.aeo_score * 100) : null));
|
|
698
|
+
const geoVals = points.map((p) => (p.geo_score != null ? Math.round(p.geo_score * 100) : null));
|
|
699
|
+
const spark = (vals) => {
|
|
700
|
+
const ticks = "\u2581\u2582\u2583\u2584\u2585\u2586\u2587\u2588";
|
|
701
|
+
const valid = vals.filter((v) => v != null);
|
|
702
|
+
if (valid.length === 0) return "-";
|
|
703
|
+
const min = Math.min(...valid);
|
|
704
|
+
const max = Math.max(...valid);
|
|
705
|
+
const range = max - min || 1;
|
|
706
|
+
return vals.map((v) => (v != null ? ticks[Math.min(7, Math.floor(((v - min) / range) * 7))] : " ")).join("");
|
|
707
|
+
};
|
|
708
|
+
|
|
709
|
+
console.log(` SEO ${spark(seoVals)} ${seoVals.filter((v) => v != null).slice(-1)[0] ?? "-"}%`);
|
|
710
|
+
console.log(` AEO ${spark(aeoVals)} ${aeoVals.filter((v) => v != null).slice(-1)[0] ?? "-"}%`);
|
|
711
|
+
console.log(` GEO ${spark(geoVals)} ${geoVals.filter((v) => v != null).slice(-1)[0] ?? "-"}%`);
|
|
712
|
+
console.log();
|
|
713
|
+
console.log(printTable(
|
|
714
|
+
points.map((p) => ({
|
|
715
|
+
date: p.date,
|
|
716
|
+
seo: p.seo_score != null ? Math.round(p.seo_score * 100) + "%" : "-",
|
|
717
|
+
aeo: p.aeo_score != null ? Math.round(p.aeo_score * 100) + "%" : "-",
|
|
718
|
+
geo: p.geo_score != null ? Math.round(p.geo_score * 100) + "%" : "-",
|
|
719
|
+
signals: p.passing_signals,
|
|
720
|
+
})),
|
|
721
|
+
["date", "seo", "aeo", "geo", "signals"]
|
|
722
|
+
));
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
} catch (e) {
|
|
726
|
+
handleError(e);
|
|
727
|
+
}
|
|
728
|
+
});
|
|
620
729
|
|
|
621
|
-
|
|
622
|
-
|
|
730
|
+
// --- diff ---
|
|
731
|
+
program
|
|
732
|
+
.command("diff")
|
|
733
|
+
.description("Compare two snapshots")
|
|
734
|
+
.requiredOption("--before <id>", "Before snapshot ID")
|
|
735
|
+
.requiredOption("--after <id>", "After snapshot ID")
|
|
736
|
+
.option("--json", "Output as JSON")
|
|
737
|
+
.action(async (opts) => {
|
|
623
738
|
try {
|
|
624
|
-
|
|
739
|
+
const data = await diffSnapshots(opts.before, opts.after);
|
|
740
|
+
if (opts.json) {
|
|
741
|
+
console.log(printJson(data));
|
|
742
|
+
} else {
|
|
743
|
+
console.log(`\n\x1b[1mSnapshot Diff\x1b[0m\n`);
|
|
744
|
+
|
|
745
|
+
const delta = (label, val) => {
|
|
746
|
+
if (val == null) return ` ${label.padEnd(20)} -`;
|
|
747
|
+
const pct = Math.round(val * 100);
|
|
748
|
+
const sign = pct > 0 ? "+" : "";
|
|
749
|
+
const color = pct > 0 ? "\x1b[32m" : pct < 0 ? "\x1b[31m" : "\x1b[2m";
|
|
750
|
+
return ` ${label.padEnd(20)} ${color}${sign}${pct}%\x1b[0m`;
|
|
751
|
+
};
|
|
752
|
+
|
|
753
|
+
console.log(delta("SEO Score", data.seo_delta));
|
|
754
|
+
console.log(delta("AEO Score", data.aeo_delta));
|
|
755
|
+
console.log(delta("GEO Score", data.geo_delta));
|
|
756
|
+
|
|
757
|
+
if (data.breakdown_delta) {
|
|
758
|
+
for (const [key, val] of Object.entries(data.breakdown_delta)) {
|
|
759
|
+
console.log(delta(` ${key}`, val));
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
|
|
763
|
+
const sigDelta = data.signals_delta || 0;
|
|
764
|
+
const sigColor = sigDelta > 0 ? "\x1b[32m" : sigDelta < 0 ? "\x1b[31m" : "\x1b[2m";
|
|
765
|
+
const sigSign = sigDelta > 0 ? "+" : "";
|
|
766
|
+
console.log(` ${"Signals".padEnd(20)} ${sigColor}${sigSign}${sigDelta}\x1b[0m`);
|
|
767
|
+
|
|
768
|
+
const resolved = data.resolved_issues || [];
|
|
769
|
+
if (resolved.length) {
|
|
770
|
+
console.log(`\n \x1b[32mResolved:\x1b[0m`);
|
|
771
|
+
for (const issue of resolved) {
|
|
772
|
+
console.log(` \x1b[32m\u2713\x1b[0m ${issue}`);
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
const newIssues = data.new_issues || [];
|
|
777
|
+
if (newIssues.length) {
|
|
778
|
+
console.log(`\n \x1b[31mNew Issues:\x1b[0m`);
|
|
779
|
+
for (const issue of newIssues) {
|
|
780
|
+
console.log(` \x1b[31m\u2717\x1b[0m ${issue}`);
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
} catch (e) {
|
|
785
|
+
handleError(e);
|
|
786
|
+
}
|
|
787
|
+
});
|
|
788
|
+
|
|
789
|
+
// --- ask ---
|
|
790
|
+
program
|
|
791
|
+
.command("ask")
|
|
792
|
+
.description("Query the knowledge base for expert SEO/AEO/GEO guidance")
|
|
793
|
+
.requiredOption("--question <text>", "Question to ask the knowledge base")
|
|
794
|
+
.option("--url <url>", "Context URL for the question")
|
|
795
|
+
.option("--topic <topic>", "Topic filter (e.g., schema, technical-seo)")
|
|
796
|
+
.option("--sources <n>", "Number of sources to retrieve", "5")
|
|
797
|
+
.option("--json", "Output as JSON")
|
|
798
|
+
.action(async (opts) => {
|
|
799
|
+
try {
|
|
800
|
+
const data = await askKnowledgeBase(
|
|
801
|
+
opts.question,
|
|
802
|
+
opts.url,
|
|
803
|
+
opts.topic,
|
|
804
|
+
parseInt(opts.sources, 10)
|
|
805
|
+
);
|
|
806
|
+
if (opts.json) {
|
|
807
|
+
console.log(printJson(data));
|
|
808
|
+
} else {
|
|
809
|
+
console.log(`\n\x1b[1mAnswer\x1b[0m${data.grounded ? " \x1b[32m(grounded)\x1b[0m" : ""}\n`);
|
|
810
|
+
console.log(data.answer);
|
|
811
|
+
console.log();
|
|
812
|
+
}
|
|
625
813
|
} catch (e) {
|
|
626
|
-
|
|
627
|
-
process.exit(e.status || 1);
|
|
814
|
+
handleError(e);
|
|
628
815
|
}
|
|
629
816
|
});
|
|
817
|
+
|
|
818
|
+
// --- geo ---
|
|
819
|
+
program
|
|
820
|
+
.command("geo")
|
|
821
|
+
.description("Show GEO (Generative Engine Optimization) signals and score for a URL")
|
|
822
|
+
.requiredOption("--url <url>", "URL to analyze for GEO signals")
|
|
823
|
+
.option("--json", "Output as JSON")
|
|
824
|
+
.action(async (opts) => {
|
|
825
|
+
try {
|
|
826
|
+
const data = await crawlPage(opts.url);
|
|
827
|
+
if (opts.json) {
|
|
828
|
+
console.log(printJson({
|
|
829
|
+
url: data.url,
|
|
830
|
+
geo_signals: data.geo_signals,
|
|
831
|
+
}));
|
|
832
|
+
} else {
|
|
833
|
+
const geo = data.geo_signals;
|
|
834
|
+
if (!geo) {
|
|
835
|
+
console.log("\x1b[33mGEO signals not available for this page.\x1b[0m");
|
|
836
|
+
return;
|
|
837
|
+
}
|
|
838
|
+
const check = (v) => (v ? "\x1b[32m\u2713\x1b[0m" : "\x1b[31m\u2717\x1b[0m");
|
|
839
|
+
const scoreBar = (label, score) => {
|
|
840
|
+
const pct = Math.round((score || 0) * 100);
|
|
841
|
+
const filled = Math.round(pct / 5);
|
|
842
|
+
const bar = "\u2588".repeat(filled) + "\u2591".repeat(20 - filled);
|
|
843
|
+
const color = pct >= 70 ? "\x1b[32m" : pct >= 40 ? "\x1b[33m" : "\x1b[31m";
|
|
844
|
+
return ` ${label.padEnd(28)} ${color}${bar} ${pct}%\x1b[0m`;
|
|
845
|
+
};
|
|
846
|
+
|
|
847
|
+
console.log(`\n\x1b[1mGEO Signals\x1b[0m ${data.url}\n`);
|
|
848
|
+
|
|
849
|
+
console.log(`\x1b[1mCitability (40%)\x1b[0m`);
|
|
850
|
+
console.log(` Quotable statements: ${geo.quotable_statement_count}`);
|
|
851
|
+
console.log(` Statistics: ${geo.statistic_count}`);
|
|
852
|
+
console.log(scoreBar("Unique insight density", geo.unique_insight_density));
|
|
853
|
+
console.log(` Source attributions: ${geo.source_attribution_count}`);
|
|
854
|
+
|
|
855
|
+
console.log(`\n\x1b[1mEntity/Topical (25%)\x1b[0m`);
|
|
856
|
+
console.log(` Named entities: ${geo.named_entity_count}`);
|
|
857
|
+
console.log(scoreBar("Topical coverage", geo.topical_coverage_score));
|
|
858
|
+
console.log(` Clear definitions: ${geo.definition_clarity_count}`);
|
|
859
|
+
|
|
860
|
+
console.log(`\n\x1b[1mStructural (25%)\x1b[0m`);
|
|
861
|
+
console.log(` ${check(geo.has_summary_section)} Summary/TL;DR section`);
|
|
862
|
+
console.log(` Comparison structures: ${geo.comparison_structure_count}`);
|
|
863
|
+
console.log(` Step-by-step content: ${geo.step_by_step_count}`);
|
|
864
|
+
console.log(scoreBar("Content segmentation", geo.content_segmentation_score));
|
|
865
|
+
|
|
866
|
+
console.log(`\n\x1b[1mAuthority (10%)\x1b[0m`);
|
|
867
|
+
console.log(` ${check(geo.has_last_reviewed_date)} Last reviewed date`);
|
|
868
|
+
console.log(` Expertise signals: ${geo.author_expertise_signals}`);
|
|
869
|
+
console.log(` ${check(geo.has_methodology_section)} Methodology section`);
|
|
870
|
+
console.log(scoreBar("Authority link ratio", geo.outbound_authority_ratio));
|
|
871
|
+
}
|
|
872
|
+
} catch (e) {
|
|
873
|
+
handleError(e);
|
|
874
|
+
}
|
|
875
|
+
});
|
|
876
|
+
|
|
877
|
+
// --- site-crawl ---
|
|
878
|
+
const siteCrawlCmd = program
|
|
879
|
+
.command("site-crawl")
|
|
880
|
+
.description("Full-site SEO crawler — Screaming Frog-style audit");
|
|
881
|
+
|
|
882
|
+
// site-crawl <url> — start a crawl and poll until complete
|
|
883
|
+
siteCrawlCmd
|
|
884
|
+
.command("start")
|
|
885
|
+
.description("Start a full-site crawl")
|
|
886
|
+
.argument("<url>", "Seed URL to crawl")
|
|
887
|
+
.option("--max-pages <n>", "Maximum pages to crawl (default 50, max 500)", "50")
|
|
888
|
+
.option("--max-depth <n>", "Maximum crawl depth (default 3, max 10)", "3")
|
|
889
|
+
.option("--render-js", "Enable JavaScript rendering (slower, for SPA sites)")
|
|
890
|
+
.option("--include-subdomains", "Follow links to subdomains of the seed host")
|
|
891
|
+
.option("--json", "Output as JSON (no live polling)")
|
|
892
|
+
.action(async (url, opts) => {
|
|
893
|
+
try {
|
|
894
|
+
// NOTE: robots.txt is always respected server-side. The crawler
|
|
895
|
+
// identifies as Xyle-Crawler/<version> and cannot be spoofed.
|
|
896
|
+
const config = {
|
|
897
|
+
max_pages: parseInt(opts.maxPages, 10),
|
|
898
|
+
max_depth: parseInt(opts.maxDepth, 10),
|
|
899
|
+
render_js: opts.renderJs === true,
|
|
900
|
+
include_subdomains: opts.includeSubdomains === true,
|
|
901
|
+
};
|
|
902
|
+
const data = await startSiteCrawl(url, config);
|
|
903
|
+
const jobId = data.job_id;
|
|
904
|
+
|
|
905
|
+
if (opts.json) {
|
|
906
|
+
console.log(printJson(data));
|
|
907
|
+
return;
|
|
908
|
+
}
|
|
909
|
+
|
|
910
|
+
console.log(`\x1b[36mCrawl started:\x1b[0m ${jobId}`);
|
|
911
|
+
|
|
912
|
+
// Poll every 2 seconds. Progress bar denominator is `max_pages`
|
|
913
|
+
// (the hard cap), not `pages_discovered` — which is the frontier
|
|
914
|
+
// size and can explode on link-heavy sites, making the bar look
|
|
915
|
+
// broken. We also tolerate a few transient poll failures (the API
|
|
916
|
+
// container is busy running the crawl on the same event loop so
|
|
917
|
+
// individual status requests can spike latency).
|
|
918
|
+
const POLL_MS = 2000;
|
|
919
|
+
const MAX_POLL_ERRORS = 5;
|
|
920
|
+
const target = config.max_pages;
|
|
921
|
+
let prev = -1;
|
|
922
|
+
let pollErrors = 0;
|
|
923
|
+
while (true) {
|
|
924
|
+
await new Promise((r) => setTimeout(r, POLL_MS));
|
|
925
|
+
let status;
|
|
926
|
+
try {
|
|
927
|
+
status = await getSiteCrawlStatus(jobId);
|
|
928
|
+
pollErrors = 0;
|
|
929
|
+
} catch (err) {
|
|
930
|
+
pollErrors += 1;
|
|
931
|
+
if (pollErrors >= MAX_POLL_ERRORS) {
|
|
932
|
+
process.stdout.write("\n");
|
|
933
|
+
console.log(`\x1b[31mLost connection while polling.\x1b[0m The job may still be running server-side.`);
|
|
934
|
+
console.log(`Re-check with: xyle site-crawl status ${jobId}`);
|
|
935
|
+
return;
|
|
936
|
+
}
|
|
937
|
+
continue;
|
|
938
|
+
}
|
|
939
|
+
const crawled = status.pages_crawled || 0;
|
|
940
|
+
const errors = status.errors_count || 0;
|
|
941
|
+
|
|
942
|
+
if (crawled !== prev) {
|
|
943
|
+
const pct = Math.min(100, Math.round((crawled / target) * 100));
|
|
944
|
+
const filled = Math.round(pct / 5);
|
|
945
|
+
const bar = "\u2588".repeat(filled) + "\u2591".repeat(20 - filled);
|
|
946
|
+
process.stdout.write(`\r ${bar} ${pct}% ${crawled}/${target} pages ${errors} errors `);
|
|
947
|
+
prev = crawled;
|
|
948
|
+
}
|
|
949
|
+
|
|
950
|
+
if (["completed", "failed", "cancelled"].includes(status.status)) {
|
|
951
|
+
process.stdout.write("\n");
|
|
952
|
+
if (status.status === "completed") {
|
|
953
|
+
const h = status.health_score != null ? Math.round(status.health_score * 100) : null;
|
|
954
|
+
const hColor = h != null ? (h >= 70 ? "\x1b[32m" : h >= 40 ? "\x1b[33m" : "\x1b[31m") : "\x1b[2m";
|
|
955
|
+
console.log(`\x1b[32mCompleted.\x1b[0m Pages: ${crawled} Errors: ${errors}`);
|
|
956
|
+
if (h != null) {
|
|
957
|
+
console.log(`Site Health: ${hColor}${h}%\x1b[0m`);
|
|
958
|
+
}
|
|
959
|
+
console.log(`\nView pages: xyle site-crawl pages ${jobId}`);
|
|
960
|
+
console.log(`View issues: xyle site-crawl issues ${jobId}`);
|
|
961
|
+
} else {
|
|
962
|
+
console.log(`\x1b[31mCrawl ${status.status}\x1b[0m${status.error_message ? ": " + status.error_message : ""}`);
|
|
963
|
+
}
|
|
964
|
+
break;
|
|
965
|
+
}
|
|
966
|
+
}
|
|
967
|
+
} catch (e) {
|
|
968
|
+
handleError(e);
|
|
969
|
+
}
|
|
970
|
+
});
|
|
971
|
+
|
|
972
|
+
// site-crawl status <job_id>
|
|
973
|
+
siteCrawlCmd
|
|
974
|
+
.command("status")
|
|
975
|
+
.description("Check crawl job status")
|
|
976
|
+
.argument("<jobId>", "Crawl job ID")
|
|
977
|
+
.option("--json", "Output as JSON")
|
|
978
|
+
.action(async (jobId, opts) => {
|
|
979
|
+
try {
|
|
980
|
+
const data = await getSiteCrawlStatus(jobId);
|
|
981
|
+
if (opts.json) {
|
|
982
|
+
console.log(printJson(data));
|
|
983
|
+
} else {
|
|
984
|
+
const color = data.status === "completed" ? "\x1b[32m" : data.status === "failed" ? "\x1b[31m" : "\x1b[33m";
|
|
985
|
+
console.log(`Status: ${color}${data.status}\x1b[0m`);
|
|
986
|
+
console.log(`Pages: ${data.pages_crawled}/${data.pages_discovered} Errors: ${data.errors_count}`);
|
|
987
|
+
if (data.health_score != null) {
|
|
988
|
+
console.log(`Health: ${Math.round(data.health_score * 100)}%`);
|
|
989
|
+
}
|
|
990
|
+
}
|
|
991
|
+
} catch (e) {
|
|
992
|
+
handleError(e);
|
|
993
|
+
}
|
|
994
|
+
});
|
|
995
|
+
|
|
996
|
+
// site-crawl pages <job_id>
|
|
997
|
+
siteCrawlCmd
|
|
998
|
+
.command("pages")
|
|
999
|
+
.description("List crawled pages")
|
|
1000
|
+
.argument("<jobId>", "Crawl job ID")
|
|
1001
|
+
.option("--limit <n>", "Results per page", "50")
|
|
1002
|
+
.option("--filter <type>", "Filter: broken, redirect, thin")
|
|
1003
|
+
.option("--json", "Output as JSON")
|
|
1004
|
+
.action(async (jobId, opts) => {
|
|
1005
|
+
try {
|
|
1006
|
+
const data = await getSiteCrawlPages(jobId, {
|
|
1007
|
+
limit: parseInt(opts.limit, 10),
|
|
1008
|
+
filter: opts.filter,
|
|
1009
|
+
});
|
|
1010
|
+
if (opts.json) {
|
|
1011
|
+
console.log(printJson(data));
|
|
1012
|
+
} else {
|
|
1013
|
+
console.log(`\n\x1b[1mCrawled Pages\x1b[0m (${data.total} total)\n`);
|
|
1014
|
+
const rows = (data.pages || []).map((p) => ({
|
|
1015
|
+
status: p.http_status || "-",
|
|
1016
|
+
depth: p.depth,
|
|
1017
|
+
seo: p.seo_score != null ? Math.round(p.seo_score * 100) + "%" : "-",
|
|
1018
|
+
aeo: p.aeo_score != null ? Math.round(p.aeo_score * 100) + "%" : "-",
|
|
1019
|
+
geo: p.geo_score != null ? Math.round(p.geo_score * 100) + "%" : "-",
|
|
1020
|
+
words: p.word_count,
|
|
1021
|
+
issues: p.issues_count,
|
|
1022
|
+
url: p.url.length > 60 ? p.url.slice(0, 57) + "..." : p.url,
|
|
1023
|
+
}));
|
|
1024
|
+
console.log(printTable(rows, ["status", "depth", "seo", "aeo", "geo", "words", "issues", "url"]));
|
|
1025
|
+
}
|
|
1026
|
+
} catch (e) {
|
|
1027
|
+
handleError(e);
|
|
1028
|
+
}
|
|
1029
|
+
});
|
|
1030
|
+
|
|
1031
|
+
// site-crawl issues <job_id>
|
|
1032
|
+
siteCrawlCmd
|
|
1033
|
+
.command("issues")
|
|
1034
|
+
.description("List crawl issues")
|
|
1035
|
+
.argument("<jobId>", "Crawl job ID")
|
|
1036
|
+
.option("--severity <level>", "Filter: critical, warning, info")
|
|
1037
|
+
.option("--category <cat>", "Filter by category")
|
|
1038
|
+
.option("--json", "Output as JSON")
|
|
1039
|
+
.action(async (jobId, opts) => {
|
|
1040
|
+
try {
|
|
1041
|
+
const data = await getSiteCrawlIssues(jobId, {
|
|
1042
|
+
severity: opts.severity,
|
|
1043
|
+
category: opts.category,
|
|
1044
|
+
});
|
|
1045
|
+
if (opts.json) {
|
|
1046
|
+
console.log(printJson(data));
|
|
1047
|
+
} else {
|
|
1048
|
+
const counts = data.counts_by_severity || {};
|
|
1049
|
+
console.log(`\n\x1b[1mCrawl Issues\x1b[0m \x1b[31m${counts.critical || 0} critical\x1b[0m \x1b[33m${counts.warning || 0} warning\x1b[0m \x1b[2m${counts.info || 0} info\x1b[0m\n`);
|
|
1050
|
+
for (const issue of data.issues || []) {
|
|
1051
|
+
const sevColor = issue.severity === "critical" ? "\x1b[31m" : issue.severity === "warning" ? "\x1b[33m" : "\x1b[2m";
|
|
1052
|
+
console.log(` ${sevColor}[${issue.severity}]\x1b[0m \x1b[36m${issue.category}\x1b[0m ${issue.message}`);
|
|
1053
|
+
}
|
|
1054
|
+
}
|
|
1055
|
+
} catch (e) {
|
|
1056
|
+
handleError(e);
|
|
1057
|
+
}
|
|
1058
|
+
});
|
|
1059
|
+
|
|
1060
|
+
// site-crawl list
|
|
1061
|
+
siteCrawlCmd
|
|
1062
|
+
.command("list")
|
|
1063
|
+
.description("List recent crawl jobs")
|
|
1064
|
+
.option("--json", "Output as JSON")
|
|
1065
|
+
.action(async (opts) => {
|
|
1066
|
+
try {
|
|
1067
|
+
const data = await listSiteCrawls();
|
|
1068
|
+
if (opts.json) {
|
|
1069
|
+
console.log(printJson(data));
|
|
1070
|
+
} else {
|
|
1071
|
+
const jobs = data.jobs || [];
|
|
1072
|
+
if (!jobs.length) {
|
|
1073
|
+
console.log("\x1b[33mNo crawl jobs found.\x1b[0m");
|
|
1074
|
+
return;
|
|
1075
|
+
}
|
|
1076
|
+
const rows = jobs.map((j) => ({
|
|
1077
|
+
id: j.job_id.slice(0, 8),
|
|
1078
|
+
status: j.status,
|
|
1079
|
+
pages: j.pages_crawled,
|
|
1080
|
+
errors: j.errors_count,
|
|
1081
|
+
health: j.health_score != null ? Math.round(j.health_score * 100) + "%" : "-",
|
|
1082
|
+
seed: j.seed_url.length > 40 ? j.seed_url.slice(0, 37) + "..." : j.seed_url,
|
|
1083
|
+
}));
|
|
1084
|
+
console.log(printTable(rows, ["id", "status", "pages", "errors", "health", "seed"]));
|
|
1085
|
+
}
|
|
1086
|
+
} catch (e) {
|
|
1087
|
+
handleError(e);
|
|
1088
|
+
}
|
|
1089
|
+
});
|
|
1090
|
+
|
|
1091
|
+
// --- kb-stats ---
|
|
1092
|
+
program
|
|
1093
|
+
.command("kb-stats")
|
|
1094
|
+
.description("Show knowledge base indexing statistics")
|
|
1095
|
+
.option("--json", "Output as JSON")
|
|
1096
|
+
.action(async (opts) => {
|
|
1097
|
+
try {
|
|
1098
|
+
const data = await getKbStats();
|
|
1099
|
+
if (opts.json) {
|
|
1100
|
+
console.log(printJson(data));
|
|
1101
|
+
} else {
|
|
1102
|
+
console.log(`\n\x1b[1mKnowledge Base\x1b[0m\n`);
|
|
1103
|
+
const ready = data.ready === true || data.status === "indexed";
|
|
1104
|
+
console.log(` Status: ${ready ? "\x1b[32mready\x1b[0m" : "\x1b[31mnot ready\x1b[0m"}`);
|
|
1105
|
+
}
|
|
1106
|
+
} catch (e) {
|
|
1107
|
+
handleError(e);
|
|
1108
|
+
}
|
|
1109
|
+
});
|
|
1110
|
+
|
|
630
1111
|
}
|
package/src/seed.mjs
CHANGED
|
@@ -89,6 +89,10 @@ npx @xyleapp/cli <command> [options]
|
|
|
89
89
|
| \`xyle sync --site <url> [--json]\` | \`--site\` (required) | Syncs Search Console data; returns synced_queries count |
|
|
90
90
|
| \`xyle queries --site <domain> [--limit N] [--json]\` | \`--site\` (required), \`--limit\` (default 20) | query, impressions, clicks, ctr, position |
|
|
91
91
|
| \`xyle crawl --url <url> [--json]\` | \`--url\` (required) | title, meta_desc, word_count, headings |
|
|
92
|
+
| \`xyle site-crawl <url> [--max-pages N] [--max-depth N] [--no-robots] [--no-js] [--json]\` | \`url\` (required) | Full-site BFS crawl; returns job_id, polls to completion, prints site health score |
|
|
93
|
+
| \`xyle site-crawl status <job_id>\` | \`job_id\` | Crawl job status + progress |
|
|
94
|
+
| \`xyle site-crawl pages <job_id> [--limit N] [--filter broken\\|redirect\\|thin] [--json]\` | \`job_id\` | Per-page SEO/AEO/GEO scores |
|
|
95
|
+
| \`xyle site-crawl issues <job_id> [--severity critical\\|warning] [--json]\` | \`job_id\` | Site-wide issues (broken links, duplicates, orphans, thin content, canonicals) |
|
|
92
96
|
|
|
93
97
|
### Analysis
|
|
94
98
|
| Command | Key Flags | Returns |
|
|
@@ -112,8 +116,19 @@ Always use \`--json\` when parsing output programmatically.
|
|
|
112
116
|
|
|
113
117
|
## Strategic Workflows
|
|
114
118
|
|
|
115
|
-
### 1. Full
|
|
116
|
-
**When:** User wants a health
|
|
119
|
+
### 1. Full-Site Audit (Screaming Frog replacement)
|
|
120
|
+
**When:** User wants a whole-site health report, not a single-page audit.
|
|
121
|
+
**Goal:** BFS every internal page, detect site-wide issues, deliver a prioritized fix plan with a Site Health Score.
|
|
122
|
+
|
|
123
|
+
1. \`xyle status --json\` — verify connectivity
|
|
124
|
+
2. \`xyle site-crawl https://<domain> --max-pages 500 --json\` — run the crawl; the CLI polls until complete and prints a progress bar
|
|
125
|
+
3. \`xyle site-crawl issues <job_id> --severity critical --json\` — triage broken links, redirect chains, canonical mismatches, duplicates, orphans, thin content
|
|
126
|
+
4. \`xyle site-crawl pages <job_id> --filter thin --json\` — find pages that need content work
|
|
127
|
+
5. \`xyle site-crawl pages <job_id> --limit 50 --json\` — sort by lowest SEO/AEO/GEO scores
|
|
128
|
+
6. **Deliver a prioritized report**: Site Health Score, critical issue count, top 10 lowest-scoring pages, 30-day fix roadmap
|
|
129
|
+
|
|
130
|
+
### 2. Single-Page / Query-Driven SEO Audit
|
|
131
|
+
**When:** User wants a deep dive on one page or a performance check driven by Search Console data.
|
|
117
132
|
**Goal:** Categorize queries by intent, flag striking-distance opportunities, and deliver a prioritized action plan.
|
|
118
133
|
|
|
119
134
|
1. \`xyle status --json\` — verify connectivity
|
|
@@ -174,7 +189,8 @@ When the user asks something SEO-related, route to the right workflow:
|
|
|
174
189
|
|
|
175
190
|
| User Says | Workflow | Why |
|
|
176
191
|
|-----------|----------|-----|
|
|
177
|
-
| "
|
|
192
|
+
| "Audit my whole site" / "Crawl my site" / "Screaming Frog" / "Site health" | Full-Site Audit | Need site-wide view: broken links, duplicates, orphans, thin content, link graph |
|
|
193
|
+
| "How's my SEO?" / "Audit my site" | Single-Page / Query-Driven SEO Audit | Need holistic view before specific fixes |
|
|
178
194
|
| "Optimize this page" / "Improve rankings for X" | Page Optimization | Specific page needs score-based action |
|
|
179
195
|
| "Who is my audience?" / "What should I write about?" | ICP Discovery | Need strategy before tactics |
|
|
180
196
|
| "What content am I missing?" / "Find gaps" | Content Gap Sprint | Ready to create, need briefs |
|