bmad-plus 0.4.0 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/README.md +12 -56
- package/osint-agent-package/skills/bmad-osint-investigate/osint/SKILL.md +452 -452
- package/osint-agent-package/skills/bmad-osint-investigate/osint/assets/dossier-template.md +116 -116
- package/osint-agent-package/skills/bmad-osint-investigate/osint/references/content-extraction.md +100 -100
- package/osint-agent-package/skills/bmad-osint-investigate/osint/references/platforms.md +130 -130
- package/osint-agent-package/skills/bmad-osint-investigate/osint/references/psychoprofile.md +69 -69
- package/osint-agent-package/skills/bmad-osint-investigate/osint/references/tools.md +281 -281
- package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/mcp-client.py +136 -136
- package/package.json +1 -1
- package/readme-international/README.de.md +1 -1
- package/readme-international/README.es.md +1 -1
- package/readme-international/README.fr.md +1 -1
- package/tools/cli/commands/install.js +74 -46
- package/tools/cli/i18n.js +501 -0
- package/oveanet-pack/animated-website/DEPLOYMENT.md +0 -104
- package/oveanet-pack/animated-website/README.md +0 -63
- package/oveanet-pack/animated-website/agent/animated-website-agent.md +0 -325
- package/oveanet-pack/animated-website/agent.yaml +0 -63
- package/oveanet-pack/animated-website/templates/animated-website-workflow.md +0 -55
- package/oveanet-pack/seo-audit-360/DEPLOYMENT.md +0 -115
- package/oveanet-pack/seo-audit-360/README.md +0 -66
- package/oveanet-pack/seo-audit-360/SKILL.md +0 -171
- package/oveanet-pack/seo-audit-360/agent/seo-chief.md +0 -294
- package/oveanet-pack/seo-audit-360/agent/seo-judge.md +0 -241
- package/oveanet-pack/seo-audit-360/agent/seo-scout.md +0 -171
- package/oveanet-pack/seo-audit-360/agent.yaml +0 -70
- package/oveanet-pack/seo-audit-360/checklist.md +0 -140
- package/oveanet-pack/seo-audit-360/extensions/google-analytics/EXTENSION.md +0 -79
- package/oveanet-pack/seo-audit-360/extensions/google-analytics/ga4_client.py +0 -200
- package/oveanet-pack/seo-audit-360/extensions/google-analytics/requirements.txt +0 -4
- package/oveanet-pack/seo-audit-360/extensions/google-search-console/EXTENSION.md +0 -109
- package/oveanet-pack/seo-audit-360/extensions/google-search-console/gsc_client.py +0 -186
- package/oveanet-pack/seo-audit-360/extensions/google-search-console/requirements.txt +0 -4
- package/oveanet-pack/seo-audit-360/hooks/seo-check.sh +0 -95
- package/oveanet-pack/seo-audit-360/pagespeed-playbook.md +0 -320
- package/oveanet-pack/seo-audit-360/ref/audit-schema.json +0 -187
- package/oveanet-pack/seo-audit-360/ref/cwv-thresholds.md +0 -87
- package/oveanet-pack/seo-audit-360/ref/eeat-criteria.md +0 -123
- package/oveanet-pack/seo-audit-360/ref/geo-signals.md +0 -167
- package/oveanet-pack/seo-audit-360/ref/hreflang-rules.md +0 -153
- package/oveanet-pack/seo-audit-360/ref/quality-gates.md +0 -133
- package/oveanet-pack/seo-audit-360/ref/schema-catalog.md +0 -91
- package/oveanet-pack/seo-audit-360/ref/schema-templates.json +0 -356
- package/oveanet-pack/seo-audit-360/requirements.txt +0 -14
- package/oveanet-pack/seo-audit-360/scripts/__pycache__/seo_crawl.cpython-314.pyc +0 -0
- package/oveanet-pack/seo-audit-360/scripts/__pycache__/seo_parse.cpython-314.pyc +0 -0
- package/oveanet-pack/seo-audit-360/scripts/install.ps1 +0 -53
- package/oveanet-pack/seo-audit-360/scripts/install.sh +0 -48
- package/oveanet-pack/seo-audit-360/scripts/seo_apis.py +0 -464
- package/oveanet-pack/seo-audit-360/scripts/seo_crawl.py +0 -282
- package/oveanet-pack/seo-audit-360/scripts/seo_fetch.py +0 -231
- package/oveanet-pack/seo-audit-360/scripts/seo_parse.py +0 -255
- package/oveanet-pack/seo-audit-360/scripts/seo_report.py +0 -403
- package/oveanet-pack/seo-audit-360/scripts/seo_screenshot.py +0 -202
- package/oveanet-pack/seo-audit-360/templates/seo-audit-workflow.md +0 -241
- package/oveanet-pack/seo-audit-360/tests/__pycache__/test_crawl.cpython-314-pytest-9.0.2.pyc +0 -0
- package/oveanet-pack/seo-audit-360/tests/__pycache__/test_parse.cpython-314-pytest-9.0.2.pyc +0 -0
- package/oveanet-pack/seo-audit-360/tests/fixtures/sample_page.html +0 -62
- package/oveanet-pack/seo-audit-360/tests/test_apis.py +0 -75
- package/oveanet-pack/seo-audit-360/tests/test_crawl.py +0 -121
- package/oveanet-pack/seo-audit-360/tests/test_fetch.py +0 -70
- package/oveanet-pack/seo-audit-360/tests/test_parse.py +0 -184
- package/oveanet-pack/universal-backup/DEPLOYMENT.md +0 -80
- package/oveanet-pack/universal-backup/README.md +0 -58
- package/oveanet-pack/universal-backup/agent/backup-agent.md +0 -71
- package/oveanet-pack/universal-backup/agent.yaml +0 -45
- package/oveanet-pack/universal-backup/templates/backup-workflow.md +0 -51
|
@@ -1,255 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
3
|
-
SEO Parse — HTML parser for SEO element extraction.
|
|
4
|
-
|
|
5
|
-
Extracts: title, meta tags, canonicals, headings, images, links (internal/external),
|
|
6
|
-
schema (JSON-LD), Open Graph, Twitter Cards, hreflang, word count, text/code ratio.
|
|
7
|
-
|
|
8
|
-
Author: Laurent Rochetta
|
|
9
|
-
License: MIT
|
|
10
|
-
"""
|
|
11
|
-
|
|
12
|
-
import argparse
|
|
13
|
-
import json
|
|
14
|
-
import os
|
|
15
|
-
import re
|
|
16
|
-
import sys
|
|
17
|
-
from typing import Optional
|
|
18
|
-
from urllib.parse import urljoin, urlparse
|
|
19
|
-
|
|
20
|
-
try:
|
|
21
|
-
from bs4 import BeautifulSoup
|
|
22
|
-
except ImportError:
|
|
23
|
-
print("Error: beautifulsoup4 required. Install: pip install beautifulsoup4", file=sys.stderr)
|
|
24
|
-
sys.exit(1)
|
|
25
|
-
|
|
26
|
-
# Use lxml if available for speed, fallback to html.parser
|
|
27
|
-
try:
|
|
28
|
-
import lxml # noqa: F401
|
|
29
|
-
HTML_PARSER = "lxml"
|
|
30
|
-
except ImportError:
|
|
31
|
-
HTML_PARSER = "html.parser"
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def parse_html(html: str, base_url: Optional[str] = None) -> dict:
|
|
35
|
-
"""
|
|
36
|
-
Parse HTML and extract all SEO-relevant elements.
|
|
37
|
-
|
|
38
|
-
Args:
|
|
39
|
-
html: Raw HTML content
|
|
40
|
-
base_url: Base URL for resolving relative links
|
|
41
|
-
|
|
42
|
-
Returns:
|
|
43
|
-
Comprehensive dictionary of SEO data
|
|
44
|
-
"""
|
|
45
|
-
soup = BeautifulSoup(html, HTML_PARSER)
|
|
46
|
-
|
|
47
|
-
result = {
|
|
48
|
-
"title": None,
|
|
49
|
-
"title_length": 0,
|
|
50
|
-
"meta_description": None,
|
|
51
|
-
"meta_description_length": 0,
|
|
52
|
-
"meta_robots": None,
|
|
53
|
-
"meta_viewport": None,
|
|
54
|
-
"canonical": None,
|
|
55
|
-
"headings": {"h1": [], "h2": [], "h3": [], "h4": []},
|
|
56
|
-
"images": [],
|
|
57
|
-
"links": {"internal": [], "external": [], "broken_candidates": []},
|
|
58
|
-
"schema_blocks": [],
|
|
59
|
-
"open_graph": {},
|
|
60
|
-
"twitter_card": {},
|
|
61
|
-
"hreflang": [],
|
|
62
|
-
"word_count": 0,
|
|
63
|
-
"html_size_bytes": len(html.encode("utf-8")),
|
|
64
|
-
"text_ratio": 0.0,
|
|
65
|
-
"has_lang_attr": False,
|
|
66
|
-
"lang": None,
|
|
67
|
-
"scripts_count": 0,
|
|
68
|
-
"stylesheets_count": 0,
|
|
69
|
-
"dom_depth_estimate": 0,
|
|
70
|
-
"security_headers_hints": {},
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
# ── Title ──
|
|
74
|
-
title_tag = soup.find("title")
|
|
75
|
-
if title_tag:
|
|
76
|
-
result["title"] = title_tag.get_text(strip=True)
|
|
77
|
-
result["title_length"] = len(result["title"])
|
|
78
|
-
|
|
79
|
-
# ── Meta Tags ──
|
|
80
|
-
for meta in soup.find_all("meta"):
|
|
81
|
-
name = (meta.get("name") or "").lower()
|
|
82
|
-
property_attr = (meta.get("property") or "").lower()
|
|
83
|
-
content = meta.get("content", "")
|
|
84
|
-
|
|
85
|
-
if name == "description":
|
|
86
|
-
result["meta_description"] = content
|
|
87
|
-
result["meta_description_length"] = len(content)
|
|
88
|
-
elif name == "robots":
|
|
89
|
-
result["meta_robots"] = content
|
|
90
|
-
elif name == "viewport":
|
|
91
|
-
result["meta_viewport"] = content
|
|
92
|
-
|
|
93
|
-
# Open Graph
|
|
94
|
-
if property_attr.startswith("og:"):
|
|
95
|
-
result["open_graph"][property_attr] = content
|
|
96
|
-
|
|
97
|
-
# Twitter Card
|
|
98
|
-
if name.startswith("twitter:"):
|
|
99
|
-
result["twitter_card"][name] = content
|
|
100
|
-
|
|
101
|
-
# ── Language ──
|
|
102
|
-
html_tag = soup.find("html")
|
|
103
|
-
if html_tag and html_tag.get("lang"):
|
|
104
|
-
result["has_lang_attr"] = True
|
|
105
|
-
result["lang"] = html_tag.get("lang")
|
|
106
|
-
|
|
107
|
-
# ── Canonical ──
|
|
108
|
-
canonical = soup.find("link", rel="canonical")
|
|
109
|
-
if canonical:
|
|
110
|
-
result["canonical"] = canonical.get("href")
|
|
111
|
-
|
|
112
|
-
# ── Hreflang ──
|
|
113
|
-
for link in soup.find_all("link", rel="alternate"):
|
|
114
|
-
hreflang = link.get("hreflang")
|
|
115
|
-
if hreflang:
|
|
116
|
-
result["hreflang"].append({
|
|
117
|
-
"lang": hreflang,
|
|
118
|
-
"href": link.get("href"),
|
|
119
|
-
})
|
|
120
|
-
|
|
121
|
-
# ── Headings ──
|
|
122
|
-
for level in ["h1", "h2", "h3", "h4"]:
|
|
123
|
-
for tag in soup.find_all(level):
|
|
124
|
-
text = tag.get_text(strip=True)
|
|
125
|
-
if text:
|
|
126
|
-
result["headings"][level].append(text)
|
|
127
|
-
|
|
128
|
-
# ── Images ──
|
|
129
|
-
for img in soup.find_all("img"):
|
|
130
|
-
src = img.get("src", "")
|
|
131
|
-
if base_url and src:
|
|
132
|
-
src = urljoin(base_url, src)
|
|
133
|
-
|
|
134
|
-
has_alt = img.get("alt") is not None
|
|
135
|
-
alt_text = img.get("alt", "")
|
|
136
|
-
has_dimensions = bool(img.get("width") and img.get("height"))
|
|
137
|
-
|
|
138
|
-
result["images"].append({
|
|
139
|
-
"src": src,
|
|
140
|
-
"alt": alt_text,
|
|
141
|
-
"has_alt": has_alt,
|
|
142
|
-
"alt_empty": has_alt and alt_text.strip() == "",
|
|
143
|
-
"width": img.get("width"),
|
|
144
|
-
"height": img.get("height"),
|
|
145
|
-
"has_dimensions": has_dimensions,
|
|
146
|
-
"loading": img.get("loading"),
|
|
147
|
-
"srcset": img.get("srcset") is not None,
|
|
148
|
-
})
|
|
149
|
-
|
|
150
|
-
# ── Links ──
|
|
151
|
-
if base_url:
|
|
152
|
-
base_domain = urlparse(base_url).netloc
|
|
153
|
-
|
|
154
|
-
for a in soup.find_all("a", href=True):
|
|
155
|
-
href = a.get("href", "")
|
|
156
|
-
if not href or href.startswith("#") or href.startswith("javascript:"):
|
|
157
|
-
continue
|
|
158
|
-
|
|
159
|
-
full_url = urljoin(base_url, href)
|
|
160
|
-
parsed = urlparse(full_url)
|
|
161
|
-
|
|
162
|
-
link_data = {
|
|
163
|
-
"href": full_url,
|
|
164
|
-
"text": a.get_text(strip=True)[:100],
|
|
165
|
-
"rel": a.get("rel", []),
|
|
166
|
-
"is_nofollow": "nofollow" in (a.get("rel") or []),
|
|
167
|
-
"target": a.get("target"),
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
if parsed.netloc == base_domain:
|
|
171
|
-
result["links"]["internal"].append(link_data)
|
|
172
|
-
else:
|
|
173
|
-
result["links"]["external"].append(link_data)
|
|
174
|
-
|
|
175
|
-
# ── Schema (JSON-LD) ──
|
|
176
|
-
for script in soup.find_all("script", type="application/ld+json"):
|
|
177
|
-
try:
|
|
178
|
-
schema_data = json.loads(script.string)
|
|
179
|
-
if isinstance(schema_data, dict):
|
|
180
|
-
result["schema_blocks"].append({
|
|
181
|
-
"type": schema_data.get("@type", "unknown"),
|
|
182
|
-
"data": schema_data,
|
|
183
|
-
})
|
|
184
|
-
elif isinstance(schema_data, list):
|
|
185
|
-
for item in schema_data:
|
|
186
|
-
if isinstance(item, dict):
|
|
187
|
-
result["schema_blocks"].append({
|
|
188
|
-
"type": item.get("@type", "unknown"),
|
|
189
|
-
"data": item,
|
|
190
|
-
})
|
|
191
|
-
except (json.JSONDecodeError, TypeError):
|
|
192
|
-
result["schema_blocks"].append({"type": "PARSE_ERROR", "data": None})
|
|
193
|
-
|
|
194
|
-
# ── Resource Counts ──
|
|
195
|
-
result["scripts_count"] = len(soup.find_all("script"))
|
|
196
|
-
result["stylesheets_count"] = len(soup.find_all("link", rel="stylesheet"))
|
|
197
|
-
|
|
198
|
-
# ── Word Count & Text Ratio ──
|
|
199
|
-
text_soup = BeautifulSoup(html, HTML_PARSER)
|
|
200
|
-
for element in text_soup(["script", "style", "nav", "footer", "header", "noscript"]):
|
|
201
|
-
element.decompose()
|
|
202
|
-
|
|
203
|
-
visible_text = text_soup.get_text(separator=" ", strip=True)
|
|
204
|
-
words = re.findall(r"\b\w+\b", visible_text)
|
|
205
|
-
result["word_count"] = len(words)
|
|
206
|
-
|
|
207
|
-
text_bytes = len(visible_text.encode("utf-8"))
|
|
208
|
-
if result["html_size_bytes"] > 0:
|
|
209
|
-
result["text_ratio"] = round(text_bytes / result["html_size_bytes"], 3)
|
|
210
|
-
|
|
211
|
-
return result
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
# ── CLI ────────────────────────────────────────────────────────────
|
|
215
|
-
|
|
216
|
-
def main():
|
|
217
|
-
parser = argparse.ArgumentParser(
|
|
218
|
-
description="SEO Parse — HTML parser for SEO analysis (BMAD+ SEO Engine)"
|
|
219
|
-
)
|
|
220
|
-
parser.add_argument("file", nargs="?", help="HTML file to parse")
|
|
221
|
-
parser.add_argument("--url", "-u", help="Base URL for resolving relative links")
|
|
222
|
-
parser.add_argument("--json", "-j", action="store_true", help="Output as JSON")
|
|
223
|
-
|
|
224
|
-
args = parser.parse_args()
|
|
225
|
-
|
|
226
|
-
if args.file:
|
|
227
|
-
real_path = os.path.realpath(args.file)
|
|
228
|
-
if not os.path.isfile(real_path):
|
|
229
|
-
print(f"Error: File not found: {args.file}", file=sys.stderr)
|
|
230
|
-
sys.exit(1)
|
|
231
|
-
with open(real_path, "r", encoding="utf-8") as f:
|
|
232
|
-
html = f.read()
|
|
233
|
-
else:
|
|
234
|
-
html = sys.stdin.read()
|
|
235
|
-
|
|
236
|
-
result = parse_html(html, args.url)
|
|
237
|
-
|
|
238
|
-
if args.json:
|
|
239
|
-
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
240
|
-
else:
|
|
241
|
-
print(f"Title: {result['title']} ({result['title_length']} chars)")
|
|
242
|
-
print(f"Meta Description: {result['meta_description'][:80] + '...' if result['meta_description'] and len(result['meta_description']) > 80 else result['meta_description']}")
|
|
243
|
-
print(f"Canonical: {result['canonical']}")
|
|
244
|
-
print(f"Language: {result['lang']}")
|
|
245
|
-
print(f"H1: {len(result['headings']['h1'])} | H2: {len(result['headings']['h2'])} | H3: {len(result['headings']['h3'])}")
|
|
246
|
-
print(f"Images: {len(result['images'])} (missing alt: {sum(1 for i in result['images'] if not i['has_alt'])})")
|
|
247
|
-
print(f"Internal Links: {len(result['links']['internal'])} | External: {len(result['links']['external'])}")
|
|
248
|
-
print(f"Schema Blocks: {len(result['schema_blocks'])} ({', '.join(s['type'] for s in result['schema_blocks'])})")
|
|
249
|
-
print(f"Word Count: {result['word_count']:,}")
|
|
250
|
-
print(f"Text/HTML Ratio: {result['text_ratio']:.1%}")
|
|
251
|
-
print(f"Scripts: {result['scripts_count']} | Stylesheets: {result['stylesheets_count']}")
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
if __name__ == "__main__":
|
|
255
|
-
main()
|
|
@@ -1,403 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
3
|
-
SEO Report — Professional HTML audit report generator.
|
|
4
|
-
|
|
5
|
-
Features:
|
|
6
|
-
- Single-file HTML with inline CSS (no external deps)
|
|
7
|
-
- SVG radar chart for score visualization
|
|
8
|
-
- Color-coded issue cards (Critical/High/Medium/Low)
|
|
9
|
-
- Quick Wins section
|
|
10
|
-
- Print-friendly (@media print)
|
|
11
|
-
- Responsive (mobile-readable)
|
|
12
|
-
|
|
13
|
-
Author: Laurent Rochetta
|
|
14
|
-
License: MIT
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
import argparse
|
|
18
|
-
import json
|
|
19
|
-
import math
|
|
20
|
-
import os
|
|
21
|
-
import sys
|
|
22
|
-
from datetime import datetime
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
def generate_radar_svg(scores: dict, size: int = 300) -> str:
|
|
26
|
-
"""Generate an SVG radar chart for the 7 score categories."""
|
|
27
|
-
categories = list(scores.keys())
|
|
28
|
-
values = list(scores.values())
|
|
29
|
-
n = len(categories)
|
|
30
|
-
|
|
31
|
-
if n == 0:
|
|
32
|
-
return ""
|
|
33
|
-
|
|
34
|
-
cx, cy = size // 2, size // 2
|
|
35
|
-
radius = size // 2 - 40
|
|
36
|
-
|
|
37
|
-
# Short labels for display
|
|
38
|
-
short_labels = {
|
|
39
|
-
"technical": "Tech",
|
|
40
|
-
"content_eeat": "E-E-A-T",
|
|
41
|
-
"on_page": "On-Page",
|
|
42
|
-
"schema": "Schema",
|
|
43
|
-
"performance": "Perf",
|
|
44
|
-
"ai_readiness": "AI/GEO",
|
|
45
|
-
"images": "Images",
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
def point(angle_deg, r):
|
|
49
|
-
angle_rad = math.radians(angle_deg - 90)
|
|
50
|
-
x = cx + r * math.cos(angle_rad)
|
|
51
|
-
y = cy + r * math.sin(angle_rad)
|
|
52
|
-
return x, y
|
|
53
|
-
|
|
54
|
-
svg_parts = [f'<svg viewBox="0 0 {size} {size}" xmlns="http://www.w3.org/2000/svg" style="max-width:{size}px;margin:auto;display:block;">']
|
|
55
|
-
|
|
56
|
-
# Background circles
|
|
57
|
-
for pct in [25, 50, 75, 100]:
|
|
58
|
-
r = radius * pct / 100
|
|
59
|
-
svg_parts.append(f'<circle cx="{cx}" cy="{cy}" r="{r}" fill="none" stroke="#e2e8f0" stroke-width="1" opacity="0.5"/>')
|
|
60
|
-
|
|
61
|
-
# Axis lines + labels
|
|
62
|
-
for i in range(n):
|
|
63
|
-
angle = (360 / n) * i
|
|
64
|
-
x1, y1 = point(angle, 0)
|
|
65
|
-
x2, y2 = point(angle, radius)
|
|
66
|
-
svg_parts.append(f'<line x1="{cx}" y1="{cy}" x2="{x2}" y2="{y2}" stroke="#e2e8f0" stroke-width="1"/>')
|
|
67
|
-
|
|
68
|
-
lx, ly = point(angle, radius + 20)
|
|
69
|
-
label = short_labels.get(categories[i], categories[i][:6])
|
|
70
|
-
svg_parts.append(f'<text x="{lx}" y="{ly}" text-anchor="middle" font-size="11" fill="#64748b" font-family="Inter,sans-serif">{label}</text>')
|
|
71
|
-
|
|
72
|
-
# Data polygon
|
|
73
|
-
data_points = []
|
|
74
|
-
for i in range(n):
|
|
75
|
-
angle = (360 / n) * i
|
|
76
|
-
r = radius * min(values[i], 100) / 100
|
|
77
|
-
x, y = point(angle, r)
|
|
78
|
-
data_points.append(f"{x},{y}")
|
|
79
|
-
|
|
80
|
-
poly = " ".join(data_points)
|
|
81
|
-
svg_parts.append(f'<polygon points="{poly}" fill="rgba(59,130,246,0.2)" stroke="#3b82f6" stroke-width="2"/>')
|
|
82
|
-
|
|
83
|
-
# Data points
|
|
84
|
-
for i in range(n):
|
|
85
|
-
angle = (360 / n) * i
|
|
86
|
-
r = radius * min(values[i], 100) / 100
|
|
87
|
-
x, y = point(angle, r)
|
|
88
|
-
color = "#22c55e" if values[i] >= 80 else "#f59e0b" if values[i] >= 50 else "#ef4444"
|
|
89
|
-
svg_parts.append(f'<circle cx="{x}" cy="{y}" r="4" fill="{color}" stroke="white" stroke-width="2"/>')
|
|
90
|
-
|
|
91
|
-
svg_parts.append('</svg>')
|
|
92
|
-
return "\n".join(svg_parts)
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
def severity_color(severity: str) -> str:
|
|
96
|
-
"""Get color for severity level."""
|
|
97
|
-
return {
|
|
98
|
-
"critical": "#ef4444",
|
|
99
|
-
"high": "#f97316",
|
|
100
|
-
"medium": "#f59e0b",
|
|
101
|
-
"low": "#22c55e",
|
|
102
|
-
}.get(severity, "#64748b")
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def severity_icon(severity: str) -> str:
|
|
106
|
-
return {
|
|
107
|
-
"critical": "🔴",
|
|
108
|
-
"high": "🟠",
|
|
109
|
-
"medium": "🟡",
|
|
110
|
-
"low": "🟢",
|
|
111
|
-
}.get(severity, "⚪")
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def score_color(score: int) -> str:
|
|
115
|
-
if score >= 90:
|
|
116
|
-
return "#22c55e"
|
|
117
|
-
elif score >= 70:
|
|
118
|
-
return "#84cc16"
|
|
119
|
-
elif score >= 50:
|
|
120
|
-
return "#f59e0b"
|
|
121
|
-
else:
|
|
122
|
-
return "#ef4444"
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def generate_html_report(audit_data: dict) -> str:
|
|
126
|
-
"""Generate a complete HTML report from audit JSON data."""
|
|
127
|
-
|
|
128
|
-
domain = audit_data.get("domain", "Unknown")
|
|
129
|
-
timestamp = audit_data.get("timestamp", datetime.now().isoformat())
|
|
130
|
-
total_score = audit_data.get("score", {}).get("total", 0)
|
|
131
|
-
categories = audit_data.get("score", {}).get("categories", {})
|
|
132
|
-
issues = audit_data.get("issues", [])
|
|
133
|
-
pages = audit_data.get("pages", [])
|
|
134
|
-
|
|
135
|
-
# Generate radar chart
|
|
136
|
-
radar_svg = generate_radar_svg(categories) if categories else ""
|
|
137
|
-
|
|
138
|
-
# Sort issues by severity
|
|
139
|
-
severity_order = {"critical": 0, "high": 1, "medium": 2, "low": 3}
|
|
140
|
-
sorted_issues = sorted(issues, key=lambda x: severity_order.get(x.get("severity", "low"), 4))
|
|
141
|
-
|
|
142
|
-
# Count by severity
|
|
143
|
-
counts = {"critical": 0, "high": 0, "medium": 0, "low": 0}
|
|
144
|
-
for issue in issues:
|
|
145
|
-
sev = issue.get("severity", "low")
|
|
146
|
-
counts[sev] = counts.get(sev, 0) + 1
|
|
147
|
-
|
|
148
|
-
# Quick wins
|
|
149
|
-
quick_wins = [i for i in issues if i.get("quick_win", False)][:5]
|
|
150
|
-
|
|
151
|
-
# Build issue cards HTML
|
|
152
|
-
issue_cards = ""
|
|
153
|
-
for issue in sorted_issues:
|
|
154
|
-
sev = issue.get("severity", "low")
|
|
155
|
-
fix_html = ""
|
|
156
|
-
if issue.get("fix"):
|
|
157
|
-
fix_html = f'<div class="fix-block"><strong>Fix:</strong><pre><code>{issue["fix"]}</code></pre></div>'
|
|
158
|
-
|
|
159
|
-
issue_cards += f'''
|
|
160
|
-
<div class="issue-card" style="border-left: 4px solid {severity_color(sev)}">
|
|
161
|
-
<div class="issue-header">
|
|
162
|
-
<span class="severity-badge" style="background:{severity_color(sev)}">{sev.upper()}</span>
|
|
163
|
-
<span class="issue-category">{issue.get("category", "")}</span>
|
|
164
|
-
</div>
|
|
165
|
-
<h4>{issue.get("title", "")}</h4>
|
|
166
|
-
<p>{issue.get("description", "")}</p>
|
|
167
|
-
{fix_html}
|
|
168
|
-
</div>'''
|
|
169
|
-
|
|
170
|
-
# Quick wins HTML
|
|
171
|
-
qw_html = ""
|
|
172
|
-
if quick_wins:
|
|
173
|
-
qw_items = ""
|
|
174
|
-
for qw in quick_wins:
|
|
175
|
-
qw_items += f'<li>{severity_icon(qw.get("severity", ""))} {qw.get("title", "")}</li>'
|
|
176
|
-
qw_html = f'<div class="quick-wins"><h3>⚡ Quick Wins</h3><ul>{qw_items}</ul></div>'
|
|
177
|
-
|
|
178
|
-
# Category scores table
|
|
179
|
-
cat_rows = ""
|
|
180
|
-
for cat, score in categories.items():
|
|
181
|
-
cat_name = cat.replace("_", " ").title()
|
|
182
|
-
cat_rows += f'''
|
|
183
|
-
<tr>
|
|
184
|
-
<td>{cat_name}</td>
|
|
185
|
-
<td>
|
|
186
|
-
<div class="score-bar-bg">
|
|
187
|
-
<div class="score-bar" style="width:{score}%;background:{score_color(score)}"></div>
|
|
188
|
-
</div>
|
|
189
|
-
</td>
|
|
190
|
-
<td style="color:{score_color(score)};font-weight:700">{score}</td>
|
|
191
|
-
</tr>'''
|
|
192
|
-
|
|
193
|
-
html = f'''<!DOCTYPE html>
|
|
194
|
-
<html lang="en">
|
|
195
|
-
<head>
|
|
196
|
-
<meta charset="UTF-8">
|
|
197
|
-
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
198
|
-
<title>SEO Audit Report — {domain}</title>
|
|
199
|
-
<style>
|
|
200
|
-
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap');
|
|
201
|
-
|
|
202
|
-
* {{ margin: 0; padding: 0; box-sizing: border-box; }}
|
|
203
|
-
body {{
|
|
204
|
-
font-family: 'Inter', -apple-system, sans-serif;
|
|
205
|
-
background: #f8fafc;
|
|
206
|
-
color: #1e293b;
|
|
207
|
-
line-height: 1.6;
|
|
208
|
-
}}
|
|
209
|
-
.container {{ max-width: 900px; margin: 0 auto; padding: 2rem; }}
|
|
210
|
-
|
|
211
|
-
/* Header */
|
|
212
|
-
.header {{
|
|
213
|
-
background: linear-gradient(135deg, #0f172a 0%, #1e3a5f 100%);
|
|
214
|
-
color: white;
|
|
215
|
-
padding: 3rem 2rem;
|
|
216
|
-
border-radius: 16px;
|
|
217
|
-
margin-bottom: 2rem;
|
|
218
|
-
text-align: center;
|
|
219
|
-
}}
|
|
220
|
-
.header h1 {{ font-size: 2rem; margin-bottom: 0.5rem; }}
|
|
221
|
-
.header .domain {{ font-size: 1.2rem; opacity: 0.8; }}
|
|
222
|
-
.header .date {{ font-size: 0.85rem; opacity: 0.6; margin-top: 0.5rem; }}
|
|
223
|
-
|
|
224
|
-
/* Score circle */
|
|
225
|
-
.score-hero {{
|
|
226
|
-
display: flex;
|
|
227
|
-
align-items: center;
|
|
228
|
-
justify-content: center;
|
|
229
|
-
gap: 3rem;
|
|
230
|
-
margin: 2rem 0;
|
|
231
|
-
flex-wrap: wrap;
|
|
232
|
-
}}
|
|
233
|
-
.score-circle {{
|
|
234
|
-
width: 150px;
|
|
235
|
-
height: 150px;
|
|
236
|
-
border-radius: 50%;
|
|
237
|
-
display: flex;
|
|
238
|
-
flex-direction: column;
|
|
239
|
-
align-items: center;
|
|
240
|
-
justify-content: center;
|
|
241
|
-
border: 6px solid {score_color(total_score)};
|
|
242
|
-
background: white;
|
|
243
|
-
box-shadow: 0 4px 24px rgba(0,0,0,0.08);
|
|
244
|
-
}}
|
|
245
|
-
.score-number {{ font-size: 3rem; font-weight: 700; color: {score_color(total_score)}; }}
|
|
246
|
-
.score-label {{ font-size: 0.75rem; text-transform: uppercase; color: #64748b; letter-spacing: 1px; }}
|
|
247
|
-
|
|
248
|
-
/* Summary cards */
|
|
249
|
-
.summary-grid {{
|
|
250
|
-
display: grid;
|
|
251
|
-
grid-template-columns: repeat(4, 1fr);
|
|
252
|
-
gap: 1rem;
|
|
253
|
-
margin-bottom: 2rem;
|
|
254
|
-
}}
|
|
255
|
-
.summary-card {{
|
|
256
|
-
background: white;
|
|
257
|
-
border-radius: 12px;
|
|
258
|
-
padding: 1.2rem;
|
|
259
|
-
text-align: center;
|
|
260
|
-
box-shadow: 0 2px 8px rgba(0,0,0,0.04);
|
|
261
|
-
}}
|
|
262
|
-
.summary-card .count {{ font-size: 2rem; font-weight: 700; }}
|
|
263
|
-
.summary-card .label {{ font-size: 0.8rem; color: #64748b; }}
|
|
264
|
-
|
|
265
|
-
/* Sections */
|
|
266
|
-
.section {{ background: white; border-radius: 12px; padding: 2rem; margin-bottom: 1.5rem; box-shadow: 0 2px 8px rgba(0,0,0,0.04); }}
|
|
267
|
-
.section h2 {{ margin-bottom: 1rem; font-size: 1.3rem; }}
|
|
268
|
-
.section h3 {{ margin-bottom: 0.8rem; font-size: 1.1rem; }}
|
|
269
|
-
|
|
270
|
-
/* Score bars */
|
|
271
|
-
table {{ width: 100%; border-collapse: collapse; }}
|
|
272
|
-
td {{ padding: 0.6rem 0; }}
|
|
273
|
-
.score-bar-bg {{ width: 100%; height: 8px; background: #e2e8f0; border-radius: 4px; overflow: hidden; margin: 0 1rem; }}
|
|
274
|
-
.score-bar {{ height: 100%; border-radius: 4px; transition: width 0.5s ease; }}
|
|
275
|
-
|
|
276
|
-
/* Issue cards */
|
|
277
|
-
.issue-card {{
|
|
278
|
-
border: 1px solid #e2e8f0;
|
|
279
|
-
border-radius: 8px;
|
|
280
|
-
padding: 1rem;
|
|
281
|
-
margin-bottom: 0.8rem;
|
|
282
|
-
}}
|
|
283
|
-
.issue-header {{ display: flex; gap: 0.5rem; margin-bottom: 0.3rem; align-items: center; }}
|
|
284
|
-
.severity-badge {{ color: white; padding: 2px 8px; border-radius: 4px; font-size: 0.7rem; font-weight: 600; }}
|
|
285
|
-
.issue-category {{ font-size: 0.8rem; color: #64748b; }}
|
|
286
|
-
.issue-card h4 {{ margin-bottom: 0.3rem; }}
|
|
287
|
-
.issue-card p {{ color: #475569; font-size: 0.9rem; }}
|
|
288
|
-
.fix-block {{ background: #f1f5f9; border-radius: 6px; padding: 0.8rem; margin-top: 0.5rem; }}
|
|
289
|
-
.fix-block pre {{ overflow-x: auto; font-size: 0.8rem; }}
|
|
290
|
-
|
|
291
|
-
/* Quick wins */
|
|
292
|
-
.quick-wins {{ background: #f0fdf4; border: 1px solid #bbf7d0; border-radius: 12px; padding: 1.5rem; margin-bottom: 1.5rem; }}
|
|
293
|
-
.quick-wins ul {{ list-style: none; padding: 0; }}
|
|
294
|
-
.quick-wins li {{ padding: 0.3rem 0; }}
|
|
295
|
-
|
|
296
|
-
/* Footer */
|
|
297
|
-
.footer {{ text-align: center; color: #94a3b8; font-size: 0.8rem; padding: 2rem 0; }}
|
|
298
|
-
|
|
299
|
-
/* Print */
|
|
300
|
-
@media print {{
|
|
301
|
-
body {{ background: white; }}
|
|
302
|
-
.container {{ max-width: 100%; padding: 0; }}
|
|
303
|
-
.header {{ break-after: avoid; }}
|
|
304
|
-
.section {{ break-inside: avoid; box-shadow: none; border: 1px solid #e2e8f0; }}
|
|
305
|
-
}}
|
|
306
|
-
|
|
307
|
-
/* Mobile */
|
|
308
|
-
@media (max-width: 640px) {{
|
|
309
|
-
.summary-grid {{ grid-template-columns: repeat(2, 1fr); }}
|
|
310
|
-
.score-hero {{ flex-direction: column; gap: 1.5rem; }}
|
|
311
|
-
}}
|
|
312
|
-
</style>
|
|
313
|
-
</head>
|
|
314
|
-
<body>
|
|
315
|
-
<div class="container">
|
|
316
|
-
<div class="header">
|
|
317
|
-
<h1>SEO Audit Report</h1>
|
|
318
|
-
<div class="domain">{domain}</div>
|
|
319
|
-
<div class="date">{timestamp[:10]}</div>
|
|
320
|
-
</div>
|
|
321
|
-
|
|
322
|
-
<div class="score-hero">
|
|
323
|
-
<div class="score-circle">
|
|
324
|
-
<div class="score-number">{total_score}</div>
|
|
325
|
-
<div class="score-label">SEO Score</div>
|
|
326
|
-
</div>
|
|
327
|
-
<div>
|
|
328
|
-
{radar_svg}
|
|
329
|
-
</div>
|
|
330
|
-
</div>
|
|
331
|
-
|
|
332
|
-
<div class="summary-grid">
|
|
333
|
-
<div class="summary-card">
|
|
334
|
-
<div class="count" style="color:#ef4444">{counts["critical"]}</div>
|
|
335
|
-
<div class="label">Critical</div>
|
|
336
|
-
</div>
|
|
337
|
-
<div class="summary-card">
|
|
338
|
-
<div class="count" style="color:#f97316">{counts["high"]}</div>
|
|
339
|
-
<div class="label">High</div>
|
|
340
|
-
</div>
|
|
341
|
-
<div class="summary-card">
|
|
342
|
-
<div class="count" style="color:#f59e0b">{counts["medium"]}</div>
|
|
343
|
-
<div class="label">Medium</div>
|
|
344
|
-
</div>
|
|
345
|
-
<div class="summary-card">
|
|
346
|
-
<div class="count" style="color:#22c55e">{counts["low"]}</div>
|
|
347
|
-
<div class="label">Low</div>
|
|
348
|
-
</div>
|
|
349
|
-
</div>
|
|
350
|
-
|
|
351
|
-
{qw_html}
|
|
352
|
-
|
|
353
|
-
<div class="section">
|
|
354
|
-
<h2>📊 Category Scores</h2>
|
|
355
|
-
<table>{cat_rows}</table>
|
|
356
|
-
</div>
|
|
357
|
-
|
|
358
|
-
<div class="section">
|
|
359
|
-
<h2>🔍 Issues ({len(issues)})</h2>
|
|
360
|
-
{issue_cards}
|
|
361
|
-
</div>
|
|
362
|
-
|
|
363
|
-
<div class="footer">
|
|
364
|
-
Generated by BMAD+ SEO Engine v2.1 — By Laurent Rochetta
|
|
365
|
-
</div>
|
|
366
|
-
</div>
|
|
367
|
-
</body>
|
|
368
|
-
</html>'''
|
|
369
|
-
|
|
370
|
-
return html
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
# ── CLI ────────────────────────────────────────────────────────────
|
|
374
|
-
|
|
375
|
-
def main():
|
|
376
|
-
parser = argparse.ArgumentParser(
|
|
377
|
-
description="SEO Report — HTML audit report generator (BMAD+ SEO Engine)"
|
|
378
|
-
)
|
|
379
|
-
parser.add_argument("input", help="Audit JSON file")
|
|
380
|
-
parser.add_argument("--output", "-o", default="seo-report.html", help="Output HTML file")
|
|
381
|
-
|
|
382
|
-
args = parser.parse_args()
|
|
383
|
-
|
|
384
|
-
if not os.path.isfile(args.input):
|
|
385
|
-
print(f"Error: File not found: {args.input}", file=sys.stderr)
|
|
386
|
-
sys.exit(1)
|
|
387
|
-
|
|
388
|
-
with open(args.input, "r", encoding="utf-8") as f:
|
|
389
|
-
audit_data = json.load(f)
|
|
390
|
-
|
|
391
|
-
html = generate_html_report(audit_data)
|
|
392
|
-
|
|
393
|
-
with open(args.output, "w", encoding="utf-8") as f:
|
|
394
|
-
f.write(html)
|
|
395
|
-
|
|
396
|
-
print(f"✅ Report generated: {args.output}", file=sys.stderr)
|
|
397
|
-
print(f" Domain: {audit_data.get('domain', 'Unknown')}")
|
|
398
|
-
print(f" Score: {audit_data.get('score', {}).get('total', 0)}/100")
|
|
399
|
-
print(f" Issues: {len(audit_data.get('issues', []))}")
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
if __name__ == "__main__":
|
|
403
|
-
main()
|