evolinkai-seo-assistant 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,18 @@
1
+ # SEO Assistant — npm installer
2
+
3
+ This package installs the SEO Assistant skill for OpenClaw / Claude Code.
4
+
5
+ ## Install
6
+
7
+ ```
8
+ npx evolinkai-seo-assistant
9
+ ```
10
+
11
+ ## What it does
12
+
13
+ Copies the SEO Assistant skill files into your project's `skills/` directory.
14
+
15
+ ## Links
16
+
17
+ - [ClawHub](https://clawhub.ai/evolinkai/ai-seo-assistant)
18
+ - [GitHub](https://github.com/EvoLinkAI/seo-skill-for-openclaw)
package/bin/install.js ADDED
@@ -0,0 +1,83 @@
1
+ #!/usr/bin/env node
2
+
3
+ const fs = require("fs");
4
+ const path = require("path");
5
+
6
+ const SLUG = "ai-seo-assistant";
7
+ const VERSION = "1.0.0";
8
+ const SKILL_FILES_DIR = path.join(__dirname, "..", "skill-files");
9
+
10
+ function findWorkdir() {
11
+ const envDir = process.env.CLAWHUB_WORKDIR?.trim() || process.env.CLAWDHUB_WORKDIR?.trim();
12
+ if (envDir) return path.resolve(envDir);
13
+
14
+ let dir = process.cwd();
15
+ while (true) {
16
+ if (fs.existsSync(path.join(dir, ".clawhub")) || fs.existsSync(path.join(dir, ".clawdhub"))) {
17
+ return dir;
18
+ }
19
+ const parent = path.dirname(dir);
20
+ if (parent === dir) break;
21
+ dir = parent;
22
+ }
23
+ return process.cwd();
24
+ }
25
+
26
+ function copyDirSync(src, dest) {
27
+ fs.mkdirSync(dest, { recursive: true });
28
+ for (const entry of fs.readdirSync(src, { withFileTypes: true })) {
29
+ const srcPath = path.join(src, entry.name);
30
+ const destPath = path.join(dest, entry.name);
31
+ if (entry.isDirectory()) copyDirSync(srcPath, destPath);
32
+ else fs.copyFileSync(srcPath, destPath);
33
+ }
34
+ }
35
+
36
+ function updateLockfile(workdir) {
37
+ const lockDir = path.join(workdir, ".clawhub");
38
+ const lockFile = path.join(lockDir, "lock.json");
39
+ fs.mkdirSync(lockDir, { recursive: true });
40
+
41
+ let lock = { skills: {} };
42
+ if (fs.existsSync(lockFile)) {
43
+ try { lock = JSON.parse(fs.readFileSync(lockFile, "utf8")); if (!lock.skills) lock.skills = {}; }
44
+ catch { lock = { skills: {} }; }
45
+ }
46
+
47
+ lock.skills[SLUG] = { version: VERSION, installedAt: Date.now() };
48
+ fs.writeFileSync(lockFile, JSON.stringify(lock, null, 2) + "\n");
49
+ }
50
+
51
+ function writeOrigin(targetDir) {
52
+ fs.writeFileSync(path.join(targetDir, ".clawhub-origin.json"),
53
+ JSON.stringify({ version: 1, registry: "https://api.clawhub.ai", slug: SLUG, installedVersion: VERSION, installedAt: Date.now() }, null, 2) + "\n");
54
+ }
55
+
56
+ function main() {
57
+ console.log(`\n SEO Assistant — OpenClaw Skill Installer v${VERSION}`);
58
+ console.log(` Powered by evolink.ai\n`);
59
+
60
+ const workdir = findWorkdir();
61
+ const targetDir = path.join(workdir, "skills", SLUG);
62
+
63
+ if (fs.existsSync(targetDir)) {
64
+ console.log(` Already installed at: ${targetDir}`);
65
+ console.log(` Use "npx clawhub update ${SLUG}" to update.\n`);
66
+ process.exit(0);
67
+ }
68
+
69
+ console.log(` Installing to: ${targetDir}`);
70
+ copyDirSync(SKILL_FILES_DIR, targetDir);
71
+ writeOrigin(targetDir);
72
+ updateLockfile(workdir);
73
+ console.log(` Installed ${SLUG}@${VERSION}\n`);
74
+
75
+ console.log(" Next steps:");
76
+ console.log(" 1. export EVOLINK_API_KEY=\"your-key-here\"");
77
+ console.log(" Get a free key: https://evolink.ai/signup");
78
+ console.log(" 2. bash scripts/seo.sh audit index.html");
79
+ console.log(" 3. bash scripts/seo.sh help");
80
+ console.log("");
81
+ }
82
+
83
+ main();
package/package.json ADDED
@@ -0,0 +1,32 @@
1
+ {
2
+ "name": "evolinkai-seo-assistant",
3
+ "version": "1.0.0",
4
+ "description": "AI-powered SEO analysis and optimization. Audit HTML, rewrite meta tags, research keywords, generate schema markup, create sitemaps. OpenClaw skill installer. Powered by evolink.ai",
5
+ "bin": {
6
+ "evolinkai-seo-assistant": "bin/install.js"
7
+ },
8
+ "files": [
9
+ "bin/",
10
+ "skill-files/"
11
+ ],
12
+ "keywords": [
13
+ "openclaw",
14
+ "clawhub",
15
+ "seo",
16
+ "search-engine-optimization",
17
+ "meta-tags",
18
+ "schema-markup",
19
+ "sitemap",
20
+ "ai",
21
+ "claude",
22
+ "evolink",
23
+ "skill"
24
+ ],
25
+ "author": "EvoLinkAI <support@evolink.ai>",
26
+ "license": "MIT",
27
+ "repository": {
28
+ "type": "git",
29
+ "url": "https://github.com/EvoLinkAI/seo-skill-for-openclaw"
30
+ },
31
+ "homepage": "https://clawhub.ai/evolinkai/ai-seo-assistant"
32
+ }
@@ -0,0 +1,146 @@
1
+ ---
2
+ name: SEO Assistant
3
+ description: AI-powered SEO analysis and optimization. Audit HTML pages, rewrite meta tags, research keywords, generate schema markup, and create sitemaps. Powered by evolink.ai
4
+ version: 1.0.0
5
+ homepage: https://github.com/EvoLinkAI/seo-skill-for-openclaw
6
+ metadata: {"openclaw":{"homepage":"https://github.com/EvoLinkAI/seo-skill-for-openclaw","requires":{"bins":["python3","curl"],"env":["EVOLINK_API_KEY"]},"primaryEnv":"EVOLINK_API_KEY"}}
7
+ ---
8
+
9
+ # SEO Assistant
10
+
11
+ AI-powered SEO analysis and optimization from your terminal. Audit HTML pages locally with scoring, fetch and analyze live URLs, rewrite meta tags, research keywords, generate JSON-LD schema markup, and create XML sitemaps.
12
+
13
+ Powered by [Evolink.ai](https://evolink.ai?utm_source=clawhub&utm_medium=skill&utm_campaign=seo)
14
+
15
+ ## When to Use
16
+
17
+ - User wants to audit HTML files for SEO issues
18
+ - User asks "how's my SEO?" or "check my page"
19
+ - User needs optimized title/meta/description tags
20
+ - User wants keyword research for a topic
21
+ - User needs schema markup (Article, Product, FAQ, etc.)
22
+ - User wants to generate a sitemap
23
+
24
+ ## Quick Start
25
+
26
+ ### 1. Set your EvoLink API key
27
+
28
+ export EVOLINK_API_KEY="your-key-here"
29
+
30
+ Get a free key: [evolink.ai/signup](https://evolink.ai/signup?utm_source=clawhub&utm_medium=skill&utm_campaign=seo)
31
+
32
+ ### 2. Audit your HTML
33
+
34
+ bash scripts/seo.sh audit index.html
35
+
36
+ ### 3. AI-powered analysis
37
+
38
+ bash scripts/seo.sh check https://example.com
39
+
40
+ ## Capabilities
41
+
42
+ ### Local Commands (no API key needed)
43
+
44
+ | Command | Description |
45
+ |---------|-------------|
46
+ | `audit <file\|dir>` | Local HTML SEO audit with 0-100 scoring |
47
+ | `sitemap <dir> --base <url>` | Generate XML sitemap from HTML files |
48
+
49
+ ### AI Commands (require EVOLINK_API_KEY)
50
+
51
+ | Command | Description |
52
+ |---------|-------------|
53
+ | `check <url>` | Fetch live URL + AI deep SEO analysis |
54
+ | `rewrite <file>` | AI rewrite title, meta, description, OG, Twitter tags |
55
+ | `keywords <topic>` | AI keyword research with content strategy |
56
+ | `schema <file> --type <type>` | AI generate JSON-LD schema markup |
57
+
58
+ ### Schema Types
59
+
60
+ | Type | Key Properties |
61
+ |------|---------------|
62
+ | `Article` | headline, author, datePublished, image, publisher |
63
+ | `Product` | name, description, price, availability, review |
64
+ | `FAQ` | mainEntity with Question/Answer pairs |
65
+ | `HowTo` | name, step, totalTime, tool, supply |
66
+ | `LocalBusiness` | name, address, telephone, openingHours |
67
+ | `Event` | name, startDate, location, performer |
68
+
69
+ ## Examples
70
+
71
+ ### Local audit with scoring
72
+
73
+ bash scripts/seo.sh audit ./public
74
+
75
+ Output:
76
+
77
+ === ./public/index.html ===
78
+ [ISSUE] Missing meta description
79
+ [ISSUE] 2/5 images missing alt text
80
+ [WARN] Title too short (12 chars, aim for 50-60)
81
+ [WARN] Missing Open Graph tags
82
+ [OK] H1 OK: Welcome to Our Site
83
+ [OK] HTML lang attribute present
84
+
85
+ SEO Score: 67/100 (2 issues, 2 warnings, 1 files)
86
+
87
+ ### AI check a live URL
88
+
89
+ bash scripts/seo.sh check https://example.com
90
+
91
+ ### AI rewrite meta tags
92
+
93
+ bash scripts/seo.sh rewrite index.html
94
+
95
+ Output:
96
+
97
+ **Title Tag**
98
+ Before: <title>Home</title>
99
+ After: <title>Cloud Computing Solutions for Small Business | YourBrand</title>
100
+
101
+ **Meta Description**
102
+ Before: (missing)
103
+ After: <meta name="description" content="Scalable cloud solutions...">
104
+
105
+ ### AI keyword research
106
+
107
+ bash scripts/seo.sh keywords "cloud computing SaaS"
108
+
109
+ ### Generate schema markup
110
+
111
+ bash scripts/seo.sh schema blog-post.html --type Article
112
+
113
+ ### Generate sitemap
114
+
115
+ bash scripts/seo.sh sitemap ./public --base https://example.com
116
+
117
+ ## Configuration
118
+
119
+ | Variable | Default | Required | Description |
120
+ |---|---|---|---|
121
+ | `EVOLINK_API_KEY` | — | Yes (AI commands) | Your EvoLink API key. [Get one free](https://evolink.ai/signup?utm_source=clawhub&utm_medium=skill&utm_campaign=seo) |
122
+ | `EVOLINK_MODEL` | `claude-opus-4-6` | No | Model for AI analysis |
123
+
124
+ Required binaries: `python3`, `curl`
125
+
126
+ ## Security
127
+
128
+ **Data Transmission**
129
+
130
+ AI commands send HTML content or topic descriptions to `api.evolink.ai` for analysis by Claude. By setting `EVOLINK_API_KEY` and using these commands, you consent to this transmission. Data is not stored after the response is returned. The `audit` and `sitemap` commands run entirely locally and never transmit data.
131
+
132
+ **Network Access**
133
+
134
+ - Target URL (via curl) — `check` command fetches the page
135
+ - `api.evolink.ai` — AI analysis (AI commands only)
136
+
137
+ **Persistence & Privilege**
138
+
139
+ The `sitemap` command writes a `sitemap.xml` file to the specified directory. Temporary files for API payloads are cleaned up automatically. No credentials or persistent data are stored.
140
+
141
+ ## Links
142
+
143
+ - [GitHub](https://github.com/EvoLinkAI/seo-skill-for-openclaw)
144
+ - [EvoLink API](https://docs.evolink.ai/en/api-manual/language-series/claude/claude-messages-api?utm_source=clawhub&utm_medium=skill&utm_campaign=seo)
145
+ - [Community](https://discord.com/invite/5mGHfA24kn)
146
+ - [Support](mailto:support@evolink.ai)
@@ -0,0 +1,18 @@
1
+ {
2
+ "name": "SEO Assistant",
3
+ "version": "1.0.0",
4
+ "description": "AI-powered SEO analysis and optimization. Audit HTML pages, rewrite meta tags, research keywords, generate schema markup, and create sitemaps. Powered by evolink.ai",
5
+ "author": "EvoLinkAI",
6
+ "license": "MIT",
7
+ "type": "tool",
8
+ "requiredFiles": [],
9
+ "requiredEnvVars": ["EVOLINK_API_KEY"],
10
+ "optionalEnvVars": ["EVOLINK_MODEL"],
11
+ "requiredBinaries": ["python3", "curl"],
12
+ "externalServices": [
13
+ "api.evolink.ai"
14
+ ],
15
+ "dataTransmission": {
16
+ "api.evolink.ai": "AI commands (check, rewrite, keywords, schema) send HTML content or topic descriptions to api.evolink.ai for analysis by Claude. No data is stored after the response is returned. Local commands (audit, sitemap) run entirely locally and never transmit data."
17
+ }
18
+ }
@@ -0,0 +1,492 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ # SEO Assistant — AI-powered SEO analysis and optimization
5
+ # Usage: bash seo.sh <command> [options]
6
+ #
7
+ # Commands:
8
+ # audit <file|dir> — Local HTML SEO audit
9
+ # check <url> — Fetch + AI analyze a live URL
10
+ # rewrite <file> — AI rewrite title/meta/description
11
+ # keywords <topic> — AI keyword research
12
+ # schema <file> --type <type> — AI generate schema markup
13
+ # sitemap <dir> --base <url> — Generate XML sitemap
14
+
15
+ SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
16
+ EVOLINK_API="https://api.evolink.ai/v1/messages"
17
+
18
+ # --- Helpers ---
19
+ err() { echo "Error: $*" >&2; exit 1; }
20
+
21
+ to_native_path() {
22
+ if [[ "$1" =~ ^/([a-zA-Z])/ ]]; then
23
+ echo "${BASH_REMATCH[1]}:/${1:3}"
24
+ else
25
+ echo "$1"
26
+ fi
27
+ }
28
+
29
+ check_deps() {
30
+ command -v python3 &>/dev/null || err "python3 not found."
31
+ command -v curl &>/dev/null || err "curl not found."
32
+ }
33
+
34
+ read_file() {
35
+ local file="$1"
36
+ [ -f "$file" ] || err "File not found: $file"
37
+ cat "$file"
38
+ }
39
+
40
+ evolink_ai() {
41
+ local prompt="$1"
42
+ local content="$2"
43
+
44
+ local api_key="${EVOLINK_API_KEY:?Set EVOLINK_API_KEY for AI features. Get one at https://evolink.ai/signup}"
45
+ local model="${EVOLINK_MODEL:-claude-opus-4-6}"
46
+
47
+ local tmp_prompt tmp_content tmp_payload
48
+ tmp_prompt=$(mktemp)
49
+ tmp_content=$(mktemp)
50
+ tmp_payload=$(mktemp)
51
+ trap "rm -f '$tmp_prompt' '$tmp_content' '$tmp_payload'" EXIT
52
+
53
+ printf '%s' "$prompt" > "$tmp_prompt"
54
+ printf '%s' "$content" > "$tmp_content"
55
+
56
+ local native_prompt native_content native_payload
57
+ native_prompt=$(to_native_path "$tmp_prompt")
58
+ native_content=$(to_native_path "$tmp_content")
59
+ native_payload=$(to_native_path "$tmp_payload")
60
+
61
+ python3 -c "
62
+ import json, sys
63
+
64
+ with open(sys.argv[1], 'r', encoding='utf-8') as f:
65
+ prompt = f.read()
66
+ with open(sys.argv[2], 'r', encoding='utf-8') as f:
67
+ content = f.read()
68
+
69
+ data = {
70
+ 'model': sys.argv[4],
71
+ 'max_tokens': 4096,
72
+ 'messages': [
73
+ {
74
+ 'role': 'user',
75
+ 'content': prompt + '\n\n' + content
76
+ }
77
+ ]
78
+ }
79
+ with open(sys.argv[3], 'w', encoding='utf-8') as f:
80
+ json.dump(data, f)
81
+ " "$native_prompt" "$native_content" "$native_payload" "$model"
82
+
83
+ local response
84
+ response=$(curl -s -X POST "$EVOLINK_API" \
85
+ -H "Authorization: Bearer $api_key" \
86
+ -H "Content-Type: application/json" \
87
+ -d "@$tmp_payload")
88
+
89
+ echo "$response" | python3 -c "
90
+ import json, sys
91
+ data = json.load(sys.stdin)
92
+ if 'content' in data:
93
+ for block in data['content']:
94
+ if block.get('type') == 'text':
95
+ print(block['text'])
96
+ elif 'error' in data:
97
+ print(f\"AI Error: {data['error'].get('message', str(data['error']))}\", file=sys.stderr)
98
+ else:
99
+ print(json.dumps(data, indent=2))
100
+ "
101
+ }
102
+
103
+ # --- Local HTML audit (no API needed) ---
104
+
105
+ local_audit() {
106
+ local target="$1"
107
+
108
+ python3 -c "
109
+ import sys, os, re
110
+ from html.parser import HTMLParser
111
+
112
+ class SEOParser(HTMLParser):
113
+ def __init__(self):
114
+ super().__init__()
115
+ self.title = ''
116
+ self.meta_desc = ''
117
+ self.h1s = []
118
+ self.h2s = []
119
+ self.imgs_no_alt = 0
120
+ self.imgs_total = 0
121
+ self.has_lang = False
122
+ self.has_viewport = False
123
+ self.has_charset = False
124
+ self.has_canonical = False
125
+ self.og_tags = []
126
+ self.twitter_tags = []
127
+ self.links_count = 0
128
+ self._tag = None
129
+
130
+ def handle_starttag(self, tag, attrs):
131
+ d = dict(attrs)
132
+ if tag == 'html' and 'lang' in d:
133
+ self.has_lang = True
134
+ if tag == 'title':
135
+ self._tag = 'title'
136
+ if tag in ('h1','h2'):
137
+ self._tag = tag
138
+ if tag == 'meta':
139
+ name = d.get('name','').lower()
140
+ prop = d.get('property','').lower()
141
+ content = d.get('content','')
142
+ if name == 'description': self.meta_desc = content
143
+ if name == 'viewport': self.has_viewport = True
144
+ if d.get('charset'): self.has_charset = True
145
+ if prop.startswith('og:'): self.og_tags.append(prop)
146
+ if name.startswith('twitter:'): self.twitter_tags.append(name)
147
+ if tag == 'link' and d.get('rel','') == 'canonical':
148
+ self.has_canonical = True
149
+ if tag == 'img':
150
+ self.imgs_total += 1
151
+ if not d.get('alt','').strip(): self.imgs_no_alt += 1
152
+ if tag == 'a': self.links_count += 1
153
+
154
+ def handle_data(self, data):
155
+ if self._tag == 'title': self.title += data
156
+ elif self._tag == 'h1': self.h1s.append(data.strip())
157
+ elif self._tag == 'h2': self.h2s.append(data.strip())
158
+
159
+ def handle_endtag(self, tag):
160
+ if tag in ('title','h1','h2'): self._tag = None
161
+
162
+ def audit_file(path):
163
+ with open(path, 'r', encoding='utf-8', errors='replace') as f:
164
+ html = f.read()
165
+ p = SEOParser()
166
+ p.feed(html)
167
+
168
+ issues = []
169
+ warnings = []
170
+ good = []
171
+
172
+ # Title
173
+ if not p.title.strip():
174
+ issues.append('Missing <title> tag')
175
+ elif len(p.title.strip()) < 30:
176
+ warnings.append(f'Title too short ({len(p.title.strip())} chars, aim for 50-60)')
177
+ elif len(p.title.strip()) > 60:
178
+ warnings.append(f'Title too long ({len(p.title.strip())} chars, aim for 50-60)')
179
+ else:
180
+ good.append(f'Title OK ({len(p.title.strip())} chars): {p.title.strip()[:60]}')
181
+
182
+ # Meta description
183
+ if not p.meta_desc:
184
+ issues.append('Missing meta description')
185
+ elif len(p.meta_desc) < 120:
186
+ warnings.append(f'Meta description short ({len(p.meta_desc)} chars, aim for 150-160)')
187
+ elif len(p.meta_desc) > 160:
188
+ warnings.append(f'Meta description long ({len(p.meta_desc)} chars, aim for 150-160)')
189
+ else:
190
+ good.append(f'Meta description OK ({len(p.meta_desc)} chars)')
191
+
192
+ # H1
193
+ if not p.h1s:
194
+ issues.append('Missing H1 heading')
195
+ elif len(p.h1s) > 1:
196
+ warnings.append(f'Multiple H1 tags ({len(p.h1s)}), use only one')
197
+ else:
198
+ good.append(f'H1 OK: {p.h1s[0][:50]}')
199
+
200
+ # Images
201
+ if p.imgs_no_alt > 0:
202
+ issues.append(f'{p.imgs_no_alt}/{p.imgs_total} images missing alt text')
203
+ elif p.imgs_total > 0:
204
+ good.append(f'All {p.imgs_total} images have alt text')
205
+
206
+ # Technical
207
+ if not p.has_lang: issues.append('Missing lang attribute on <html>')
208
+ else: good.append('HTML lang attribute present')
209
+ if not p.has_viewport: warnings.append('Missing viewport meta tag')
210
+ else: good.append('Viewport meta tag present')
211
+ if not p.has_charset: warnings.append('Missing charset meta tag')
212
+ if not p.has_canonical: warnings.append('Missing canonical URL')
213
+ if not p.og_tags: warnings.append('Missing Open Graph tags')
214
+ else: good.append(f'Open Graph tags: {len(p.og_tags)}')
215
+ if not p.twitter_tags: warnings.append('Missing Twitter Card tags')
216
+
217
+ # Content
218
+ text = re.sub(r'<[^>]+>', '', html)
219
+ word_count = len(text.split())
220
+ if word_count < 300:
221
+ warnings.append(f'Low content ({word_count} words, aim for 300+)')
222
+ else:
223
+ good.append(f'Content length OK ({word_count} words)')
224
+
225
+ return issues, warnings, good
226
+
227
+ def scan(target):
228
+ files = []
229
+ if os.path.isfile(target):
230
+ files = [target]
231
+ elif os.path.isdir(target):
232
+ for root, dirs, fnames in os.walk(target):
233
+ dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ('node_modules','vendor')]
234
+ for f in fnames:
235
+ if f.endswith(('.html','.htm')):
236
+ files.append(os.path.join(root, f))
237
+ if not files:
238
+ print(f'No HTML files found in {target}')
239
+ sys.exit(1)
240
+ return files
241
+
242
+ target = sys.argv[1]
243
+ files = scan(target)
244
+ total_issues = 0
245
+ total_warnings = 0
246
+
247
+ for f in files:
248
+ issues, warnings, good = audit_file(f)
249
+ total_issues += len(issues)
250
+ total_warnings += len(warnings)
251
+ print(f'=== {f} ===')
252
+ for i in issues: print(f' [ISSUE] {i}')
253
+ for w in warnings: print(f' [WARN] {w}')
254
+ for g in good: print(f' [OK] {g}')
255
+ print()
256
+
257
+ score = max(0, 100 - total_issues * 10 - total_warnings * 3)
258
+ print(f'SEO Score: {score}/100 ({total_issues} issues, {total_warnings} warnings, {len(files)} files)')
259
+ " "$target"
260
+ }
261
+
262
+ # --- Commands ---
263
+
264
+ cmd_audit() {
265
+ local target="${1:?Usage: seo.sh audit <html-file-or-directory>}"
266
+ echo "Running local SEO audit..." >&2
267
+ local_audit "$target"
268
+ }
269
+
270
+ cmd_check() {
271
+ local url="${1:?Usage: seo.sh check <url>}"
272
+ check_deps
273
+
274
+ echo "Fetching $url ..." >&2
275
+ local html
276
+ html=$(curl -sL -A "Mozilla/5.0 (compatible; SEOBot/1.0)" --max-time 15 "$url" | head -c 15000)
277
+
278
+ [ -z "$html" ] && err "Failed to fetch URL or empty response."
279
+
280
+ echo "Analyzing..." >&2
281
+ evolink_ai "You are a senior SEO consultant. Analyze this HTML page and provide a comprehensive SEO audit:
282
+
283
+ 1. **SEO Score** — Rate 0-100 with brief justification.
284
+ 2. **Critical Issues** — Must-fix problems (missing title, meta, H1, alt text, lang, etc).
285
+ 3. **Warnings** — Should-fix items (title length, description length, OG tags, schema, canonical).
286
+ 4. **Content Analysis** — Keyword density, readability, content length assessment.
287
+ 5. **Technical SEO** — Mobile-friendliness, structured data, canonical, robots directives.
288
+ 6. **Recommendations** — Top 5 prioritized actions to improve rankings.
289
+
290
+ Be specific — reference actual content from the page. Show exact HTML fixes where applicable." "URL: $url
291
+
292
+ HTML SOURCE:
293
+ $html"
294
+ }
295
+
296
+ cmd_rewrite() {
297
+ local file="${1:?Usage: seo.sh rewrite <html-file>}"
298
+ check_deps
299
+
300
+ echo "Reading HTML..." >&2
301
+ local content
302
+ content=$(read_file "$file")
303
+ local truncated
304
+ truncated=$(echo "$content" | head -c 12000)
305
+
306
+ echo "Generating optimized SEO tags..." >&2
307
+ evolink_ai "You are a senior SEO copywriter. Analyze this HTML page and rewrite its SEO elements for maximum search visibility:
308
+
309
+ 1. **Title Tag** — Optimized title (50-60 chars). Show before/after.
310
+ 2. **Meta Description** — Compelling description (150-160 chars) with CTA. Show before/after.
311
+ 3. **H1 Tag** — Optimized H1 if needed. Show before/after.
312
+ 4. **Open Graph Tags** — Complete og:title, og:description, og:type, og:image tags.
313
+ 5. **Twitter Card Tags** — Complete twitter:card, twitter:title, twitter:description tags.
314
+ 6. **Suggested Keywords** — 5-10 target keywords based on page content.
315
+
316
+ For each rewrite, show the exact HTML to copy-paste. Explain why each change improves SEO." "HTML SOURCE:
317
+ $truncated"
318
+ }
319
+
320
+ cmd_keywords() {
321
+ local topic="$*"
322
+ [ -z "$topic" ] && err "Usage: seo.sh keywords <topic>"
323
+ check_deps
324
+
325
+ echo "Researching keywords..." >&2
326
+ evolink_ai "You are a senior SEO strategist. Perform keyword research for the given topic:
327
+
328
+ 1. **Primary Keywords** (5-8) — High-volume, directly relevant terms.
329
+ 2. **Long-tail Keywords** (10-15) — Lower competition, specific phrases.
330
+ 3. **LSI Keywords** (8-10) — Semantically related terms for content depth.
331
+ 4. **Question Keywords** (5-8) — Questions people ask (for FAQ/featured snippets).
332
+ 5. **Content Strategy** — Suggest 3-5 article titles targeting these keywords.
333
+ 6. **Keyword Grouping** — Group keywords by search intent (informational, transactional, navigational).
334
+
335
+ For each keyword, estimate:
336
+ - Search intent (informational / transactional / navigational)
337
+ - Competition level (low / medium / high)
338
+ - Priority (must-target / should-target / nice-to-have)" "TOPIC: $topic"
339
+ }
340
+
341
+ cmd_schema() {
342
+ local file=""
343
+ local schema_type="Article"
344
+
345
+ while [[ $# -gt 0 ]]; do
346
+ case "$1" in
347
+ --type) schema_type="${2:?Missing schema type}"; shift 2 ;;
348
+ -*) err "Unknown option: $1" ;;
349
+ *) file="$1"; shift ;;
350
+ esac
351
+ done
352
+
353
+ [ -z "$file" ] && err "Usage: seo.sh schema <html-file> --type <Article|Product|FAQ|HowTo|LocalBusiness|Event>"
354
+ check_deps
355
+
356
+ echo "Reading HTML..." >&2
357
+ local content
358
+ content=$(read_file "$file")
359
+ local truncated
360
+ truncated=$(echo "$content" | head -c 12000)
361
+
362
+ echo "Generating schema markup..." >&2
363
+ evolink_ai "You are a structured data expert. Generate JSON-LD schema markup for this HTML page.
364
+
365
+ Schema type requested: $schema_type
366
+
367
+ Rules:
368
+ - Output valid JSON-LD wrapped in <script type=\"application/ld+json\"> tags.
369
+ - Extract real data from the page content (titles, descriptions, dates, authors, etc).
370
+ - Follow schema.org specifications exactly.
371
+ - Include all recommended properties for the $schema_type type.
372
+ - If the page content doesn't match the requested type well, suggest a better type.
373
+ - Show where to insert the script tag in the HTML.
374
+
375
+ Common types and their key properties:
376
+ - Article: headline, author, datePublished, image, publisher
377
+ - Product: name, description, price, availability, review
378
+ - FAQ: mainEntity with Question/Answer pairs
379
+ - HowTo: name, step, totalTime, tool, supply
380
+ - LocalBusiness: name, address, telephone, openingHours
381
+ - Event: name, startDate, location, performer" "HTML SOURCE:
382
+ $truncated"
383
+ }
384
+
385
+ cmd_sitemap() {
386
+ local dir=""
387
+ local base_url=""
388
+
389
+ while [[ $# -gt 0 ]]; do
390
+ case "$1" in
391
+ --base) base_url="${2:?Missing base URL}"; shift 2 ;;
392
+ -*) err "Unknown option: $1" ;;
393
+ *) dir="$1"; shift ;;
394
+ esac
395
+ done
396
+
397
+ [ -z "$dir" ] && err "Usage: seo.sh sitemap <directory> --base <https://example.com>"
398
+ [ -z "$base_url" ] && err "Missing --base. Provide your site URL (e.g., --base https://example.com)"
399
+
400
+ echo "Generating sitemap..." >&2
401
+
402
+ python3 -c "
403
+ import os, sys
404
+ from datetime import datetime
405
+ from xml.dom.minidom import getDOMImplementation
406
+
407
+ directory = sys.argv[1]
408
+ base_url = sys.argv[2].rstrip('/')
409
+
410
+ impl = getDOMImplementation()
411
+ doc = impl.createDocument('http://www.sitemaps.org/schemas/sitemap/0.9', 'urlset', None)
412
+ root = doc.documentElement
413
+ root.setAttribute('xmlns', 'http://www.sitemaps.org/schemas/sitemap/0.9')
414
+
415
+ count = 0
416
+ for dirpath, dirs, files in os.walk(directory):
417
+ dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ('node_modules','vendor','__pycache__')]
418
+ for f in sorted(files):
419
+ if not f.endswith(('.html','.htm')): continue
420
+ fpath = os.path.join(dirpath, f)
421
+ rel = os.path.relpath(fpath, directory).replace(os.sep, '/')
422
+ if f.lower() in ('index.html','index.htm'):
423
+ parent = os.path.dirname(rel)
424
+ url_path = '' if parent == '.' else parent
425
+ else:
426
+ url_path = rel.rsplit('.', 1)[0]
427
+ full_url = f'{base_url}/{url_path}'.rstrip('/')
428
+ if not full_url.endswith(base_url.split('/')[-1]):
429
+ full_url = full_url if url_path else base_url + '/'
430
+
431
+ mtime = datetime.fromtimestamp(os.path.getmtime(fpath)).strftime('%Y-%m-%d')
432
+
433
+ url_el = doc.createElement('url')
434
+ loc = doc.createElement('loc')
435
+ loc.appendChild(doc.createTextNode(full_url))
436
+ url_el.appendChild(loc)
437
+ lm = doc.createElement('lastmod')
438
+ lm.appendChild(doc.createTextNode(mtime))
439
+ url_el.appendChild(lm)
440
+ root.appendChild(url_el)
441
+ count += 1
442
+
443
+ if count == 0:
444
+ print('No HTML files found in', directory, file=sys.stderr)
445
+ sys.exit(1)
446
+
447
+ xml_str = doc.toprettyxml(indent=' ', encoding='UTF-8').decode('utf-8')
448
+ outfile = os.path.join(directory, 'sitemap.xml')
449
+ with open(outfile, 'w', encoding='utf-8') as out:
450
+ out.write(xml_str)
451
+
452
+ print(f'Sitemap generated: {outfile}')
453
+ print(f'URLs included: {count}')
454
+ print()
455
+ print('Next steps:')
456
+ print(f'1. Upload {outfile} to your website root')
457
+ print(f'2. Add to robots.txt: Sitemap: {base_url}/sitemap.xml')
458
+ print(f'3. Submit to Google Search Console and Bing Webmaster Tools')
459
+ " "$dir" "$base_url"
460
+ }
461
+
462
+ # --- Main ---
463
+ COMMAND="${1:-help}"
464
+ shift || true
465
+
466
+ case "$COMMAND" in
467
+ audit) cmd_audit "$@" ;;
468
+ check) cmd_check "$@" ;;
469
+ rewrite) cmd_rewrite "$@" ;;
470
+ keywords) cmd_keywords "$@" ;;
471
+ schema) cmd_schema "$@" ;;
472
+ sitemap) cmd_sitemap "$@" ;;
473
+ help|*)
474
+ echo "SEO Assistant — AI-powered SEO analysis and optimization"
475
+ echo ""
476
+ echo "Usage: bash seo.sh <command> [options]"
477
+ echo ""
478
+ echo "Local Commands (no API key needed):"
479
+ echo " audit <file|dir> Local HTML SEO audit with scoring"
480
+ echo " sitemap <dir> --base <url> Generate XML sitemap"
481
+ echo ""
482
+ echo "AI Commands (requires EVOLINK_API_KEY):"
483
+ echo " check <url> Fetch + AI deep SEO analysis"
484
+ echo " rewrite <file> AI rewrite title/meta/description/OG"
485
+ echo " keywords <topic> AI keyword research + content strategy"
486
+ echo " schema <file> --type <type> AI generate JSON-LD schema markup"
487
+ echo ""
488
+ echo "Schema types: Article, Product, FAQ, HowTo, LocalBusiness, Event"
489
+ echo ""
490
+ echo "Get a free EvoLink API key: https://evolink.ai/signup"
491
+ ;;
492
+ esac