@xyleapp/cli 0.4.2 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -40,22 +40,35 @@ xyle sync --site https://example.com
40
40
 
41
41
  ## Commands
42
42
 
43
- | Command | Description |
44
- | ------------- | ---------------------------------------- |
45
- | `status` | Check if the SEO API is reachable |
46
- | `queries` | List top search queries for a site |
47
- | `competitors` | Show competitor pages for a query |
48
- | `gaps` | Show content gaps for a page |
49
- | `analyze` | Analyze page content against competitors |
50
- | `rewrite` | Get AI rewrite suggestions |
51
- | `crawl` | Crawl a URL and extract SEO metadata |
52
- | `sync` | Sync Google Search Console data |
53
- | `login` | Authenticate with Google OAuth |
54
- | `logout` | Remove stored credentials |
55
- | `whoami` | Show current authentication status |
43
+ | Command | Description |
44
+ | ------------- | ---------------------------------------------- |
45
+ | `status` | Check if the SEO API is reachable |
46
+ | `setup` | Check backend requirements and Playwright info |
47
+ | `queries` | List top search queries for a site |
48
+ | `competitors` | Show competitor pages for a query |
49
+ | `gaps` | Show content gaps for a page |
50
+ | `analyze` | Analyze page content against competitors |
51
+ | `rewrite` | Get AI rewrite suggestions |
52
+ | `crawl` | Crawl a URL and extract SEO metadata |
53
+ | `sync` | Sync Google Search Console data |
54
+ | `login` | Authenticate with Google OAuth |
55
+ | `logout` | Remove stored credentials |
56
+ | `whoami` | Show current authentication status |
57
+ | `seed` | Add xyle agent instructions to your project |
56
58
 
57
59
  All data commands accept `--json` for machine-readable output.
58
60
 
61
+ ## Playwright (Server-Side)
62
+
63
+ The Xyle API uses Playwright to crawl JS-rendered pages (SPAs). If you self-host the API backend, install Chromium once:
64
+
65
+ ```bash
66
+ pip install playwright
67
+ playwright install chromium
68
+ ```
69
+
70
+ Docker deployments handle this automatically. Skip this if using the hosted API.
71
+
59
72
  ## Configuration
60
73
 
61
74
  | Environment Variable | Default | Description |
package/bin/xyle.mjs CHANGED
@@ -8,7 +8,7 @@ const program = new Command();
8
8
  program
9
9
  .name("xyle")
10
10
  .description("SEO Intelligence Engine CLI")
11
- .version("0.4.2");
11
+ .version("0.5.0");
12
12
 
13
13
  registerCommands(program);
14
14
 
package/package.json CHANGED
@@ -1,17 +1,21 @@
1
1
  {
2
2
  "name": "@xyleapp/cli",
3
- "version": "0.4.2",
3
+ "version": "0.5.0",
4
4
  "description": "CLI for the Xyle SEO Intelligence Engine",
5
5
  "type": "module",
6
6
  "bin": {
7
7
  "xyle": "bin/xyle.mjs"
8
8
  },
9
+ "scripts": {
10
+ "postinstall": "node scripts/postinstall.mjs"
11
+ },
9
12
  "engines": {
10
13
  "node": ">=18"
11
14
  },
12
15
  "files": [
13
16
  "bin/",
14
17
  "src/",
18
+ "scripts/",
15
19
  "README.md"
16
20
  ],
17
21
  "keywords": [
@@ -0,0 +1,31 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Post-install message shown after `npm install -g @xyleapp/cli`.
5
+ * Tells backend operators about Playwright/Chromium for JS-rendered crawling.
6
+ */
7
+
8
+ const CYAN = "\x1b[36m";
9
+ const GREEN = "\x1b[32m";
10
+ const YELLOW = "\x1b[33m";
11
+ const DIM = "\x1b[2m";
12
+ const RESET = "\x1b[0m";
13
+
14
+ console.log(`
15
+ ${GREEN}xyle CLI installed successfully!${RESET}
16
+
17
+ ${CYAN}Quick start:${RESET}
18
+ xyle status ${DIM}Check API connectivity${RESET}
19
+ xyle login ${DIM}Authenticate with Google${RESET}
20
+ xyle crawl --url <u> ${DIM}Crawl a page for SEO data${RESET}
21
+
22
+ ${YELLOW}Server-side setup (for self-hosted API):${RESET}
23
+ The Xyle API uses Playwright to crawl JS-rendered pages (SPAs).
24
+ If you're running the API backend locally, install Chromium:
25
+
26
+ pip install playwright
27
+ playwright install chromium
28
+
29
+ ${DIM}Docker deployments handle this automatically.${RESET}
30
+ ${DIM}Skip this if you're using the hosted API at https://api.xyle.app${RESET}
31
+ `);
package/src/commands.mjs CHANGED
@@ -194,6 +194,15 @@ export function registerCommands(program) {
194
194
  console.log(` ${h}`);
195
195
  }
196
196
  }
197
+ const wc = data.word_count || 0;
198
+ if (wc > 0 && wc < 50) {
199
+ console.log(
200
+ `\n\x1b[33mLow word count — this may be a JS-rendered page.\x1b[0m`
201
+ );
202
+ console.log(
203
+ `\x1b[2mEnsure Playwright + Chromium are installed on the API server: xyle setup\x1b[0m`
204
+ );
205
+ }
197
206
  }
198
207
  } catch (e) {
199
208
  handleError(e);
@@ -308,6 +317,42 @@ export function registerCommands(program) {
308
317
  }
309
318
  });
310
319
 
320
+ // --- setup ---
321
+ program
322
+ .command("setup")
323
+ .description("Check backend requirements and show setup instructions")
324
+ .action(async () => {
325
+ console.log("\x1b[36mXyle Backend Setup\x1b[0m\n");
326
+
327
+ // Check API connectivity
328
+ try {
329
+ const data = await checkHealth();
330
+ console.log(`\x1b[32m API: reachable (${SEO_BASE})\x1b[0m`);
331
+ } catch {
332
+ console.log(`\x1b[31m API: unreachable (${SEO_BASE})\x1b[0m`);
333
+ console.log(`\x1b[2m Set SEO_BASE env var or start the API server.\x1b[0m`);
334
+ }
335
+
336
+ // Auth check
337
+ const creds = getCredentials();
338
+ if (creds && creds.authenticated) {
339
+ console.log(`\x1b[32m Auth: logged in${creds.email ? ` (${creds.email})` : ""}\x1b[0m`);
340
+ } else {
341
+ console.log(`\x1b[33m Auth: not logged in\x1b[0m`);
342
+ console.log(`\x1b[2m Run: xyle login\x1b[0m`);
343
+ }
344
+
345
+ console.log(`\n\x1b[36mPlaywright Setup (server-side, for JS-rendered crawling)\x1b[0m\n`);
346
+ console.log(` The API uses Playwright to crawl JavaScript-rendered pages`);
347
+ console.log(` (SPAs like React, Next.js, Angular). Without it, crawling`);
348
+ console.log(` falls back to static HTML only.\n`);
349
+ console.log(` \x1b[33mInstall (one-time, on the machine running the API):\x1b[0m\n`);
350
+ console.log(` pip install playwright`);
351
+ console.log(` playwright install chromium\n`);
352
+ console.log(` \x1b[2mDocker deployments handle this automatically.\x1b[0m`);
353
+ console.log(` \x1b[2mSkip this if using the hosted API at https://api.xyle.app\x1b[0m\n`);
354
+ });
355
+
311
356
  // --- seed ---
312
357
  program
313
358
  .command("seed")
@@ -350,7 +395,7 @@ export function registerCommands(program) {
350
395
  toolNames = selected;
351
396
  }
352
397
 
353
- const { created, skipped } = seedInstructions(opts.dir, toolNames);
398
+ const { created, appended, skipped } = seedInstructions(opts.dir, toolNames);
354
399
 
355
400
  if (created.length) {
356
401
  console.log("\x1b[32mCreated:\x1b[0m");
@@ -358,16 +403,22 @@ export function registerCommands(program) {
358
403
  console.log(` + ${f}`);
359
404
  }
360
405
  }
406
+ if (appended.length) {
407
+ console.log("\x1b[32mAppended:\x1b[0m");
408
+ for (const f of appended) {
409
+ console.log(` ~ ${f}`);
410
+ }
411
+ }
361
412
  if (skipped.length) {
362
413
  console.log("\x1b[33mSkipped:\x1b[0m");
363
414
  for (const f of skipped) {
364
415
  console.log(` - ${f}`);
365
416
  }
366
417
  }
367
- if (!created.length && !skipped.length) {
418
+ if (!created.length && !appended.length && !skipped.length) {
368
419
  console.log("Nothing to do.");
369
420
  }
370
- if (created.length) {
421
+ if (created.length || appended.length) {
371
422
  console.log(
372
423
  `\n\x1b[32mDone!\x1b[0m Your AI coding tools will now know about xyle.`
373
424
  );
package/src/seed.mjs CHANGED
@@ -3,7 +3,7 @@
3
3
  * Each tool reads from a specific file path to learn about xyle.
4
4
  */
5
5
 
6
- import { writeFileSync, mkdirSync, existsSync } from "node:fs";
6
+ import { writeFileSync, readFileSync, appendFileSync, mkdirSync, existsSync } from "node:fs";
7
7
  import { join, dirname } from "node:path";
8
8
 
9
9
  const INSTRUCTIONS = `# Xyle — SEO Intelligence Engine
@@ -80,7 +80,7 @@ npx @xyleapp/cli sync --site <url> [--json]
80
80
 
81
81
  | Variable | Default | Description |
82
82
  |----------|---------|-------------|
83
- | \`SEO_BASE\` | \`http://localhost:8765\` | API base URL |
83
+ | \`SEO_BASE\` | \`https://api.xyle.app\` | API base URL |
84
84
  | \`AGENT_API_KEY\` | — | Fallback API key (when not using Google OAuth) |
85
85
 
86
86
  ## Workflows
@@ -185,24 +185,33 @@ export function seedInstructions(targetDir, toolNames) {
185
185
  const tools = toolNames
186
186
  ? Object.fromEntries(toolNames.map((n) => [n, TOOLS[n]]))
187
187
  : TOOLS;
188
+ const MARKER = "# Xyle — SEO Intelligence Engine";
188
189
  const created = [];
190
+ const appended = [];
189
191
  const skipped = [];
190
192
 
191
193
  for (const [name, tool] of Object.entries(tools)) {
192
194
  const filePath = join(targetDir, tool.path);
193
- if (existsSync(filePath)) {
194
- skipped.push(`${tool.label} (${tool.path}) — already exists`);
195
- continue;
196
- }
197
195
  const dir = dirname(filePath);
198
196
  if (!existsSync(dir)) {
199
197
  mkdirSync(dir, { recursive: true });
200
198
  }
201
- writeFileSync(filePath, INSTRUCTIONS, "utf-8");
202
- created.push(`${tool.label} (${tool.path})`);
199
+
200
+ if (existsSync(filePath)) {
201
+ const existing = readFileSync(filePath, "utf-8");
202
+ if (existing.includes(MARKER)) {
203
+ skipped.push(`${tool.label} (${tool.path}) — xyle instructions already present`);
204
+ continue;
205
+ }
206
+ appendFileSync(filePath, "\n\n" + INSTRUCTIONS, "utf-8");
207
+ appended.push(`${tool.label} (${tool.path})`);
208
+ } else {
209
+ writeFileSync(filePath, INSTRUCTIONS, "utf-8");
210
+ created.push(`${tool.label} (${tool.path})`);
211
+ }
203
212
  }
204
213
 
205
- return { created, skipped };
214
+ return { created, appended, skipped };
206
215
  }
207
216
 
208
217
  /**