@arabold/docs-mcp-server 1.11.0 → 1.12.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -1,131 +1,37 @@
1
1
  #!/usr/bin/env node
2
- import {
3
- DEFAULT_MAX_CONCURRENCY,
4
- DEFAULT_MAX_DEPTH,
5
- DEFAULT_MAX_PAGES,
6
- DocumentManagementService,
7
- FetchUrlTool,
8
- FileFetcher,
9
- FindVersionTool,
10
- HttpFetcher,
11
- ListLibrariesTool,
12
- PipelineManager,
13
- ScrapeMode,
14
- ScrapeTool,
15
- SearchTool,
16
- setLogLevel
17
- } from "./chunk-VF2RUEVV.js";
18
- import "./chunk-YCXNASA6.js";
19
-
20
- // src/cli.ts
21
2
  import "dotenv/config";
22
3
  import { Command } from "commander";
23
-
24
- // package.json
25
- var package_default = {
26
- name: "@arabold/docs-mcp-server",
27
- version: "1.10.0",
28
- description: "MCP server for fetching and searching documentation",
29
- type: "module",
30
- bin: {
31
- "docs-server": "dist/server.js",
32
- "docs-cli": "dist/cli.js"
33
- },
34
- license: "MIT",
35
- repository: {
36
- type: "git",
37
- url: "git+https://github.com/arabold/docs-mcp-server.git"
38
- },
39
- files: [
40
- "dist",
41
- "README.md",
42
- "LICENSE",
43
- "package.json"
44
- ],
45
- scripts: {
46
- prepare: "husky || true",
47
- build: "tsup",
48
- cli: "node --enable-source-maps dist/cli.js",
49
- start: "node --enable-source-maps dist/server.js",
50
- "dev:cli": "npm run build && node --enable-source-maps dist/cli.js",
51
- server: "node --enable-source-maps --watch dist/server.js",
52
- "dev:server": 'run-p "build -- --watch" "server"',
53
- test: "vitest run",
54
- "test:watch": "vitest",
55
- "test:coverage": "vitest run --coverage",
56
- lint: "biome check .",
57
- format: "biome format . --write",
58
- postinstall: "npx playwright install --no-shell --with-deps chromium"
59
- },
60
- dependencies: {
61
- "@joplin/turndown-plugin-gfm": "^1.0.61",
62
- "@langchain/aws": "^0.1.8",
63
- "@langchain/community": "^0.3.34",
64
- "@langchain/google-genai": "^0.2.3",
65
- "@langchain/google-vertexai": "^0.2.4",
66
- "@langchain/openai": "^0.5.0",
67
- "@modelcontextprotocol/sdk": "^1.10.2",
68
- axios: "^1.8.3",
69
- "axios-retry": "^4.5.0",
70
- "better-sqlite3": "^11.9.1",
71
- cheerio: "^1.0.0",
72
- commander: "^13.1.0",
73
- dotenv: "^16.4.7",
74
- "env-paths": "^3.0.0",
75
- "fuse.js": "^7.1.0",
76
- jsdom: "^26.0.0",
77
- langchain: "0.3.19",
78
- pg: "^8.14.0",
79
- playwright: "^1.52.0",
80
- psl: "^1.15.0",
81
- remark: "^15.0.1",
82
- "remark-gfm": "^4.0.1",
83
- "remark-html": "^16.0.1",
84
- semver: "^7.7.1",
85
- "sqlite-vec": "^0.1.7-alpha.2",
86
- turndown: "^7.2.0",
87
- zod: "^3.24.2"
88
- },
89
- devDependencies: {
90
- "@biomejs/biome": "1.9.4",
91
- "@commitlint/cli": "^19.8.0",
92
- "@commitlint/config-conventional": "^19.8.0",
93
- "@semantic-release/changelog": "^6.0.3",
94
- "@semantic-release/git": "^10.0.1",
95
- "@semantic-release/github": "^11.0.1",
96
- "@semantic-release/npm": "^12.0.1",
97
- "@types/better-sqlite3": "^7.6.12",
98
- "@types/jsdom": "~21.1.7",
99
- "@types/lint-staged": "~13.3.0",
100
- "@types/node": "^20.17.23",
101
- "@types/node-fetch": "^2.6.12",
102
- "@types/pg": "~8.11.11",
103
- "@types/psl": "^1.1.3",
104
- "@types/semver": "^7.5.8",
105
- "@types/turndown": "^5.0.5",
106
- husky: "^9.1.7",
107
- "lint-staged": "^15.5.0",
108
- memfs: "^4.17.0",
109
- "npm-run-all": "^4.1.5",
110
- "semantic-release": "^24.2.3",
111
- tsup: "^8.4.0",
112
- typescript: "^5.8.2",
113
- vite: "^6.2.1",
114
- vitest: "^3.0.8"
115
- },
116
- engines: {
117
- node: ">=20.0.0"
118
- },
119
- "lint-staged": {
120
- "*.{js,ts,jsx,tsx,json,md}": [
121
- "biome check --apply --no-errors-on-unmatched",
122
- "biome format --write --no-errors-on-unmatched"
123
- ]
124
- }
4
+ import { D as DocumentManagementService, a as PipelineManager, H as HttpFetcher, F as FileFetcher, S as SearchTool, e as ScrapeTool, f as ListLibrariesTool, c as DEFAULT_MAX_PAGES, b as DEFAULT_MAX_DEPTH, i as DEFAULT_MAX_CONCURRENCY, j as ScrapeMode, s as setLogLevel, d as LogLevel } from "./DocumentManagementService-_qCZ1Hi2.js";
5
+ import "semver";
6
+ import { F as FetchUrlTool, a as FindVersionTool } from "./FindVersionTool-CH1c3Tyu.js";
7
+ const name = "@arabold/docs-mcp-server";
8
+ const version = "1.12.0";
9
+ const description = "MCP server for fetching and searching documentation";
10
+ const type = "module";
11
+ const bin = { "docs-server": "dist/server.js", "docs-cli": "dist/cli.js", "docs-web": "dist/web.js" };
12
+ const license = "MIT";
13
+ const repository = { "type": "git", "url": "git+https://github.com/arabold/docs-mcp-server.git" };
14
+ const files = ["dist", "public", "db", "README.md", "LICENSE", "package.json"];
15
+ const scripts = { "prepare": "husky || true", "build": "vite build --config vite.config.web.ts && vite build", "start": "node --enable-source-maps dist/server.js", "dev:cli": "vite build && node --enable-source-maps dist/cli.js", "dev:server": "vite-node --watch src/server.ts", "dev:server:stdio": "vite-node --watch src/server.ts -- --protocol stdio", "dev:server:http": "vite-node --watch src/server.ts -- --protocol http", "dev:web": "npm-run-all --parallel watch:web web", "watch:web": "vite build --config vite.config.web.ts --watch", "cli": "node --enable-source-maps dist/cli.js", "server": "vite-node --watch src/server.ts", "web": "vite-node --watch src/web.ts", "test": "vitest run", "test:watch": "vitest", "test:coverage": "vitest run --coverage", "lint": "biome check .", "format": "biome format . --write", "postinstall": "npx playwright install --no-shell --with-deps chromium" };
16
+ const dependencies = { "@fastify/formbody": "^8.0.2", "@fastify/static": "^8.1.1", "@joplin/turndown-plugin-gfm": "^1.0.61", "@kitajs/html": "^4.2.7", "@kitajs/ts-html-plugin": "^4.1.1", "@langchain/aws": "^0.1.8", "@langchain/community": "^0.3.34", "@langchain/google-genai": "^0.2.3", "@langchain/google-vertexai": "^0.2.4", "@langchain/openai": "^0.5.0", "@modelcontextprotocol/sdk": "^1.10.2", "alpinejs": "^3.14.9", "axios": "^1.8.3", "axios-retry": "^4.5.0", "better-sqlite3": "^11.9.1", "cheerio": "^1.0.0", "commander": "^13.1.0", "dompurify": "^3.2.5", "dotenv": "^16.4.7", "env-paths": "^3.0.0", "fastify": "^5.3.0", "flowbite": "^3.1.2", "fuse.js": "^7.1.0", "header-generator": "^2.1.66", "htmx.org": "^1.9.12", "jsdom": "^26.0.0", "langchain": "0.3.19", "playwright": "^1.52.0", "psl": "^1.15.0", "remark": "^15.0.1", "remark-gfm": "^4.0.1", "remark-html": "^16.0.1", "semver": "^7.7.1", "sqlite-vec": "^0.1.7-alpha.2", "turndown": "^7.2.0", "zod": "^3.24.2" };
17
+ const devDependencies = { "@biomejs/biome": "1.9.4", "@commitlint/cli": "^19.8.0", "@commitlint/config-conventional": "^19.8.0", "@semantic-release/changelog": "^6.0.3", "@semantic-release/git": "^10.0.1", "@semantic-release/github": "^11.0.1", "@semantic-release/npm": "^12.0.1", "@tailwindcss/postcss": "^4.1.4", "@tailwindcss/vite": "^4.1.4", "@types/alpinejs": "^3.13.11", "@types/better-sqlite3": "^7.6.12", "@types/jsdom": "~21.1.7", "@types/lint-staged": "~13.3.0", "@types/node": "^20.17.23", "@types/node-fetch": "^2.6.12", "@types/psl": "^1.1.3", "@types/semver": "^7.5.8", "@types/turndown": "^5.0.5", "autoprefixer": "^10.4.21", "flowbite-typography": "^1.0.5", "husky": "^9.1.7", "lint-staged": "^15.5.0", "memfs": "^4.17.0", "npm-run-all": "^4.1.5", "postcss": "^8.5.3", "semantic-release": "^24.2.3", "tailwindcss": "^4.1.4", "typescript": "^5.8.2", "vite": "^6.2.1", "vite-node": "^3.1.2", "vite-plugin-dts": "^4.5.3", "vitest": "^3.0.8" };
18
+ const engines = { "node": ">=20.0.0" };
19
+ const packageJson = {
20
+ name,
21
+ version,
22
+ description,
23
+ type,
24
+ bin,
25
+ license,
26
+ repository,
27
+ files,
28
+ scripts,
29
+ dependencies,
30
+ devDependencies,
31
+ engines,
32
+ "lint-staged": { "*.{js,ts,jsx,tsx,json,md}": ["biome check --apply --no-errors-on-unmatched", "biome format --write --no-errors-on-unmatched"] }
125
33
  };
126
-
127
- // src/cli.ts
128
- var formatOutput = (data) => JSON.stringify(data, null, 2);
34
+ const formatOutput = (data) => JSON.stringify(data, null, 2);
129
35
  async function main() {
130
36
  let docService;
131
37
  let pipelineManager;
@@ -148,7 +54,7 @@ async function main() {
148
54
  if (docService) await docService.shutdown();
149
55
  process.exit(0);
150
56
  });
151
- program.name("docs-mcp").description("CLI for managing documentation vector store").version(package_default.version).option("--verbose", "Enable verbose (debug) logging", false).option("--silent", "Disable all logging except errors", false);
57
+ program.name("docs-mcp").description("CLI for managing documentation vector store").version(packageJson.version).option("--verbose", "Enable verbose (debug) logging", false).option("--silent", "Disable all logging except errors", false);
152
58
  program.command("scrape <library> <url>").description("Scrape and index documentation from a URL").option("-v, --version <string>", "Version of the library (optional)").option(
153
59
  "-p, --max-pages <number>",
154
60
  "Maximum pages to scrape",
@@ -178,18 +84,18 @@ async function main() {
178
84
  "Disable following HTTP redirects (default: follow redirects)"
179
85
  ).option(
180
86
  "--scrape-mode <mode>",
181
- `HTML processing strategy: '${"fetch" /* Fetch */}', '${"playwright" /* Playwright */}', '${"auto" /* Auto */}' (default)`,
87
+ `HTML processing strategy: '${ScrapeMode.Fetch}', '${ScrapeMode.Playwright}', '${ScrapeMode.Auto}' (default)`,
182
88
  (value) => {
183
89
  const validModes = Object.values(ScrapeMode);
184
90
  if (!validModes.includes(value)) {
185
91
  console.warn(
186
- `Warning: Invalid scrape mode '${value}'. Using default '${"auto" /* Auto */}'.`
92
+ `Warning: Invalid scrape mode '${value}'. Using default '${ScrapeMode.Auto}'.`
187
93
  );
188
- return "auto" /* Auto */;
94
+ return ScrapeMode.Auto;
189
95
  }
190
96
  return value;
191
97
  },
192
- "auto" /* Auto */
98
+ ScrapeMode.Auto
193
99
  // Use enum default
194
100
  ).action(async (library, url, options) => {
195
101
  const result = await tools.scrape.execute({
@@ -211,9 +117,9 @@ async function main() {
211
117
  // CLI always waits for completion (default behavior)
212
118
  });
213
119
  if ("pagesScraped" in result) {
214
- console.log(`\u2705 Successfully scraped ${result.pagesScraped} pages`);
120
+ console.log(`✅ Successfully scraped ${result.pagesScraped} pages`);
215
121
  } else {
216
- console.log(`\u{1F680} Scraping job started with ID: ${result.jobId}`);
122
+ console.log(`🚀 Scraping job started with ID: ${result.jobId}`);
217
123
  }
218
124
  });
219
125
  program.command("search <library> <query>").description(
@@ -263,15 +169,15 @@ async function main() {
263
169
  if (!docService) {
264
170
  throw new Error("Document service not initialized.");
265
171
  }
266
- const { version } = options;
172
+ const { version: version2 } = options;
267
173
  try {
268
- await docService.removeAllDocuments(library, version);
174
+ await docService.removeAllDocuments(library, version2);
269
175
  console.log(
270
- `\u2705 Successfully removed documents for ${library}${version ? `@${version}` : " (unversioned)"}.`
176
+ `✅ Successfully removed documents for ${library}${version2 ? `@${version2}` : " (unversioned)"}.`
271
177
  );
272
178
  } catch (error) {
273
179
  console.error(
274
- `\u274C Failed to remove documents for ${library}${version ? `@${version}` : " (unversioned)"}:`,
180
+ `❌ Failed to remove documents for ${library}${version2 ? `@${version2}` : " (unversioned)"}:`,
275
181
  error instanceof Error ? error.message : String(error)
276
182
  );
277
183
  throw error;
@@ -282,18 +188,18 @@ async function main() {
282
188
  "Disable following HTTP redirects (default: follow redirects)"
283
189
  ).option(
284
190
  "--scrape-mode <mode>",
285
- `HTML processing strategy: '${"fetch" /* Fetch */}', '${"playwright" /* Playwright */}', '${"auto" /* Auto */}' (default)`,
191
+ `HTML processing strategy: '${ScrapeMode.Fetch}', '${ScrapeMode.Playwright}', '${ScrapeMode.Auto}' (default)`,
286
192
  (value) => {
287
193
  const validModes = Object.values(ScrapeMode);
288
194
  if (!validModes.includes(value)) {
289
195
  console.warn(
290
- `Warning: Invalid scrape mode '${value}'. Using default '${"auto" /* Auto */}'.`
196
+ `Warning: Invalid scrape mode '${value}'. Using default '${ScrapeMode.Auto}'.`
291
197
  );
292
- return "auto" /* Auto */;
198
+ return ScrapeMode.Auto;
293
199
  }
294
200
  return value;
295
201
  },
296
- "auto" /* Auto */
202
+ ScrapeMode.Auto
297
203
  // Use enum default
298
204
  ).action(async (url, options) => {
299
205
  const content = await tools.fetchUrl.execute({
@@ -307,9 +213,9 @@ async function main() {
307
213
  program.hook("preAction", (thisCommand) => {
308
214
  const options = thisCommand.opts();
309
215
  if (options.silent) {
310
- setLogLevel(0 /* ERROR */);
216
+ setLogLevel(LogLevel.ERROR);
311
217
  } else if (options.verbose) {
312
- setLogLevel(3 /* DEBUG */);
218
+ setLogLevel(LogLevel.DEBUG);
313
219
  }
314
220
  });
315
221
  await program.parseAsync();
@@ -327,4 +233,4 @@ main().catch((error) => {
327
233
  console.error("Fatal error:", error);
328
234
  process.exit(1);
329
235
  });
330
- //# sourceMappingURL=cli.js.map
236
+ //# sourceMappingURL=cli.js.map
package/dist/cli.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/cli.ts","../package.json"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { Command } from \"commander\";\nimport packageJson from \"../package.json\";\nimport { PipelineManager } from \"./pipeline/PipelineManager\";\nimport { FileFetcher, HttpFetcher } from \"./scraper/fetcher\";\nimport { ScrapeMode } from \"./scraper/types\"; // Import ScrapeMode enum\nimport { DocumentManagementService } from \"./store/DocumentManagementService\";\nimport {\n FetchUrlTool,\n FindVersionTool,\n ListLibrariesTool,\n ScrapeTool,\n SearchTool,\n} from \"./tools\";\nimport {\n DEFAULT_MAX_CONCURRENCY,\n DEFAULT_MAX_DEPTH,\n DEFAULT_MAX_PAGES,\n} from \"./utils/config\";\nimport { LogLevel, setLogLevel } from \"./utils/logger\";\n\nconst formatOutput = (data: unknown) => JSON.stringify(data, null, 2);\n\nasync function main() {\n let docService: DocumentManagementService | undefined;\n let pipelineManager: PipelineManager | undefined;\n\n try {\n docService = new DocumentManagementService();\n await docService.initialize();\n\n // Instantiate PipelineManager for CLI use\n pipelineManager = new PipelineManager(docService); // Assign inside try\n // Start the manager for the CLI session\n await pipelineManager.start();\n\n const tools = {\n listLibraries: new ListLibrariesTool(docService),\n findVersion: new FindVersionTool(docService),\n scrape: new ScrapeTool(docService, pipelineManager), // Pass manager\n search: new SearchTool(docService),\n fetchUrl: new FetchUrlTool(new HttpFetcher(), new FileFetcher()),\n };\n\n const program = new Command();\n\n // Handle cleanup on SIGINT\n process.on(\"SIGINT\", async () => {\n if (pipelineManager) await pipelineManager.stop(); // Check before stopping\n if (docService) await docService.shutdown(); // Check before stopping\n process.exit(0);\n });\n\n program\n .name(\"docs-mcp\")\n .description(\"CLI for managing documentation vector store\")\n .version(packageJson.version)\n // Add global options for logging level\n .option(\"--verbose\", \"Enable verbose (debug) logging\", false)\n .option(\"--silent\", \"Disable all logging except errors\", false);\n\n program\n .command(\"scrape <library> <url>\") // Remove <version> as positional\n .description(\"Scrape and index documentation from a URL\")\n .option(\"-v, --version <string>\", \"Version of the library (optional)\") // Add optional version flag\n .option(\n \"-p, --max-pages <number>\",\n \"Maximum pages to scrape\",\n DEFAULT_MAX_PAGES.toString(),\n )\n .option(\n \"-d, --max-depth <number>\",\n \"Maximum navigation depth\",\n DEFAULT_MAX_DEPTH.toString(),\n )\n .option(\n \"-c, --max-concurrency <number>\",\n \"Maximum concurrent page requests\",\n DEFAULT_MAX_CONCURRENCY.toString(),\n )\n .option(\"--ignore-errors\", \"Ignore errors during scraping\", true)\n .option(\n \"--scope <scope>\",\n \"Crawling boundary: 'subpages' (default), 'hostname', or 'domain'\",\n (value) => {\n const validScopes = [\"subpages\", \"hostname\", \"domain\"];\n if (!validScopes.includes(value)) {\n console.warn(`Warning: Invalid scope '${value}'. Using default 'subpages'.`);\n return \"subpages\";\n }\n return value;\n },\n \"subpages\",\n )\n .option(\n \"--no-follow-redirects\",\n \"Disable following HTTP redirects (default: follow redirects)\",\n )\n .option(\n \"--scrape-mode <mode>\",\n `HTML processing strategy: '${ScrapeMode.Fetch}', '${ScrapeMode.Playwright}', '${ScrapeMode.Auto}' (default)`,\n (value: string): ScrapeMode => {\n const validModes = Object.values(ScrapeMode);\n if (!validModes.includes(value as ScrapeMode)) {\n console.warn(\n `Warning: Invalid scrape mode '${value}'. Using default '${ScrapeMode.Auto}'.`,\n );\n return ScrapeMode.Auto;\n }\n return value as ScrapeMode; // Cast to enum type\n },\n ScrapeMode.Auto, // Use enum default\n )\n .action(async (library, url, options) => {\n // Update action parameters\n const result = await tools.scrape.execute({\n url,\n library,\n version: options.version, // Get version from options\n options: {\n maxPages: Number.parseInt(options.maxPages),\n maxDepth: Number.parseInt(options.maxDepth),\n maxConcurrency: Number.parseInt(options.maxConcurrency),\n ignoreErrors: options.ignoreErrors,\n scope: options.scope,\n followRedirects: options.followRedirects, // This will be `true` by default, or `false` if --no-follow-redirects is used\n scrapeMode: options.scrapeMode, // Pass the new scrapeMode option\n },\n // CLI always waits for completion (default behavior)\n });\n // Type guard to satisfy TypeScript\n if (\"pagesScraped\" in result) {\n console.log(`✅ Successfully scraped ${result.pagesScraped} pages`);\n } else {\n // This branch should not be hit by the CLI\n console.log(`🚀 Scraping job started with ID: ${result.jobId}`);\n }\n });\n\n program\n .command(\"search <library> <query>\") // Remove <version> as positional\n .description(\n \"Search documents in a library. Version matching examples:\\n\" +\n \" - search react --version 18.0.0 'hooks' -> matches docs for React 18.0.0 or earlier versions\\n\" +\n \" - search react --version 18.0.0 'hooks' --exact-match -> only matches React 18.0.0\\n\" +\n \" - search typescript --version 5.x 'types' -> matches any TypeScript 5.x.x version\\n\" +\n \" - search typescript --version 5.2.x 'types' -> matches any TypeScript 5.2.x version\",\n )\n .option(\n \"-v, --version <string>\", // Add optional version flag\n \"Version of the library (optional, supports ranges)\",\n )\n .option(\"-l, --limit <number>\", \"Maximum number of results\", \"5\")\n .option(\n \"-e, --exact-match\",\n \"Only use exact version match (e.g., '18.0.0' matches only 18.0.0, not 17.x.x) (default: false)\",\n false,\n )\n .action(async (library, query, options) => {\n // Update action parameters\n const result = await tools.search.execute({\n library,\n version: options.version, // Get version from options\n query,\n limit: Number.parseInt(options.limit),\n exactMatch: options.exactMatch,\n });\n console.log(formatOutput(result.results));\n });\n\n program\n .command(\"list\")\n .description(\"List all available libraries and their versions\")\n .action(async () => {\n const result = await tools.listLibraries.execute();\n console.log(formatOutput(result.libraries));\n });\n\n program\n .command(\"find-version <library>\") // Remove [targetVersion] positional\n .description(\"Find the best matching version for a library\")\n .option(\n \"-v, --version <string>\", // Add optional version flag\n \"Pattern to match (optional, supports ranges)\",\n )\n .action(async (library, options) => {\n // Update action parameters\n const versionInfo = await tools.findVersion.execute({\n library,\n targetVersion: options.version, // Get version from options\n });\n // findVersion.execute now returns a string, handle potential error messages within it\n if (!versionInfo) {\n // Should not happen with current tool logic, but good practice\n throw new Error(\"Failed to get version information\");\n }\n console.log(versionInfo); // Log the descriptive string from the tool\n });\n\n program\n .command(\"remove <library>\") // Library as positional argument\n .description(\"Remove documents for a specific library and version\")\n .option(\n \"-v, --version <string>\",\n \"Version to remove (optional, removes unversioned if omitted)\",\n )\n .action(async (library, options) => {\n // library is now the first arg\n if (!docService) {\n throw new Error(\"Document service not initialized.\");\n }\n const { version } = options; // Get version from options\n try {\n await docService.removeAllDocuments(library, version);\n console.log(\n `✅ Successfully removed documents for ${library}${version ? `@${version}` : \" (unversioned)\"}.`,\n );\n } catch (error) {\n console.error(\n `❌ Failed to remove documents for ${library}${version ? `@${version}` : \" (unversioned)\"}:`,\n error instanceof Error ? error.message : String(error),\n );\n // Re-throw to trigger the main catch block for shutdown\n throw error;\n }\n });\n\n program\n .command(\"fetch-url <url>\")\n .description(\"Fetch a URL and convert its content to Markdown\")\n .option(\n \"--no-follow-redirects\",\n \"Disable following HTTP redirects (default: follow redirects)\",\n )\n .option(\n \"--scrape-mode <mode>\",\n `HTML processing strategy: '${ScrapeMode.Fetch}', '${ScrapeMode.Playwright}', '${ScrapeMode.Auto}' (default)`,\n (value: string): ScrapeMode => {\n const validModes = Object.values(ScrapeMode);\n if (!validModes.includes(value as ScrapeMode)) {\n console.warn(\n `Warning: Invalid scrape mode '${value}'. Using default '${ScrapeMode.Auto}'.`,\n );\n return ScrapeMode.Auto;\n }\n return value as ScrapeMode; // Cast to enum type\n },\n ScrapeMode.Auto, // Use enum default\n )\n .action(async (url, options) => {\n const content = await tools.fetchUrl.execute({\n url,\n followRedirects: options.followRedirects,\n scrapeMode: options.scrapeMode, // Pass the scrapeMode option\n });\n console.log(content);\n });\n\n // Hook to set log level after parsing global options but before executing command action\n program.hook(\"preAction\", (thisCommand) => {\n // Global options are attached to the program (thisCommand)\n const options = thisCommand.opts();\n if (options.silent) {\n // If silent is true, it overrides verbose\n setLogLevel(LogLevel.ERROR);\n } else if (options.verbose) {\n setLogLevel(LogLevel.DEBUG);\n }\n // Otherwise, the default LogLevel.INFO remains set from logger.ts\n });\n\n await program.parseAsync();\n } catch (error) {\n console.error(\"Error:\", error instanceof Error ? error.message : String(error));\n if (pipelineManager) await pipelineManager.stop(); // Check before stopping\n if (docService) await docService.shutdown();\n process.exit(1);\n }\n\n // Clean shutdown after successful execution\n if (pipelineManager) await pipelineManager.stop(); // Check before stopping\n await docService.shutdown();\n process.exit(0);\n}\n\nmain().catch((error) => {\n console.error(\"Fatal error:\", error);\n process.exit(1);\n});\n","{\n \"name\": \"@arabold/docs-mcp-server\",\n \"version\": \"1.10.0\",\n \"description\": \"MCP server for fetching and searching documentation\",\n \"type\": \"module\",\n \"bin\": {\n \"docs-server\": \"dist/server.js\",\n \"docs-cli\": \"dist/cli.js\"\n },\n \"license\": \"MIT\",\n \"repository\": {\n \"type\": \"git\",\n \"url\": \"git+https://github.com/arabold/docs-mcp-server.git\"\n },\n \"files\": [\n \"dist\",\n \"README.md\",\n \"LICENSE\",\n \"package.json\"\n ],\n \"scripts\": {\n \"prepare\": \"husky || true\",\n \"build\": \"tsup\",\n \"cli\": \"node --enable-source-maps dist/cli.js\",\n \"start\": \"node --enable-source-maps dist/server.js\",\n \"dev:cli\": \"npm run build && node --enable-source-maps dist/cli.js\",\n \"server\": \"node --enable-source-maps --watch dist/server.js\",\n \"dev:server\": \"run-p \\\"build -- --watch\\\" \\\"server\\\"\",\n \"test\": \"vitest run\",\n \"test:watch\": \"vitest\",\n \"test:coverage\": \"vitest run --coverage\",\n \"lint\": \"biome check .\",\n \"format\": \"biome format . --write\",\n \"postinstall\": \"npx playwright install --no-shell --with-deps chromium\"\n },\n \"dependencies\": {\n \"@joplin/turndown-plugin-gfm\": \"^1.0.61\",\n \"@langchain/aws\": \"^0.1.8\",\n \"@langchain/community\": \"^0.3.34\",\n \"@langchain/google-genai\": \"^0.2.3\",\n \"@langchain/google-vertexai\": \"^0.2.4\",\n \"@langchain/openai\": \"^0.5.0\",\n \"@modelcontextprotocol/sdk\": \"^1.10.2\",\n \"axios\": \"^1.8.3\",\n \"axios-retry\": \"^4.5.0\",\n \"better-sqlite3\": \"^11.9.1\",\n \"cheerio\": \"^1.0.0\",\n \"commander\": \"^13.1.0\",\n \"dotenv\": \"^16.4.7\",\n \"env-paths\": \"^3.0.0\",\n \"fuse.js\": \"^7.1.0\",\n \"jsdom\": \"^26.0.0\",\n \"langchain\": \"0.3.19\",\n \"pg\": \"^8.14.0\",\n \"playwright\": \"^1.52.0\",\n \"psl\": \"^1.15.0\",\n \"remark\": \"^15.0.1\",\n \"remark-gfm\": \"^4.0.1\",\n \"remark-html\": \"^16.0.1\",\n \"semver\": \"^7.7.1\",\n \"sqlite-vec\": \"^0.1.7-alpha.2\",\n \"turndown\": \"^7.2.0\",\n \"zod\": \"^3.24.2\"\n },\n \"devDependencies\": {\n \"@biomejs/biome\": \"1.9.4\",\n \"@commitlint/cli\": \"^19.8.0\",\n \"@commitlint/config-conventional\": \"^19.8.0\",\n \"@semantic-release/changelog\": \"^6.0.3\",\n \"@semantic-release/git\": \"^10.0.1\",\n \"@semantic-release/github\": \"^11.0.1\",\n \"@semantic-release/npm\": \"^12.0.1\",\n \"@types/better-sqlite3\": \"^7.6.12\",\n \"@types/jsdom\": \"~21.1.7\",\n \"@types/lint-staged\": \"~13.3.0\",\n \"@types/node\": \"^20.17.23\",\n \"@types/node-fetch\": \"^2.6.12\",\n \"@types/pg\": \"~8.11.11\",\n \"@types/psl\": \"^1.1.3\",\n \"@types/semver\": \"^7.5.8\",\n \"@types/turndown\": \"^5.0.5\",\n \"husky\": \"^9.1.7\",\n \"lint-staged\": \"^15.5.0\",\n \"memfs\": \"^4.17.0\",\n \"npm-run-all\": \"^4.1.5\",\n \"semantic-release\": \"^24.2.3\",\n \"tsup\": \"^8.4.0\",\n \"typescript\": \"^5.8.2\",\n \"vite\": \"^6.2.1\",\n \"vitest\": \"^3.0.8\"\n },\n \"engines\": {\n \"node\": \">=20.0.0\"\n },\n \"lint-staged\": {\n \"*.{js,ts,jsx,tsx,json,md}\": [\n \"biome check --apply --no-errors-on-unmatched\",\n \"biome format --write --no-errors-on-unmatched\"\n ]\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AACA,OAAO;AACP,SAAS,eAAe;;;ACFxB;AAAA,EACE,MAAQ;AAAA,EACR,SAAW;AAAA,EACX,aAAe;AAAA,EACf,MAAQ;AAAA,EACR,KAAO;AAAA,IACL,eAAe;AAAA,IACf,YAAY;AAAA,EACd;AAAA,EACA,SAAW;AAAA,EACX,YAAc;AAAA,IACZ,MAAQ;AAAA,IACR,KAAO;AAAA,EACT;AAAA,EACA,OAAS;AAAA,IACP;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,SAAW;AAAA,IACT,SAAW;AAAA,IACX,OAAS;AAAA,IACT,KAAO;AAAA,IACP,OAAS;AAAA,IACT,WAAW;AAAA,IACX,QAAU;AAAA,IACV,cAAc;AAAA,IACd,MAAQ;AAAA,IACR,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,MAAQ;AAAA,IACR,QAAU;AAAA,IACV,aAAe;AAAA,EACjB;AAAA,EACA,cAAgB;AAAA,IACd,+BAA+B;AAAA,IAC/B,kBAAkB;AAAA,IAClB,wBAAwB;AAAA,IACxB,2BAA2B;AAAA,IAC3B,8BAA8B;AAAA,IAC9B,qBAAqB;AAAA,IACrB,6BAA6B;AAAA,IAC7B,OAAS;AAAA,IACT,eAAe;AAAA,IACf,kBAAkB;AAAA,IAClB,SAAW;AAAA,IACX,WAAa;AAAA,IACb,QAAU;AAAA,IACV,aAAa;AAAA,IACb,WAAW;AAAA,IACX,OAAS;AAAA,IACT,WAAa;AAAA,IACb,IAAM;AAAA,IACN,YAAc;AAAA,IACd,KAAO;AAAA,IACP,QAAU;AAAA,IACV,cAAc;AAAA,IACd,eAAe;AAAA,IACf,QAAU;AAAA,IACV,cAAc;AAAA,IACd,UAAY;AAAA,IACZ,KAAO;AAAA,EACT;AAAA,EACA,iBAAmB;AAAA,IACjB,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,IACnB,mCAAmC;AAAA,IACnC,+BAA+B;AAAA,IAC/B,yBAAyB;AAAA,IACzB,4BAA4B;AAAA,IAC5B,yBAAyB;AAAA,IACzB,yBAAyB;AAAA,IACzB,gBAAgB;AAAA,IAChB,sBAAsB;AAAA,IACtB,eAAe;AAAA,IACf,qBAAqB;AAAA,IACrB,aAAa;AAAA,IACb,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,mBAAmB;AAAA,IACnB,OAAS;AAAA,IACT,eAAe;AAAA,IACf,OAAS;AAAA,IACT,eAAe;AAAA,IACf,oBAAoB;AAAA,IACpB,MAAQ;AAAA,IACR,YAAc;AAAA,IACd,MAAQ;AAAA,IACR,QAAU;AAAA,EACZ;AAAA,EACA,SAAW;AAAA,IACT,MAAQ;AAAA,EACV;AAAA,EACA,eAAe;AAAA,IACb,6BAA6B;AAAA,MAC3B;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;;;AD9EA,IAAM,eAAe,CAAC,SAAkB,KAAK,UAAU,MAAM,MAAM,CAAC;AAEpE,eAAe,OAAO;AACpB,MAAI;AACJ,MAAI;AAEJ,MAAI;AACF,iBAAa,IAAI,0BAA0B;AAC3C,UAAM,WAAW,WAAW;AAG5B,sBAAkB,IAAI,gBAAgB,UAAU;AAEhD,UAAM,gBAAgB,MAAM;AAE5B,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,kBAAkB,UAAU;AAAA,MAC/C,aAAa,IAAI,gBAAgB,UAAU;AAAA,MAC3C,QAAQ,IAAI,WAAW,YAAY,eAAe;AAAA;AAAA,MAClD,QAAQ,IAAI,WAAW,UAAU;AAAA,MACjC,UAAU,IAAI,aAAa,IAAI,YAAY,GAAG,IAAI,YAAY,CAAC;AAAA,IACjE;AAEA,UAAM,UAAU,IAAI,QAAQ;AAG5B,YAAQ,GAAG,UAAU,YAAY;AAC/B,UAAI,gBAAiB,OAAM,gBAAgB,KAAK;AAChD,UAAI,WAAY,OAAM,WAAW,SAAS;AAC1C,cAAQ,KAAK,CAAC;AAAA,IAChB,CAAC;AAED,YACG,KAAK,UAAU,EACf,YAAY,6CAA6C,EACzD,QAAQ,gBAAY,OAAO,EAE3B,OAAO,aAAa,kCAAkC,KAAK,EAC3D,OAAO,YAAY,qCAAqC,KAAK;AAEhE,YACG,QAAQ,wBAAwB,EAChC,YAAY,2CAA2C,EACvD,OAAO,0BAA0B,mCAAmC,EACpE;AAAA,MACC;AAAA,MACA;AAAA,MACA,kBAAkB,SAAS;AAAA,IAC7B,EACC;AAAA,MACC;AAAA,MACA;AAAA,MACA,kBAAkB,SAAS;AAAA,IAC7B,EACC;AAAA,MACC;AAAA,MACA;AAAA,MACA,wBAAwB,SAAS;AAAA,IACnC,EACC,OAAO,mBAAmB,iCAAiC,IAAI,EAC/D;AAAA,MACC;AAAA,MACA;AAAA,MACA,CAAC,UAAU;AACT,cAAM,cAAc,CAAC,YAAY,YAAY,QAAQ;AACrD,YAAI,CAAC,YAAY,SAAS,KAAK,GAAG;AAChC,kBAAQ,KAAK,2BAA2B,KAAK,8BAA8B;AAC3E,iBAAO;AAAA,QACT;AACA,eAAO;AAAA,MACT;AAAA,MACA;AAAA,IACF,EACC;AAAA,MACC;AAAA,MACA;AAAA,IACF,EACC;AAAA,MACC;AAAA,MACA,iDAA8C,oCAA4B,wBAAsB;AAAA,MAChG,CAAC,UAA8B;AAC7B,cAAM,aAAa,OAAO,OAAO,UAAU;AAC3C,YAAI,CAAC,WAAW,SAAS,KAAmB,GAAG;AAC7C,kBAAQ;AAAA,YACN,iCAAiC,KAAK,sCAAoC;AAAA,UAC5E;AACA;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA,IAEF,EACC,OAAO,OAAO,SAAS,KAAK,YAAY;AAEvC,YAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAAA,QACxC;AAAA,QACA;AAAA,QACA,SAAS,QAAQ;AAAA;AAAA,QACjB,SAAS;AAAA,UACP,UAAU,OAAO,SAAS,QAAQ,QAAQ;AAAA,UAC1C,UAAU,OAAO,SAAS,QAAQ,QAAQ;AAAA,UAC1C,gBAAgB,OAAO,SAAS,QAAQ,cAAc;AAAA,UACtD,cAAc,QAAQ;AAAA,UACtB,OAAO,QAAQ;AAAA,UACf,iBAAiB,QAAQ;AAAA;AAAA,UACzB,YAAY,QAAQ;AAAA;AAAA,QACtB;AAAA;AAAA,MAEF,CAAC;AAED,UAAI,kBAAkB,QAAQ;AAC5B,gBAAQ,IAAI,+BAA0B,OAAO,YAAY,QAAQ;AAAA,MACnE,OAAO;AAEL,gBAAQ,IAAI,2CAAoC,OAAO,KAAK,EAAE;AAAA,MAChE;AAAA,IACF,CAAC;AAEH,YACG,QAAQ,0BAA0B,EAClC;AAAA,MACC;AAAA,IAKF,EACC;AAAA,MACC;AAAA;AAAA,MACA;AAAA,IACF,EACC,OAAO,wBAAwB,6BAA6B,GAAG,EAC/D;AAAA,MACC;AAAA,MACA;AAAA,MACA;AAAA,IACF,EACC,OAAO,OAAO,SAAS,OAAO,YAAY;AAEzC,YAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAAA,QACxC;AAAA,QACA,SAAS,QAAQ;AAAA;AAAA,QACjB;AAAA,QACA,OAAO,OAAO,SAAS,QAAQ,KAAK;AAAA,QACpC,YAAY,QAAQ;AAAA,MACtB,CAAC;AACD,cAAQ,IAAI,aAAa,OAAO,OAAO,CAAC;AAAA,IAC1C,CAAC;AAEH,YACG,QAAQ,MAAM,EACd,YAAY,iDAAiD,EAC7D,OAAO,YAAY;AAClB,YAAM,SAAS,MAAM,MAAM,cAAc,QAAQ;AACjD,cAAQ,IAAI,aAAa,OAAO,SAAS,CAAC;AAAA,IAC5C,CAAC;AAEH,YACG,QAAQ,wBAAwB,EAChC,YAAY,8CAA8C,EAC1D;AAAA,MACC;AAAA;AAAA,MACA;AAAA,IACF,EACC,OAAO,OAAO,SAAS,YAAY;AAElC,YAAM,cAAc,MAAM,MAAM,YAAY,QAAQ;AAAA,QAClD;AAAA,QACA,eAAe,QAAQ;AAAA;AAAA,MACzB,CAAC;AAED,UAAI,CAAC,aAAa;AAEhB,cAAM,IAAI,MAAM,mCAAmC;AAAA,MACrD;AACA,cAAQ,IAAI,WAAW;AAAA,IACzB,CAAC;AAEH,YACG,QAAQ,kBAAkB,EAC1B,YAAY,qDAAqD,EACjE;AAAA,MACC;AAAA,MACA;AAAA,IACF,EACC,OAAO,OAAO,SAAS,YAAY;AAElC,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,mCAAmC;AAAA,MACrD;AACA,YAAM,EAAE,QAAQ,IAAI;AACpB,UAAI;AACF,cAAM,WAAW,mBAAmB,SAAS,OAAO;AACpD,gBAAQ;AAAA,UACN,6CAAwC,OAAO,GAAG,UAAU,IAAI,OAAO,KAAK,gBAAgB;AAAA,QAC9F;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ;AAAA,UACN,yCAAoC,OAAO,GAAG,UAAU,IAAI,OAAO,KAAK,gBAAgB;AAAA,UACxF,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACvD;AAEA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAEH,YACG,QAAQ,iBAAiB,EACzB,YAAY,iDAAiD,EAC7D;AAAA,MACC;AAAA,MACA;AAAA,IACF,EACC;AAAA,MACC;AAAA,MACA,iDAA8C,oCAA4B,wBAAsB;AAAA,MAChG,CAAC,UAA8B;AAC7B,cAAM,aAAa,OAAO,OAAO,UAAU;AAC3C,YAAI,CAAC,WAAW,SAAS,KAAmB,GAAG;AAC7C,kBAAQ;AAAA,YACN,iCAAiC,KAAK,sCAAoC;AAAA,UAC5E;AACA;AAAA,QACF;AACA,eAAO;AAAA,MACT;AAAA;AAAA;AAAA,IAEF,EACC,OAAO,OAAO,KAAK,YAAY;AAC9B,YAAM,UAAU,MAAM,MAAM,SAAS,QAAQ;AAAA,QAC3C;AAAA,QACA,iBAAiB,QAAQ;AAAA,QACzB,YAAY,QAAQ;AAAA;AAAA,MACtB,CAAC;AACD,cAAQ,IAAI,OAAO;AAAA,IACrB,CAAC;AAGH,YAAQ,KAAK,aAAa,CAAC,gBAAgB;AAEzC,YAAM,UAAU,YAAY,KAAK;AACjC,UAAI,QAAQ,QAAQ;AAElB,iCAA0B;AAAA,MAC5B,WAAW,QAAQ,SAAS;AAC1B,iCAA0B;AAAA,MAC5B;AAAA,IAEF,CAAC;AAED,UAAM,QAAQ,WAAW;AAAA,EAC3B,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAC9E,QAAI,gBAAiB,OAAM,gBAAgB,KAAK;AAChD,QAAI,WAAY,OAAM,WAAW,SAAS;AAC1C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAGA,MAAI,gBAAiB,OAAM,gBAAgB,KAAK;AAChD,QAAM,WAAW,SAAS;AAC1B,UAAQ,KAAK,CAAC;AAChB;AAEA,KAAK,EAAE,MAAM,CAAC,UAAU;AACtB,UAAQ,MAAM,gBAAgB,KAAK;AACnC,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":[]}
1
+ {"version":3,"file":"cli.js","sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { Command } from \"commander\";\nimport packageJson from \"../package.json\";\nimport { PipelineManager } from \"./pipeline/PipelineManager\";\nimport { FileFetcher, HttpFetcher } from \"./scraper/fetcher\";\nimport { ScrapeMode } from \"./scraper/types\"; // Import ScrapeMode enum\nimport { DocumentManagementService } from \"./store/DocumentManagementService\";\nimport {\n FetchUrlTool,\n FindVersionTool,\n ListLibrariesTool,\n ScrapeTool,\n SearchTool,\n} from \"./tools\";\nimport {\n DEFAULT_MAX_CONCURRENCY,\n DEFAULT_MAX_DEPTH,\n DEFAULT_MAX_PAGES,\n} from \"./utils/config\";\nimport { LogLevel, setLogLevel } from \"./utils/logger\";\n\nconst formatOutput = (data: unknown) => JSON.stringify(data, null, 2);\n\nasync function main() {\n let docService: DocumentManagementService | undefined;\n let pipelineManager: PipelineManager | undefined;\n\n try {\n docService = new DocumentManagementService();\n await docService.initialize();\n\n // Instantiate PipelineManager for CLI use\n pipelineManager = new PipelineManager(docService); // Assign inside try\n // Start the manager for the CLI session\n await pipelineManager.start();\n\n const tools = {\n listLibraries: new ListLibrariesTool(docService),\n findVersion: new FindVersionTool(docService),\n scrape: new ScrapeTool(docService, pipelineManager), // Pass manager\n search: new SearchTool(docService),\n fetchUrl: new FetchUrlTool(new HttpFetcher(), new FileFetcher()),\n };\n\n const program = new Command();\n\n // Handle cleanup on SIGINT\n process.on(\"SIGINT\", async () => {\n if (pipelineManager) await pipelineManager.stop(); // Check before stopping\n if (docService) await docService.shutdown(); // Check before stopping\n process.exit(0);\n });\n\n program\n .name(\"docs-mcp\")\n .description(\"CLI for managing documentation vector store\")\n .version(packageJson.version)\n // Add global options for logging level\n .option(\"--verbose\", \"Enable verbose (debug) logging\", false)\n .option(\"--silent\", \"Disable all logging except errors\", false);\n\n program\n .command(\"scrape <library> <url>\") // Remove <version> as positional\n .description(\"Scrape and index documentation from a URL\")\n .option(\"-v, --version <string>\", \"Version of the library (optional)\") // Add optional version flag\n .option(\n \"-p, --max-pages <number>\",\n \"Maximum pages to scrape\",\n DEFAULT_MAX_PAGES.toString(),\n )\n .option(\n \"-d, --max-depth <number>\",\n \"Maximum navigation depth\",\n DEFAULT_MAX_DEPTH.toString(),\n )\n .option(\n \"-c, --max-concurrency <number>\",\n \"Maximum concurrent page requests\",\n DEFAULT_MAX_CONCURRENCY.toString(),\n )\n .option(\"--ignore-errors\", \"Ignore errors during scraping\", true)\n .option(\n \"--scope <scope>\",\n \"Crawling boundary: 'subpages' (default), 'hostname', or 'domain'\",\n (value) => {\n const validScopes = [\"subpages\", \"hostname\", \"domain\"];\n if (!validScopes.includes(value)) {\n console.warn(`Warning: Invalid scope '${value}'. Using default 'subpages'.`);\n return \"subpages\";\n }\n return value;\n },\n \"subpages\",\n )\n .option(\n \"--no-follow-redirects\",\n \"Disable following HTTP redirects (default: follow redirects)\",\n )\n .option(\n \"--scrape-mode <mode>\",\n `HTML processing strategy: '${ScrapeMode.Fetch}', '${ScrapeMode.Playwright}', '${ScrapeMode.Auto}' (default)`,\n (value: string): ScrapeMode => {\n const validModes = Object.values(ScrapeMode);\n if (!validModes.includes(value as ScrapeMode)) {\n console.warn(\n `Warning: Invalid scrape mode '${value}'. Using default '${ScrapeMode.Auto}'.`,\n );\n return ScrapeMode.Auto;\n }\n return value as ScrapeMode; // Cast to enum type\n },\n ScrapeMode.Auto, // Use enum default\n )\n .action(async (library, url, options) => {\n // Update action parameters\n const result = await tools.scrape.execute({\n url,\n library,\n version: options.version, // Get version from options\n options: {\n maxPages: Number.parseInt(options.maxPages),\n maxDepth: Number.parseInt(options.maxDepth),\n maxConcurrency: Number.parseInt(options.maxConcurrency),\n ignoreErrors: options.ignoreErrors,\n scope: options.scope,\n followRedirects: options.followRedirects, // This will be `true` by default, or `false` if --no-follow-redirects is used\n scrapeMode: options.scrapeMode, // Pass the new scrapeMode option\n },\n // CLI always waits for completion (default behavior)\n });\n // Type guard to satisfy TypeScript\n if (\"pagesScraped\" in result) {\n console.log(`✅ Successfully scraped ${result.pagesScraped} pages`);\n } else {\n // This branch should not be hit by the CLI\n console.log(`🚀 Scraping job started with ID: ${result.jobId}`);\n }\n });\n\n program\n .command(\"search <library> <query>\") // Remove <version> as positional\n .description(\n \"Search documents in a library. Version matching examples:\\n\" +\n \" - search react --version 18.0.0 'hooks' -> matches docs for React 18.0.0 or earlier versions\\n\" +\n \" - search react --version 18.0.0 'hooks' --exact-match -> only matches React 18.0.0\\n\" +\n \" - search typescript --version 5.x 'types' -> matches any TypeScript 5.x.x version\\n\" +\n \" - search typescript --version 5.2.x 'types' -> matches any TypeScript 5.2.x version\",\n )\n .option(\n \"-v, --version <string>\", // Add optional version flag\n \"Version of the library (optional, supports ranges)\",\n )\n .option(\"-l, --limit <number>\", \"Maximum number of results\", \"5\")\n .option(\n \"-e, --exact-match\",\n \"Only use exact version match (e.g., '18.0.0' matches only 18.0.0, not 17.x.x) (default: false)\",\n false,\n )\n .action(async (library, query, options) => {\n // Update action parameters\n const result = await tools.search.execute({\n library,\n version: options.version, // Get version from options\n query,\n limit: Number.parseInt(options.limit),\n exactMatch: options.exactMatch,\n });\n console.log(formatOutput(result.results));\n });\n\n program\n .command(\"list\")\n .description(\"List all available libraries and their versions\")\n .action(async () => {\n const result = await tools.listLibraries.execute();\n console.log(formatOutput(result.libraries));\n });\n\n program\n .command(\"find-version <library>\") // Remove [targetVersion] positional\n .description(\"Find the best matching version for a library\")\n .option(\n \"-v, --version <string>\", // Add optional version flag\n \"Pattern to match (optional, supports ranges)\",\n )\n .action(async (library, options) => {\n // Update action parameters\n const versionInfo = await tools.findVersion.execute({\n library,\n targetVersion: options.version, // Get version from options\n });\n // findVersion.execute now returns a string, handle potential error messages within it\n if (!versionInfo) {\n // Should not happen with current tool logic, but good practice\n throw new Error(\"Failed to get version information\");\n }\n console.log(versionInfo); // Log the descriptive string from the tool\n });\n\n program\n .command(\"remove <library>\") // Library as positional argument\n .description(\"Remove documents for a specific library and version\")\n .option(\n \"-v, --version <string>\",\n \"Version to remove (optional, removes unversioned if omitted)\",\n )\n .action(async (library, options) => {\n // library is now the first arg\n if (!docService) {\n throw new Error(\"Document service not initialized.\");\n }\n const { version } = options; // Get version from options\n try {\n await docService.removeAllDocuments(library, version);\n console.log(\n `✅ Successfully removed documents for ${library}${version ? `@${version}` : \" (unversioned)\"}.`,\n );\n } catch (error) {\n console.error(\n `❌ Failed to remove documents for ${library}${version ? `@${version}` : \" (unversioned)\"}:`,\n error instanceof Error ? error.message : String(error),\n );\n // Re-throw to trigger the main catch block for shutdown\n throw error;\n }\n });\n\n program\n .command(\"fetch-url <url>\")\n .description(\"Fetch a URL and convert its content to Markdown\")\n .option(\n \"--no-follow-redirects\",\n \"Disable following HTTP redirects (default: follow redirects)\",\n )\n .option(\n \"--scrape-mode <mode>\",\n `HTML processing strategy: '${ScrapeMode.Fetch}', '${ScrapeMode.Playwright}', '${ScrapeMode.Auto}' (default)`,\n (value: string): ScrapeMode => {\n const validModes = Object.values(ScrapeMode);\n if (!validModes.includes(value as ScrapeMode)) {\n console.warn(\n `Warning: Invalid scrape mode '${value}'. Using default '${ScrapeMode.Auto}'.`,\n );\n return ScrapeMode.Auto;\n }\n return value as ScrapeMode; // Cast to enum type\n },\n ScrapeMode.Auto, // Use enum default\n )\n .action(async (url, options) => {\n const content = await tools.fetchUrl.execute({\n url,\n followRedirects: options.followRedirects,\n scrapeMode: options.scrapeMode, // Pass the scrapeMode option\n });\n console.log(content);\n });\n\n // Hook to set log level after parsing global options but before executing command action\n program.hook(\"preAction\", (thisCommand) => {\n // Global options are attached to the program (thisCommand)\n const options = thisCommand.opts();\n if (options.silent) {\n // If silent is true, it overrides verbose\n setLogLevel(LogLevel.ERROR);\n } else if (options.verbose) {\n setLogLevel(LogLevel.DEBUG);\n }\n // Otherwise, the default LogLevel.INFO remains set from logger.ts\n });\n\n await program.parseAsync();\n } catch (error) {\n console.error(\"Error:\", error instanceof Error ? error.message : String(error));\n if (pipelineManager) await pipelineManager.stop(); // Check before stopping\n if (docService) await docService.shutdown();\n process.exit(1);\n }\n\n // Clean shutdown after successful execution\n if (pipelineManager) await pipelineManager.stop(); // Check before stopping\n await docService.shutdown();\n process.exit(0);\n}\n\nmain().catch((error) => {\n console.error(\"Fatal error:\", error);\n process.exit(1);\n});\n"],"names":["version"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsBA,MAAM,eAAe,CAAC,SAAkB,KAAK,UAAU,MAAM,MAAM,CAAC;AAEpE,eAAe,OAAO;AAChB,MAAA;AACA,MAAA;AAEA,MAAA;AACF,iBAAa,IAAI,0BAA0B;AAC3C,UAAM,WAAW,WAAW;AAGV,sBAAA,IAAI,gBAAgB,UAAU;AAEhD,UAAM,gBAAgB,MAAM;AAE5B,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,kBAAkB,UAAU;AAAA,MAC/C,aAAa,IAAI,gBAAgB,UAAU;AAAA,MAC3C,QAAQ,IAAI,WAAW,YAAY,eAAe;AAAA;AAAA,MAClD,QAAQ,IAAI,WAAW,UAAU;AAAA,MACjC,UAAU,IAAI,aAAa,IAAI,YAAe,GAAA,IAAI,YAAa,CAAA;AAAA,IACjE;AAEM,UAAA,UAAU,IAAI,QAAQ;AAGpB,YAAA,GAAG,UAAU,YAAY;AAC3B,UAAA,gBAAuB,OAAA,gBAAgB,KAAK;AAC5C,UAAA,WAAkB,OAAA,WAAW,SAAS;AAC1C,cAAQ,KAAK,CAAC;AAAA,IAAA,CACf;AAED,YACG,KAAK,UAAU,EACf,YAAY,6CAA6C,EACzD,QAAQ,YAAY,OAAO,EAE3B,OAAO,aAAa,kCAAkC,KAAK,EAC3D,OAAO,YAAY,qCAAqC,KAAK;AAG7D,YAAA,QAAQ,wBAAwB,EAChC,YAAY,2CAA2C,EACvD,OAAO,0BAA0B,mCAAmC,EACpE;AAAA,MACC;AAAA,MACA;AAAA,MACA,kBAAkB,SAAS;AAAA,IAAA,EAE5B;AAAA,MACC;AAAA,MACA;AAAA,MACA,kBAAkB,SAAS;AAAA,IAAA,EAE5B;AAAA,MACC;AAAA,MACA;AAAA,MACA,wBAAwB,SAAS;AAAA,IAElC,EAAA,OAAO,mBAAmB,iCAAiC,IAAI,EAC/D;AAAA,MACC;AAAA,MACA;AAAA,MACA,CAAC,UAAU;AACT,cAAM,cAAc,CAAC,YAAY,YAAY,QAAQ;AACrD,YAAI,CAAC,YAAY,SAAS,KAAK,GAAG;AACxB,kBAAA,KAAK,2BAA2B,KAAK,8BAA8B;AACpE,iBAAA;AAAA,QAAA;AAEF,eAAA;AAAA,MACT;AAAA,MACA;AAAA,IAAA,EAED;AAAA,MACC;AAAA,MACA;AAAA,IAAA,EAED;AAAA,MACC;AAAA,MACA,8BAA8B,WAAW,KAAK,OAAO,WAAW,UAAU,OAAO,WAAW,IAAI;AAAA,MAChG,CAAC,UAA8B;AACvB,cAAA,aAAa,OAAO,OAAO,UAAU;AAC3C,YAAI,CAAC,WAAW,SAAS,KAAmB,GAAG;AACrC,kBAAA;AAAA,YACN,iCAAiC,KAAK,qBAAqB,WAAW,IAAI;AAAA,UAC5E;AACA,iBAAO,WAAW;AAAA,QAAA;AAEb,eAAA;AAAA,MACT;AAAA,MACA,WAAW;AAAA;AAAA,IAEZ,EAAA,OAAO,OAAO,SAAS,KAAK,YAAY;AAEvC,YAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAAA,QACxC;AAAA,QACA;AAAA,QACA,SAAS,QAAQ;AAAA;AAAA,QACjB,SAAS;AAAA,UACP,UAAU,OAAO,SAAS,QAAQ,QAAQ;AAAA,UAC1C,UAAU,OAAO,SAAS,QAAQ,QAAQ;AAAA,UAC1C,gBAAgB,OAAO,SAAS,QAAQ,cAAc;AAAA,UACtD,cAAc,QAAQ;AAAA,UACtB,OAAO,QAAQ;AAAA,UACf,iBAAiB,QAAQ;AAAA;AAAA,UACzB,YAAY,QAAQ;AAAA;AAAA,QAAA;AAAA;AAAA,MACtB,CAED;AAED,UAAI,kBAAkB,QAAQ;AAC5B,gBAAQ,IAAI,0BAA0B,OAAO,YAAY,QAAQ;AAAA,MAAA,OAC5D;AAEL,gBAAQ,IAAI,oCAAoC,OAAO,KAAK,EAAE;AAAA,MAAA;AAAA,IAChE,CACD;AAGA,YAAA,QAAQ,0BAA0B,EAClC;AAAA,MACC;AAAA,IAAA,EAMD;AAAA,MACC;AAAA;AAAA,MACA;AAAA,IAED,EAAA,OAAO,wBAAwB,6BAA6B,GAAG,EAC/D;AAAA,MACC;AAAA,MACA;AAAA,MACA;AAAA,IAED,EAAA,OAAO,OAAO,SAAS,OAAO,YAAY;AAEzC,YAAM,SAAS,MAAM,MAAM,OAAO,QAAQ;AAAA,QACxC;AAAA,QACA,SAAS,QAAQ;AAAA;AAAA,QACjB;AAAA,QACA,OAAO,OAAO,SAAS,QAAQ,KAAK;AAAA,QACpC,YAAY,QAAQ;AAAA,MAAA,CACrB;AACD,cAAQ,IAAI,aAAa,OAAO,OAAO,CAAC;AAAA,IAAA,CACzC;AAEH,YACG,QAAQ,MAAM,EACd,YAAY,iDAAiD,EAC7D,OAAO,YAAY;AAClB,YAAM,SAAS,MAAM,MAAM,cAAc,QAAQ;AACjD,cAAQ,IAAI,aAAa,OAAO,SAAS,CAAC;AAAA,IAAA,CAC3C;AAEH,YACG,QAAQ,wBAAwB,EAChC,YAAY,8CAA8C,EAC1D;AAAA,MACC;AAAA;AAAA,MACA;AAAA,IAAA,EAED,OAAO,OAAO,SAAS,YAAY;AAElC,YAAM,cAAc,MAAM,MAAM,YAAY,QAAQ;AAAA,QAClD;AAAA,QACA,eAAe,QAAQ;AAAA;AAAA,MAAA,CACxB;AAED,UAAI,CAAC,aAAa;AAEV,cAAA,IAAI,MAAM,mCAAmC;AAAA,MAAA;AAErD,cAAQ,IAAI,WAAW;AAAA,IAAA,CACxB;AAEH,YACG,QAAQ,kBAAkB,EAC1B,YAAY,qDAAqD,EACjE;AAAA,MACC;AAAA,MACA;AAAA,IAAA,EAED,OAAO,OAAO,SAAS,YAAY;AAElC,UAAI,CAAC,YAAY;AACT,cAAA,IAAI,MAAM,mCAAmC;AAAA,MAAA;AAE/C,YAAA,EAAE,SAAAA,aAAY;AAChB,UAAA;AACI,cAAA,WAAW,mBAAmB,SAASA,QAAO;AAC5C,gBAAA;AAAA,UACN,wCAAwC,OAAO,GAAGA,WAAU,IAAIA,QAAO,KAAK,gBAAgB;AAAA,QAC9F;AAAA,eACO,OAAO;AACN,gBAAA;AAAA,UACN,oCAAoC,OAAO,GAAGA,WAAU,IAAIA,QAAO,KAAK,gBAAgB;AAAA,UACxF,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACvD;AAEM,cAAA;AAAA,MAAA;AAAA,IACR,CACD;AAEH,YACG,QAAQ,iBAAiB,EACzB,YAAY,iDAAiD,EAC7D;AAAA,MACC;AAAA,MACA;AAAA,IAAA,EAED;AAAA,MACC;AAAA,MACA,8BAA8B,WAAW,KAAK,OAAO,WAAW,UAAU,OAAO,WAAW,IAAI;AAAA,MAChG,CAAC,UAA8B;AACvB,cAAA,aAAa,OAAO,OAAO,UAAU;AAC3C,YAAI,CAAC,WAAW,SAAS,KAAmB,GAAG;AACrC,kBAAA;AAAA,YACN,iCAAiC,KAAK,qBAAqB,WAAW,IAAI;AAAA,UAC5E;AACA,iBAAO,WAAW;AAAA,QAAA;AAEb,eAAA;AAAA,MACT;AAAA,MACA,WAAW;AAAA;AAAA,IAAA,EAEZ,OAAO,OAAO,KAAK,YAAY;AAC9B,YAAM,UAAU,MAAM,MAAM,SAAS,QAAQ;AAAA,QAC3C;AAAA,QACA,iBAAiB,QAAQ;AAAA,QACzB,YAAY,QAAQ;AAAA;AAAA,MAAA,CACrB;AACD,cAAQ,IAAI,OAAO;AAAA,IAAA,CACpB;AAGK,YAAA,KAAK,aAAa,CAAC,gBAAgB;AAEnC,YAAA,UAAU,YAAY,KAAK;AACjC,UAAI,QAAQ,QAAQ;AAElB,oBAAY,SAAS,KAAK;AAAA,MAAA,WACjB,QAAQ,SAAS;AAC1B,oBAAY,SAAS,KAAK;AAAA,MAAA;AAAA,IAC5B,CAED;AAED,UAAM,QAAQ,WAAW;AAAA,WAClB,OAAO;AACN,YAAA,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAC1E,QAAA,gBAAuB,OAAA,gBAAgB,KAAK;AAC5C,QAAA,WAAkB,OAAA,WAAW,SAAS;AAC1C,YAAQ,KAAK,CAAC;AAAA,EAAA;AAIZ,MAAA,gBAAuB,OAAA,gBAAgB,KAAK;AAChD,QAAM,WAAW,SAAS;AAC1B,UAAQ,KAAK,CAAC;AAChB;AAEA,OAAO,MAAM,CAAC,UAAU;AACd,UAAA,MAAM,gBAAgB,KAAK;AACnC,UAAQ,KAAK,CAAC;AAChB,CAAC;"}