docshark 0.1.6 ā 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/README.md +6 -6
- package/dist/cli.d.ts +1 -1
- package/dist/cli.js +38 -38
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/package.json +6 -2
package/CHANGELOG.md
CHANGED
package/README.md
CHANGED
|
@@ -49,14 +49,14 @@ We are actively polishing the integration between the core engine and external M
|
|
|
49
49
|
|
|
50
50
|
### Quick Start (from npm)
|
|
51
51
|
|
|
52
|
-
You can run DocShark directly without installing it globally using `
|
|
52
|
+
You can run DocShark directly without installing it globally using `bunx`:
|
|
53
53
|
|
|
54
54
|
```bash
|
|
55
55
|
# Add a documentation library to the index
|
|
56
|
-
|
|
56
|
+
bunx docshark add https://valibot.dev/guides/ --depth 2
|
|
57
57
|
|
|
58
58
|
# Search your indexed docs
|
|
59
|
-
|
|
59
|
+
bunx docshark search "schema validation"
|
|
60
60
|
```
|
|
61
61
|
|
|
62
62
|
### Installation
|
|
@@ -87,7 +87,7 @@ Add DocShark to your `.vscode/settings.json` or global MCP configuration:
|
|
|
87
87
|
{
|
|
88
88
|
"mcpServers": {
|
|
89
89
|
"docshark": {
|
|
90
|
-
"command": "
|
|
90
|
+
"command": "bunx",
|
|
91
91
|
"args": ["-y", "docshark", "start", "--stdio"]
|
|
92
92
|
}
|
|
93
93
|
}
|
|
@@ -100,7 +100,7 @@ Add DocShark to your `.vscode/settings.json` or global MCP configuration:
|
|
|
100
100
|
2. Click **+ Add New MCP Server**.
|
|
101
101
|
3. Name: `docshark`
|
|
102
102
|
4. Type: `command`
|
|
103
|
-
5. Command: `
|
|
103
|
+
5. Command: `bunx -y docshark start --stdio`
|
|
104
104
|
|
|
105
105
|
### Claude Desktop
|
|
106
106
|
|
|
@@ -113,7 +113,7 @@ Edit your Claude Desktop configuration file:
|
|
|
113
113
|
{
|
|
114
114
|
"mcpServers": {
|
|
115
115
|
"docshark": {
|
|
116
|
-
"command": "
|
|
116
|
+
"command": "bunx",
|
|
117
117
|
"args": ["-y", "docshark", "start", "--stdio"]
|
|
118
118
|
}
|
|
119
119
|
}
|
package/dist/cli.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
#!/usr/bin/env
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
2
|
export {};
|
package/dist/cli.js
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
|
-
#!/usr/bin/env
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
2
|
// src/cli.ts ā DocShark CLI entry point
|
|
3
|
-
import { Command } from
|
|
4
|
-
import { startHttpServer } from
|
|
5
|
-
import { StdioTransport } from
|
|
6
|
-
import { server, db, searchEngine, libraryService } from
|
|
7
|
-
import { VERSION } from
|
|
3
|
+
import { Command } from "commander";
|
|
4
|
+
import { startHttpServer } from "./http.js";
|
|
5
|
+
import { StdioTransport } from "@tmcp/transport-stdio";
|
|
6
|
+
import { server, db, searchEngine, libraryService } from "./server.js";
|
|
7
|
+
import { VERSION } from "./version.js";
|
|
8
8
|
const program = new Command()
|
|
9
|
-
.name(
|
|
10
|
-
.description(
|
|
11
|
-
.version(VERSION,
|
|
9
|
+
.name("docshark")
|
|
10
|
+
.description("š¦ Documentation MCP Server ā scrape, index, and search any doc website")
|
|
11
|
+
.version(VERSION, "-v, --version", "output the current version");
|
|
12
12
|
program
|
|
13
|
-
.command(
|
|
14
|
-
.description(
|
|
15
|
-
.option(
|
|
16
|
-
.option(
|
|
17
|
-
.option(
|
|
13
|
+
.command("start", { isDefault: true })
|
|
14
|
+
.description("Start the MCP server")
|
|
15
|
+
.option("-p, --port <port>", "HTTP server port", "6380")
|
|
16
|
+
.option("--stdio", "Run in STDIO mode (for Claude Desktop, Cursor, etc.)")
|
|
17
|
+
.option("--data-dir <path>", "Data directory", "")
|
|
18
18
|
.action(async (opts) => {
|
|
19
19
|
if (opts.dataDir) {
|
|
20
20
|
process.env.DOCSHARK_DATA_DIR = opts.dataDir;
|
|
@@ -30,11 +30,11 @@ program
|
|
|
30
30
|
}
|
|
31
31
|
});
|
|
32
32
|
program
|
|
33
|
-
.command(
|
|
34
|
-
.description(
|
|
35
|
-
.option(
|
|
36
|
-
.option(
|
|
37
|
-
.option(
|
|
33
|
+
.command("add <url>")
|
|
34
|
+
.description("Add a documentation library and start crawling")
|
|
35
|
+
.option("-n, --name <name>", "Library name (auto-generated from URL if omitted)")
|
|
36
|
+
.option("-d, --depth <n>", "Max crawl depth", "3")
|
|
37
|
+
.option("--lib-version <version>", "Library version")
|
|
38
38
|
.action(async (url, opts) => {
|
|
39
39
|
db.init();
|
|
40
40
|
try {
|
|
@@ -56,10 +56,10 @@ program
|
|
|
56
56
|
}
|
|
57
57
|
});
|
|
58
58
|
program
|
|
59
|
-
.command(
|
|
60
|
-
.description(
|
|
61
|
-
.option(
|
|
62
|
-
.option(
|
|
59
|
+
.command("search <query>")
|
|
60
|
+
.description("Search indexed documentation")
|
|
61
|
+
.option("-l, --library <name>", "Filter by library")
|
|
62
|
+
.option("--limit <n>", "Max results", "5")
|
|
63
63
|
.action(async (query, opts) => {
|
|
64
64
|
db.init();
|
|
65
65
|
const results = searchEngine.search(query, {
|
|
@@ -78,8 +78,8 @@ program
|
|
|
78
78
|
}
|
|
79
79
|
});
|
|
80
80
|
program
|
|
81
|
-
.command(
|
|
82
|
-
.description(
|
|
81
|
+
.command("list")
|
|
82
|
+
.description("List indexed libraries")
|
|
83
83
|
.action(() => {
|
|
84
84
|
db.init();
|
|
85
85
|
const libs = db.listLibraries();
|
|
@@ -93,19 +93,19 @@ program
|
|
|
93
93
|
Pages: l.page_count,
|
|
94
94
|
Chunks: l.chunk_count,
|
|
95
95
|
Status: l.status,
|
|
96
|
-
|
|
96
|
+
"Last Crawled": l.last_crawled_at || "never",
|
|
97
97
|
})));
|
|
98
98
|
});
|
|
99
99
|
program
|
|
100
|
-
.command(
|
|
101
|
-
.description(
|
|
100
|
+
.command("refresh <name>")
|
|
101
|
+
.description("Refresh an existing documentation library")
|
|
102
102
|
.action(async (name) => {
|
|
103
103
|
db.init();
|
|
104
104
|
try {
|
|
105
105
|
const lib = db.getLibraryByName(name);
|
|
106
106
|
if (!lib)
|
|
107
107
|
throw new Error(`Library "${name}" not found.`);
|
|
108
|
-
const { jobManager } = await import(
|
|
108
|
+
const { jobManager } = await import("./server.js");
|
|
109
109
|
const job = jobManager.startCrawl(lib.id, { incremental: true });
|
|
110
110
|
console.log(`\nš Refreshing "${lib.display_name}" ā crawling ${lib.url}...`);
|
|
111
111
|
console.log(` Job ID: ${job.id}`);
|
|
@@ -117,8 +117,8 @@ program
|
|
|
117
117
|
}
|
|
118
118
|
});
|
|
119
119
|
program
|
|
120
|
-
.command(
|
|
121
|
-
.description(
|
|
120
|
+
.command("remove <name>")
|
|
121
|
+
.description("Remove a documentation library and its index")
|
|
122
122
|
.action((name) => {
|
|
123
123
|
db.init();
|
|
124
124
|
try {
|
|
@@ -134,8 +134,8 @@ program
|
|
|
134
134
|
}
|
|
135
135
|
});
|
|
136
136
|
program
|
|
137
|
-
.command(
|
|
138
|
-
.description(
|
|
137
|
+
.command("get <url>")
|
|
138
|
+
.description("Get the full markdown content of a specific indexed page")
|
|
139
139
|
.action((url) => {
|
|
140
140
|
db.init();
|
|
141
141
|
const page = db.getPage({ url });
|
|
@@ -146,23 +146,23 @@ program
|
|
|
146
146
|
console.log(`\n--- ${page.title} ---`);
|
|
147
147
|
console.log(`Source: ${page.url}\n\n`);
|
|
148
148
|
console.log(page.content_markdown);
|
|
149
|
-
console.log(
|
|
149
|
+
console.log("\n");
|
|
150
150
|
});
|
|
151
151
|
program.parse();
|
|
152
152
|
/** Helper to wait for a crawl job to finish (CLI blocking mode) */
|
|
153
153
|
async function waitForCrawl(jobId) {
|
|
154
|
-
const { jobManager } = await import(
|
|
154
|
+
const { jobManager } = await import("./server.js");
|
|
155
155
|
return new Promise((resolve) => {
|
|
156
156
|
const check = () => {
|
|
157
157
|
const job = jobManager.getJob(jobId);
|
|
158
|
-
if (!job || job.status ===
|
|
159
|
-
if (job?.status ===
|
|
158
|
+
if (!job || job.status === "completed" || job.status === "failed") {
|
|
159
|
+
if (job?.status === "completed") {
|
|
160
160
|
console.log(`\nš¦ Crawl complete: ${job.pages_crawled} pages, ${job.chunks_created} chunks indexed.`);
|
|
161
161
|
if (job.pages_failed > 0) {
|
|
162
162
|
console.log(` ā ļø ${job.pages_failed} pages failed.`);
|
|
163
163
|
}
|
|
164
164
|
}
|
|
165
|
-
else if (job?.status ===
|
|
165
|
+
else if (job?.status === "failed") {
|
|
166
166
|
console.error(`\nā Crawl failed: ${job.error_message}`);
|
|
167
167
|
}
|
|
168
168
|
resolve();
|
package/dist/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const VERSION = "0.1.
|
|
1
|
+
export declare const VERSION = "0.1.7";
|
package/dist/version.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
// This file is automatically updated by release-please
|
|
2
|
-
export const VERSION = '0.1.
|
|
2
|
+
export const VERSION = '0.1.7'; // x-release-please-version
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "docshark",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.7",
|
|
4
4
|
"description": "š¦ Documentation MCP Server ā scrape, index, and search any doc website",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -26,10 +26,14 @@
|
|
|
26
26
|
"dev": "bun run --watch src/cli.ts start",
|
|
27
27
|
"cli": "bun run src/cli.ts",
|
|
28
28
|
"check": "tsc --noEmit",
|
|
29
|
-
"build": "rm -rf dist && tsc",
|
|
29
|
+
"build": "rm -rf dist && tsc && chmod +x dist/cli.js",
|
|
30
30
|
"prepublishOnly": "bun run build",
|
|
31
31
|
"test:crawl": "bun run src/cli.ts add https://svelte.dev/docs/svelte/overview"
|
|
32
32
|
},
|
|
33
|
+
"engines": {
|
|
34
|
+
"node": ">=20",
|
|
35
|
+
"bun": ">=1.1.0"
|
|
36
|
+
},
|
|
33
37
|
"keywords": [
|
|
34
38
|
"tmcp",
|
|
35
39
|
"mcp",
|