feathersjs-docs-mcp 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +102 -0
- package/dist/config.js +72 -0
- package/dist/docs/discover.js +28 -0
- package/dist/docs/parse.js +38 -0
- package/dist/docs/sync.js +49 -0
- package/dist/index.js +218 -0
- package/dist/indexer.js +63 -0
- package/dist/search/embed.js +19 -0
- package/dist/search/vector-store.js +67 -0
- package/dist/transport.js +16 -0
- package/dist/types.js +1 -0
- package/dist/utils.js +25 -0
- package/package.json +64 -0
package/README.md
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# feathersjs-docs-mcp
|
|
2
|
+
|
|
3
|
+
Read-only MCP server for discovering and reading **FeathersJS documentation** from the official Feathers repository.
|
|
4
|
+
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- Syncs the Feathers docs repository into a local cache
|
|
10
|
+
- Indexes markdown pages from `docs/`
|
|
11
|
+
- Exposes MCP tools for listing, reading, refreshing, and status checks
|
|
12
|
+
- Exposes `feathers-doc://docs/{path}` as a markdown resource template
|
|
13
|
+
|
|
14
|
+
## MCP Tools
|
|
15
|
+
|
|
16
|
+
- `list_docs(query?, limit?, offset?)`
|
|
17
|
+
- `read_doc(uri)`
|
|
18
|
+
- `refresh_docs_index(forceRebuild?)`
|
|
19
|
+
- `get_docs_status()`
|
|
20
|
+
|
|
21
|
+
---
|
|
22
|
+
|
|
23
|
+
## Installation
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
npm install
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## Build
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
npm run build
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Run
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
npm start
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
### Run with explicit transport
|
|
42
|
+
|
|
43
|
+
`feathersjs-docs-mcp` does not auto-detect transport at runtime. Set transport explicitly:
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
# Default standalone streamable HTTP transport for shared localhost access
|
|
47
|
+
npm start
|
|
48
|
+
|
|
49
|
+
# Explicitly force stdio transport
|
|
50
|
+
FEATHERS_MCP_TRANSPORT=stdio npm start
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
When running with `httpStream`:
|
|
54
|
+
|
|
55
|
+
- MCP streamable HTTP endpoint: `http://127.0.0.1:8123/mcp` (customizable)
|
|
56
|
+
- SSE compatibility endpoint: `http://127.0.0.1:8123/sse` (provided by FastMCP)
|
|
57
|
+
|
|
58
|
+
## Test
|
|
59
|
+
|
|
60
|
+
```bash
|
|
61
|
+
npm run test:e2e
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
---
|
|
65
|
+
|
|
66
|
+
## Configuration
|
|
67
|
+
|
|
68
|
+
- `FEATHERS_MCP_TRANSPORT` (default: `httpStream`, values: `stdio` | `httpStream`)
|
|
69
|
+
- `FEATHERS_MCP_HTTP_HOST` (default: `127.0.0.1`)
|
|
70
|
+
- `FEATHERS_MCP_HTTP_PORT` (default: `8123`)
|
|
71
|
+
- `FEATHERS_MCP_HTTP_ENDPOINT` (default: `/mcp`)
|
|
72
|
+
- `FEATHERS_MCP_HTTP_STATELESS` (default: `false`)
|
|
73
|
+
- `FEATHERS_REPO_URL` (default: `https://github.com/feathersjs/feathers.git`)
|
|
74
|
+
- `FEATHERS_REPO_BRANCH` (default: `dove`)
|
|
75
|
+
- `FEATHERS_MCP_CACHE_DIR` (default: `./.cache/feathersjs-docs-mcp`)
|
|
76
|
+
- `TOP_K` (default: `6`)
|
|
77
|
+
|
|
78
|
+
---
|
|
79
|
+
|
|
80
|
+
## Example MCP client config
|
|
81
|
+
|
|
82
|
+
```json
|
|
83
|
+
{
|
|
84
|
+
"mcpServers": {
|
|
85
|
+
"feathers-docs": {
|
|
86
|
+
"command": "node",
|
|
87
|
+
"args": ["/home/bob/feathersjs-docs-mcp/dist/index.js"],
|
|
88
|
+
"disabled": false,
|
|
89
|
+
"autoApprove": [],
|
|
90
|
+
"env": {
|
|
91
|
+
"FEATHERS_REPO_BRANCH": "dove"
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
---
|
|
99
|
+
|
|
100
|
+
## License
|
|
101
|
+
|
|
102
|
+
MIT
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
/**
|
|
3
|
+
* Parse an integer from an environment variable with fallback handling.
|
|
4
|
+
*/
|
|
5
|
+
function intFromEnv(name, fallback) {
|
|
6
|
+
const value = process.env[name];
|
|
7
|
+
if (!value)
|
|
8
|
+
return fallback;
|
|
9
|
+
const parsed = Number.parseInt(value, 10);
|
|
10
|
+
return Number.isFinite(parsed) ? parsed : fallback;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Parse a boolean from an environment variable with fallback handling.
|
|
14
|
+
*/
|
|
15
|
+
function boolFromEnv(name, fallback) {
|
|
16
|
+
const value = process.env[name];
|
|
17
|
+
if (!value)
|
|
18
|
+
return fallback;
|
|
19
|
+
const normalized = value.toLowerCase().trim();
|
|
20
|
+
if (normalized === 'true' || normalized === '1' || normalized === 'yes')
|
|
21
|
+
return true;
|
|
22
|
+
if (normalized === 'false' || normalized === '0' || normalized === 'no')
|
|
23
|
+
return false;
|
|
24
|
+
return fallback;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Parse MCP transport mode from environment.
|
|
28
|
+
*/
|
|
29
|
+
function transportFromEnv() {
|
|
30
|
+
const value = process.env.FEATHERS_MCP_TRANSPORT;
|
|
31
|
+
if (!value)
|
|
32
|
+
return 'httpStream';
|
|
33
|
+
if (value === 'stdio' || value === 'httpStream')
|
|
34
|
+
return value;
|
|
35
|
+
throw new Error(`Invalid FEATHERS_MCP_TRANSPORT: "${value}". Expected "stdio" or "httpStream".`);
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Parse and normalize MCP endpoint path.
|
|
39
|
+
*/
|
|
40
|
+
function endpointFromEnv() {
|
|
41
|
+
const value = process.env.FEATHERS_MCP_HTTP_ENDPOINT;
|
|
42
|
+
if (!value)
|
|
43
|
+
return '/mcp';
|
|
44
|
+
const trimmed = value.trim();
|
|
45
|
+
if (!trimmed.startsWith('/')) {
|
|
46
|
+
throw new Error(`Invalid FEATHERS_MCP_HTTP_ENDPOINT: "${value}". Expected path starting with "/".`);
|
|
47
|
+
}
|
|
48
|
+
return trimmed;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Build the application configuration from environment variables.
|
|
52
|
+
*/
|
|
53
|
+
export function getConfig() {
|
|
54
|
+
const baseCache = process.env.FEATHERS_MCP_CACHE_DIR
|
|
55
|
+
? path.resolve(process.env.FEATHERS_MCP_CACHE_DIR)
|
|
56
|
+
: path.resolve(process.cwd(), '.cache', 'feathersjs-docs-mcp');
|
|
57
|
+
const repoDir = path.join(baseCache, 'feathers-repo');
|
|
58
|
+
const docsDir = path.join(repoDir, 'docs');
|
|
59
|
+
return {
|
|
60
|
+
transport: transportFromEnv(),
|
|
61
|
+
repoUrl: process.env.FEATHERS_REPO_URL ?? 'https://github.com/feathersjs/feathers.git',
|
|
62
|
+
repoBranch: process.env.FEATHERS_REPO_BRANCH ?? 'dove',
|
|
63
|
+
cacheDir: baseCache,
|
|
64
|
+
repoDir,
|
|
65
|
+
docsDir,
|
|
66
|
+
topK: intFromEnv('TOP_K', 6),
|
|
67
|
+
httpHost: process.env.FEATHERS_MCP_HTTP_HOST ?? '127.0.0.1',
|
|
68
|
+
httpPort: intFromEnv('FEATHERS_MCP_HTTP_PORT', 8123),
|
|
69
|
+
httpEndpoint: endpointFromEnv(),
|
|
70
|
+
httpStateless: boolFromEnv('FEATHERS_MCP_HTTP_STATELESS', false)
|
|
71
|
+
};
|
|
72
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
/**
|
|
4
|
+
* Recursively walk a directory and collect markdown files.
|
|
5
|
+
*/
|
|
6
|
+
function walk(dir, out) {
|
|
7
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
8
|
+
for (const entry of entries) {
|
|
9
|
+
const full = path.join(dir, entry.name);
|
|
10
|
+
if (entry.isDirectory()) {
|
|
11
|
+
walk(full, out);
|
|
12
|
+
continue;
|
|
13
|
+
}
|
|
14
|
+
if (entry.isFile() && entry.name.endsWith('.md')) {
|
|
15
|
+
out.push(full);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Discover all markdown pages under the provided docs directory.
|
|
21
|
+
*/
|
|
22
|
+
export function discoverMarkdownFiles(docsDir) {
|
|
23
|
+
if (!fs.existsSync(docsDir))
|
|
24
|
+
return [];
|
|
25
|
+
const files = [];
|
|
26
|
+
walk(docsDir, files);
|
|
27
|
+
return files.sort();
|
|
28
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import matter from 'gray-matter';
|
|
4
|
+
import { sha256, toUri } from '../utils.js';
|
|
5
|
+
/**
|
|
6
|
+
* Extract markdown headings from body content.
|
|
7
|
+
*/
|
|
8
|
+
function extractHeadings(content) {
|
|
9
|
+
return content
|
|
10
|
+
.split('\n')
|
|
11
|
+
.filter((line) => /^#{1,6}\s+/.test(line))
|
|
12
|
+
.map((line) => line.replace(/^#{1,6}\s+/, '').trim());
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Parse a markdown file into normalized page metadata used by the index.
|
|
16
|
+
*/
|
|
17
|
+
export function parseMarkdownFile(filePath, docsDir) {
|
|
18
|
+
const raw = fs.readFileSync(filePath, 'utf8');
|
|
19
|
+
const stat = fs.statSync(filePath);
|
|
20
|
+
const relativePath = path.relative(docsDir, filePath);
|
|
21
|
+
const uri = toUri(relativePath);
|
|
22
|
+
const parsed = matter(raw);
|
|
23
|
+
const body = parsed.content.trim();
|
|
24
|
+
const headings = extractHeadings(body);
|
|
25
|
+
const title = (typeof parsed.data.title === 'string' ? parsed.data.title : undefined) ??
|
|
26
|
+
headings[0] ??
|
|
27
|
+
path.basename(filePath, '.md');
|
|
28
|
+
const checksum = sha256(raw);
|
|
29
|
+
const page = {
|
|
30
|
+
uri,
|
|
31
|
+
title,
|
|
32
|
+
relativePath,
|
|
33
|
+
headings,
|
|
34
|
+
checksum,
|
|
35
|
+
lastModified: stat.mtimeMs
|
|
36
|
+
};
|
|
37
|
+
return page;
|
|
38
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import git from 'isomorphic-git';
|
|
4
|
+
import http from 'isomorphic-git/http/node';
|
|
5
|
+
/**
|
|
6
|
+
* Ensure Feathers docs repository exists locally and is updated to target branch.
|
|
7
|
+
*
|
|
8
|
+
* If repository does not exist, it performs a shallow clone.
|
|
9
|
+
* Otherwise, it fetches and hard-resets to `origin/<branch>`.
|
|
10
|
+
*/
|
|
11
|
+
export async function syncDocsRepo(config) {
|
|
12
|
+
fs.mkdirSync(config.cacheDir, { recursive: true });
|
|
13
|
+
const gitDir = path.join(config.repoDir, '.git');
|
|
14
|
+
if (!fs.existsSync(gitDir)) {
|
|
15
|
+
await git.clone({
|
|
16
|
+
fs,
|
|
17
|
+
http,
|
|
18
|
+
dir: config.repoDir,
|
|
19
|
+
url: config.repoUrl,
|
|
20
|
+
ref: config.repoBranch,
|
|
21
|
+
singleBranch: true,
|
|
22
|
+
depth: 1
|
|
23
|
+
});
|
|
24
|
+
const commit = await git.resolveRef({ fs, dir: config.repoDir, ref: 'HEAD' });
|
|
25
|
+
return { commit, changed: true };
|
|
26
|
+
}
|
|
27
|
+
const before = await git.resolveRef({ fs, dir: config.repoDir, ref: 'HEAD' }).catch(() => '');
|
|
28
|
+
await git.fetch({
|
|
29
|
+
fs,
|
|
30
|
+
http,
|
|
31
|
+
dir: config.repoDir,
|
|
32
|
+
url: config.repoUrl,
|
|
33
|
+
ref: config.repoBranch,
|
|
34
|
+
singleBranch: true,
|
|
35
|
+
depth: 1,
|
|
36
|
+
prune: true,
|
|
37
|
+
tags: false
|
|
38
|
+
});
|
|
39
|
+
const remoteRef = `refs/remotes/origin/${config.repoBranch}`;
|
|
40
|
+
const remoteOid = await git.resolveRef({ fs, dir: config.repoDir, ref: remoteRef });
|
|
41
|
+
await git.checkout({
|
|
42
|
+
fs,
|
|
43
|
+
dir: config.repoDir,
|
|
44
|
+
ref: remoteOid,
|
|
45
|
+
force: true
|
|
46
|
+
});
|
|
47
|
+
const after = await git.resolveRef({ fs, dir: config.repoDir, ref: 'HEAD' });
|
|
48
|
+
return { commit: after, changed: before !== after };
|
|
49
|
+
}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import fs from 'node:fs';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { FastMCP } from 'fastmcp';
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
import { getConfig } from './config.js';
|
|
7
|
+
import { syncDocsRepo } from './docs/sync.js';
|
|
8
|
+
import { discoverMarkdownFiles } from './docs/discover.js';
|
|
9
|
+
import { parseMarkdownFile } from './docs/parse.js';
|
|
10
|
+
const listDocsParams = z.object({
|
|
11
|
+
query: z.string().optional(),
|
|
12
|
+
limit: z.number().int().min(1).max(20).optional(),
|
|
13
|
+
offset: z.number().int().min(0).optional()
|
|
14
|
+
});
|
|
15
|
+
const readDocParams = z.object({
|
|
16
|
+
uri: z.string().startsWith('feathers-doc://docs/')
|
|
17
|
+
});
|
|
18
|
+
const refreshDocsParams = z.object({
|
|
19
|
+
forceRebuild: z.boolean().optional()
|
|
20
|
+
});
|
|
21
|
+
/** Global runtime configuration loaded from environment variables. */
|
|
22
|
+
const config = getConfig();
|
|
23
|
+
/** Timestamp of the last successful refresh cycle. */
|
|
24
|
+
let lastSyncAt = new Date().toISOString();
|
|
25
|
+
/** Latest commit hash from synchronized docs repository. */
|
|
26
|
+
let commit = '';
|
|
27
|
+
/** In-memory docs page index used by MCP tools. */
|
|
28
|
+
let pages = [];
|
|
29
|
+
/** Effective docs directory resolved at runtime. */
|
|
30
|
+
let docsDirResolved = config.docsDir;
|
|
31
|
+
/** Non-fatal issues discovered while resolving/discovering docs. */
|
|
32
|
+
let discoveryWarnings = [];
|
|
33
|
+
/**
|
|
34
|
+
* Resolve the docs directory and update discovery warnings if missing.
|
|
35
|
+
*/
|
|
36
|
+
function resolveDocsDir() {
|
|
37
|
+
if (!fs.existsSync(config.docsDir)) {
|
|
38
|
+
discoveryWarnings = [`Docs directory not found: ${config.docsDir}`];
|
|
39
|
+
return config.docsDir;
|
|
40
|
+
}
|
|
41
|
+
discoveryWarnings = [];
|
|
42
|
+
return config.docsDir;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Convert a docs URI into an absolute file path with traversal protection.
|
|
46
|
+
*/
|
|
47
|
+
function resolveDocFilePath(docsDir, uri) {
|
|
48
|
+
const prefix = 'feathers-doc://docs/';
|
|
49
|
+
if (!uri.startsWith(prefix)) {
|
|
50
|
+
throw new Error(`Invalid URI: ${uri}`);
|
|
51
|
+
}
|
|
52
|
+
const relativePath = decodeURIComponent(uri.slice(prefix.length));
|
|
53
|
+
const resolved = path.resolve(docsDir, relativePath);
|
|
54
|
+
const root = path.resolve(docsDir);
|
|
55
|
+
if (!resolved.startsWith(root)) {
|
|
56
|
+
throw new Error('Path traversal detected');
|
|
57
|
+
}
|
|
58
|
+
return resolved;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Synchronize docs repository and rebuild in-memory metadata index.
|
|
62
|
+
*/
|
|
63
|
+
async function refreshDocs() {
|
|
64
|
+
const sync = await syncDocsRepo(config);
|
|
65
|
+
commit = sync.commit;
|
|
66
|
+
docsDirResolved = resolveDocsDir();
|
|
67
|
+
const files = discoverMarkdownFiles(docsDirResolved);
|
|
68
|
+
pages = files.map((file) => parseMarkdownFile(file, docsDirResolved));
|
|
69
|
+
if (files.length === 0) {
|
|
70
|
+
discoveryWarnings.push(`No markdown pages found under: ${docsDirResolved}`);
|
|
71
|
+
}
|
|
72
|
+
lastSyncAt = new Date().toISOString();
|
|
73
|
+
}
|
|
74
|
+
await refreshDocs();
|
|
75
|
+
/** FastMCP server instance configured for stdio transport. */
|
|
76
|
+
const server = new FastMCP({
|
|
77
|
+
name: 'feathersjs-docs-mcp',
|
|
78
|
+
version: '0.1.0'
|
|
79
|
+
});
|
|
80
|
+
server.addResourceTemplate({
|
|
81
|
+
uriTemplate: 'feathers-doc://docs/{path}',
|
|
82
|
+
name: 'Feathers Documentation Page',
|
|
83
|
+
mimeType: 'text/markdown',
|
|
84
|
+
arguments: [
|
|
85
|
+
{
|
|
86
|
+
name: 'path',
|
|
87
|
+
required: true
|
|
88
|
+
}
|
|
89
|
+
],
|
|
90
|
+
async load({ path: docPath }) {
|
|
91
|
+
const filePath = resolveDocFilePath(docsDirResolved, `feathers-doc://docs/${docPath}`);
|
|
92
|
+
const text = fs.readFileSync(filePath, 'utf8');
|
|
93
|
+
return {
|
|
94
|
+
text
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
server.addTool({
|
|
99
|
+
name: 'list_docs',
|
|
100
|
+
description: 'Lists available FeathersJS docs pages with optional text filtering',
|
|
101
|
+
parameters: listDocsParams,
|
|
102
|
+
async execute(args) {
|
|
103
|
+
const { query, limit, offset } = args;
|
|
104
|
+
// Filter pages by title/path/headings when a query is provided.
|
|
105
|
+
const q = query?.toLowerCase().trim();
|
|
106
|
+
const filtered = pages.filter((page) => {
|
|
107
|
+
if (!q)
|
|
108
|
+
return true;
|
|
109
|
+
return (page.title.toLowerCase().includes(q) ||
|
|
110
|
+
page.relativePath.toLowerCase().includes(q) ||
|
|
111
|
+
page.headings.some((h) => h.toLowerCase().includes(q)));
|
|
112
|
+
});
|
|
113
|
+
// Apply cursor-style pagination over filtered results for the `results` field.
|
|
114
|
+
const start = offset ?? 0;
|
|
115
|
+
const size = limit ?? config.topK;
|
|
116
|
+
const results = filtered.slice(start, start + size).map((page) => ({
|
|
117
|
+
uri: page.uri,
|
|
118
|
+
title: page.title,
|
|
119
|
+
relativePath: page.relativePath,
|
|
120
|
+
headings: page.headings.slice(0, 8)
|
|
121
|
+
}));
|
|
122
|
+
// Build `groups` from the full filtered set (not only paged results)
|
|
123
|
+
// so clients can render complete section navigation independently of page size.
|
|
124
|
+
const groupMap = new Map();
|
|
125
|
+
for (const page of filtered) {
|
|
126
|
+
const folder = path.posix.dirname(page.relativePath.replaceAll('\\', '/'));
|
|
127
|
+
const key = folder === '.' ? '/' : folder;
|
|
128
|
+
const existing = groupMap.get(key) ?? [];
|
|
129
|
+
existing.push({
|
|
130
|
+
uri: page.uri,
|
|
131
|
+
title: page.title,
|
|
132
|
+
relativePath: page.relativePath,
|
|
133
|
+
headings: page.headings.slice(0, 8)
|
|
134
|
+
});
|
|
135
|
+
groupMap.set(key, existing);
|
|
136
|
+
}
|
|
137
|
+
const groups = Array.from(groupMap.entries())
|
|
138
|
+
.sort((a, b) => a[0].localeCompare(b[0]))
|
|
139
|
+
.map(([folder, pagesInFolder]) => ({
|
|
140
|
+
folder,
|
|
141
|
+
count: pagesInFolder.length,
|
|
142
|
+
pages: pagesInFolder.sort((a, b) => a.relativePath.localeCompare(b.relativePath))
|
|
143
|
+
}));
|
|
144
|
+
// Return a JSON string payload as text content for broad MCP client compatibility.
|
|
145
|
+
return JSON.stringify({
|
|
146
|
+
query: query ?? null,
|
|
147
|
+
total: filtered.length,
|
|
148
|
+
offset: start,
|
|
149
|
+
limit: size,
|
|
150
|
+
count: results.length,
|
|
151
|
+
results,
|
|
152
|
+
groups
|
|
153
|
+
}, null, 2);
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
server.addTool({
|
|
157
|
+
name: 'read_doc',
|
|
158
|
+
description: 'Reads a markdown page by feathers-doc URI',
|
|
159
|
+
parameters: readDocParams,
|
|
160
|
+
async execute({ uri }) {
|
|
161
|
+
// Resolve and read the markdown page addressed by the docs URI.
|
|
162
|
+
const filePath = resolveDocFilePath(docsDirResolved, uri);
|
|
163
|
+
const text = fs.readFileSync(filePath, 'utf8');
|
|
164
|
+
return JSON.stringify({ uri, content: text }, null, 2);
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
server.addTool({
|
|
168
|
+
name: 'refresh_docs_index',
|
|
169
|
+
description: 'Pull latest Feathers docs and refresh in-memory catalog',
|
|
170
|
+
parameters: refreshDocsParams,
|
|
171
|
+
async execute({ forceRebuild }) {
|
|
172
|
+
// Re-sync repository and fully rebuild in-memory page index.
|
|
173
|
+
await refreshDocs();
|
|
174
|
+
return JSON.stringify({
|
|
175
|
+
ok: true,
|
|
176
|
+
forceRebuild: forceRebuild ?? false,
|
|
177
|
+
commit,
|
|
178
|
+
lastSyncAt,
|
|
179
|
+
pages: pages.length
|
|
180
|
+
}, null, 2);
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
server.addTool({
|
|
184
|
+
name: 'get_docs_status',
|
|
185
|
+
description: 'Returns repository/index health and metadata',
|
|
186
|
+
async execute() {
|
|
187
|
+
// Report current repository/index state for diagnostics.
|
|
188
|
+
return JSON.stringify({
|
|
189
|
+
repoUrl: config.repoUrl,
|
|
190
|
+
branch: config.repoBranch,
|
|
191
|
+
commit,
|
|
192
|
+
lastSyncAt,
|
|
193
|
+
pages: pages.length,
|
|
194
|
+
docsDirResolved,
|
|
195
|
+
docsDirExists: fs.existsSync(docsDirResolved),
|
|
196
|
+
discoveryWarnings,
|
|
197
|
+
cacheDir: config.cacheDir
|
|
198
|
+
}, null, 2);
|
|
199
|
+
}
|
|
200
|
+
});
|
|
201
|
+
if (config.transport === 'stdio') {
|
|
202
|
+
await server.start({ transportType: 'stdio' });
|
|
203
|
+
console.error('feathersjs-docs-mcp running on stdio');
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
await server.start({
|
|
207
|
+
transportType: 'httpStream',
|
|
208
|
+
httpStream: {
|
|
209
|
+
host: config.httpHost,
|
|
210
|
+
port: config.httpPort,
|
|
211
|
+
endpoint: config.httpEndpoint,
|
|
212
|
+
stateless: config.httpStateless
|
|
213
|
+
}
|
|
214
|
+
});
|
|
215
|
+
const protocol = 'http';
|
|
216
|
+
console.error(`feathersjs-docs-mcp running on ${protocol}://${config.httpHost}:${config.httpPort}${config.httpEndpoint}`);
|
|
217
|
+
console.error(`feathersjs-docs-mcp SSE compatibility endpoint: ${protocol}://${config.httpHost}:${config.httpPort}/sse`);
|
|
218
|
+
}
|
package/dist/indexer.js
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import fs from 'node:fs';
|
|
3
|
+
import { discoverMarkdownFiles } from './docs/discover.js';
|
|
4
|
+
import { parseMarkdownFile, toChunkRecord } from './docs/parse.js';
|
|
5
|
+
import { syncDocsRepo } from './docs/sync.js';
|
|
6
|
+
import { createEmbedder } from './search/embed.js';
|
|
7
|
+
import { loadIndexedData, saveIndexedData, VectorStore } from './search/vector-store.js';
|
|
8
|
+
function needsRebuild(existing, commit, config) {
|
|
9
|
+
if (!existing)
|
|
10
|
+
return true;
|
|
11
|
+
return (existing.manifest.commit !== commit ||
|
|
12
|
+
existing.manifest.embeddingModel !== config.embeddingModel ||
|
|
13
|
+
existing.manifest.chunkSize !== config.chunkSize ||
|
|
14
|
+
existing.manifest.chunkOverlap !== config.chunkOverlap);
|
|
15
|
+
}
|
|
16
|
+
export async function buildOrLoadIndex(config, forceRebuild = false) {
|
|
17
|
+
const sync = syncDocsRepo(config);
|
|
18
|
+
fs.mkdirSync(config.indexDir, { recursive: true });
|
|
19
|
+
const embedder = createEmbedder(config.embeddingModel);
|
|
20
|
+
const store = new VectorStore();
|
|
21
|
+
const existing = loadIndexedData(config.indexDir);
|
|
22
|
+
if (!forceRebuild && !sync.changed && !needsRebuild(existing, sync.commit, config) && existing) {
|
|
23
|
+
store.setData(existing);
|
|
24
|
+
return { store, embedder, commit: sync.commit, lastSyncAt: new Date().toISOString() };
|
|
25
|
+
}
|
|
26
|
+
const files = discoverMarkdownFiles(config.docsDir);
|
|
27
|
+
const pages = [];
|
|
28
|
+
const chunkInputs = [];
|
|
29
|
+
for (const file of files) {
|
|
30
|
+
const parsed = parseMarkdownFile(file, config.docsDir, config.chunkSize, config.chunkOverlap);
|
|
31
|
+
pages.push(parsed.page);
|
|
32
|
+
chunkInputs.push(...parsed.chunks);
|
|
33
|
+
}
|
|
34
|
+
const embeddings = await embedder.embed(chunkInputs.map((c) => c.content));
|
|
35
|
+
const chunks = chunkInputs.map((input, i) => toChunkRecord(input, embeddings[i], config.repoUrl, config.repoBranch, sync.commit));
|
|
36
|
+
const indexed = {
|
|
37
|
+
pages,
|
|
38
|
+
chunks,
|
|
39
|
+
manifest: {
|
|
40
|
+
embeddingModel: config.embeddingModel,
|
|
41
|
+
commit: sync.commit,
|
|
42
|
+
builtAt: new Date().toISOString(),
|
|
43
|
+
chunkSize: config.chunkSize,
|
|
44
|
+
chunkOverlap: config.chunkOverlap
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
saveIndexedData(config.indexDir, indexed);
|
|
48
|
+
store.setData(indexed);
|
|
49
|
+
return { store, embedder, commit: sync.commit, lastSyncAt: new Date().toISOString() };
|
|
50
|
+
}
|
|
51
|
+
export function resolveDocFilePath(docsDir, uri) {
|
|
52
|
+
const prefix = 'feathers-doc://docs/';
|
|
53
|
+
if (!uri.startsWith(prefix)) {
|
|
54
|
+
throw new Error(`Invalid URI: ${uri}`);
|
|
55
|
+
}
|
|
56
|
+
const relativePath = decodeURIComponent(uri.slice(prefix.length));
|
|
57
|
+
const resolved = path.resolve(docsDir, relativePath);
|
|
58
|
+
const root = path.resolve(docsDir);
|
|
59
|
+
if (!resolved.startsWith(root)) {
|
|
60
|
+
throw new Error('Path traversal detected');
|
|
61
|
+
}
|
|
62
|
+
return resolved;
|
|
63
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { pipeline } from '@xenova/transformers';
|
|
2
|
+
class TransformersEmbedder {
|
|
3
|
+
extractorPromise;
|
|
4
|
+
constructor(model) {
|
|
5
|
+
this.extractorPromise = pipeline('feature-extraction', model);
|
|
6
|
+
}
|
|
7
|
+
async embed(texts) {
|
|
8
|
+
const extractor = await this.extractorPromise;
|
|
9
|
+
const vectors = [];
|
|
10
|
+
for (const text of texts) {
|
|
11
|
+
const output = await extractor(text, { pooling: 'mean', normalize: true });
|
|
12
|
+
vectors.push(Array.from(output.data));
|
|
13
|
+
}
|
|
14
|
+
return vectors;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
export function createEmbedder(model) {
|
|
18
|
+
return new TransformersEmbedder(model);
|
|
19
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { cosineSimilarity, snippet } from '../utils.js';
|
|
4
|
+
export class VectorStore {
|
|
5
|
+
pages = [];
|
|
6
|
+
chunks = [];
|
|
7
|
+
manifest = null;
|
|
8
|
+
setData(data) {
|
|
9
|
+
this.pages = data.pages;
|
|
10
|
+
this.chunks = data.chunks;
|
|
11
|
+
this.manifest = data.manifest;
|
|
12
|
+
}
|
|
13
|
+
getData() {
|
|
14
|
+
if (!this.manifest) {
|
|
15
|
+
throw new Error('Index is not initialized');
|
|
16
|
+
}
|
|
17
|
+
return {
|
|
18
|
+
pages: this.pages,
|
|
19
|
+
chunks: this.chunks,
|
|
20
|
+
manifest: this.manifest
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
getPages() {
|
|
24
|
+
return this.pages;
|
|
25
|
+
}
|
|
26
|
+
getPageByUri(uri) {
|
|
27
|
+
return this.pages.find((p) => p.uri === uri);
|
|
28
|
+
}
|
|
29
|
+
search(query, queryVector, limit, sectionFilter) {
|
|
30
|
+
const filter = sectionFilter?.toLowerCase().trim();
|
|
31
|
+
const hits = this.chunks
|
|
32
|
+
.filter((c) => !filter || c.sectionPath.toLowerCase().includes(filter) || c.title.toLowerCase().includes(filter))
|
|
33
|
+
.map((chunk) => {
|
|
34
|
+
const score = cosineSimilarity(queryVector, chunk.embedding);
|
|
35
|
+
return {
|
|
36
|
+
title: chunk.title,
|
|
37
|
+
uri: chunk.uri,
|
|
38
|
+
sectionPath: chunk.sectionPath,
|
|
39
|
+
score,
|
|
40
|
+
snippet: snippet(chunk.content, query)
|
|
41
|
+
};
|
|
42
|
+
})
|
|
43
|
+
.sort((a, b) => b.score - a.score);
|
|
44
|
+
const dedup = new Map();
|
|
45
|
+
for (const hit of hits) {
|
|
46
|
+
const existing = dedup.get(hit.uri);
|
|
47
|
+
if (!existing || hit.score > existing.score) {
|
|
48
|
+
dedup.set(hit.uri, hit);
|
|
49
|
+
}
|
|
50
|
+
if (dedup.size >= limit)
|
|
51
|
+
break;
|
|
52
|
+
}
|
|
53
|
+
return Array.from(dedup.values()).slice(0, limit);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
export function loadIndexedData(indexDir) {
|
|
57
|
+
const filePath = path.join(indexDir, 'index.json');
|
|
58
|
+
if (!fs.existsSync(filePath))
|
|
59
|
+
return null;
|
|
60
|
+
const raw = fs.readFileSync(filePath, 'utf8');
|
|
61
|
+
return JSON.parse(raw);
|
|
62
|
+
}
|
|
63
|
+
export function saveIndexedData(indexDir, data) {
|
|
64
|
+
fs.mkdirSync(indexDir, { recursive: true });
|
|
65
|
+
const filePath = path.join(indexDir, 'index.json');
|
|
66
|
+
fs.writeFileSync(filePath, JSON.stringify(data), 'utf8');
|
|
67
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Wrap `app.setup` and run `afterSetup` once setup has completed.
|
|
3
|
+
*/
|
|
4
|
+
export function attachSetupHook(app, afterSetup) {
|
|
5
|
+
const originalSetup = app.setup.bind(app);
|
|
6
|
+
app.setup = (...args) => {
|
|
7
|
+
const setupResult = originalSetup(...args);
|
|
8
|
+
if (setupResult && typeof setupResult.then === 'function') {
|
|
9
|
+
return setupResult.then(async (value) => {
|
|
10
|
+
await afterSetup();
|
|
11
|
+
return value;
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
return Promise.resolve(afterSetup()).then(() => setupResult);
|
|
15
|
+
};
|
|
16
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/utils.js
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import crypto from 'node:crypto';
|
|
2
|
+
/**
|
|
3
|
+
* Create a deterministic SHA-256 hash for a string payload.
|
|
4
|
+
*/
|
|
5
|
+
export function sha256(input) {
|
|
6
|
+
return crypto.createHash('sha256').update(input).digest('hex');
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* Convert a docs-relative path to the MCP resource URI format.
|
|
10
|
+
*/
|
|
11
|
+
export function toUri(relativePath) {
|
|
12
|
+
return `feathers-doc://docs/${relativePath.replaceAll('\\', '/')}`;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Build a compact excerpt around a query match for display/search previews.
|
|
16
|
+
*/
|
|
17
|
+
export function snippet(text, query, max = 280) {
|
|
18
|
+
const lower = text.toLowerCase();
|
|
19
|
+
const q = query.toLowerCase();
|
|
20
|
+
const idx = lower.indexOf(q);
|
|
21
|
+
if (idx < 0 || text.length <= max)
|
|
22
|
+
return text.slice(0, max);
|
|
23
|
+
const start = Math.max(0, idx - Math.floor(max / 3));
|
|
24
|
+
return text.slice(start, start + max);
|
|
25
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "feathersjs-docs-mcp",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"private": false,
|
|
5
|
+
"type": "module",
|
|
6
|
+
"description": "Read-only MCP server for discovering and reading FeathersJS documentation",
|
|
7
|
+
"keywords": [
|
|
8
|
+
"mcp",
|
|
9
|
+
"model-context-protocol",
|
|
10
|
+
"feathers",
|
|
11
|
+
"feathersjs",
|
|
12
|
+
"documentation"
|
|
13
|
+
],
|
|
14
|
+
"license": "MIT",
|
|
15
|
+
"author": "fedevela",
|
|
16
|
+
"homepage": "https://github.com/fedevela/feathersjs-docs-mcp#readme",
|
|
17
|
+
"repository": {
|
|
18
|
+
"type": "git",
|
|
19
|
+
"url": "git+https://github.com/fedevela/feathersjs-docs-mcp.git"
|
|
20
|
+
},
|
|
21
|
+
"bugs": {
|
|
22
|
+
"url": "https://github.com/fedevela/feathersjs-docs-mcp/issues"
|
|
23
|
+
},
|
|
24
|
+
"files": [
|
|
25
|
+
"dist",
|
|
26
|
+
"README.md",
|
|
27
|
+
"LICENSE"
|
|
28
|
+
],
|
|
29
|
+
"main": "dist/index.js",
|
|
30
|
+
"types": "dist/index.d.ts",
|
|
31
|
+
"exports": {
|
|
32
|
+
".": {
|
|
33
|
+
"types": "./dist/index.d.ts",
|
|
34
|
+
"import": "./dist/index.js"
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
"bin": {
|
|
38
|
+
"feathersjs-docs-mcp": "dist/index.js"
|
|
39
|
+
},
|
|
40
|
+
"engines": {
|
|
41
|
+
"node": ">=20"
|
|
42
|
+
},
|
|
43
|
+
"scripts": {
|
|
44
|
+
"build": "tsc -p tsconfig.json",
|
|
45
|
+
"start": "node dist/index.js",
|
|
46
|
+
"dev": "tsx src/index.ts",
|
|
47
|
+
"test:e2e": "npm run build && vitest run --config vitest.e2e.config.ts",
|
|
48
|
+
"test:e2e:watch": "vitest --config vitest.e2e.config.ts",
|
|
49
|
+
"prepublishOnly": "npm run build && npm run test:e2e"
|
|
50
|
+
},
|
|
51
|
+
"dependencies": {
|
|
52
|
+
"fastmcp": "^3.33.0",
|
|
53
|
+
"gray-matter": "^4.0.3",
|
|
54
|
+
"isomorphic-git": "^1.37.1",
|
|
55
|
+
"zod": "^4.3.6"
|
|
56
|
+
},
|
|
57
|
+
"devDependencies": {
|
|
58
|
+
"@playwright/test": "^1.58.2",
|
|
59
|
+
"@types/node": "^25.3.0",
|
|
60
|
+
"tsx": "^4.21.0",
|
|
61
|
+
"typescript": "~5.9.3",
|
|
62
|
+
"vitest": "^4.0.18"
|
|
63
|
+
}
|
|
64
|
+
}
|