@react-spectrum/mcp 0.1.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,117 @@
1
+ #!/usr/bin/env node
2
+ /// <reference types="node" />
3
+ import { errorToString } from '../../shared/src/utils.js';
4
+ import { listIconNames, listIllustrationNames, loadIconAliases, loadIllustrationAliases, loadStyleMacroPropertyValues } from './s2-data.js';
5
+ import { startServer } from '../../shared/src/server.js';
6
+ import { z } from 'zod';
7
+ // CLI entry for S2
8
+ (async () => {
9
+ try {
10
+ const arg = (process.argv[2] || '').trim();
11
+ if (arg === '--help' || arg === '-h' || arg === 'help') {
12
+ console.log('Usage: npx @react-spectrum/mcp@latest\n\nStarts the MCP server for React Spectrum (S2) documentation.');
13
+ process.exit(0);
14
+ }
15
+ await startServer('s2', '0.1.0', (server) => {
16
+ server.registerTool('search_s2_icons', {
17
+ title: 'Search S2 icons',
18
+ description: 'Searches the S2 workflow icon set by one or more terms; returns matching icon names.',
19
+ inputSchema: { terms: z.union([z.string(), z.array(z.string())]) }
20
+ }, async ({ terms }) => {
21
+ const allNames = listIconNames();
22
+ const nameSet = new Set(allNames);
23
+ const aliases = await loadIconAliases();
24
+ const rawTerms = Array.isArray(terms) ? terms : [terms];
25
+ const normalized = Array.from(new Set(rawTerms.map(t => String(t ?? '').trim().toLowerCase()).filter(Boolean)));
26
+ if (normalized.length === 0) {
27
+ throw new Error('Provide at least one non-empty search term.');
28
+ }
29
+ // direct name matches
30
+ const results = new Set(allNames.filter(name => {
31
+ const nameLower = name.toLowerCase();
32
+ return normalized.some(term => nameLower.includes(term));
33
+ }));
34
+ // alias matches
35
+ for (const [aliasKey, targets] of Object.entries(aliases)) {
36
+ if (!targets || targets.length === 0) {
37
+ continue;
38
+ }
39
+ const aliasLower = aliasKey.toLowerCase();
40
+ if (normalized.some(term => aliasLower.includes(term) || term.includes(aliasLower))) {
41
+ for (const t of targets) {
42
+ const n = String(t);
43
+ if (nameSet.has(n)) {
44
+ results.add(n);
45
+ }
46
+ }
47
+ }
48
+ }
49
+ return { content: [{ type: 'text', text: JSON.stringify(Array.from(results).sort((a, b) => a.localeCompare(b)), null, 2) }] };
50
+ });
51
+ server.registerTool('search_s2_illustrations', {
52
+ title: 'Search S2 illustrations',
53
+ description: 'Searches the S2 illustrations set by one or more terms; returns matching illustration names.',
54
+ inputSchema: { terms: z.union([z.string(), z.array(z.string())]) }
55
+ }, async ({ terms }) => {
56
+ const allNames = listIllustrationNames();
57
+ const nameSet = new Set(allNames);
58
+ const aliases = await loadIllustrationAliases();
59
+ const rawTerms = Array.isArray(terms) ? terms : [terms];
60
+ const normalized = Array.from(new Set(rawTerms.map(t => String(t ?? '').trim().toLowerCase()).filter(Boolean)));
61
+ if (normalized.length === 0) {
62
+ throw new Error('Provide at least one non-empty search term.');
63
+ }
64
+ // direct name matches
65
+ const results = new Set(allNames.filter(name => {
66
+ const nameLower = name.toLowerCase();
67
+ return normalized.some(term => nameLower.includes(term));
68
+ }));
69
+ // alias matches
70
+ for (const [aliasKey, targets] of Object.entries(aliases)) {
71
+ if (!targets || targets.length === 0) {
72
+ continue;
73
+ }
74
+ const aliasLower = aliasKey.toLowerCase();
75
+ if (normalized.some(term => aliasLower.includes(term) || term.includes(aliasLower))) {
76
+ for (const t of targets) {
77
+ const n = String(t);
78
+ if (nameSet.has(n)) {
79
+ results.add(n);
80
+ }
81
+ }
82
+ }
83
+ }
84
+ return { content: [{ type: 'text', text: JSON.stringify(Array.from(results).sort((a, b) => a.localeCompare(b)), null, 2) }] };
85
+ });
86
+ server.registerTool('get_style_macro_property_values', {
87
+ title: 'Get style macro property values',
88
+ description: 'Returns the allowed values for a given S2 style macro property (including expanded color/spacing value lists where applicable).',
89
+ inputSchema: { propertyName: z.string() }
90
+ }, async ({ propertyName }) => {
91
+ const name = String(propertyName ?? '').trim();
92
+ if (!name) {
93
+ throw new Error('Provide a non-empty propertyName.');
94
+ }
95
+ const all = loadStyleMacroPropertyValues();
96
+ let def = all[name];
97
+ if (!def) {
98
+ // fallback to case-insensitive lookup
99
+ const lower = name.toLowerCase();
100
+ const matchKey = Object.keys(all).find(k => k.toLowerCase() === lower);
101
+ if (matchKey) {
102
+ def = all[matchKey];
103
+ }
104
+ }
105
+ if (!def) {
106
+ const available = Object.keys(all).sort((a, b) => a.localeCompare(b));
107
+ throw new Error(`Unknown style macro property '${name}'. Available properties: ${available.join(', ')}`);
108
+ }
109
+ return { content: [{ type: 'text', text: JSON.stringify(def, null, 2) }] };
110
+ });
111
+ });
112
+ }
113
+ catch (err) {
114
+ console.error(errorToString(err));
115
+ process.exit(1);
116
+ }
117
+ })();
@@ -0,0 +1,62 @@
1
+ import { fileURLToPath } from 'url';
2
+ import fs from 'fs';
3
+ import path from 'path';
4
+ const __filename = fileURLToPath(import.meta.url);
5
+ const __dirname = path.dirname(__filename);
6
+ let iconIdCache = null;
7
+ let illustrationIdCache = null;
8
+ let iconAliasesCache = null;
9
+ let illustrationAliasesCache = null;
10
+ let styleMacroPropertyValuesCache = null;
11
+ function readBundledJson(filename) {
12
+ try {
13
+ // Go up from s2/src/ to dist/, then to data/
14
+ const p = path.resolve(__dirname, '..', '..', 'data', filename);
15
+ if (!fs.existsSync(p)) {
16
+ return null;
17
+ }
18
+ const txt = fs.readFileSync(p, 'utf8');
19
+ return JSON.parse(txt);
20
+ }
21
+ catch {
22
+ return null;
23
+ }
24
+ }
25
+ export function listIconNames() {
26
+ if (iconIdCache) {
27
+ return iconIdCache;
28
+ }
29
+ const bundled = readBundledJson('icons.json');
30
+ return (iconIdCache = Array.isArray(bundled) ? bundled.slice().sort((a, b) => a.localeCompare(b)) : []);
31
+ }
32
+ export function listIllustrationNames() {
33
+ if (illustrationIdCache) {
34
+ return illustrationIdCache;
35
+ }
36
+ const bundled = readBundledJson('illustrations.json');
37
+ return (illustrationIdCache = Array.isArray(bundled) ? bundled.slice().sort((a, b) => a.localeCompare(b)) : []);
38
+ }
39
+ export async function loadIconAliases() {
40
+ if (iconAliasesCache) {
41
+ return iconAliasesCache;
42
+ }
43
+ const bundled = readBundledJson('iconAliases.json');
44
+ return (iconAliasesCache = (bundled && typeof bundled === 'object') ? bundled : {});
45
+ }
46
+ export async function loadIllustrationAliases() {
47
+ if (illustrationAliasesCache) {
48
+ return illustrationAliasesCache;
49
+ }
50
+ const bundled = readBundledJson('illustrationAliases.json');
51
+ return (illustrationAliasesCache = (bundled && typeof bundled === 'object') ? bundled : {});
52
+ }
53
+ export function loadStyleMacroPropertyValues() {
54
+ if (styleMacroPropertyValuesCache) {
55
+ return styleMacroPropertyValuesCache;
56
+ }
57
+ const bundled = readBundledJson('styleMacroPropertyValues.json');
58
+ if (!bundled || typeof bundled !== 'object' || Array.isArray(bundled)) {
59
+ return (styleMacroPropertyValuesCache = {});
60
+ }
61
+ return (styleMacroPropertyValuesCache = bundled);
62
+ }
@@ -0,0 +1,83 @@
1
+ import { DEFAULT_CDN_BASE, fetchText } from './utils.js';
2
+ import { extractNameAndDescription, parseSectionsFromMarkdown } from './parser.js';
3
+ import path from 'path';
4
+ // Cache of parsed pages
5
+ const pageCache = new Map();
6
+ // Whether we've loaded the page index for a library yet.
7
+ const pageIndexLoaded = new Set();
8
+ function libBaseUrl(library) {
9
+ return `${DEFAULT_CDN_BASE}/${library}`;
10
+ }
11
+ // Build an index of pages for the given library from the CDN's llms.txt.
12
+ export async function buildPageIndex(library) {
13
+ if (pageIndexLoaded.has(library)) {
14
+ return Array.from(pageCache.values()).filter(p => p.key.startsWith(`${library}/`));
15
+ }
16
+ const pages = [];
17
+ // Read llms.txt to enumerate available pages without downloading them all.
18
+ const llmsUrl = `${libBaseUrl(library)}/llms.txt`;
19
+ const txt = await fetchText(llmsUrl);
20
+ const re = /^\s*-\s*\[([^\]]+)\]\(([^)]+)\)(?:\s*:\s*(.*))?\s*$/;
21
+ for (const line of txt.split(/\r?\n/)) {
22
+ const m = line.match(re);
23
+ if (!m) {
24
+ continue;
25
+ }
26
+ const display = (m[1] || '').trim();
27
+ const href = (m[2] || '').trim();
28
+ const description = (m[3] || '').trim() || undefined;
29
+ if (!href || !/\.md$/i.test(href)) {
30
+ continue;
31
+ }
32
+ const key = href.replace(/\.md$/i, '').replace(/\\/g, '/');
33
+ const name = display || path.basename(key);
34
+ const filePath = `${DEFAULT_CDN_BASE}/${key}.md`;
35
+ const info = { key, name, description, filePath, sections: [] };
36
+ pages.push(info);
37
+ pageCache.set(info.key, info);
38
+ }
39
+ pageIndexLoaded.add(library);
40
+ return pages.sort((a, b) => a.key.localeCompare(b.key));
41
+ }
42
+ export async function ensureParsedPage(info) {
43
+ if (info.sections && info.sections.length > 0 && info.description !== undefined) {
44
+ return info;
45
+ }
46
+ const text = await fetchText(info.filePath);
47
+ const lines = text.split(/\r?\n/);
48
+ const { name, description } = extractNameAndDescription(lines);
49
+ const sections = parseSectionsFromMarkdown(lines);
50
+ const updated = { ...info, name: name || info.name, description, sections };
51
+ pageCache.set(updated.key, updated);
52
+ return updated;
53
+ }
54
+ export async function resolvePageRef(library, pageName) {
55
+ await buildPageIndex(library);
56
+ if (pageCache.has(pageName)) {
57
+ return pageCache.get(pageName);
58
+ }
59
+ if (pageName.includes('/')) {
60
+ const normalized = pageName.replace(/\\/g, '/');
61
+ const prefix = normalized.split('/', 1)[0];
62
+ if (prefix !== library) {
63
+ throw new Error(`Page '${pageName}' is not in the '${library}' library.`);
64
+ }
65
+ const maybe = pageCache.get(normalized);
66
+ if (maybe) {
67
+ return maybe;
68
+ }
69
+ const filePath = `${DEFAULT_CDN_BASE}/${normalized}.md`;
70
+ const stub = { key: normalized, name: path.basename(normalized), description: undefined, filePath, sections: [] };
71
+ pageCache.set(stub.key, stub);
72
+ return stub;
73
+ }
74
+ const key = `${library}/${pageName}`;
75
+ const maybe = pageCache.get(key);
76
+ if (maybe) {
77
+ return maybe;
78
+ }
79
+ const filePath = `${DEFAULT_CDN_BASE}/${key}.md`;
80
+ const stub = { key, name: pageName, description: undefined, filePath, sections: [] };
81
+ pageCache.set(stub.key, stub);
82
+ return stub;
83
+ }
@@ -0,0 +1,61 @@
1
+ export function parseSectionsFromMarkdown(lines) {
2
+ const sections = [];
3
+ let inCode = false;
4
+ for (let idx = 0; idx < lines.length; idx++) {
5
+ const line = lines[idx];
6
+ if (/^```/.test(line.trim())) {
7
+ inCode = !inCode;
8
+ }
9
+ if (inCode) {
10
+ continue;
11
+ }
12
+ if (line.startsWith('## ')) {
13
+ const name = line.replace(/^##\s+/, '').trim();
14
+ sections.push({ name, startLine: idx, endLine: lines.length });
15
+ }
16
+ }
17
+ for (let s = 0; s < sections.length - 1; s++) {
18
+ sections[s].endLine = sections[s + 1].startLine;
19
+ }
20
+ return sections;
21
+ }
22
+ export function extractNameAndDescription(lines) {
23
+ let name = '';
24
+ let description = undefined;
25
+ let i = 0;
26
+ for (; i < lines.length; i++) {
27
+ const line = lines[i];
28
+ if (line.startsWith('# ')) {
29
+ name = line.replace(/^#\s+/, '').trim();
30
+ i++;
31
+ break;
32
+ }
33
+ }
34
+ let descLines = [];
35
+ let inCode = false;
36
+ for (; i < lines.length; i++) {
37
+ const line = lines[i];
38
+ if (/^```/.test(line.trim())) {
39
+ inCode = !inCode;
40
+ }
41
+ if (inCode) {
42
+ continue;
43
+ }
44
+ if (line.trim() === '') {
45
+ if (descLines.length > 0) {
46
+ break;
47
+ }
48
+ else {
49
+ continue;
50
+ }
51
+ }
52
+ if (/^#{1,6}\s/.test(line) || /^</.test(line.trim())) {
53
+ continue;
54
+ }
55
+ descLines.push(line);
56
+ }
57
+ if (descLines.length > 0) {
58
+ description = descLines.join('\n').trim();
59
+ }
60
+ return { name, description };
61
+ }
@@ -0,0 +1,76 @@
1
+ import { buildPageIndex, ensureParsedPage, resolvePageRef } from './page-manager.js';
2
+ import { errorToString, fetchText } from './utils.js';
3
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
4
+ import { parseSectionsFromMarkdown } from './parser.js';
5
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
6
+ import { z } from 'zod';
7
+ export async function startServer(library, version, registerAdditionalTools) {
8
+ const server = new McpServer({
9
+ name: library === 's2' ? 's2-docs-server' : 'react-aria-docs-server',
10
+ version
11
+ });
12
+ // Build page index at startup.
13
+ try {
14
+ await buildPageIndex(library);
15
+ }
16
+ catch (e) {
17
+ console.warn(`Warning: failed to load ${library} docs index (${errorToString(e)}).`);
18
+ }
19
+ const toolPrefix = library === 's2' ? 's2' : 'react_aria';
20
+ server.registerTool(`list_${toolPrefix}_pages`, {
21
+ title: library === 's2' ? 'List React Spectrum (@react-spectrum/s2) docs pages' : 'List React Aria docs pages',
22
+ description: `Returns a list of available pages in the ${library} docs.`,
23
+ inputSchema: { includeDescription: z.boolean().optional() }
24
+ }, async ({ includeDescription }) => {
25
+ const pages = await buildPageIndex(library);
26
+ const items = pages
27
+ .sort((a, b) => a.key.localeCompare(b.key))
28
+ .map(p => includeDescription ? { name: p.name, description: p.description ?? '' } : { name: p.name });
29
+ return {
30
+ content: [{ type: 'text', text: JSON.stringify(items, null, 2) }]
31
+ };
32
+ });
33
+ server.registerTool(`get_${toolPrefix}_page_info`, {
34
+ title: 'Get page info',
35
+ description: 'Returns page description and list of sections for a given page.',
36
+ inputSchema: { page_name: z.string() }
37
+ }, async ({ page_name }) => {
38
+ const ref = await resolvePageRef(library, page_name);
39
+ const info = await ensureParsedPage(ref);
40
+ const out = {
41
+ name: info.name,
42
+ description: info.description ?? '',
43
+ sections: info.sections.map(s => s.name)
44
+ };
45
+ return { content: [{ type: 'text', text: JSON.stringify(out, null, 2) }] };
46
+ });
47
+ server.registerTool(`get_${toolPrefix}_page`, {
48
+ title: 'Get page markdown',
49
+ description: 'Returns the full markdown content for a page, or a specific section if provided.',
50
+ inputSchema: { page_name: z.string(), section_name: z.string().optional() }
51
+ }, async ({ page_name, section_name }) => {
52
+ const ref = await resolvePageRef(library, page_name);
53
+ let text;
54
+ text = await fetchText(ref.filePath);
55
+ if (!section_name) {
56
+ return { content: [{ type: 'text', text }] };
57
+ }
58
+ const lines = text.split(/\r?\n/);
59
+ const sections = parseSectionsFromMarkdown(lines);
60
+ let section = sections.find(s => s.name === section_name);
61
+ if (!section) {
62
+ section = sections.find(s => s.name.toLowerCase() === section_name.toLowerCase());
63
+ }
64
+ if (!section) {
65
+ const available = sections.map(s => s.name).join(', ');
66
+ throw new Error(`Section '${section_name}' not found in ${ref.key}. Available: ${available}`);
67
+ }
68
+ const snippet = lines.slice(section.startLine, section.endLine).join('\n');
69
+ return { content: [{ type: 'text', text: snippet }] };
70
+ });
71
+ if (registerAdditionalTools) {
72
+ await registerAdditionalTools(server);
73
+ }
74
+ const transport = new StdioServerTransport();
75
+ await server.connect(transport);
76
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,30 @@
1
+ export function errorToString(err) {
2
+ if (err && typeof err === 'object' && 'stack' in err && typeof err.stack === 'string') {
3
+ return err.stack;
4
+ }
5
+ if (err && typeof err === 'object' && 'message' in err && typeof err.message === 'string') {
6
+ return err.message;
7
+ }
8
+ try {
9
+ return JSON.stringify(err);
10
+ }
11
+ catch {
12
+ return String(err);
13
+ }
14
+ }
15
+ // CDN base for docs. Can be overridden via env variable.
16
+ export const DEFAULT_CDN_BASE = process.env.DOCS_CDN_BASE ?? 'https://react-spectrum.adobe.com/beta';
17
+ export async function fetchText(url, timeoutMs = 15000) {
18
+ const ctrl = new AbortController();
19
+ const id = setTimeout(() => ctrl.abort(), timeoutMs).unref?.();
20
+ try {
21
+ const res = await fetch(url, { signal: ctrl.signal, cache: 'no-store' });
22
+ if (!res.ok) {
23
+ throw new Error(`HTTP ${res.status} for ${url}`);
24
+ }
25
+ return await res.text();
26
+ }
27
+ finally {
28
+ clearTimeout(id);
29
+ }
30
+ }
package/package.json CHANGED
@@ -1,14 +1,14 @@
1
1
  {
2
2
  "name": "@react-spectrum/mcp",
3
- "version": "0.1.0",
4
- "description": "MCP server for React Spectrum (S2) and React Aria documentation",
3
+ "version": "1.0.0",
4
+ "description": "MCP server for React Spectrum (S2) documentation",
5
5
  "type": "module",
6
- "bin": "dist/index.js",
6
+ "bin": "dist/s2/src/index.js",
7
7
  "scripts": {
8
8
  "prepublishOnly": "yarn build",
9
9
  "build": "node ./scripts/build-data.mjs && tsc -p tsconfig.json",
10
- "start": "node dist/index.js",
11
- "dev": "node --enable-source-maps dist/index.js"
10
+ "start": "node dist/s2/src/index.js",
11
+ "dev": "node --enable-source-maps dist/s2/src/index.js"
12
12
  },
13
13
  "dependencies": {
14
14
  "@modelcontextprotocol/sdk": "^1.17.3",
@@ -30,10 +30,6 @@
30
30
  "type": "git",
31
31
  "url": "https://github.com/adobe/react-spectrum"
32
32
  },
33
- "main": "dist/main.js",
34
- "module": "dist/module.js",
35
- "types": "dist/types.d.ts",
36
- "source": "src/index.ts",
37
33
  "files": [
38
34
  "dist",
39
35
  "src"
@@ -41,5 +37,9 @@
41
37
  "sideEffects": [
42
38
  "*.css"
43
39
  ],
44
- "gitHead": "0bda51183baa23306342af32a82012ea0fe0f2dc"
40
+ "main": "dist/main.js",
41
+ "module": "dist/module.js",
42
+ "types": "dist/types.d.ts",
43
+ "source": "src/index.ts",
44
+ "gitHead": "4d838da5bfe36abb35aed166995a9ef63825370f"
45
45
  }