@gemini-designer/mcp-server 0.1.39 → 0.1.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/dist/context/builder.d.ts.map +1 -1
  2. package/dist/context/builder.js +29 -28
  3. package/dist/context/builder.js.map +1 -1
  4. package/dist/output/file-bundle.d.ts +1 -1
  5. package/dist/tools/catalog-components.d.ts.map +1 -1
  6. package/dist/tools/catalog-components.js +29 -3
  7. package/dist/tools/catalog-components.js.map +1 -1
  8. package/dist/tools/detect-ui-stack.d.ts.map +1 -1
  9. package/dist/tools/detect-ui-stack.js +45 -14
  10. package/dist/tools/detect-ui-stack.js.map +1 -1
  11. package/dist/tools/repo-search.d.ts.map +1 -1
  12. package/dist/tools/repo-search.js +26 -120
  13. package/dist/tools/repo-search.js.map +1 -1
  14. package/dist/tools/repo-tree.d.ts.map +1 -1
  15. package/dist/tools/repo-tree.js +44 -19
  16. package/dist/tools/repo-tree.js.map +1 -1
  17. package/dist/utils/concurrency.d.ts +2 -0
  18. package/dist/utils/concurrency.d.ts.map +1 -0
  19. package/dist/utils/concurrency.js +16 -0
  20. package/dist/utils/concurrency.js.map +1 -0
  21. package/dist/utils/ripgrep.d.ts +45 -0
  22. package/dist/utils/ripgrep.d.ts.map +1 -0
  23. package/dist/utils/ripgrep.js +312 -0
  24. package/dist/utils/ripgrep.js.map +1 -0
  25. package/dist/utils/walk.d.ts +1 -0
  26. package/dist/utils/walk.d.ts.map +1 -1
  27. package/dist/utils/walk.js +2 -1
  28. package/dist/utils/walk.js.map +1 -1
  29. package/package.json +2 -1
  30. package/src/__tests__/ripgrep.test.ts +136 -0
  31. package/src/context/builder.ts +27 -30
  32. package/src/tools/catalog-components.ts +28 -3
  33. package/src/tools/detect-ui-stack.ts +51 -14
  34. package/src/tools/repo-search.ts +27 -121
  35. package/src/tools/repo-tree.ts +44 -17
  36. package/src/utils/concurrency.ts +21 -0
  37. package/src/utils/ripgrep.ts +360 -0
  38. package/src/utils/walk.ts +3 -3
@@ -11,6 +11,7 @@ import * as path from 'node:path';
11
11
  import { Config } from '../config/index.js';
12
12
  import { isPathAllowed, isSensitiveFile, sanitizeContent } from './filter.js';
13
13
  import { assertReadablePath } from './guards.js';
14
+ import { ripgrepListFiles } from '../utils/ripgrep.js';
14
15
 
15
16
  // Token estimation: ~4 characters per token for English text/code
16
17
  const CHARS_PER_TOKEN = 4;
@@ -190,38 +191,34 @@ export async function discoverUIFiles(directory: string, config: Config): Promis
190
191
  /tailwind\.config\./,
191
192
  ];
192
193
 
193
- const files: string[] = [];
194
-
195
- function scan(dir: string, depth: number = 0) {
196
- if (depth > 3) return; // Max depth
197
-
198
- try {
199
- const entries = fs.readdirSync(dir, { withFileTypes: true });
200
-
201
- for (const entry of entries) {
202
- const fullPath = path.join(dir, entry.name);
203
-
204
- // Skip node_modules, .git, etc.
205
- if (entry.isDirectory()) {
206
- if (['node_modules', '.git', 'dist', 'build', '.next', '.nuxt'].includes(entry.name)) {
207
- continue;
208
- }
209
- scan(fullPath, depth + 1);
210
- } else if (entry.isFile()) {
211
- // Check if matches UI patterns
212
- if (uiPatterns.some((pattern) => pattern.test(entry.name))) {
213
- if (isPathAllowed(fullPath, config.allowedPaths) && !isSensitiveFile(fullPath)) {
214
- files.push(fullPath);
215
- }
216
- }
217
- }
218
- }
219
- } catch {
220
- // Skip directories we can't read
221
- }
194
+ const excludedDirs = ['node_modules', '.git', 'dist', 'build', '.next', '.nuxt'];
195
+ const excludeGlobs = excludedDirs.map((d) => `!**/${d}/**`);
196
+
197
+ let list: Awaited<ReturnType<typeof ripgrepListFiles>>;
198
+ try {
199
+ list = await ripgrepListFiles({
200
+ cwd: directory,
201
+ includeHidden: true,
202
+ noIgnore: true,
203
+ maxFiles: 5000,
204
+ maxDepth: 3,
205
+ globs: excludeGlobs,
206
+ cacheTtlMs: 1500,
207
+ });
208
+ } catch {
209
+ // Fall back to empty on rg failures; caller can still operate without auto-discovery.
210
+ return [];
222
211
  }
223
212
 
224
- scan(directory);
213
+ const files: string[] = [];
214
+ for (const rel of list.files) {
215
+ const name = path.basename(rel);
216
+ if (!uiPatterns.some((pattern) => pattern.test(name))) continue;
217
+ const abs = path.resolve(directory, rel);
218
+ if (!isPathAllowed(abs, config.allowedPaths)) continue;
219
+ if (isSensitiveFile(abs)) continue;
220
+ files.push(abs);
221
+ }
225
222
 
226
223
  // Sort by relevance
227
224
  return sortByRelevance(files);
@@ -15,7 +15,8 @@ import * as path from 'node:path';
15
15
  import { Config } from '../config/index.js';
16
16
  import { assertReadableDir } from '../context/guards.js';
17
17
  import { getProjectRootAbs } from '../context/project-root.js';
18
- import { walkFiles, toPosixPath } from '../utils/walk.js';
18
+ import { DEFAULT_EXCLUDE_DIR_NAMES, toPosixPath } from '../utils/walk.js';
19
+ import { extensionGlob, ripgrepListFiles } from '../utils/ripgrep.js';
19
20
  import { buildComponentCatalog } from '../components/catalog.js';
20
21
  import { resolveWithinProjectRoot } from '../output/path-resolve.js';
21
22
 
@@ -71,8 +72,32 @@ export function registerCatalogComponents(server: McpServer, config: Config): vo
71
72
 
72
73
  const allFiles: string[] = [];
73
74
  for (const r of safeRoots) {
74
- const found = walkFiles(r, { includeExtensions: extensions, maxFiles });
75
- allFiles.push(...found);
75
+ const extGlob = extensionGlob(extensions);
76
+ const globs: string[] = [];
77
+ for (const d of DEFAULT_EXCLUDE_DIR_NAMES) globs.push(`!**/${d}/**`);
78
+ if (extGlob) globs.push(extGlob);
79
+
80
+ let found: Awaited<ReturnType<typeof ripgrepListFiles>>;
81
+ try {
82
+ found = await ripgrepListFiles({
83
+ cwd: r,
84
+ includeHidden: true,
85
+ noIgnore: true,
86
+ maxFiles,
87
+ globs,
88
+ cacheTtlMs: 2500,
89
+ });
90
+ } catch (error) {
91
+ const message = error instanceof Error ? error.message : 'ripgrep failed';
92
+ return {
93
+ content: [{ type: 'text' as const, text: `Error: ${message}` }],
94
+ isError: true,
95
+ };
96
+ }
97
+
98
+ for (const rel of found.files) {
99
+ allFiles.push(path.resolve(r, rel));
100
+ }
76
101
  }
77
102
 
78
103
  // Use projectRoot (when provided) so paths are stable for agents in monorepos
@@ -17,6 +17,7 @@ import * as path from 'node:path';
17
17
  import { Config } from '../config/index.js';
18
18
  import { assertReadableDir } from '../context/guards.js';
19
19
  import { detectUiStack } from '../stack/detect.js';
20
+ import { ripgrepListFiles } from '../utils/ripgrep.js';
20
21
 
21
22
  const inputSchema = {
22
23
  root: z
@@ -26,19 +27,26 @@ const inputSchema = {
26
27
  .default(process.cwd()),
27
28
  };
28
29
 
29
- function findPackageJsonRoots(rootDir: string, maxDepth: number): string[] {
30
- const deny = new Set([
31
- 'node_modules',
32
- '.git',
33
- '.next',
34
- 'dist',
35
- 'build',
36
- 'out',
37
- 'coverage',
38
- '.turbo',
39
- '.cache',
40
- 'tmp',
41
- ]);
30
+ const PACKAGE_JSON_DENY_DIRS = [
31
+ 'node_modules',
32
+ '.git',
33
+ '.next',
34
+ 'dist',
35
+ 'build',
36
+ 'out',
37
+ 'coverage',
38
+ '.turbo',
39
+ '.cache',
40
+ 'tmp',
41
+ ];
42
+
43
+ function isWithinPath(parent: string, child: string): boolean {
44
+ const rel = path.relative(parent, child);
45
+ return rel === '' || (!rel.startsWith(`..${path.sep}`) && rel !== '..' && !path.isAbsolute(rel));
46
+ }
47
+
48
+ function findPackageJsonRootsFs(rootDir: string, maxDepth: number): string[] {
49
+ const deny = new Set(PACKAGE_JSON_DENY_DIRS);
42
50
 
43
51
  const out: string[] = [];
44
52
  const q: Array<{ dir: string; depth: number }> = [{ dir: rootDir, depth: 0 }];
@@ -74,6 +82,35 @@ function findPackageJsonRoots(rootDir: string, maxDepth: number): string[] {
74
82
  return out.sort();
75
83
  }
76
84
 
85
+ async function findPackageJsonRootsRg(rootDir: string, maxDepth: number): Promise<string[]> {
86
+ const globs = PACKAGE_JSON_DENY_DIRS.map((d) => `!**/${d}/**`);
87
+ // Match previous behavior: don't descend into dot-directories.
88
+ globs.push('!**/.*/**');
89
+ globs.push('**/package.json');
90
+
91
+ const files = await ripgrepListFiles({
92
+ cwd: rootDir,
93
+ includeHidden: true,
94
+ noIgnore: true,
95
+ maxFiles: 2000,
96
+ maxDepth,
97
+ globs,
98
+ cacheTtlMs: 2500,
99
+ });
100
+
101
+ const absDirs = [...new Set(files.files.map((p) => path.dirname(p)).filter((d) => d !== '.' && d !== ''))]
102
+ .map((d) => path.resolve(rootDir, d))
103
+ .sort((a, b) => a.length - b.length);
104
+
105
+ const kept: string[] = [];
106
+ for (const d of absDirs) {
107
+ if (kept.some((k) => isWithinPath(k, d))) continue;
108
+ kept.push(d);
109
+ }
110
+
111
+ return kept.sort();
112
+ }
113
+
77
114
  export function registerDetectUIStack(server: McpServer, config: Config): void {
78
115
  server.registerTool(
79
116
  'detect_ui_stack',
@@ -104,7 +141,7 @@ export function registerDetectUIStack(server: McpServer, config: Config): void {
104
141
  // try to find subprojects and either auto-select or guide the user.
105
142
  const missingPkg = result.warnings.some((w) => w.includes('package.json not found'));
106
143
  if (missingPkg) {
107
- const candidates = findPackageJsonRoots(safeRoot, 3);
144
+ const candidates = await findPackageJsonRootsRg(safeRoot, 3).catch(() => findPackageJsonRootsFs(safeRoot, 3));
108
145
  if (candidates.length === 1) {
109
146
  effectiveRoot = candidates[0];
110
147
  result = detectUiStack(effectiveRoot);
@@ -7,12 +7,12 @@
7
7
 
8
8
  import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
9
9
  import { z } from 'zod';
10
- import * as fs from 'node:fs';
11
10
  import * as path from 'node:path';
12
11
 
13
12
  import { Config } from '../config/index.js';
14
13
  import { getProjectRootAbs } from '../context/project-root.js';
15
14
  import { isSensitiveFile } from '../context/filter.js';
15
+ import { ripgrepJsonSearch } from '../utils/ripgrep.js';
16
16
 
17
17
  const inputSchema = {
18
18
  projectRoot: z
@@ -20,7 +20,7 @@ const inputSchema = {
20
20
  .optional()
21
21
  .describe('Optional project root to search (defaults to cwd).'),
22
22
  pattern: z.string().min(1).describe('String pattern to search for.'),
23
- regex: z.boolean().default(false).describe('Treat pattern as a JavaScript RegExp source.'),
23
+ regex: z.boolean().default(false).describe('Treat pattern as a ripgrep regex (PCRE2).'),
24
24
  caseSensitive: z.boolean().default(false).describe('Case-sensitive search (default: false).'),
25
25
  maxResults: z.number().int().min(1).max(2000).default(200).describe('Maximum matches to return.'),
26
26
  maxFileBytes: z
@@ -34,7 +34,7 @@ const inputSchema = {
34
34
  includeBinary: z.boolean().default(false).describe('If true, attempt to search binary files (not recommended).'),
35
35
  };
36
36
 
37
- const DEFAULT_IGNORED_DIRS = new Set([
37
+ const DEFAULT_IGNORED_DIRS = [
38
38
  'node_modules',
39
39
  '.git',
40
40
  'dist',
@@ -46,67 +46,7 @@ const DEFAULT_IGNORED_DIRS = new Set([
46
46
  'coverage',
47
47
  '.cache',
48
48
  '.gemini-designer',
49
- ]);
50
-
51
- const TEXT_EXTENSIONS = new Set([
52
- '.ts',
53
- '.tsx',
54
- '.js',
55
- '.jsx',
56
- '.mjs',
57
- '.cjs',
58
- '.json',
59
- '.md',
60
- '.html',
61
- '.css',
62
- '.scss',
63
- '.sass',
64
- '.less',
65
- '.styl',
66
- '.vue',
67
- '.svelte',
68
- '.astro',
69
- '.yml',
70
- '.yaml',
71
- '.toml',
72
- '.txt',
73
- ]);
74
-
75
- function isBinaryBuffer(buf: Buffer): boolean {
76
- // Heuristic: if it contains a NUL byte, treat as binary.
77
- return buf.includes(0);
78
- }
79
-
80
- function normalizeRel(root: string, abs: string): string {
81
- return path.relative(root, abs).replace(/\\/g, '/');
82
- }
83
-
84
- function walkFiles(root: string, maxFiles: number = 50_000): string[] {
85
- const out: string[] = [];
86
- const stack: string[] = [root];
87
-
88
- while (stack.length) {
89
- const dir = stack.pop() as string;
90
- let entries: fs.Dirent[];
91
- try {
92
- entries = fs.readdirSync(dir, { withFileTypes: true });
93
- } catch {
94
- continue;
95
- }
96
-
97
- for (const e of entries) {
98
- if (out.length >= maxFiles) return out;
99
- const abs = path.join(dir, e.name);
100
- if (e.isDirectory()) {
101
- if (DEFAULT_IGNORED_DIRS.has(e.name)) continue;
102
- stack.push(abs);
103
- } else if (e.isFile()) {
104
- out.push(abs);
105
- }
106
- }
107
- }
108
- return out;
109
- }
49
+ ];
110
50
 
111
51
  export function registerRepoSearch(server: McpServer, _config: Config): void {
112
52
  server.registerTool(
@@ -129,63 +69,28 @@ export function registerRepoSearch(server: McpServer, _config: Config): void {
129
69
  const includePreview = args.includePreview !== false;
130
70
  const includeBinary = args.includeBinary === true;
131
71
 
132
- let re: RegExp;
72
+ const ignoredDirGlobs = DEFAULT_IGNORED_DIRS.map((d) => `!**/${d}/**`);
73
+
74
+ let res: Awaited<ReturnType<typeof ripgrepJsonSearch>>;
133
75
  try {
134
- re = regex
135
- ? new RegExp(pattern, caseSensitive ? 'g' : 'gi')
136
- : new RegExp(pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), caseSensitive ? 'g' : 'gi');
76
+ res = await ripgrepJsonSearch({
77
+ cwd: projectRootAbs,
78
+ pattern,
79
+ fixedStrings: !regex,
80
+ caseSensitive,
81
+ includeHidden: true,
82
+ includeBinary,
83
+ maxFileBytes,
84
+ maxResults,
85
+ globs: ignoredDirGlobs,
86
+ usePcre2: regex,
87
+ shouldIgnorePath: (abs) => isSensitiveFile(abs),
88
+ includePreview,
89
+ previewMaxChars: 240,
90
+ });
137
91
  } catch (e) {
138
92
  const msg = e instanceof Error ? e.message : String(e);
139
- return { content: [{ type: 'text' as const, text: `Error: invalid pattern/regex: ${msg}` }], isError: true };
140
- }
141
-
142
- const matches: Array<{ path: string; line: number; column: number; preview?: string }> = [];
143
- const scannedFiles: string[] = [];
144
- const files = walkFiles(projectRootAbs);
145
-
146
- for (const abs of files) {
147
- if (matches.length >= maxResults) break;
148
- if (isSensitiveFile(abs)) continue;
149
-
150
- const ext = path.extname(abs).toLowerCase();
151
- if (!includeBinary && !TEXT_EXTENSIONS.has(ext)) continue;
152
-
153
- let st: fs.Stats;
154
- try {
155
- st = fs.statSync(abs);
156
- } catch {
157
- continue;
158
- }
159
- if (st.size > maxFileBytes) continue;
160
-
161
- let buf: Buffer;
162
- try {
163
- buf = fs.readFileSync(abs);
164
- } catch {
165
- continue;
166
- }
167
-
168
- if (!includeBinary && isBinaryBuffer(buf)) continue;
169
-
170
- const text = buf.toString('utf-8');
171
- scannedFiles.push(normalizeRel(projectRootAbs, abs));
172
-
173
- const lines = text.split(/\r?\n/);
174
- for (let i = 0; i < lines.length; i++) {
175
- if (matches.length >= maxResults) break;
176
- const line = lines[i];
177
- re.lastIndex = 0;
178
- let m: RegExpExecArray | null;
179
- while ((m = re.exec(line)) && matches.length < maxResults) {
180
- matches.push({
181
- path: normalizeRel(projectRootAbs, abs),
182
- line: i + 1,
183
- column: (m.index || 0) + 1,
184
- preview: includePreview ? line.slice(0, 240) : undefined,
185
- });
186
- if (m.index === re.lastIndex) re.lastIndex++; // avoid infinite loops
187
- }
188
- }
93
+ return { content: [{ type: 'text' as const, text: `Error: ripgrep search failed: ${msg}` }], isError: true };
189
94
  }
190
95
 
191
96
  const out = {
@@ -193,9 +98,10 @@ export function registerRepoSearch(server: McpServer, _config: Config): void {
193
98
  pattern,
194
99
  regex,
195
100
  caseSensitive,
196
- matchCount: matches.length,
197
- matches,
198
- scannedFilesCount: scannedFiles.length,
101
+ matchCount: res.matches.length,
102
+ matches: res.matches,
103
+ scannedFilesCount: res.scannedFilesCount,
104
+ truncated: res.truncated,
199
105
  };
200
106
 
201
107
  return { content: [{ type: 'text' as const, text: JSON.stringify(out, null, 2) }] };
@@ -13,7 +13,9 @@ import { Config } from '../config/index.js';
13
13
  import { getProjectRootAbs } from '../context/project-root.js';
14
14
  import { assertReadableDir } from '../context/guards.js';
15
15
  import { isSensitiveFile } from '../context/filter.js';
16
- import { walkFiles, toPosixPath } from '../utils/walk.js';
16
+ import { toPosixPath } from '../utils/walk.js';
17
+ import { extensionGlob, ripgrepListFiles } from '../utils/ripgrep.js';
18
+ import { mapWithConcurrency } from '../utils/concurrency.js';
17
19
 
18
20
  const inputSchema = {
19
21
  projectRoot: z
@@ -59,29 +61,54 @@ export function registerRepoTree(server: McpServer, config: Config): void {
59
61
  const maxFiles = (args.maxFiles as number) || 10_000;
60
62
  const includeMeta = args.includeMeta === true;
61
63
 
62
- const absFiles = walkFiles(safeRoot, { includeExtensions, excludeDirNames, maxFiles });
64
+ const defaultExcludedDirs = ['node_modules', '.git', 'dist', 'build', '.next', '.nuxt', 'coverage', '.cache'];
65
+ const excludeGlobs = [...new Set([...defaultExcludedDirs, ...excludeDirNames])]
66
+ .map((d) => d.trim())
67
+ .filter(Boolean)
68
+ .map((d) => `!**/${d}/**`);
69
+
70
+ const extGlob = extensionGlob(includeExtensions);
71
+ const includeGlobs = extGlob ? [extGlob] : [];
72
+
73
+ let list: Awaited<ReturnType<typeof ripgrepListFiles>>;
74
+ try {
75
+ list = await ripgrepListFiles({
76
+ cwd: safeRoot,
77
+ includeHidden: true,
78
+ noIgnore: true,
79
+ maxFiles,
80
+ globs: [...excludeGlobs, ...includeGlobs],
81
+ cacheTtlMs: 1500,
82
+ });
83
+ } catch (error) {
84
+ const message = error instanceof Error ? error.message : 'ripgrep failed';
85
+ return { content: [{ type: 'text' as const, text: `Error: ${message}` }], isError: true };
86
+ }
87
+
88
+ const relFiles = list.files
89
+ .map((rel) => toPosixPath(rel))
90
+ .filter((rel) => rel && !rel.startsWith('..') && !isSensitiveFile(rel));
63
91
 
64
92
  const files: Array<any> = [];
65
- for (const abs of absFiles) {
66
- const rel = toPosixPath(path.relative(safeRoot, abs));
67
- if (!rel || rel.startsWith('..')) continue;
68
- if (isSensitiveFile(rel)) continue;
69
- if (!includeMeta) {
70
- files.push({ path: rel });
71
- continue;
72
- }
73
- try {
74
- const st = fs.statSync(abs);
75
- files.push({ path: rel, sizeBytes: st.size, mtimeMs: st.mtimeMs });
76
- } catch {
77
- files.push({ path: rel });
78
- }
93
+ if (!includeMeta) {
94
+ for (const rel of relFiles) files.push({ path: rel });
95
+ } else {
96
+ const meta = await mapWithConcurrency(relFiles, 32, async (rel) => {
97
+ const abs = path.join(safeRoot, rel);
98
+ try {
99
+ const st = await fs.promises.stat(abs);
100
+ return { path: rel, sizeBytes: st.size, mtimeMs: st.mtimeMs };
101
+ } catch {
102
+ return { path: rel };
103
+ }
104
+ });
105
+ files.push(...meta);
79
106
  }
80
107
 
81
108
  const out = {
82
109
  root: safeRoot,
83
110
  count: files.length,
84
- truncated: absFiles.length >= maxFiles,
111
+ truncated: list.truncated,
85
112
  files,
86
113
  };
87
114
 
@@ -0,0 +1,21 @@
1
+ export async function mapWithConcurrency<T, R>(
2
+ items: T[],
3
+ concurrency: number,
4
+ mapper: (item: T, index: number) => Promise<R>
5
+ ): Promise<R[]> {
6
+ const limit = Math.max(1, Math.floor(concurrency));
7
+ const results = new Array<R>(items.length);
8
+ let nextIndex = 0;
9
+
10
+ const workers = Array.from({ length: Math.min(limit, items.length) }, async () => {
11
+ while (true) {
12
+ const idx = nextIndex++;
13
+ if (idx >= items.length) break;
14
+ results[idx] = await mapper(items[idx], idx);
15
+ }
16
+ });
17
+
18
+ await Promise.all(workers);
19
+ return results;
20
+ }
21
+