@j0hanz/filesystem-context-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +369 -0
- package/dist/__tests__/errors.test.d.ts +2 -0
- package/dist/__tests__/errors.test.d.ts.map +1 -0
- package/dist/__tests__/errors.test.js +88 -0
- package/dist/__tests__/errors.test.js.map +1 -0
- package/dist/__tests__/file-operations.test.d.ts +2 -0
- package/dist/__tests__/file-operations.test.d.ts.map +1 -0
- package/dist/__tests__/file-operations.test.js +230 -0
- package/dist/__tests__/file-operations.test.js.map +1 -0
- package/dist/__tests__/lib/errors.test.d.ts +2 -0
- package/dist/__tests__/lib/errors.test.d.ts.map +1 -0
- package/dist/__tests__/lib/errors.test.js +156 -0
- package/dist/__tests__/lib/errors.test.js.map +1 -0
- package/dist/__tests__/lib/file-operations.test.d.ts +2 -0
- package/dist/__tests__/lib/file-operations.test.d.ts.map +1 -0
- package/dist/__tests__/lib/file-operations.test.js +417 -0
- package/dist/__tests__/lib/file-operations.test.js.map +1 -0
- package/dist/__tests__/lib/fs-helpers.test.d.ts +2 -0
- package/dist/__tests__/lib/fs-helpers.test.d.ts.map +1 -0
- package/dist/__tests__/lib/fs-helpers.test.js +183 -0
- package/dist/__tests__/lib/fs-helpers.test.js.map +1 -0
- package/dist/__tests__/lib/path-validation.test.d.ts +2 -0
- package/dist/__tests__/lib/path-validation.test.d.ts.map +1 -0
- package/dist/__tests__/lib/path-validation.test.js +103 -0
- package/dist/__tests__/lib/path-validation.test.js.map +1 -0
- package/dist/__tests__/path-validation.test.d.ts +2 -0
- package/dist/__tests__/path-validation.test.d.ts.map +1 -0
- package/dist/__tests__/path-validation.test.js +92 -0
- package/dist/__tests__/path-validation.test.js.map +1 -0
- package/dist/config/types.d.ts +222 -0
- package/dist/config/types.d.ts.map +1 -0
- package/dist/config/types.js +23 -0
- package/dist/config/types.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +50 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/constants.d.ts +16 -0
- package/dist/lib/constants.d.ts.map +1 -0
- package/dist/lib/constants.js +335 -0
- package/dist/lib/constants.js.map +1 -0
- package/dist/lib/errors.d.ts +33 -0
- package/dist/lib/errors.d.ts.map +1 -0
- package/dist/lib/errors.js +205 -0
- package/dist/lib/errors.js.map +1 -0
- package/dist/lib/file-operations.d.ts +69 -0
- package/dist/lib/file-operations.d.ts.map +1 -0
- package/dist/lib/file-operations.js +1003 -0
- package/dist/lib/file-operations.js.map +1 -0
- package/dist/lib/formatters.d.ts +30 -0
- package/dist/lib/formatters.d.ts.map +1 -0
- package/dist/lib/formatters.js +204 -0
- package/dist/lib/formatters.js.map +1 -0
- package/dist/lib/fs-helpers.d.ts +29 -0
- package/dist/lib/fs-helpers.d.ts.map +1 -0
- package/dist/lib/fs-helpers.js +295 -0
- package/dist/lib/fs-helpers.js.map +1 -0
- package/dist/lib/path-utils.d.ts +12 -0
- package/dist/lib/path-utils.d.ts.map +1 -0
- package/dist/lib/path-utils.js +34 -0
- package/dist/lib/path-utils.js.map +1 -0
- package/dist/lib/path-validation.d.ts +31 -0
- package/dist/lib/path-validation.d.ts.map +1 -0
- package/dist/lib/path-validation.js +181 -0
- package/dist/lib/path-validation.js.map +1 -0
- package/dist/lib/roots-utils.d.ts +7 -0
- package/dist/lib/roots-utils.d.ts.map +1 -0
- package/dist/lib/roots-utils.js +39 -0
- package/dist/lib/roots-utils.js.map +1 -0
- package/dist/lib/types.d.ts +6 -0
- package/dist/lib/types.d.ts.map +1 -0
- package/dist/lib/types.js +2 -0
- package/dist/lib/types.js.map +1 -0
- package/dist/schemas/common.d.ts +41 -0
- package/dist/schemas/common.d.ts.map +1 -0
- package/dist/schemas/common.js +21 -0
- package/dist/schemas/common.js.map +1 -0
- package/dist/schemas/index.d.ts +3 -0
- package/dist/schemas/index.d.ts.map +1 -0
- package/dist/schemas/index.js +5 -0
- package/dist/schemas/index.js.map +1 -0
- package/dist/schemas/inputs.d.ts +72 -0
- package/dist/schemas/inputs.d.ts.map +1 -0
- package/dist/schemas/inputs.js +326 -0
- package/dist/schemas/inputs.js.map +1 -0
- package/dist/schemas/outputs.d.ts +476 -0
- package/dist/schemas/outputs.d.ts.map +1 -0
- package/dist/schemas/outputs.js +181 -0
- package/dist/schemas/outputs.js.map +1 -0
- package/dist/server.d.ts +5 -0
- package/dist/server.d.ts.map +1 -0
- package/dist/server.js +83 -0
- package/dist/server.js.map +1 -0
- package/dist/tools/analyze-directory.d.ts +3 -0
- package/dist/tools/analyze-directory.d.ts.map +1 -0
- package/dist/tools/analyze-directory.js +57 -0
- package/dist/tools/analyze-directory.js.map +1 -0
- package/dist/tools/directory-tree.d.ts +3 -0
- package/dist/tools/directory-tree.d.ts.map +1 -0
- package/dist/tools/directory-tree.js +47 -0
- package/dist/tools/directory-tree.js.map +1 -0
- package/dist/tools/get-file-info.d.ts +3 -0
- package/dist/tools/get-file-info.d.ts.map +1 -0
- package/dist/tools/get-file-info.js +44 -0
- package/dist/tools/get-file-info.js.map +1 -0
- package/dist/tools/index.d.ts +3 -0
- package/dist/tools/index.d.ts.map +1 -0
- package/dist/tools/index.js +23 -0
- package/dist/tools/index.js.map +1 -0
- package/dist/tools/list-allowed-dirs.d.ts +3 -0
- package/dist/tools/list-allowed-dirs.d.ts.map +1 -0
- package/dist/tools/list-allowed-dirs.js +23 -0
- package/dist/tools/list-allowed-dirs.js.map +1 -0
- package/dist/tools/list-directory.d.ts +3 -0
- package/dist/tools/list-directory.d.ts.map +1 -0
- package/dist/tools/list-directory.js +60 -0
- package/dist/tools/list-directory.js.map +1 -0
- package/dist/tools/read-file.d.ts +3 -0
- package/dist/tools/read-file.d.ts.map +1 -0
- package/dist/tools/read-file.js +55 -0
- package/dist/tools/read-file.js.map +1 -0
- package/dist/tools/read-media-file.d.ts +3 -0
- package/dist/tools/read-media-file.d.ts.map +1 -0
- package/dist/tools/read-media-file.js +48 -0
- package/dist/tools/read-media-file.js.map +1 -0
- package/dist/tools/read-multiple-files.d.ts +3 -0
- package/dist/tools/read-multiple-files.d.ts.map +1 -0
- package/dist/tools/read-multiple-files.js +53 -0
- package/dist/tools/read-multiple-files.js.map +1 -0
- package/dist/tools/search-content.d.ts +3 -0
- package/dist/tools/search-content.d.ts.map +1 -0
- package/dist/tools/search-content.js +67 -0
- package/dist/tools/search-content.js.map +1 -0
- package/dist/tools/search-files.d.ts +3 -0
- package/dist/tools/search-files.d.ts.map +1 -0
- package/dist/tools/search-files.js +51 -0
- package/dist/tools/search-files.js.map +1 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +2 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/response-helpers.d.ts +22 -0
- package/dist/utils/response-helpers.d.ts.map +1 -0
- package/dist/utils/response-helpers.js +24 -0
- package/dist/utils/response-helpers.js.map +1 -0
- package/package.json +61 -0
|
@@ -0,0 +1,1003 @@
|
|
|
1
|
+
import * as fs from 'node:fs/promises';
|
|
2
|
+
import * as path from 'node:path';
|
|
3
|
+
import * as readline from 'node:readline';
|
|
4
|
+
import { createReadStream } from 'node:fs';
|
|
5
|
+
import fg from 'fast-glob';
|
|
6
|
+
import { Minimatch } from 'minimatch';
|
|
7
|
+
import safeRegex from 'safe-regex2';
|
|
8
|
+
import { DEFAULT_MAX_DEPTH, DEFAULT_MAX_RESULTS, DEFAULT_TOP_N, DIR_TRAVERSAL_CONCURRENCY, MAX_LINE_CONTENT_LENGTH, MAX_MEDIA_FILE_SIZE, MAX_SEARCHABLE_FILE_SIZE, MAX_TEXT_FILE_SIZE, MIME_TYPES, PARALLEL_CONCURRENCY, REGEX_MATCH_TIMEOUT_MS, } from './constants.js';
|
|
9
|
+
import { ErrorCode, McpError } from './errors.js';
|
|
10
|
+
import { getFileType, isHidden, isProbablyBinary, readFile, runWorkQueue, } from './fs-helpers.js';
|
|
11
|
+
import { validateExistingPath, validateExistingPathDetailed, } from './path-validation.js';
|
|
12
|
+
function shouldStopBecauseOfTimeout(deadlineMs) {
|
|
13
|
+
return deadlineMs !== undefined && Date.now() > deadlineMs;
|
|
14
|
+
}
|
|
15
|
+
async function processInParallel(items, processor, concurrency = PARALLEL_CONCURRENCY) {
|
|
16
|
+
const results = [];
|
|
17
|
+
const errors = [];
|
|
18
|
+
for (let i = 0; i < items.length; i += concurrency) {
|
|
19
|
+
const batch = items.slice(i, i + concurrency);
|
|
20
|
+
const batchResults = await Promise.allSettled(batch.map(processor));
|
|
21
|
+
for (let j = 0; j < batchResults.length; j++) {
|
|
22
|
+
const result = batchResults[j];
|
|
23
|
+
if (result?.status === 'fulfilled') {
|
|
24
|
+
results.push(result.value);
|
|
25
|
+
}
|
|
26
|
+
else if (result?.status === 'rejected') {
|
|
27
|
+
const globalIndex = i + j;
|
|
28
|
+
const error = result.reason instanceof Error
|
|
29
|
+
? result.reason
|
|
30
|
+
: new Error(String(result.reason));
|
|
31
|
+
errors.push({ index: globalIndex, error });
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return { results, errors };
|
|
36
|
+
}
|
|
37
|
+
function countRegexMatches(line, regex, timeoutMs = REGEX_MATCH_TIMEOUT_MS) {
|
|
38
|
+
regex.lastIndex = 0;
|
|
39
|
+
let count = 0;
|
|
40
|
+
const deadline = Date.now() + timeoutMs;
|
|
41
|
+
let match;
|
|
42
|
+
while ((match = regex.exec(line)) !== null) {
|
|
43
|
+
count++;
|
|
44
|
+
if (match[0] === '') {
|
|
45
|
+
regex.lastIndex++;
|
|
46
|
+
}
|
|
47
|
+
if (count % 100 === 0 && Date.now() > deadline) {
|
|
48
|
+
console.error(`[countRegexMatches] Regex matching timed out after ${timeoutMs}ms on line (length: ${line.length})`);
|
|
49
|
+
return -1; // Signal timeout
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return count;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Check if a regex pattern is simple enough to be safe without full ReDoS analysis.
|
|
56
|
+
* This reduces false positives from safe-regex2 for common safe patterns.
|
|
57
|
+
*
|
|
58
|
+
* A pattern is considered "simple safe" if it:
|
|
59
|
+
* - Has no nested quantifiers (e.g., (a+)+ or (a*)*) which are the main ReDoS concern
|
|
60
|
+
* - Has no high repetition counts (e.g., {25} or higher) that safe-regex2 would flag
|
|
61
|
+
*
|
|
62
|
+
* This is a quick heuristic, not a full safety proof.
|
|
63
|
+
*/
|
|
64
|
+
function isSimpleSafePattern(pattern) {
|
|
65
|
+
// Patterns with nested quantifiers are the main ReDoS concern
|
|
66
|
+
// Look for quantifier followed by closing paren then another quantifier
|
|
67
|
+
// Matches patterns like: (a+)+, (a*)+, (a+)*, (a?)+, (a{2})+
|
|
68
|
+
const nestedQuantifierPattern = /[+*?}]\s*\)\s*[+*?{]/;
|
|
69
|
+
if (nestedQuantifierPattern.test(pattern)) {
|
|
70
|
+
return false; // Potentially dangerous, needs full check
|
|
71
|
+
}
|
|
72
|
+
// Check for high repetition counts that safe-regex2 would flag (default limit is 25)
|
|
73
|
+
// Matches {n} or {n,} or {n,m} where n >= 25
|
|
74
|
+
const highRepetitionPattern = /\{(\d+)(?:,\d*)?\}/g;
|
|
75
|
+
let match;
|
|
76
|
+
while ((match = highRepetitionPattern.exec(pattern)) !== null) {
|
|
77
|
+
const count = parseInt(match[1] ?? '0', 10);
|
|
78
|
+
if (count >= 25) {
|
|
79
|
+
return false; // High repetition count, needs full check
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
// Simple patterns without nested quantifiers or high repetition are generally safe
|
|
83
|
+
// Examples: "throw new McpError\(", "\bword\b", "foo|bar"
|
|
84
|
+
return true;
|
|
85
|
+
}
|
|
86
|
+
function getPermissions(mode) {
|
|
87
|
+
const p = ['---', '--x', '-w-', '-wx', 'r--', 'r-x', 'rw-', 'rwx'];
|
|
88
|
+
const owner = p[(mode >> 6) & 7] ?? '---';
|
|
89
|
+
const group = p[(mode >> 3) & 7] ?? '---';
|
|
90
|
+
const other = p[mode & 7] ?? '---';
|
|
91
|
+
return `${owner}${group}${other}`;
|
|
92
|
+
}
|
|
93
|
+
export async function getFileInfo(filePath) {
|
|
94
|
+
const { requestedPath, resolvedPath, isSymlink } = await validateExistingPathDetailed(filePath);
|
|
95
|
+
const name = path.basename(requestedPath);
|
|
96
|
+
const ext = path.extname(name).toLowerCase();
|
|
97
|
+
const mimeType = MIME_TYPES[ext] ?? undefined;
|
|
98
|
+
// If it is a symlink, try to read the link target without following.
|
|
99
|
+
let symlinkTarget;
|
|
100
|
+
if (isSymlink) {
|
|
101
|
+
try {
|
|
102
|
+
symlinkTarget = await fs.readlink(requestedPath);
|
|
103
|
+
}
|
|
104
|
+
catch {
|
|
105
|
+
// Symlink target unreadable
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
// Use stat for size/dates (follows symlinks), but keep type as symlink based on lstat.
|
|
109
|
+
const stats = await fs.stat(resolvedPath);
|
|
110
|
+
return {
|
|
111
|
+
name,
|
|
112
|
+
path: requestedPath,
|
|
113
|
+
type: isSymlink ? 'symlink' : getFileType(stats),
|
|
114
|
+
size: stats.size,
|
|
115
|
+
created: stats.birthtime,
|
|
116
|
+
modified: stats.mtime,
|
|
117
|
+
accessed: stats.atime,
|
|
118
|
+
permissions: getPermissions(stats.mode),
|
|
119
|
+
isHidden: isHidden(name),
|
|
120
|
+
mimeType,
|
|
121
|
+
symlinkTarget,
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
export async function listDirectory(dirPath, options = {}) {
|
|
125
|
+
const { recursive = false, includeHidden = false, maxDepth = DEFAULT_MAX_DEPTH, maxEntries, sortBy = 'name', includeSymlinkTargets = false, } = options;
|
|
126
|
+
const validPath = await validateExistingPath(dirPath);
|
|
127
|
+
const entries = [];
|
|
128
|
+
let totalFiles = 0;
|
|
129
|
+
let totalDirectories = 0;
|
|
130
|
+
let maxDepthReached = 0;
|
|
131
|
+
let truncated = false;
|
|
132
|
+
let skippedInaccessible = 0;
|
|
133
|
+
let skippedSymlinks = 0;
|
|
134
|
+
const stopIfNeeded = () => {
|
|
135
|
+
if (maxEntries !== undefined && entries.length >= maxEntries) {
|
|
136
|
+
truncated = true;
|
|
137
|
+
return true;
|
|
138
|
+
}
|
|
139
|
+
return false;
|
|
140
|
+
};
|
|
141
|
+
await runWorkQueue([{ currentPath: validPath, depth: 0 }], async ({ currentPath, depth }, enqueue) => {
|
|
142
|
+
if (depth > maxDepth)
|
|
143
|
+
return;
|
|
144
|
+
if (stopIfNeeded())
|
|
145
|
+
return;
|
|
146
|
+
maxDepthReached = Math.max(maxDepthReached, depth);
|
|
147
|
+
let items;
|
|
148
|
+
try {
|
|
149
|
+
items = await fs.readdir(currentPath, { withFileTypes: true });
|
|
150
|
+
}
|
|
151
|
+
catch (error) {
|
|
152
|
+
skippedInaccessible++;
|
|
153
|
+
const { code } = error;
|
|
154
|
+
if (code !== 'ENOENT' && code !== 'EACCES' && code !== 'EPERM') {
|
|
155
|
+
console.error(`[listDirectory] Error reading directory ${currentPath}:`, error);
|
|
156
|
+
}
|
|
157
|
+
return;
|
|
158
|
+
}
|
|
159
|
+
const visibleItems = includeHidden
|
|
160
|
+
? items
|
|
161
|
+
: items.filter((item) => !isHidden(item.name));
|
|
162
|
+
const { results: processedEntries, errors: processingErrors } = await processInParallel(visibleItems, async (item) => {
|
|
163
|
+
const fullPath = path.join(currentPath, item.name);
|
|
164
|
+
const relativePath = path.relative(validPath, fullPath) || item.name;
|
|
165
|
+
try {
|
|
166
|
+
if (item.isSymbolicLink()) {
|
|
167
|
+
skippedSymlinks++;
|
|
168
|
+
const stats = await fs.lstat(fullPath);
|
|
169
|
+
let symlinkTarget;
|
|
170
|
+
if (includeSymlinkTargets) {
|
|
171
|
+
try {
|
|
172
|
+
symlinkTarget = await fs.readlink(fullPath);
|
|
173
|
+
}
|
|
174
|
+
catch {
|
|
175
|
+
// Symlink target unreadable
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
const entry = {
|
|
179
|
+
name: item.name,
|
|
180
|
+
path: fullPath,
|
|
181
|
+
relativePath,
|
|
182
|
+
type: 'symlink',
|
|
183
|
+
size: stats.size,
|
|
184
|
+
modified: stats.mtime,
|
|
185
|
+
symlinkTarget,
|
|
186
|
+
};
|
|
187
|
+
return { entry };
|
|
188
|
+
}
|
|
189
|
+
const stats = await fs.stat(fullPath);
|
|
190
|
+
const isDir = item.isDirectory();
|
|
191
|
+
const type = isDir
|
|
192
|
+
? 'directory'
|
|
193
|
+
: item.isFile()
|
|
194
|
+
? 'file'
|
|
195
|
+
: getFileType(stats);
|
|
196
|
+
const entry = {
|
|
197
|
+
name: item.name,
|
|
198
|
+
path: fullPath,
|
|
199
|
+
relativePath,
|
|
200
|
+
type,
|
|
201
|
+
size: type === 'file' ? stats.size : undefined,
|
|
202
|
+
modified: stats.mtime,
|
|
203
|
+
};
|
|
204
|
+
const enqueueDir = recursive && isDir && depth + 1 <= maxDepth
|
|
205
|
+
? {
|
|
206
|
+
currentPath: await validateExistingPath(fullPath),
|
|
207
|
+
depth: depth + 1,
|
|
208
|
+
}
|
|
209
|
+
: undefined;
|
|
210
|
+
return { entry, enqueueDir };
|
|
211
|
+
}
|
|
212
|
+
catch {
|
|
213
|
+
skippedInaccessible++;
|
|
214
|
+
const entry = {
|
|
215
|
+
name: item.name,
|
|
216
|
+
path: fullPath,
|
|
217
|
+
relativePath,
|
|
218
|
+
type: item.isDirectory()
|
|
219
|
+
? 'directory'
|
|
220
|
+
: item.isFile()
|
|
221
|
+
? 'file'
|
|
222
|
+
: 'other',
|
|
223
|
+
};
|
|
224
|
+
return { entry };
|
|
225
|
+
}
|
|
226
|
+
});
|
|
227
|
+
// Count errors from parallel processing as inaccessible
|
|
228
|
+
skippedInaccessible += processingErrors.length;
|
|
229
|
+
for (const { entry, enqueueDir } of processedEntries) {
|
|
230
|
+
if (stopIfNeeded())
|
|
231
|
+
break;
|
|
232
|
+
entries.push(entry);
|
|
233
|
+
if (entry.type === 'directory')
|
|
234
|
+
totalDirectories++;
|
|
235
|
+
if (entry.type === 'file')
|
|
236
|
+
totalFiles++;
|
|
237
|
+
if (enqueueDir)
|
|
238
|
+
enqueue(enqueueDir);
|
|
239
|
+
}
|
|
240
|
+
}, DIR_TRAVERSAL_CONCURRENCY);
|
|
241
|
+
entries.sort((a, b) => {
|
|
242
|
+
switch (sortBy) {
|
|
243
|
+
case 'size':
|
|
244
|
+
return (b.size ?? 0) - (a.size ?? 0);
|
|
245
|
+
case 'modified':
|
|
246
|
+
return (b.modified?.getTime() ?? 0) - (a.modified?.getTime() ?? 0);
|
|
247
|
+
case 'type':
|
|
248
|
+
// directories first, then by name
|
|
249
|
+
if (a.type !== b.type) {
|
|
250
|
+
return a.type === 'directory' ? -1 : 1;
|
|
251
|
+
}
|
|
252
|
+
return a.name.localeCompare(b.name);
|
|
253
|
+
case 'name':
|
|
254
|
+
default:
|
|
255
|
+
return a.name.localeCompare(b.name);
|
|
256
|
+
}
|
|
257
|
+
});
|
|
258
|
+
return {
|
|
259
|
+
path: validPath,
|
|
260
|
+
entries,
|
|
261
|
+
summary: {
|
|
262
|
+
totalEntries: entries.length,
|
|
263
|
+
totalFiles,
|
|
264
|
+
totalDirectories,
|
|
265
|
+
maxDepthReached,
|
|
266
|
+
truncated,
|
|
267
|
+
skippedInaccessible,
|
|
268
|
+
skippedSymlinks,
|
|
269
|
+
},
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
export async function searchFiles(basePath, pattern, excludePatterns = [], options = {}) {
|
|
273
|
+
const validPath = await validateExistingPath(basePath);
|
|
274
|
+
const { maxResults, sortBy = 'path', maxDepth } = options;
|
|
275
|
+
const results = [];
|
|
276
|
+
let skippedInaccessible = 0;
|
|
277
|
+
let truncated = false;
|
|
278
|
+
const batch = [];
|
|
279
|
+
const flushBatch = async () => {
|
|
280
|
+
if (batch.length === 0)
|
|
281
|
+
return;
|
|
282
|
+
const toProcess = batch.splice(0, batch.length);
|
|
283
|
+
const settled = await Promise.allSettled(toProcess.map(async (match) => {
|
|
284
|
+
const validMatch = await validateExistingPath(match);
|
|
285
|
+
const stats = await fs.stat(validMatch);
|
|
286
|
+
const { size, mtime: modified } = stats;
|
|
287
|
+
return {
|
|
288
|
+
path: validMatch,
|
|
289
|
+
type: getFileType(stats),
|
|
290
|
+
size: stats.isFile() ? size : undefined,
|
|
291
|
+
modified,
|
|
292
|
+
};
|
|
293
|
+
}));
|
|
294
|
+
for (const r of settled) {
|
|
295
|
+
if (r.status === 'fulfilled') {
|
|
296
|
+
if (maxResults !== undefined && results.length >= maxResults) {
|
|
297
|
+
truncated = true;
|
|
298
|
+
break;
|
|
299
|
+
}
|
|
300
|
+
results.push(r.value);
|
|
301
|
+
}
|
|
302
|
+
else {
|
|
303
|
+
skippedInaccessible++;
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
};
|
|
307
|
+
const stream = fg.stream(pattern, {
|
|
308
|
+
cwd: validPath,
|
|
309
|
+
absolute: true,
|
|
310
|
+
onlyFiles: false,
|
|
311
|
+
dot: true,
|
|
312
|
+
ignore: excludePatterns,
|
|
313
|
+
suppressErrors: true,
|
|
314
|
+
followSymbolicLinks: false, // Security: never follow symlinks
|
|
315
|
+
deep: maxDepth, // Limit search depth if specified
|
|
316
|
+
});
|
|
317
|
+
for await (const entry of stream) {
|
|
318
|
+
const matchPath = typeof entry === 'string' ? entry : String(entry);
|
|
319
|
+
if (maxResults !== undefined && results.length >= maxResults) {
|
|
320
|
+
truncated = true;
|
|
321
|
+
break;
|
|
322
|
+
}
|
|
323
|
+
batch.push(matchPath);
|
|
324
|
+
if (batch.length >= PARALLEL_CONCURRENCY) {
|
|
325
|
+
await flushBatch();
|
|
326
|
+
if (maxResults !== undefined && results.length >= maxResults) {
|
|
327
|
+
truncated = true;
|
|
328
|
+
break;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
await flushBatch();
|
|
333
|
+
results.sort((a, b) => {
|
|
334
|
+
switch (sortBy) {
|
|
335
|
+
case 'size':
|
|
336
|
+
return (b.size ?? 0) - (a.size ?? 0);
|
|
337
|
+
case 'modified':
|
|
338
|
+
return (b.modified?.getTime() ?? 0) - (a.modified?.getTime() ?? 0);
|
|
339
|
+
case 'name':
|
|
340
|
+
return path.basename(a.path).localeCompare(path.basename(b.path));
|
|
341
|
+
case 'path':
|
|
342
|
+
default:
|
|
343
|
+
return a.path.localeCompare(b.path);
|
|
344
|
+
}
|
|
345
|
+
});
|
|
346
|
+
return {
|
|
347
|
+
basePath: validPath,
|
|
348
|
+
pattern,
|
|
349
|
+
results,
|
|
350
|
+
summary: {
|
|
351
|
+
matched: results.length,
|
|
352
|
+
truncated,
|
|
353
|
+
skippedInaccessible,
|
|
354
|
+
},
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
// Re-export readFile from fs-helpers so it can be used by tools
|
|
358
|
+
export { readFile };
|
|
359
|
+
/**
|
|
360
|
+
* Read multiple files in parallel.
|
|
361
|
+
* Individual file errors don't fail the entire operation.
|
|
362
|
+
*/
|
|
363
|
+
export async function readMultipleFiles(filePaths, options = {}) {
|
|
364
|
+
const { encoding = 'utf-8', maxSize = MAX_TEXT_FILE_SIZE, head, tail, } = options;
|
|
365
|
+
if (filePaths.length === 0)
|
|
366
|
+
return [];
|
|
367
|
+
// Preserve input order while limiting concurrency to avoid spiky I/O / EMFILE.
|
|
368
|
+
const output = filePaths.map((filePath) => ({ path: filePath }));
|
|
369
|
+
const { results, errors } = await processInParallel(filePaths.map((filePath, index) => ({ filePath, index })), async ({ filePath, index }) => {
|
|
370
|
+
const result = await readFile(filePath, {
|
|
371
|
+
encoding,
|
|
372
|
+
maxSize,
|
|
373
|
+
head,
|
|
374
|
+
tail,
|
|
375
|
+
});
|
|
376
|
+
return {
|
|
377
|
+
index,
|
|
378
|
+
value: { path: result.path, content: result.content },
|
|
379
|
+
};
|
|
380
|
+
}, PARALLEL_CONCURRENCY);
|
|
381
|
+
for (const r of results) {
|
|
382
|
+
output[r.index] = r.value;
|
|
383
|
+
}
|
|
384
|
+
for (const e of errors) {
|
|
385
|
+
const filePath = filePaths[e.index] ?? '(unknown)';
|
|
386
|
+
output[e.index] = {
|
|
387
|
+
path: filePath,
|
|
388
|
+
error: e.error.message,
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
return output;
|
|
392
|
+
}
|
|
393
|
+
export async function searchContent(basePath, searchPattern, options = {}) {
|
|
394
|
+
const { filePattern = '**/*', excludePatterns = [], caseSensitive = false, maxResults = DEFAULT_MAX_RESULTS, maxFileSize = MAX_SEARCHABLE_FILE_SIZE, maxFilesScanned, timeoutMs, skipBinary = true, contextLines = 0, wholeWord = false, isLiteral = false, } = options;
|
|
395
|
+
const validPath = await validateExistingPath(basePath);
|
|
396
|
+
const deadlineMs = timeoutMs !== undefined ? Date.now() + timeoutMs : undefined;
|
|
397
|
+
// Build the final pattern
|
|
398
|
+
let finalPattern = searchPattern;
|
|
399
|
+
// Escape regex special characters if literal mode
|
|
400
|
+
if (isLiteral) {
|
|
401
|
+
finalPattern = finalPattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
402
|
+
}
|
|
403
|
+
// Add word boundaries if whole word mode
|
|
404
|
+
if (wholeWord) {
|
|
405
|
+
finalPattern = `\\b${finalPattern}\\b`;
|
|
406
|
+
}
|
|
407
|
+
// ReDoS protection: skip check for literal or simple patterns
|
|
408
|
+
const needsReDoSCheck = !isLiteral && !isSimpleSafePattern(finalPattern);
|
|
409
|
+
if (needsReDoSCheck && !safeRegex(finalPattern)) {
|
|
410
|
+
throw new McpError(ErrorCode.E_INVALID_PATTERN, `Potentially unsafe regular expression (ReDoS risk): ${searchPattern}. ` +
|
|
411
|
+
'Avoid patterns with nested quantifiers, overlapping alternations, or exponential backtracking.', basePath, {
|
|
412
|
+
searchPattern,
|
|
413
|
+
finalPattern,
|
|
414
|
+
reason: 'ReDoS risk detected by safe-regex2',
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
let regex;
|
|
418
|
+
try {
|
|
419
|
+
regex = new RegExp(finalPattern, caseSensitive ? 'g' : 'gi');
|
|
420
|
+
}
|
|
421
|
+
catch (error) {
|
|
422
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
423
|
+
throw new McpError(ErrorCode.E_INVALID_PATTERN, `Invalid regular expression: ${finalPattern} (${message})`, basePath, { searchPattern: finalPattern });
|
|
424
|
+
}
|
|
425
|
+
const matches = [];
|
|
426
|
+
let filesScanned = 0;
|
|
427
|
+
let filesMatched = 0;
|
|
428
|
+
let skippedTooLarge = 0;
|
|
429
|
+
let skippedBinary = 0;
|
|
430
|
+
let skippedInaccessible = 0;
|
|
431
|
+
let truncated = false;
|
|
432
|
+
let stoppedReason;
|
|
433
|
+
const stopNow = (reason) => {
|
|
434
|
+
truncated = true;
|
|
435
|
+
stoppedReason = reason;
|
|
436
|
+
return true;
|
|
437
|
+
};
|
|
438
|
+
const stream = fg.stream(filePattern, {
|
|
439
|
+
cwd: validPath,
|
|
440
|
+
absolute: true,
|
|
441
|
+
onlyFiles: true,
|
|
442
|
+
dot: false,
|
|
443
|
+
ignore: excludePatterns,
|
|
444
|
+
suppressErrors: true,
|
|
445
|
+
followSymbolicLinks: false, // Security: never follow symlinks
|
|
446
|
+
});
|
|
447
|
+
for await (const entry of stream) {
|
|
448
|
+
const file = typeof entry === 'string' ? entry : String(entry);
|
|
449
|
+
if (shouldStopBecauseOfTimeout(deadlineMs)) {
|
|
450
|
+
stopNow('timeout');
|
|
451
|
+
break;
|
|
452
|
+
}
|
|
453
|
+
if (maxFilesScanned !== undefined && filesScanned >= maxFilesScanned) {
|
|
454
|
+
stopNow('maxFiles');
|
|
455
|
+
break;
|
|
456
|
+
}
|
|
457
|
+
if (matches.length >= maxResults) {
|
|
458
|
+
stopNow('maxResults');
|
|
459
|
+
break;
|
|
460
|
+
}
|
|
461
|
+
let validFile;
|
|
462
|
+
let handle;
|
|
463
|
+
try {
|
|
464
|
+
validFile = await validateExistingPath(file);
|
|
465
|
+
handle = await fs.open(validFile, 'r');
|
|
466
|
+
const stats = await handle.stat();
|
|
467
|
+
filesScanned++;
|
|
468
|
+
if (stats.size > maxFileSize) {
|
|
469
|
+
skippedTooLarge++;
|
|
470
|
+
await handle.close();
|
|
471
|
+
handle = undefined;
|
|
472
|
+
continue;
|
|
473
|
+
}
|
|
474
|
+
if (skipBinary) {
|
|
475
|
+
const binary = await isProbablyBinary(validFile, handle);
|
|
476
|
+
if (binary) {
|
|
477
|
+
skippedBinary++;
|
|
478
|
+
await handle.close();
|
|
479
|
+
handle = undefined;
|
|
480
|
+
continue;
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
await handle.close();
|
|
484
|
+
handle = undefined;
|
|
485
|
+
const fileStream = createReadStream(validFile, {
|
|
486
|
+
encoding: 'utf-8',
|
|
487
|
+
});
|
|
488
|
+
const rl = readline.createInterface({
|
|
489
|
+
input: fileStream,
|
|
490
|
+
crlfDelay: Infinity,
|
|
491
|
+
});
|
|
492
|
+
let fileHadMatches = false;
|
|
493
|
+
let lineNumber = 0;
|
|
494
|
+
const lineBuffer = [];
|
|
495
|
+
const pendingMatches = [];
|
|
496
|
+
try {
|
|
497
|
+
for await (const line of rl) {
|
|
498
|
+
lineNumber++;
|
|
499
|
+
if (shouldStopBecauseOfTimeout(deadlineMs)) {
|
|
500
|
+
stopNow('timeout');
|
|
501
|
+
break;
|
|
502
|
+
}
|
|
503
|
+
if (matches.length >= maxResults) {
|
|
504
|
+
stopNow('maxResults');
|
|
505
|
+
break;
|
|
506
|
+
}
|
|
507
|
+
const trimmedLine = line.trim().substring(0, MAX_LINE_CONTENT_LENGTH);
|
|
508
|
+
for (const pending of pendingMatches) {
|
|
509
|
+
if (pending.afterNeeded > 0) {
|
|
510
|
+
pending.match.contextAfter ??= [];
|
|
511
|
+
pending.match.contextAfter.push(trimmedLine);
|
|
512
|
+
pending.afterNeeded--;
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
while (pendingMatches.length > 0 &&
|
|
516
|
+
pendingMatches[0]?.afterNeeded === 0) {
|
|
517
|
+
pendingMatches.shift();
|
|
518
|
+
}
|
|
519
|
+
const matchCount = countRegexMatches(line, regex);
|
|
520
|
+
if (matchCount < 0) {
|
|
521
|
+
console.error(`[searchContent] Skipping line ${lineNumber} in ${validFile} due to regex timeout`);
|
|
522
|
+
// Still add to buffer for context
|
|
523
|
+
if (contextLines > 0) {
|
|
524
|
+
lineBuffer.push(trimmedLine);
|
|
525
|
+
if (lineBuffer.length > contextLines) {
|
|
526
|
+
lineBuffer.shift();
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
continue;
|
|
530
|
+
}
|
|
531
|
+
if (matchCount > 0) {
|
|
532
|
+
fileHadMatches = true;
|
|
533
|
+
const newMatch = {
|
|
534
|
+
file: validFile,
|
|
535
|
+
line: lineNumber,
|
|
536
|
+
content: trimmedLine,
|
|
537
|
+
matchCount,
|
|
538
|
+
};
|
|
539
|
+
if (contextLines > 0 && lineBuffer.length > 0) {
|
|
540
|
+
newMatch.contextBefore = [...lineBuffer];
|
|
541
|
+
}
|
|
542
|
+
matches.push(newMatch);
|
|
543
|
+
if (contextLines > 0) {
|
|
544
|
+
pendingMatches.push({
|
|
545
|
+
match: newMatch,
|
|
546
|
+
afterNeeded: contextLines,
|
|
547
|
+
});
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
if (contextLines > 0) {
|
|
551
|
+
lineBuffer.push(trimmedLine);
|
|
552
|
+
if (lineBuffer.length > contextLines) {
|
|
553
|
+
lineBuffer.shift();
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
finally {
|
|
559
|
+
rl.close();
|
|
560
|
+
fileStream.destroy();
|
|
561
|
+
}
|
|
562
|
+
if (fileHadMatches)
|
|
563
|
+
filesMatched++;
|
|
564
|
+
if (stoppedReason !== undefined)
|
|
565
|
+
break;
|
|
566
|
+
}
|
|
567
|
+
catch (error) {
|
|
568
|
+
if (handle) {
|
|
569
|
+
await handle.close().catch(() => { });
|
|
570
|
+
}
|
|
571
|
+
skippedInaccessible++;
|
|
572
|
+
// Log unexpected errors for debugging
|
|
573
|
+
const { code } = error;
|
|
574
|
+
if (code !== 'ENOENT' && code !== 'EACCES' && code !== 'EPERM') {
|
|
575
|
+
console.error(`[searchContent] Error processing ${file}:`, error);
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
return {
|
|
580
|
+
basePath: validPath,
|
|
581
|
+
pattern: searchPattern,
|
|
582
|
+
filePattern,
|
|
583
|
+
matches,
|
|
584
|
+
summary: {
|
|
585
|
+
filesScanned,
|
|
586
|
+
filesMatched,
|
|
587
|
+
matches: matches.length,
|
|
588
|
+
truncated,
|
|
589
|
+
skippedTooLarge,
|
|
590
|
+
skippedBinary,
|
|
591
|
+
skippedInaccessible,
|
|
592
|
+
stoppedReason,
|
|
593
|
+
},
|
|
594
|
+
};
|
|
595
|
+
}
|
|
596
|
+
export async function analyzeDirectory(dirPath, options = {}) {
|
|
597
|
+
const { maxDepth = DEFAULT_MAX_DEPTH, topN = DEFAULT_TOP_N, excludePatterns = [], includeHidden = false, } = options;
|
|
598
|
+
const validPath = await validateExistingPath(dirPath);
|
|
599
|
+
let totalFiles = 0;
|
|
600
|
+
let totalDirectories = 0;
|
|
601
|
+
let totalSize = 0;
|
|
602
|
+
let currentMaxDepth = 0;
|
|
603
|
+
let skippedInaccessible = 0;
|
|
604
|
+
let skippedSymlinks = 0;
|
|
605
|
+
const fileTypes = {};
|
|
606
|
+
const largestFiles = [];
|
|
607
|
+
const recentlyModified = [];
|
|
608
|
+
const excludeMatchers = excludePatterns.length > 0
|
|
609
|
+
? excludePatterns.map((pattern) => new Minimatch(pattern))
|
|
610
|
+
: [];
|
|
611
|
+
const shouldExclude = (name, relativePath) => {
|
|
612
|
+
if (excludeMatchers.length === 0)
|
|
613
|
+
return false;
|
|
614
|
+
return excludeMatchers.some((m) => m.match(name) || m.match(relativePath));
|
|
615
|
+
};
|
|
616
|
+
const insertSorted = (arr, item, compare, maxLen) => {
|
|
617
|
+
if (maxLen <= 0)
|
|
618
|
+
return;
|
|
619
|
+
const idx = arr.findIndex((el) => compare(item, el));
|
|
620
|
+
if (idx === -1) {
|
|
621
|
+
if (arr.length < maxLen)
|
|
622
|
+
arr.push(item);
|
|
623
|
+
}
|
|
624
|
+
else {
|
|
625
|
+
arr.splice(idx, 0, item);
|
|
626
|
+
if (arr.length > maxLen)
|
|
627
|
+
arr.pop();
|
|
628
|
+
}
|
|
629
|
+
};
|
|
630
|
+
await runWorkQueue([{ currentPath: validPath, depth: 0 }], async ({ currentPath, depth }, enqueue) => {
|
|
631
|
+
if (depth > maxDepth)
|
|
632
|
+
return;
|
|
633
|
+
currentMaxDepth = Math.max(currentMaxDepth, depth);
|
|
634
|
+
let items;
|
|
635
|
+
try {
|
|
636
|
+
items = await fs.readdir(currentPath, { withFileTypes: true });
|
|
637
|
+
}
|
|
638
|
+
catch (error) {
|
|
639
|
+
skippedInaccessible++;
|
|
640
|
+
const { code } = error;
|
|
641
|
+
if (code !== 'ENOENT' && code !== 'EACCES' && code !== 'EPERM') {
|
|
642
|
+
console.error(`[analyzeDirectory] Error reading directory ${currentPath}:`, error);
|
|
643
|
+
}
|
|
644
|
+
return;
|
|
645
|
+
}
|
|
646
|
+
for (const item of items) {
|
|
647
|
+
const fullPath = path.join(currentPath, item.name);
|
|
648
|
+
const relativePath = path.relative(validPath, fullPath);
|
|
649
|
+
// Skip hidden files/directories unless includeHidden is true
|
|
650
|
+
if (!includeHidden && isHidden(item.name)) {
|
|
651
|
+
continue;
|
|
652
|
+
}
|
|
653
|
+
// Skip items matching exclude patterns
|
|
654
|
+
if (shouldExclude(item.name, relativePath)) {
|
|
655
|
+
continue;
|
|
656
|
+
}
|
|
657
|
+
try {
|
|
658
|
+
const validated = await validateExistingPathDetailed(fullPath);
|
|
659
|
+
if (validated.isSymlink || item.isSymbolicLink()) {
|
|
660
|
+
skippedSymlinks++;
|
|
661
|
+
continue;
|
|
662
|
+
}
|
|
663
|
+
const stats = await fs.stat(validated.resolvedPath);
|
|
664
|
+
if (stats.isDirectory()) {
|
|
665
|
+
totalDirectories++;
|
|
666
|
+
if (depth + 1 <= maxDepth) {
|
|
667
|
+
enqueue({
|
|
668
|
+
currentPath: validated.resolvedPath,
|
|
669
|
+
depth: depth + 1,
|
|
670
|
+
});
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
else if (stats.isFile()) {
|
|
674
|
+
totalFiles++;
|
|
675
|
+
totalSize += stats.size;
|
|
676
|
+
const ext = path.extname(item.name).toLowerCase() || '(no extension)';
|
|
677
|
+
fileTypes[ext] = (fileTypes[ext] ?? 0) + 1;
|
|
678
|
+
insertSorted(largestFiles, { path: validated.resolvedPath, size: stats.size }, (a, b) => a.size > b.size, topN);
|
|
679
|
+
insertSorted(recentlyModified, { path: validated.resolvedPath, modified: stats.mtime }, (a, b) => a.modified.getTime() > b.modified.getTime(), topN);
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
catch (error) {
|
|
683
|
+
if (error instanceof McpError &&
|
|
684
|
+
(error.code === ErrorCode.E_ACCESS_DENIED ||
|
|
685
|
+
error.code === ErrorCode.E_SYMLINK_NOT_ALLOWED)) {
|
|
686
|
+
skippedSymlinks++;
|
|
687
|
+
}
|
|
688
|
+
else {
|
|
689
|
+
skippedInaccessible++;
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
}, DIR_TRAVERSAL_CONCURRENCY);
|
|
694
|
+
const analysis = {
|
|
695
|
+
path: validPath,
|
|
696
|
+
totalFiles,
|
|
697
|
+
totalDirectories,
|
|
698
|
+
totalSize,
|
|
699
|
+
fileTypes,
|
|
700
|
+
largestFiles,
|
|
701
|
+
recentlyModified,
|
|
702
|
+
maxDepth: currentMaxDepth,
|
|
703
|
+
};
|
|
704
|
+
return {
|
|
705
|
+
analysis,
|
|
706
|
+
summary: {
|
|
707
|
+
truncated: false,
|
|
708
|
+
skippedInaccessible,
|
|
709
|
+
skippedSymlinks,
|
|
710
|
+
},
|
|
711
|
+
};
|
|
712
|
+
}
|
|
713
|
+
/**
|
|
714
|
+
* Build a JSON tree structure of a directory.
|
|
715
|
+
* More efficient for AI parsing than flat file lists.
|
|
716
|
+
*/
|
|
717
|
+
export async function getDirectoryTree(dirPath, options = {}) {
|
|
718
|
+
const { maxDepth = DEFAULT_MAX_DEPTH, excludePatterns = [], includeHidden = false, includeSize = false, maxFiles, } = options;
|
|
719
|
+
const validPath = await validateExistingPath(dirPath);
|
|
720
|
+
// Ensure the requested path is a directory (not just an existing path).
|
|
721
|
+
const rootStats = await fs.stat(validPath);
|
|
722
|
+
if (!rootStats.isDirectory()) {
|
|
723
|
+
throw new McpError(ErrorCode.E_NOT_DIRECTORY, `Not a directory: ${dirPath}`, dirPath);
|
|
724
|
+
}
|
|
725
|
+
let totalFiles = 0;
|
|
726
|
+
let totalDirectories = 0;
|
|
727
|
+
let maxDepthReached = 0;
|
|
728
|
+
let skippedInaccessible = 0;
|
|
729
|
+
let skippedSymlinks = 0;
|
|
730
|
+
let truncated = false;
|
|
731
|
+
const excludeMatchers = excludePatterns.length > 0
|
|
732
|
+
? excludePatterns.map((pattern) => new Minimatch(pattern))
|
|
733
|
+
: [];
|
|
734
|
+
const hitMaxFiles = () => {
|
|
735
|
+
return maxFiles !== undefined && totalFiles >= maxFiles;
|
|
736
|
+
};
|
|
737
|
+
const shouldExclude = (name, relativePath) => {
|
|
738
|
+
if (excludeMatchers.length === 0)
|
|
739
|
+
return false;
|
|
740
|
+
return excludeMatchers.some((m) => m.match(name) || m.match(relativePath));
|
|
741
|
+
};
|
|
742
|
+
const buildTree = async (currentPath, depth, relativePath = '') => {
|
|
743
|
+
if (hitMaxFiles()) {
|
|
744
|
+
truncated = true;
|
|
745
|
+
return null;
|
|
746
|
+
}
|
|
747
|
+
let validatedPath;
|
|
748
|
+
let isSymlink = false;
|
|
749
|
+
try {
|
|
750
|
+
({ resolvedPath: validatedPath, isSymlink } =
|
|
751
|
+
await validateExistingPathDetailed(currentPath));
|
|
752
|
+
}
|
|
753
|
+
catch (error) {
|
|
754
|
+
if (error instanceof McpError &&
|
|
755
|
+
(error.code === ErrorCode.E_ACCESS_DENIED ||
|
|
756
|
+
error.code === ErrorCode.E_SYMLINK_NOT_ALLOWED)) {
|
|
757
|
+
skippedSymlinks++;
|
|
758
|
+
}
|
|
759
|
+
else {
|
|
760
|
+
skippedInaccessible++;
|
|
761
|
+
}
|
|
762
|
+
return null;
|
|
763
|
+
}
|
|
764
|
+
const name = path.basename(currentPath);
|
|
765
|
+
// Check exclusions
|
|
766
|
+
if (shouldExclude(name, relativePath)) {
|
|
767
|
+
return null;
|
|
768
|
+
}
|
|
769
|
+
if (!includeHidden && name.startsWith('.') && relativePath !== '') {
|
|
770
|
+
return null;
|
|
771
|
+
}
|
|
772
|
+
maxDepthReached = Math.max(maxDepthReached, depth);
|
|
773
|
+
if (isSymlink) {
|
|
774
|
+
skippedSymlinks++;
|
|
775
|
+
return null;
|
|
776
|
+
}
|
|
777
|
+
let stats;
|
|
778
|
+
try {
|
|
779
|
+
stats = await fs.stat(validatedPath);
|
|
780
|
+
}
|
|
781
|
+
catch {
|
|
782
|
+
skippedInaccessible++;
|
|
783
|
+
return null;
|
|
784
|
+
}
|
|
785
|
+
const { size } = stats;
|
|
786
|
+
if (stats.isFile()) {
|
|
787
|
+
if (hitMaxFiles()) {
|
|
788
|
+
truncated = true;
|
|
789
|
+
return null;
|
|
790
|
+
}
|
|
791
|
+
totalFiles++;
|
|
792
|
+
const entry = { name, type: 'file' };
|
|
793
|
+
if (includeSize) {
|
|
794
|
+
entry.size = size;
|
|
795
|
+
}
|
|
796
|
+
return entry;
|
|
797
|
+
}
|
|
798
|
+
if (stats.isDirectory()) {
|
|
799
|
+
totalDirectories++;
|
|
800
|
+
if (depth >= maxDepth) {
|
|
801
|
+
truncated = true;
|
|
802
|
+
return { name, type: 'directory', children: [] };
|
|
803
|
+
}
|
|
804
|
+
let items;
|
|
805
|
+
try {
|
|
806
|
+
items = await fs.readdir(validatedPath, { withFileTypes: true });
|
|
807
|
+
}
|
|
808
|
+
catch {
|
|
809
|
+
skippedInaccessible++;
|
|
810
|
+
return { name, type: 'directory', children: [] };
|
|
811
|
+
}
|
|
812
|
+
const children = [];
|
|
813
|
+
for (let i = 0; i < items.length && !hitMaxFiles(); i += DIR_TRAVERSAL_CONCURRENCY) {
|
|
814
|
+
const batch = items.slice(i, i + DIR_TRAVERSAL_CONCURRENCY);
|
|
815
|
+
const batchResults = await Promise.all(batch.map((item) => {
|
|
816
|
+
const childPath = path.join(validatedPath, item.name);
|
|
817
|
+
const childRelative = relativePath
|
|
818
|
+
? `${relativePath}/${item.name}`
|
|
819
|
+
: item.name;
|
|
820
|
+
return buildTree(childPath, depth + 1, childRelative);
|
|
821
|
+
}));
|
|
822
|
+
for (const entry of batchResults) {
|
|
823
|
+
if (entry !== null) {
|
|
824
|
+
children.push(entry);
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
children.sort((a, b) => {
|
|
829
|
+
if (a.type !== b.type) {
|
|
830
|
+
return a.type === 'directory' ? -1 : 1;
|
|
831
|
+
}
|
|
832
|
+
return a.name.localeCompare(b.name);
|
|
833
|
+
});
|
|
834
|
+
return { name, type: 'directory', children };
|
|
835
|
+
}
|
|
836
|
+
return null;
|
|
837
|
+
};
|
|
838
|
+
const tree = await buildTree(validPath, 0);
|
|
839
|
+
if (!tree) {
|
|
840
|
+
throw new McpError(ErrorCode.E_UNKNOWN, `Unable to build tree for path: ${dirPath}`, dirPath);
|
|
841
|
+
}
|
|
842
|
+
return {
|
|
843
|
+
tree,
|
|
844
|
+
summary: {
|
|
845
|
+
totalFiles,
|
|
846
|
+
totalDirectories,
|
|
847
|
+
maxDepthReached,
|
|
848
|
+
truncated,
|
|
849
|
+
skippedInaccessible,
|
|
850
|
+
skippedSymlinks,
|
|
851
|
+
},
|
|
852
|
+
};
|
|
853
|
+
}
|
|
854
|
+
/**
|
|
855
|
+
* Read a media/binary file and return as base64.
|
|
856
|
+
* Useful for images, audio, and other binary content.
|
|
857
|
+
*/
|
|
858
|
+
export async function readMediaFile(filePath, options = {}) {
|
|
859
|
+
const { maxSize = MAX_MEDIA_FILE_SIZE } = options;
|
|
860
|
+
const validPath = await validateExistingPath(filePath);
|
|
861
|
+
const stats = await fs.stat(validPath);
|
|
862
|
+
const { size } = stats;
|
|
863
|
+
if (!stats.isFile()) {
|
|
864
|
+
throw new McpError(ErrorCode.E_NOT_FILE, `Not a file: ${filePath}`, filePath);
|
|
865
|
+
}
|
|
866
|
+
if (size > maxSize) {
|
|
867
|
+
throw new McpError(ErrorCode.E_TOO_LARGE, `File too large: ${size} bytes (max: ${maxSize} bytes)`, filePath, { size, maxSize });
|
|
868
|
+
}
|
|
869
|
+
const ext = path.extname(validPath).toLowerCase();
|
|
870
|
+
const mimeType = MIME_TYPES[ext] ?? 'application/octet-stream';
|
|
871
|
+
const buffer = await fs.readFile(validPath);
|
|
872
|
+
const data = buffer.toString('base64');
|
|
873
|
+
let width;
|
|
874
|
+
let height;
|
|
875
|
+
if (mimeType.startsWith('image/')) {
|
|
876
|
+
const dimensions = parseImageDimensions(buffer, ext);
|
|
877
|
+
if (dimensions) {
|
|
878
|
+
({ width, height } = dimensions);
|
|
879
|
+
}
|
|
880
|
+
}
|
|
881
|
+
return {
|
|
882
|
+
path: validPath,
|
|
883
|
+
mimeType,
|
|
884
|
+
size,
|
|
885
|
+
data,
|
|
886
|
+
width,
|
|
887
|
+
height,
|
|
888
|
+
};
|
|
889
|
+
}
|
|
890
|
+
const PNG_SIGNATURE = [0x89, 0x50, 0x4e, 0x47];
|
|
891
|
+
const JPEG_SIGNATURE = [0xff, 0xd8];
|
|
892
|
+
const GIF_SIGNATURE = [0x47, 0x49, 0x46];
|
|
893
|
+
const BMP_SIGNATURE = [0x42, 0x4d];
|
|
894
|
+
const WEBP_RIFF = [0x52, 0x49, 0x46, 0x46];
|
|
895
|
+
const WEBP_MARKER = [0x57, 0x45, 0x42, 0x50];
|
|
896
|
+
function matchesSignature(buffer, signature, offset = 0) {
|
|
897
|
+
if (buffer.length < offset + signature.length)
|
|
898
|
+
return false;
|
|
899
|
+
return signature.every((byte, i) => buffer[offset + i] === byte);
|
|
900
|
+
}
|
|
901
|
+
function parsePng(buffer) {
|
|
902
|
+
if (buffer.length < 24 || !matchesSignature(buffer, PNG_SIGNATURE))
|
|
903
|
+
return null;
|
|
904
|
+
return { width: buffer.readUInt32BE(16), height: buffer.readUInt32BE(20) };
|
|
905
|
+
}
|
|
906
|
+
function parseJpeg(buffer) {
|
|
907
|
+
if (buffer.length < 2 || !matchesSignature(buffer, JPEG_SIGNATURE))
|
|
908
|
+
return null;
|
|
909
|
+
let offset = 2;
|
|
910
|
+
while (offset < buffer.length - 8) {
|
|
911
|
+
if (buffer[offset] !== 0xff) {
|
|
912
|
+
offset++;
|
|
913
|
+
continue;
|
|
914
|
+
}
|
|
915
|
+
const marker = buffer[offset + 1];
|
|
916
|
+
const isSOF = marker !== undefined &&
|
|
917
|
+
((marker >= 0xc0 && marker <= 0xc3) ||
|
|
918
|
+
(marker >= 0xc5 && marker <= 0xc7) ||
|
|
919
|
+
(marker >= 0xc9 && marker <= 0xcb) ||
|
|
920
|
+
(marker >= 0xcd && marker <= 0xcf));
|
|
921
|
+
if (isSOF) {
|
|
922
|
+
return {
|
|
923
|
+
width: buffer.readUInt16BE(offset + 7),
|
|
924
|
+
height: buffer.readUInt16BE(offset + 5),
|
|
925
|
+
};
|
|
926
|
+
}
|
|
927
|
+
if (offset + 3 >= buffer.length)
|
|
928
|
+
break;
|
|
929
|
+
offset += 2 + buffer.readUInt16BE(offset + 2);
|
|
930
|
+
}
|
|
931
|
+
return null;
|
|
932
|
+
}
|
|
933
|
+
function parseGif(buffer) {
|
|
934
|
+
if (buffer.length < 10 || !matchesSignature(buffer, GIF_SIGNATURE))
|
|
935
|
+
return null;
|
|
936
|
+
return { width: buffer.readUInt16LE(6), height: buffer.readUInt16LE(8) };
|
|
937
|
+
}
|
|
938
|
+
function parseBmp(buffer) {
|
|
939
|
+
if (buffer.length < 26 || !matchesSignature(buffer, BMP_SIGNATURE))
|
|
940
|
+
return null;
|
|
941
|
+
return {
|
|
942
|
+
width: buffer.readInt32LE(18),
|
|
943
|
+
height: Math.abs(buffer.readInt32LE(22)),
|
|
944
|
+
};
|
|
945
|
+
}
|
|
946
|
+
function parseWebp(buffer) {
|
|
947
|
+
if (buffer.length < 30)
|
|
948
|
+
return null;
|
|
949
|
+
if (!matchesSignature(buffer, WEBP_RIFF) ||
|
|
950
|
+
!matchesSignature(buffer, WEBP_MARKER, 8))
|
|
951
|
+
return null;
|
|
952
|
+
const chunkType = [buffer[12], buffer[13], buffer[14], buffer[15]];
|
|
953
|
+
// VP8 (lossy): 0x56 0x50 0x38 0x20
|
|
954
|
+
if (chunkType[0] === 0x56 &&
|
|
955
|
+
chunkType[1] === 0x50 &&
|
|
956
|
+
chunkType[2] === 0x38 &&
|
|
957
|
+
chunkType[3] === 0x20) {
|
|
958
|
+
return {
|
|
959
|
+
width: buffer.readUInt16LE(26) & 0x3fff,
|
|
960
|
+
height: buffer.readUInt16LE(28) & 0x3fff,
|
|
961
|
+
};
|
|
962
|
+
}
|
|
963
|
+
// VP8L (lossless): 0x56 0x50 0x38 0x4c
|
|
964
|
+
if (chunkType[0] === 0x56 &&
|
|
965
|
+
chunkType[1] === 0x50 &&
|
|
966
|
+
chunkType[2] === 0x38 &&
|
|
967
|
+
chunkType[3] === 0x4c) {
|
|
968
|
+
const bits = buffer.readUInt32LE(21);
|
|
969
|
+
return { width: (bits & 0x3fff) + 1, height: ((bits >> 14) & 0x3fff) + 1 };
|
|
970
|
+
}
|
|
971
|
+
// VP8X (extended): 0x56 0x50 0x38 0x58
|
|
972
|
+
if (chunkType[0] === 0x56 &&
|
|
973
|
+
chunkType[1] === 0x50 &&
|
|
974
|
+
chunkType[2] === 0x38 &&
|
|
975
|
+
chunkType[3] === 0x58) {
|
|
976
|
+
const width = (buffer[24] ?? 0) | ((buffer[25] ?? 0) << 8) | ((buffer[26] ?? 0) << 16);
|
|
977
|
+
const height = (buffer[27] ?? 0) | ((buffer[28] ?? 0) << 8) | ((buffer[29] ?? 0) << 16);
|
|
978
|
+
return { width: width + 1, height: height + 1 };
|
|
979
|
+
}
|
|
980
|
+
return null;
|
|
981
|
+
}
|
|
982
|
+
const IMAGE_PARSERS = {
|
|
983
|
+
'.png': parsePng,
|
|
984
|
+
'.jpg': parseJpeg,
|
|
985
|
+
'.jpeg': parseJpeg,
|
|
986
|
+
'.gif': parseGif,
|
|
987
|
+
'.bmp': parseBmp,
|
|
988
|
+
'.webp': parseWebp,
|
|
989
|
+
};
|
|
990
|
+
/**
|
|
991
|
+
* Parse image dimensions from common image format headers.
|
|
992
|
+
* Supports PNG, JPEG, GIF, BMP, and WebP.
|
|
993
|
+
*/
|
|
994
|
+
function parseImageDimensions(buffer, ext) {
|
|
995
|
+
try {
|
|
996
|
+
const parser = IMAGE_PARSERS[ext];
|
|
997
|
+
return parser ? parser(buffer) : null;
|
|
998
|
+
}
|
|
999
|
+
catch {
|
|
1000
|
+
return null;
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
//# sourceMappingURL=file-operations.js.map
|