@j0hanz/filesystem-context-mcp 1.0.10 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +112 -22
- package/dist/__tests__/lib/errors.test.js +3 -23
- package/dist/__tests__/lib/errors.test.js.map +1 -1
- package/dist/__tests__/lib/file-operations.test.js +34 -0
- package/dist/__tests__/lib/file-operations.test.js.map +1 -1
- package/dist/__tests__/lib/path-validation.test.js +8 -0
- package/dist/__tests__/lib/path-validation.test.js.map +1 -1
- package/dist/__tests__/schemas/validators.test.js +130 -124
- package/dist/__tests__/schemas/validators.test.js.map +1 -1
- package/dist/__tests__/security/filesystem-boundary.test.js +1 -1
- package/dist/__tests__/security/filesystem-boundary.test.js.map +1 -1
- package/dist/config/types.d.ts +1 -58
- package/dist/config/types.d.ts.map +1 -1
- package/dist/config/types.js +0 -2
- package/dist/config/types.js.map +1 -1
- package/dist/index.js +3 -5
- package/dist/index.js.map +1 -1
- package/dist/instructions.md +1 -2
- package/dist/lib/constants.d.ts +9 -7
- package/dist/lib/constants.d.ts.map +1 -1
- package/dist/lib/constants.js +89 -298
- package/dist/lib/constants.js.map +1 -1
- package/dist/lib/errors.d.ts +25 -1
- package/dist/lib/errors.d.ts.map +1 -1
- package/dist/lib/errors.js +4 -112
- package/dist/lib/errors.js.map +1 -1
- package/dist/lib/file-operations/analyze-directory.d.ts +8 -0
- package/dist/lib/file-operations/analyze-directory.d.ts.map +1 -0
- package/dist/lib/file-operations/analyze-directory.js +117 -0
- package/dist/lib/file-operations/analyze-directory.js.map +1 -0
- package/dist/lib/file-operations/directory-items.d.ts +20 -0
- package/dist/lib/file-operations/directory-items.d.ts.map +1 -0
- package/dist/lib/file-operations/directory-items.js +85 -0
- package/dist/lib/file-operations/directory-items.js.map +1 -0
- package/dist/lib/file-operations/directory-iteration.d.ts +17 -0
- package/dist/lib/file-operations/directory-iteration.d.ts.map +1 -0
- package/dist/lib/file-operations/directory-iteration.js +55 -0
- package/dist/lib/file-operations/directory-iteration.js.map +1 -0
- package/dist/lib/file-operations/directory-tree.d.ts +9 -0
- package/dist/lib/file-operations/directory-tree.d.ts.map +1 -0
- package/dist/lib/file-operations/directory-tree.js +175 -0
- package/dist/lib/file-operations/directory-tree.js.map +1 -0
- package/dist/lib/file-operations/file-info.d.ts +3 -0
- package/dist/lib/file-operations/file-info.d.ts.map +1 -0
- package/dist/lib/file-operations/file-info.js +56 -0
- package/dist/lib/file-operations/file-info.js.map +1 -0
- package/dist/lib/file-operations/list-directory.d.ts +10 -0
- package/dist/lib/file-operations/list-directory.d.ts.map +1 -0
- package/dist/lib/file-operations/list-directory.js +189 -0
- package/dist/lib/file-operations/list-directory.js.map +1 -0
- package/dist/lib/file-operations/read-media-file.d.ts +5 -0
- package/dist/lib/file-operations/read-media-file.d.ts.map +1 -0
- package/dist/lib/file-operations/read-media-file.js +31 -0
- package/dist/lib/file-operations/read-media-file.js.map +1 -0
- package/dist/lib/file-operations/read-multiple-files.d.ts +16 -0
- package/dist/lib/file-operations/read-multiple-files.d.ts.map +1 -0
- package/dist/lib/file-operations/read-multiple-files.js +98 -0
- package/dist/lib/file-operations/read-multiple-files.js.map +1 -0
- package/dist/lib/file-operations/search-content.d.ts +16 -0
- package/dist/lib/file-operations/search-content.d.ts.map +1 -0
- package/dist/lib/file-operations/search-content.js +431 -0
- package/dist/lib/file-operations/search-content.js.map +1 -0
- package/dist/lib/file-operations/search-files.d.ts +9 -0
- package/dist/lib/file-operations/search-files.d.ts.map +1 -0
- package/dist/lib/file-operations/search-files.js +139 -0
- package/dist/lib/file-operations/search-files.js.map +1 -0
- package/dist/lib/file-operations/sorting.d.ts +12 -0
- package/dist/lib/file-operations/sorting.d.ts.map +1 -0
- package/dist/lib/file-operations/sorting.js +24 -0
- package/dist/lib/file-operations/sorting.js.map +1 -0
- package/dist/lib/file-operations.d.ts +9 -57
- package/dist/lib/file-operations.d.ts.map +1 -1
- package/dist/lib/file-operations.js +9 -733
- package/dist/lib/file-operations.js.map +1 -1
- package/dist/lib/fs-helpers/binary-detect.d.ts +3 -0
- package/dist/lib/fs-helpers/binary-detect.d.ts.map +1 -0
- package/dist/lib/fs-helpers/binary-detect.js +54 -0
- package/dist/lib/fs-helpers/binary-detect.js.map +1 -0
- package/dist/lib/fs-helpers/concurrency.d.ts +11 -0
- package/dist/lib/fs-helpers/concurrency.d.ts.map +1 -0
- package/dist/lib/fs-helpers/concurrency.js +95 -0
- package/dist/lib/fs-helpers/concurrency.js.map +1 -0
- package/dist/lib/fs-helpers/fs-utils.d.ts +5 -0
- package/dist/lib/fs-helpers/fs-utils.d.ts.map +1 -0
- package/dist/lib/fs-helpers/fs-utils.js +13 -0
- package/dist/lib/fs-helpers/fs-utils.js.map +1 -0
- package/dist/lib/fs-helpers/readers/head-file.d.ts +2 -0
- package/dist/lib/fs-helpers/readers/head-file.d.ts.map +1 -0
- package/dist/lib/fs-helpers/readers/head-file.js +73 -0
- package/dist/lib/fs-helpers/readers/head-file.js.map +1 -0
- package/dist/lib/fs-helpers/readers/line-range.d.ts +7 -0
- package/dist/lib/fs-helpers/readers/line-range.d.ts.map +1 -0
- package/dist/lib/fs-helpers/readers/line-range.js +46 -0
- package/dist/lib/fs-helpers/readers/line-range.js.map +1 -0
- package/dist/lib/fs-helpers/readers/read-file.d.ts +16 -0
- package/dist/lib/fs-helpers/readers/read-file.d.ts.map +1 -0
- package/dist/lib/fs-helpers/readers/read-file.js +87 -0
- package/dist/lib/fs-helpers/readers/read-file.js.map +1 -0
- package/dist/lib/fs-helpers/readers/tail-file.d.ts +2 -0
- package/dist/lib/fs-helpers/readers/tail-file.d.ts.map +1 -0
- package/dist/lib/fs-helpers/readers/tail-file.js +98 -0
- package/dist/lib/fs-helpers/readers/tail-file.js.map +1 -0
- package/dist/lib/fs-helpers/readers/utf8.d.ts +3 -0
- package/dist/lib/fs-helpers/readers/utf8.d.ts.map +1 -0
- package/dist/lib/fs-helpers/readers/utf8.js +22 -0
- package/dist/lib/fs-helpers/readers/utf8.js.map +1 -0
- package/dist/lib/fs-helpers/readers.d.ts +4 -0
- package/dist/lib/fs-helpers/readers.d.ts.map +1 -0
- package/dist/lib/fs-helpers/readers.js +4 -0
- package/dist/lib/fs-helpers/readers.js.map +1 -0
- package/dist/lib/fs-helpers.d.ts +4 -25
- package/dist/lib/fs-helpers.d.ts.map +1 -1
- package/dist/lib/fs-helpers.js +4 -350
- package/dist/lib/fs-helpers.js.map +1 -1
- package/dist/lib/path-utils.d.ts.map +1 -1
- package/dist/lib/path-utils.js +0 -2
- package/dist/lib/path-utils.js.map +1 -1
- package/dist/lib/path-validation/allowed-directories.d.ts +9 -0
- package/dist/lib/path-validation/allowed-directories.d.ts.map +1 -0
- package/dist/lib/path-validation/allowed-directories.js +94 -0
- package/dist/lib/path-validation/allowed-directories.js.map +1 -0
- package/dist/lib/path-validation/errors.d.ts +5 -0
- package/dist/lib/path-validation/errors.d.ts.map +1 -0
- package/dist/lib/path-validation/errors.js +33 -0
- package/dist/lib/path-validation/errors.js.map +1 -0
- package/dist/lib/path-validation/roots.d.ts +3 -0
- package/dist/lib/path-validation/roots.d.ts.map +1 -0
- package/dist/lib/path-validation/roots.js +49 -0
- package/dist/lib/path-validation/roots.js.map +1 -0
- package/dist/lib/path-validation/validators.d.ts +9 -0
- package/dist/lib/path-validation/validators.d.ts.map +1 -0
- package/dist/lib/path-validation/validators.js +70 -0
- package/dist/lib/path-validation/validators.js.map +1 -0
- package/dist/lib/path-validation.d.ts +3 -7
- package/dist/lib/path-validation.d.ts.map +1 -1
- package/dist/lib/path-validation.js +3 -124
- package/dist/lib/path-validation.js.map +1 -1
- package/dist/schemas/input-helpers.d.ts +8 -0
- package/dist/schemas/input-helpers.d.ts.map +1 -0
- package/dist/schemas/input-helpers.js +44 -0
- package/dist/schemas/input-helpers.js.map +1 -0
- package/dist/schemas/inputs.d.ts +8 -5
- package/dist/schemas/inputs.d.ts.map +1 -1
- package/dist/schemas/inputs.js +41 -64
- package/dist/schemas/inputs.js.map +1 -1
- package/dist/schemas/output-helpers.d.ts +24 -0
- package/dist/schemas/output-helpers.d.ts.map +1 -0
- package/dist/schemas/output-helpers.js +13 -0
- package/dist/schemas/output-helpers.js.map +1 -0
- package/dist/schemas/outputs.d.ts +480 -46
- package/dist/schemas/outputs.d.ts.map +1 -1
- package/dist/schemas/outputs.js +26 -41
- package/dist/schemas/outputs.js.map +1 -1
- package/dist/server.d.ts +9 -1
- package/dist/server.d.ts.map +1 -1
- package/dist/server.js +29 -57
- package/dist/server.js.map +1 -1
- package/dist/tools/analyze-directory.d.ts.map +1 -1
- package/dist/tools/analyze-directory.js +116 -54
- package/dist/tools/analyze-directory.js.map +1 -1
- package/dist/tools/directory-tree.d.ts.map +1 -1
- package/dist/tools/directory-tree.js +86 -49
- package/dist/tools/directory-tree.js.map +1 -1
- package/dist/tools/get-file-info.d.ts.map +1 -1
- package/dist/tools/get-file-info.js +71 -37
- package/dist/tools/get-file-info.js.map +1 -1
- package/dist/tools/list-allowed-dirs.d.ts.map +1 -1
- package/dist/tools/list-allowed-dirs.js +48 -35
- package/dist/tools/list-allowed-dirs.js.map +1 -1
- package/dist/tools/list-directory.d.ts.map +1 -1
- package/dist/tools/list-directory.js +131 -60
- package/dist/tools/list-directory.js.map +1 -1
- package/dist/tools/read-file.d.ts.map +1 -1
- package/dist/tools/read-file.js +70 -56
- package/dist/tools/read-file.js.map +1 -1
- package/dist/tools/read-media-file.d.ts.map +1 -1
- package/dist/tools/read-media-file.js +39 -41
- package/dist/tools/read-media-file.js.map +1 -1
- package/dist/tools/read-multiple-files.d.ts.map +1 -1
- package/dist/tools/read-multiple-files.js +57 -50
- package/dist/tools/read-multiple-files.js.map +1 -1
- package/dist/tools/search-content.d.ts.map +1 -1
- package/dist/tools/search-content.js +147 -95
- package/dist/tools/search-content.js.map +1 -1
- package/dist/tools/search-files.d.ts.map +1 -1
- package/dist/tools/search-files.js +122 -51
- package/dist/tools/search-files.js.map +1 -1
- package/dist/tools/tool-response.d.ts +9 -0
- package/dist/tools/tool-response.d.ts.map +1 -0
- package/dist/tools/tool-response.js +7 -0
- package/dist/tools/tool-response.js.map +1 -0
- package/package.json +2 -1
- package/dist/__tests__/errors.test.d.ts +0 -2
- package/dist/__tests__/errors.test.d.ts.map +0 -1
- package/dist/__tests__/errors.test.js +0 -88
- package/dist/__tests__/errors.test.js.map +0 -1
- package/dist/__tests__/file-operations.test.d.ts +0 -2
- package/dist/__tests__/file-operations.test.d.ts.map +0 -1
- package/dist/__tests__/file-operations.test.js +0 -230
- package/dist/__tests__/file-operations.test.js.map +0 -1
- package/dist/__tests__/lib/formatters.test.d.ts +0 -2
- package/dist/__tests__/lib/formatters.test.d.ts.map +0 -1
- package/dist/__tests__/lib/formatters.test.js +0 -248
- package/dist/__tests__/lib/formatters.test.js.map +0 -1
- package/dist/__tests__/lib/image-parsing.test.d.ts +0 -2
- package/dist/__tests__/lib/image-parsing.test.d.ts.map +0 -1
- package/dist/__tests__/lib/image-parsing.test.js +0 -262
- package/dist/__tests__/lib/image-parsing.test.js.map +0 -1
- package/dist/__tests__/path-validation.test.d.ts +0 -2
- package/dist/__tests__/path-validation.test.d.ts.map +0 -1
- package/dist/__tests__/path-validation.test.js +0 -92
- package/dist/__tests__/path-validation.test.js.map +0 -1
- package/dist/lib/directory-helpers.d.ts +0 -4
- package/dist/lib/directory-helpers.d.ts.map +0 -1
- package/dist/lib/directory-helpers.js +0 -36
- package/dist/lib/directory-helpers.js.map +0 -1
- package/dist/lib/formatters.d.ts +0 -21
- package/dist/lib/formatters.d.ts.map +0 -1
- package/dist/lib/formatters.js +0 -206
- package/dist/lib/formatters.js.map +0 -1
- package/dist/lib/image-parsing.d.ts +0 -4
- package/dist/lib/image-parsing.d.ts.map +0 -1
- package/dist/lib/image-parsing.js +0 -130
- package/dist/lib/image-parsing.js.map +0 -1
- package/dist/lib/mcp-logger.d.ts +0 -11
- package/dist/lib/mcp-logger.d.ts.map +0 -1
- package/dist/lib/mcp-logger.js +0 -49
- package/dist/lib/mcp-logger.js.map +0 -1
- package/dist/lib/roots-utils.d.ts +0 -7
- package/dist/lib/roots-utils.d.ts.map +0 -1
- package/dist/lib/roots-utils.js +0 -39
- package/dist/lib/roots-utils.js.map +0 -1
- package/dist/lib/search-helpers.d.ts +0 -13
- package/dist/lib/search-helpers.d.ts.map +0 -1
- package/dist/lib/search-helpers.js +0 -205
- package/dist/lib/search-helpers.js.map +0 -1
- package/dist/lib/sorting.d.ts +0 -12
- package/dist/lib/sorting.d.ts.map +0 -1
- package/dist/lib/sorting.js +0 -41
- package/dist/lib/sorting.js.map +0 -1
- package/dist/lib/types.d.ts +0 -6
- package/dist/lib/types.d.ts.map +0 -1
- package/dist/lib/types.js +0 -2
- package/dist/lib/types.js.map +0 -1
- package/dist/prompts/analyze-codebase.d.ts +0 -3
- package/dist/prompts/analyze-codebase.d.ts.map +0 -1
- package/dist/prompts/analyze-codebase.js +0 -144
- package/dist/prompts/analyze-codebase.js.map +0 -1
- package/dist/prompts/filesystem-query.d.ts +0 -3
- package/dist/prompts/filesystem-query.d.ts.map +0 -1
- package/dist/prompts/filesystem-query.js +0 -168
- package/dist/prompts/filesystem-query.js.map +0 -1
- package/dist/prompts/find-duplicates.d.ts +0 -3
- package/dist/prompts/find-duplicates.d.ts.map +0 -1
- package/dist/prompts/find-duplicates.js +0 -77
- package/dist/prompts/find-duplicates.js.map +0 -1
- package/dist/prompts/index.d.ts +0 -3
- package/dist/prompts/index.d.ts.map +0 -1
- package/dist/prompts/index.js +0 -13
- package/dist/prompts/index.js.map +0 -1
- package/dist/prompts/project-overview.d.ts +0 -3
- package/dist/prompts/project-overview.d.ts.map +0 -1
- package/dist/prompts/project-overview.js +0 -122
- package/dist/prompts/project-overview.js.map +0 -1
- package/dist/prompts/search-and-replace.d.ts +0 -3
- package/dist/prompts/search-and-replace.d.ts.map +0 -1
- package/dist/prompts/search-and-replace.js +0 -130
- package/dist/prompts/search-and-replace.js.map +0 -1
- package/dist/prompts/shared.d.ts +0 -11
- package/dist/prompts/shared.d.ts.map +0 -1
- package/dist/prompts/shared.js +0 -32
- package/dist/prompts/shared.js.map +0 -1
- package/dist/resources/index.d.ts +0 -3
- package/dist/resources/index.d.ts.map +0 -1
- package/dist/resources/index.js +0 -54
- package/dist/resources/index.js.map +0 -1
- package/dist/schemas/validators.d.ts +0 -12
- package/dist/schemas/validators.d.ts.map +0 -1
- package/dist/schemas/validators.js +0 -35
- package/dist/schemas/validators.js.map +0 -1
- package/dist/utils/index.d.ts +0 -2
- package/dist/utils/index.d.ts.map +0 -1
- package/dist/utils/index.js +0 -2
- package/dist/utils/index.js.map +0 -1
- package/dist/utils/response-helpers.d.ts +0 -22
- package/dist/utils/response-helpers.d.ts.map +0 -1
- package/dist/utils/response-helpers.js +0 -24
- package/dist/utils/response-helpers.js.map +0 -1
|
@@ -1,734 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
import { validateExistingPath, validateExistingPathDetailed, } from './path-validation.js';
|
|
11
|
-
import { isSimpleSafePattern, prepareSearchPattern, scanFileForContent, } from './search-helpers.js';
|
|
12
|
-
import { createSearchResultSorter, createSorter } from './sorting.js';
|
|
13
|
-
// Convert file mode to permission string (e.g., 'rwxr-xr-x')
|
|
14
|
-
function getPermissions(mode) {
|
|
15
|
-
// Permission strings indexed by octal value (0-7)
|
|
16
|
-
const PERM_STRINGS = [
|
|
17
|
-
'---',
|
|
18
|
-
'--x',
|
|
19
|
-
'-w-',
|
|
20
|
-
'-wx',
|
|
21
|
-
'r--',
|
|
22
|
-
'r-x',
|
|
23
|
-
'rw-',
|
|
24
|
-
'rwx',
|
|
25
|
-
];
|
|
26
|
-
// Bitwise mask guarantees indices 0-7
|
|
27
|
-
const ownerIndex = (mode >> 6) & 0b111;
|
|
28
|
-
const groupIndex = (mode >> 3) & 0b111;
|
|
29
|
-
const otherIndex = mode & 0b111;
|
|
30
|
-
const owner = PERM_STRINGS[ownerIndex] ?? '---';
|
|
31
|
-
const group = PERM_STRINGS[groupIndex] ?? '---';
|
|
32
|
-
const other = PERM_STRINGS[otherIndex] ?? '---';
|
|
33
|
-
return `${owner}${group}${other}`;
|
|
34
|
-
}
|
|
35
|
-
export async function getFileInfo(filePath) {
|
|
36
|
-
const { requestedPath, resolvedPath, isSymlink } = await validateExistingPathDetailed(filePath);
|
|
37
|
-
const name = path.basename(requestedPath);
|
|
38
|
-
const ext = path.extname(name).toLowerCase();
|
|
39
|
-
const mimeType = ext ? getMimeType(ext) : undefined;
|
|
40
|
-
let symlinkTarget;
|
|
41
|
-
if (isSymlink) {
|
|
42
|
-
try {
|
|
43
|
-
symlinkTarget = await fs.readlink(requestedPath);
|
|
44
|
-
}
|
|
45
|
-
catch {
|
|
46
|
-
// Symlink target unreadable
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
const stats = await fs.stat(resolvedPath);
|
|
50
|
-
return {
|
|
51
|
-
name,
|
|
52
|
-
path: requestedPath,
|
|
53
|
-
type: isSymlink ? 'symlink' : getFileType(stats),
|
|
54
|
-
size: stats.size,
|
|
55
|
-
created: stats.birthtime,
|
|
56
|
-
modified: stats.mtime,
|
|
57
|
-
accessed: stats.atime,
|
|
58
|
-
permissions: getPermissions(stats.mode),
|
|
59
|
-
isHidden: isHidden(name),
|
|
60
|
-
mimeType,
|
|
61
|
-
symlinkTarget,
|
|
62
|
-
};
|
|
63
|
-
}
|
|
64
|
-
export async function listDirectory(dirPath, options = {}) {
|
|
65
|
-
const { recursive = false, includeHidden = false, maxDepth = DEFAULT_MAX_DEPTH, maxEntries, sortBy = 'name', includeSymlinkTargets = false, } = options;
|
|
66
|
-
const validPath = await validateExistingPath(dirPath);
|
|
67
|
-
const entries = [];
|
|
68
|
-
let totalFiles = 0;
|
|
69
|
-
let totalDirectories = 0;
|
|
70
|
-
let maxDepthReached = 0;
|
|
71
|
-
let truncated = false;
|
|
72
|
-
let skippedInaccessible = 0;
|
|
73
|
-
let symlinksNotFollowed = 0;
|
|
74
|
-
const stopIfNeeded = () => {
|
|
75
|
-
if (maxEntries !== undefined && entries.length >= maxEntries) {
|
|
76
|
-
truncated = true;
|
|
77
|
-
return true;
|
|
78
|
-
}
|
|
79
|
-
return false;
|
|
80
|
-
};
|
|
81
|
-
await runWorkQueue([{ currentPath: validPath, depth: 0 }], async ({ currentPath, depth }, enqueue) => {
|
|
82
|
-
if (depth > maxDepth)
|
|
83
|
-
return;
|
|
84
|
-
if (stopIfNeeded())
|
|
85
|
-
return;
|
|
86
|
-
maxDepthReached = Math.max(maxDepthReached, depth);
|
|
87
|
-
let items;
|
|
88
|
-
try {
|
|
89
|
-
items = await fs.readdir(currentPath, { withFileTypes: true });
|
|
90
|
-
}
|
|
91
|
-
catch {
|
|
92
|
-
skippedInaccessible++;
|
|
93
|
-
return;
|
|
94
|
-
}
|
|
95
|
-
const visibleItems = includeHidden
|
|
96
|
-
? items
|
|
97
|
-
: items.filter((item) => !isHidden(item.name));
|
|
98
|
-
const { results: processedEntries, errors: processingErrors } = await processInParallel(visibleItems, async (item) => {
|
|
99
|
-
const fullPath = path.join(currentPath, item.name);
|
|
100
|
-
const relativePath = path.relative(validPath, fullPath) || item.name;
|
|
101
|
-
try {
|
|
102
|
-
if (item.isSymbolicLink()) {
|
|
103
|
-
symlinksNotFollowed++;
|
|
104
|
-
const stats = await fs.lstat(fullPath);
|
|
105
|
-
let symlinkTarget;
|
|
106
|
-
if (includeSymlinkTargets) {
|
|
107
|
-
try {
|
|
108
|
-
symlinkTarget = await fs.readlink(fullPath);
|
|
109
|
-
}
|
|
110
|
-
catch {
|
|
111
|
-
// Symlink target unreadable
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
const entry = {
|
|
115
|
-
name: item.name,
|
|
116
|
-
path: fullPath,
|
|
117
|
-
relativePath,
|
|
118
|
-
type: 'symlink',
|
|
119
|
-
size: stats.size,
|
|
120
|
-
modified: stats.mtime,
|
|
121
|
-
symlinkTarget,
|
|
122
|
-
};
|
|
123
|
-
return { entry };
|
|
124
|
-
}
|
|
125
|
-
const stats = await fs.stat(fullPath);
|
|
126
|
-
const isDir = item.isDirectory();
|
|
127
|
-
const type = isDir
|
|
128
|
-
? 'directory'
|
|
129
|
-
: item.isFile()
|
|
130
|
-
? 'file'
|
|
131
|
-
: getFileType(stats);
|
|
132
|
-
const entry = {
|
|
133
|
-
name: item.name,
|
|
134
|
-
path: fullPath,
|
|
135
|
-
relativePath,
|
|
136
|
-
type,
|
|
137
|
-
size: type === 'file' ? stats.size : undefined,
|
|
138
|
-
modified: stats.mtime,
|
|
139
|
-
};
|
|
140
|
-
const enqueueDir = recursive && isDir && depth + 1 <= maxDepth
|
|
141
|
-
? {
|
|
142
|
-
currentPath: await validateExistingPath(fullPath),
|
|
143
|
-
depth: depth + 1,
|
|
144
|
-
}
|
|
145
|
-
: undefined;
|
|
146
|
-
return { entry, enqueueDir };
|
|
147
|
-
}
|
|
148
|
-
catch {
|
|
149
|
-
skippedInaccessible++;
|
|
150
|
-
const entry = {
|
|
151
|
-
name: item.name,
|
|
152
|
-
path: fullPath,
|
|
153
|
-
relativePath,
|
|
154
|
-
type: item.isDirectory()
|
|
155
|
-
? 'directory'
|
|
156
|
-
: item.isFile()
|
|
157
|
-
? 'file'
|
|
158
|
-
: 'other',
|
|
159
|
-
};
|
|
160
|
-
return { entry };
|
|
161
|
-
}
|
|
162
|
-
});
|
|
163
|
-
skippedInaccessible += processingErrors.length;
|
|
164
|
-
for (const { entry, enqueueDir } of processedEntries) {
|
|
165
|
-
if (stopIfNeeded())
|
|
166
|
-
break;
|
|
167
|
-
entries.push(entry);
|
|
168
|
-
if (entry.type === 'directory')
|
|
169
|
-
totalDirectories++;
|
|
170
|
-
if (entry.type === 'file')
|
|
171
|
-
totalFiles++;
|
|
172
|
-
if (enqueueDir)
|
|
173
|
-
enqueue(enqueueDir);
|
|
174
|
-
}
|
|
175
|
-
}, DIR_TRAVERSAL_CONCURRENCY);
|
|
176
|
-
entries.sort(createSorter(sortBy));
|
|
177
|
-
return {
|
|
178
|
-
path: validPath,
|
|
179
|
-
entries,
|
|
180
|
-
summary: {
|
|
181
|
-
totalEntries: entries.length,
|
|
182
|
-
totalFiles,
|
|
183
|
-
totalDirectories,
|
|
184
|
-
maxDepthReached,
|
|
185
|
-
truncated,
|
|
186
|
-
skippedInaccessible,
|
|
187
|
-
symlinksNotFollowed,
|
|
188
|
-
},
|
|
189
|
-
};
|
|
190
|
-
}
|
|
191
|
-
export async function searchFiles(basePath, pattern, excludePatterns = [], options = {}) {
|
|
192
|
-
const validPath = await validateExistingPath(basePath);
|
|
193
|
-
const { maxResults, sortBy = 'path', maxDepth } = options;
|
|
194
|
-
const results = [];
|
|
195
|
-
let skippedInaccessible = 0;
|
|
196
|
-
let truncated = false;
|
|
197
|
-
let filesScanned = 0;
|
|
198
|
-
const batch = [];
|
|
199
|
-
const flushBatch = async () => {
|
|
200
|
-
if (batch.length === 0)
|
|
201
|
-
return;
|
|
202
|
-
const toProcess = batch.splice(0, batch.length);
|
|
203
|
-
const settled = await Promise.allSettled(toProcess.map(async (match) => {
|
|
204
|
-
const stats = await fs.stat(match);
|
|
205
|
-
const { size, mtime: modified } = stats;
|
|
206
|
-
return {
|
|
207
|
-
path: match,
|
|
208
|
-
type: getFileType(stats),
|
|
209
|
-
size: stats.isFile() ? size : undefined,
|
|
210
|
-
modified,
|
|
211
|
-
};
|
|
212
|
-
}));
|
|
213
|
-
for (const r of settled) {
|
|
214
|
-
if (r.status === 'fulfilled') {
|
|
215
|
-
if (maxResults !== undefined && results.length >= maxResults) {
|
|
216
|
-
truncated = true;
|
|
217
|
-
break;
|
|
218
|
-
}
|
|
219
|
-
results.push(r.value);
|
|
220
|
-
}
|
|
221
|
-
else {
|
|
222
|
-
skippedInaccessible++;
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
};
|
|
226
|
-
const stream = fg.stream(pattern, {
|
|
227
|
-
cwd: validPath,
|
|
228
|
-
absolute: true,
|
|
229
|
-
onlyFiles: false,
|
|
230
|
-
dot: true,
|
|
231
|
-
ignore: excludePatterns,
|
|
232
|
-
suppressErrors: true,
|
|
233
|
-
followSymbolicLinks: false,
|
|
234
|
-
deep: maxDepth,
|
|
235
|
-
});
|
|
236
|
-
for await (const entry of stream) {
|
|
237
|
-
const matchPath = typeof entry === 'string' ? entry : String(entry);
|
|
238
|
-
filesScanned++;
|
|
239
|
-
if (maxResults !== undefined && results.length >= maxResults) {
|
|
240
|
-
truncated = true;
|
|
241
|
-
break;
|
|
242
|
-
}
|
|
243
|
-
batch.push(matchPath);
|
|
244
|
-
if (batch.length >= PARALLEL_CONCURRENCY) {
|
|
245
|
-
await flushBatch();
|
|
246
|
-
if (maxResults !== undefined && results.length >= maxResults) {
|
|
247
|
-
truncated = true;
|
|
248
|
-
break;
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
await flushBatch();
|
|
253
|
-
results.sort(createSearchResultSorter(sortBy));
|
|
254
|
-
return {
|
|
255
|
-
basePath: validPath,
|
|
256
|
-
pattern,
|
|
257
|
-
results,
|
|
258
|
-
summary: {
|
|
259
|
-
matched: results.length,
|
|
260
|
-
truncated,
|
|
261
|
-
skippedInaccessible,
|
|
262
|
-
filesScanned,
|
|
263
|
-
},
|
|
264
|
-
};
|
|
265
|
-
}
|
|
266
|
-
export { readFile };
|
|
267
|
-
export async function readMultipleFiles(filePaths, options = {}) {
|
|
268
|
-
const { encoding = 'utf-8', maxSize = MAX_TEXT_FILE_SIZE, maxTotalSize = 100 * 1024 * 1024, head, tail, } = options;
|
|
269
|
-
if (filePaths.length === 0)
|
|
270
|
-
return [];
|
|
271
|
-
const output = filePaths.map((filePath) => ({ path: filePath }));
|
|
272
|
-
// Pre-calculate total size to avoid race condition in parallel reads
|
|
273
|
-
let totalSize = 0;
|
|
274
|
-
const fileSizes = new Map();
|
|
275
|
-
for (const filePath of filePaths) {
|
|
276
|
-
try {
|
|
277
|
-
const validPath = await validateExistingPath(filePath);
|
|
278
|
-
const stats = await fs.stat(validPath);
|
|
279
|
-
fileSizes.set(filePath, stats.size);
|
|
280
|
-
totalSize += stats.size;
|
|
281
|
-
}
|
|
282
|
-
catch {
|
|
283
|
-
// Skip files we can't access - they'll error during read
|
|
284
|
-
fileSizes.set(filePath, 0);
|
|
285
|
-
}
|
|
286
|
-
}
|
|
287
|
-
if (totalSize > maxTotalSize) {
|
|
288
|
-
throw new McpError(ErrorCode.E_TOO_LARGE, `Total size of all files (${totalSize} bytes) exceeds limit (${maxTotalSize} bytes)`, undefined, { totalSize, maxTotalSize, fileCount: filePaths.length });
|
|
289
|
-
}
|
|
290
|
-
const { results, errors } = await processInParallel(filePaths.map((filePath, index) => ({ filePath, index })), async ({ filePath, index }) => {
|
|
291
|
-
const result = await readFile(filePath, {
|
|
292
|
-
encoding,
|
|
293
|
-
maxSize,
|
|
294
|
-
head,
|
|
295
|
-
tail,
|
|
296
|
-
});
|
|
297
|
-
return {
|
|
298
|
-
index,
|
|
299
|
-
value: { path: result.path, content: result.content },
|
|
300
|
-
};
|
|
301
|
-
}, PARALLEL_CONCURRENCY);
|
|
302
|
-
for (const r of results) {
|
|
303
|
-
output[r.index] = r.value;
|
|
304
|
-
}
|
|
305
|
-
for (const e of errors) {
|
|
306
|
-
const filePath = filePaths[e.index] ?? '(unknown)';
|
|
307
|
-
output[e.index] = {
|
|
308
|
-
path: filePath,
|
|
309
|
-
error: e.error.message,
|
|
310
|
-
};
|
|
311
|
-
}
|
|
312
|
-
return output;
|
|
313
|
-
}
|
|
314
|
-
export async function searchContent(basePath, searchPattern, options = {}) {
|
|
315
|
-
const { filePattern = '**/*', excludePatterns = [], caseSensitive = false, maxResults = DEFAULT_MAX_RESULTS, maxFileSize = MAX_SEARCHABLE_FILE_SIZE, maxFilesScanned, timeoutMs, skipBinary = true, contextLines = 0, wholeWord = false, isLiteral = false, } = options;
|
|
316
|
-
const validPath = await validateExistingPath(basePath);
|
|
317
|
-
const deadlineMs = timeoutMs !== undefined ? Date.now() + timeoutMs : undefined;
|
|
318
|
-
// Prepare the search pattern with optional literal escaping and word boundaries
|
|
319
|
-
const finalPattern = prepareSearchPattern(searchPattern, {
|
|
320
|
-
isLiteral,
|
|
321
|
-
wholeWord,
|
|
322
|
-
});
|
|
323
|
-
const needsReDoSCheck = !isLiteral && !isSimpleSafePattern(finalPattern);
|
|
324
|
-
if (needsReDoSCheck && !safeRegex(finalPattern)) {
|
|
325
|
-
throw new McpError(ErrorCode.E_INVALID_PATTERN, `Potentially unsafe regular expression (ReDoS risk): ${searchPattern}. ` +
|
|
326
|
-
'Avoid patterns with nested quantifiers, overlapping alternations, or exponential backtracking.', basePath, { reason: 'ReDoS risk detected' });
|
|
327
|
-
}
|
|
328
|
-
let regex;
|
|
329
|
-
try {
|
|
330
|
-
regex = new RegExp(finalPattern, caseSensitive ? 'g' : 'gi');
|
|
331
|
-
}
|
|
332
|
-
catch (error) {
|
|
333
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
334
|
-
throw new McpError(ErrorCode.E_INVALID_PATTERN, `Invalid regular expression: ${finalPattern} (${message})`, basePath, { searchPattern: finalPattern });
|
|
335
|
-
}
|
|
336
|
-
const matches = [];
|
|
337
|
-
let filesScanned = 0;
|
|
338
|
-
let filesMatched = 0;
|
|
339
|
-
let skippedTooLarge = 0;
|
|
340
|
-
let skippedBinary = 0;
|
|
341
|
-
let skippedInaccessible = 0;
|
|
342
|
-
let linesSkippedDueToRegexTimeout = 0;
|
|
343
|
-
let truncated = false;
|
|
344
|
-
let stoppedReason;
|
|
345
|
-
let firstPathValidated = false;
|
|
346
|
-
const stopNow = (reason) => {
|
|
347
|
-
truncated = true;
|
|
348
|
-
stoppedReason = reason;
|
|
349
|
-
return true;
|
|
350
|
-
};
|
|
351
|
-
const stream = fg.stream(filePattern, {
|
|
352
|
-
cwd: validPath,
|
|
353
|
-
absolute: true,
|
|
354
|
-
onlyFiles: true,
|
|
355
|
-
dot: false,
|
|
356
|
-
ignore: excludePatterns,
|
|
357
|
-
suppressErrors: true,
|
|
358
|
-
followSymbolicLinks: false,
|
|
359
|
-
});
|
|
360
|
-
for await (const entry of stream) {
|
|
361
|
-
const file = typeof entry === 'string' ? entry : String(entry);
|
|
362
|
-
// Paranoid check: validate first result to detect unexpected fast-glob behavior
|
|
363
|
-
if (!firstPathValidated) {
|
|
364
|
-
try {
|
|
365
|
-
await validateExistingPath(file);
|
|
366
|
-
firstPathValidated = true;
|
|
367
|
-
}
|
|
368
|
-
catch {
|
|
369
|
-
console.error('[SECURITY] fast-glob returned invalid path:', file);
|
|
370
|
-
stopNow('maxFiles');
|
|
371
|
-
break;
|
|
372
|
-
}
|
|
373
|
-
}
|
|
374
|
-
if (deadlineMs !== undefined && Date.now() > deadlineMs) {
|
|
375
|
-
stopNow('timeout');
|
|
376
|
-
break;
|
|
377
|
-
}
|
|
378
|
-
if (maxFilesScanned !== undefined && filesScanned >= maxFilesScanned) {
|
|
379
|
-
stopNow('maxFiles');
|
|
380
|
-
break;
|
|
381
|
-
}
|
|
382
|
-
if (matches.length >= maxResults) {
|
|
383
|
-
stopNow('maxResults');
|
|
384
|
-
break;
|
|
385
|
-
}
|
|
386
|
-
try {
|
|
387
|
-
// fast-glob operates within validated cwd with followSymbolicLinks:false,
|
|
388
|
-
// so paths are already bounded - skip redundant validateExistingPath for glob results
|
|
389
|
-
const handle = await fs.open(file, 'r');
|
|
390
|
-
let shouldScan = true;
|
|
391
|
-
try {
|
|
392
|
-
const stats = await handle.stat();
|
|
393
|
-
filesScanned++;
|
|
394
|
-
if (stats.size > maxFileSize) {
|
|
395
|
-
skippedTooLarge++;
|
|
396
|
-
shouldScan = false;
|
|
397
|
-
}
|
|
398
|
-
else if (skipBinary) {
|
|
399
|
-
const binary = await isProbablyBinary(file, handle);
|
|
400
|
-
if (binary) {
|
|
401
|
-
skippedBinary++;
|
|
402
|
-
shouldScan = false;
|
|
403
|
-
}
|
|
404
|
-
}
|
|
405
|
-
}
|
|
406
|
-
finally {
|
|
407
|
-
await handle.close().catch(() => { });
|
|
408
|
-
}
|
|
409
|
-
if (!shouldScan)
|
|
410
|
-
continue;
|
|
411
|
-
const scanResult = await scanFileForContent(file, regex, {
|
|
412
|
-
maxResults,
|
|
413
|
-
contextLines,
|
|
414
|
-
deadlineMs,
|
|
415
|
-
currentMatchCount: matches.length,
|
|
416
|
-
});
|
|
417
|
-
matches.push(...scanResult.matches);
|
|
418
|
-
linesSkippedDueToRegexTimeout += scanResult.linesSkippedDueToRegexTimeout;
|
|
419
|
-
if (scanResult.fileHadMatches)
|
|
420
|
-
filesMatched++;
|
|
421
|
-
if (deadlineMs !== undefined && Date.now() > deadlineMs) {
|
|
422
|
-
stopNow('timeout');
|
|
423
|
-
break;
|
|
424
|
-
}
|
|
425
|
-
if (matches.length >= maxResults) {
|
|
426
|
-
stopNow('maxResults');
|
|
427
|
-
break;
|
|
428
|
-
}
|
|
429
|
-
if (stoppedReason !== undefined)
|
|
430
|
-
break;
|
|
431
|
-
}
|
|
432
|
-
catch {
|
|
433
|
-
skippedInaccessible++;
|
|
434
|
-
}
|
|
435
|
-
}
|
|
436
|
-
return {
|
|
437
|
-
basePath: validPath,
|
|
438
|
-
pattern: searchPattern,
|
|
439
|
-
filePattern,
|
|
440
|
-
matches,
|
|
441
|
-
summary: {
|
|
442
|
-
filesScanned,
|
|
443
|
-
filesMatched,
|
|
444
|
-
matches: matches.length,
|
|
445
|
-
truncated,
|
|
446
|
-
skippedTooLarge,
|
|
447
|
-
skippedBinary,
|
|
448
|
-
skippedInaccessible,
|
|
449
|
-
linesSkippedDueToRegexTimeout,
|
|
450
|
-
stoppedReason,
|
|
451
|
-
},
|
|
452
|
-
};
|
|
453
|
-
}
|
|
454
|
-
export async function analyzeDirectory(dirPath, options = {}) {
|
|
455
|
-
const { maxDepth = DEFAULT_MAX_DEPTH, topN = DEFAULT_TOP_N, excludePatterns = [], includeHidden = false, } = options;
|
|
456
|
-
const validPath = await validateExistingPath(dirPath);
|
|
457
|
-
let totalFiles = 0;
|
|
458
|
-
let totalDirectories = 0;
|
|
459
|
-
let totalSize = 0;
|
|
460
|
-
let currentMaxDepth = 0;
|
|
461
|
-
let skippedInaccessible = 0;
|
|
462
|
-
let symlinksNotFollowed = 0;
|
|
463
|
-
const fileTypes = {};
|
|
464
|
-
const largestFiles = [];
|
|
465
|
-
const recentlyModified = [];
|
|
466
|
-
const shouldExclude = createExcludeMatcher(excludePatterns);
|
|
467
|
-
await runWorkQueue([{ currentPath: validPath, depth: 0 }], async ({ currentPath, depth }, enqueue) => {
|
|
468
|
-
if (depth > maxDepth)
|
|
469
|
-
return;
|
|
470
|
-
currentMaxDepth = Math.max(currentMaxDepth, depth);
|
|
471
|
-
let items;
|
|
472
|
-
try {
|
|
473
|
-
items = await fs.readdir(currentPath, { withFileTypes: true });
|
|
474
|
-
}
|
|
475
|
-
catch {
|
|
476
|
-
skippedInaccessible++;
|
|
477
|
-
return;
|
|
478
|
-
}
|
|
479
|
-
for (const item of items) {
|
|
480
|
-
const fullPath = path.join(currentPath, item.name);
|
|
481
|
-
const relativePath = path.relative(validPath, fullPath);
|
|
482
|
-
if (!includeHidden && isHidden(item.name)) {
|
|
483
|
-
continue;
|
|
484
|
-
}
|
|
485
|
-
if (shouldExclude(item.name, relativePath)) {
|
|
486
|
-
continue;
|
|
487
|
-
}
|
|
488
|
-
try {
|
|
489
|
-
const validated = await validateExistingPathDetailed(fullPath);
|
|
490
|
-
if (validated.isSymlink || item.isSymbolicLink()) {
|
|
491
|
-
symlinksNotFollowed++;
|
|
492
|
-
continue;
|
|
493
|
-
}
|
|
494
|
-
const stats = await fs.stat(validated.resolvedPath);
|
|
495
|
-
if (stats.isDirectory()) {
|
|
496
|
-
totalDirectories++;
|
|
497
|
-
if (depth + 1 <= maxDepth) {
|
|
498
|
-
enqueue({
|
|
499
|
-
currentPath: validated.resolvedPath,
|
|
500
|
-
depth: depth + 1,
|
|
501
|
-
});
|
|
502
|
-
}
|
|
503
|
-
}
|
|
504
|
-
else if (stats.isFile()) {
|
|
505
|
-
totalFiles++;
|
|
506
|
-
totalSize += stats.size;
|
|
507
|
-
const ext = path.extname(item.name).toLowerCase() || '(no extension)';
|
|
508
|
-
fileTypes[ext] = (fileTypes[ext] ?? 0) + 1;
|
|
509
|
-
insertSorted(largestFiles, { path: validated.resolvedPath, size: stats.size }, (a, b) => b.size - a.size, topN);
|
|
510
|
-
insertSorted(recentlyModified, { path: validated.resolvedPath, modified: stats.mtime }, (a, b) => b.modified.getTime() - a.modified.getTime(), topN);
|
|
511
|
-
}
|
|
512
|
-
}
|
|
513
|
-
catch (error) {
|
|
514
|
-
if (classifyAccessError(error) === 'symlink') {
|
|
515
|
-
symlinksNotFollowed++;
|
|
516
|
-
}
|
|
517
|
-
else {
|
|
518
|
-
skippedInaccessible++;
|
|
519
|
-
}
|
|
520
|
-
}
|
|
521
|
-
}
|
|
522
|
-
}, DIR_TRAVERSAL_CONCURRENCY);
|
|
523
|
-
const analysis = {
|
|
524
|
-
path: validPath,
|
|
525
|
-
totalFiles,
|
|
526
|
-
totalDirectories,
|
|
527
|
-
totalSize,
|
|
528
|
-
fileTypes,
|
|
529
|
-
largestFiles,
|
|
530
|
-
recentlyModified,
|
|
531
|
-
maxDepth: currentMaxDepth,
|
|
532
|
-
};
|
|
533
|
-
return {
|
|
534
|
-
analysis,
|
|
535
|
-
summary: {
|
|
536
|
-
truncated: false,
|
|
537
|
-
skippedInaccessible,
|
|
538
|
-
symlinksNotFollowed,
|
|
539
|
-
},
|
|
540
|
-
};
|
|
541
|
-
}
|
|
542
|
-
export async function getDirectoryTree(dirPath, options = {}) {
|
|
543
|
-
const { maxDepth = DEFAULT_MAX_DEPTH, excludePatterns = [], includeHidden = false, includeSize = false, maxFiles, } = options;
|
|
544
|
-
const validPath = await validateExistingPath(dirPath);
|
|
545
|
-
const rootStats = await fs.stat(validPath);
|
|
546
|
-
if (!rootStats.isDirectory()) {
|
|
547
|
-
throw new McpError(ErrorCode.E_NOT_DIRECTORY, `Not a directory: ${dirPath}`, dirPath);
|
|
548
|
-
}
|
|
549
|
-
let totalFiles = 0;
|
|
550
|
-
let totalDirectories = 0;
|
|
551
|
-
let maxDepthReached = 0;
|
|
552
|
-
let skippedInaccessible = 0;
|
|
553
|
-
let symlinksNotFollowed = 0;
|
|
554
|
-
let truncated = false;
|
|
555
|
-
const shouldExclude = createExcludeMatcher(excludePatterns);
|
|
556
|
-
const hitMaxFiles = () => {
|
|
557
|
-
return maxFiles !== undefined && totalFiles >= maxFiles;
|
|
558
|
-
};
|
|
559
|
-
const collectedEntries = [];
|
|
560
|
-
const directoriesFound = new Set([validPath]);
|
|
561
|
-
// Phase 1: Collect all entries using runWorkQueue for work-stealing concurrency
|
|
562
|
-
await runWorkQueue([{ currentPath: validPath, depth: 0 }], async ({ currentPath, depth }, enqueue) => {
|
|
563
|
-
if (hitMaxFiles()) {
|
|
564
|
-
truncated = true;
|
|
565
|
-
return;
|
|
566
|
-
}
|
|
567
|
-
if (depth > maxDepth) {
|
|
568
|
-
truncated = true;
|
|
569
|
-
return;
|
|
570
|
-
}
|
|
571
|
-
maxDepthReached = Math.max(maxDepthReached, depth);
|
|
572
|
-
let items;
|
|
573
|
-
try {
|
|
574
|
-
items = await fs.readdir(currentPath, { withFileTypes: true });
|
|
575
|
-
}
|
|
576
|
-
catch {
|
|
577
|
-
skippedInaccessible++;
|
|
578
|
-
return;
|
|
579
|
-
}
|
|
580
|
-
for (const item of items) {
|
|
581
|
-
if (hitMaxFiles()) {
|
|
582
|
-
truncated = true;
|
|
583
|
-
break;
|
|
584
|
-
}
|
|
585
|
-
const { name } = item;
|
|
586
|
-
// Filter hidden files
|
|
587
|
-
if (!includeHidden && name.startsWith('.')) {
|
|
588
|
-
continue;
|
|
589
|
-
}
|
|
590
|
-
const fullPath = path.join(currentPath, name);
|
|
591
|
-
const relativePath = path.relative(validPath, fullPath);
|
|
592
|
-
// Check exclusion patterns
|
|
593
|
-
if (shouldExclude(name, relativePath)) {
|
|
594
|
-
continue;
|
|
595
|
-
}
|
|
596
|
-
// Handle symlinks - skip but count
|
|
597
|
-
if (item.isSymbolicLink()) {
|
|
598
|
-
symlinksNotFollowed++;
|
|
599
|
-
continue;
|
|
600
|
-
}
|
|
601
|
-
try {
|
|
602
|
-
// Validate path is within allowed directories
|
|
603
|
-
const { resolvedPath, isSymlink } = await validateExistingPathDetailed(fullPath);
|
|
604
|
-
if (isSymlink) {
|
|
605
|
-
symlinksNotFollowed++;
|
|
606
|
-
continue;
|
|
607
|
-
}
|
|
608
|
-
const stats = await fs.stat(resolvedPath);
|
|
609
|
-
if (stats.isFile()) {
|
|
610
|
-
totalFiles++;
|
|
611
|
-
collectedEntries.push({
|
|
612
|
-
parentPath: currentPath,
|
|
613
|
-
name,
|
|
614
|
-
type: 'file',
|
|
615
|
-
size: includeSize ? stats.size : undefined,
|
|
616
|
-
depth,
|
|
617
|
-
});
|
|
618
|
-
}
|
|
619
|
-
else if (stats.isDirectory()) {
|
|
620
|
-
totalDirectories++;
|
|
621
|
-
directoriesFound.add(resolvedPath);
|
|
622
|
-
collectedEntries.push({
|
|
623
|
-
parentPath: currentPath,
|
|
624
|
-
name,
|
|
625
|
-
type: 'directory',
|
|
626
|
-
depth,
|
|
627
|
-
});
|
|
628
|
-
// Enqueue subdirectory for traversal if not at max depth
|
|
629
|
-
if (depth + 1 <= maxDepth) {
|
|
630
|
-
enqueue({ currentPath: resolvedPath, depth: depth + 1 });
|
|
631
|
-
}
|
|
632
|
-
else {
|
|
633
|
-
// Directory exists but we can't recurse due to depth limit
|
|
634
|
-
truncated = true;
|
|
635
|
-
}
|
|
636
|
-
}
|
|
637
|
-
}
|
|
638
|
-
catch (error) {
|
|
639
|
-
if (classifyAccessError(error) === 'symlink') {
|
|
640
|
-
symlinksNotFollowed++;
|
|
641
|
-
}
|
|
642
|
-
else {
|
|
643
|
-
skippedInaccessible++;
|
|
644
|
-
}
|
|
645
|
-
}
|
|
646
|
-
}
|
|
647
|
-
}, DIR_TRAVERSAL_CONCURRENCY);
|
|
648
|
-
// Phase 2: Build tree structure from collected entries
|
|
649
|
-
const childrenByParent = new Map();
|
|
650
|
-
// Initialize all directories with empty children arrays
|
|
651
|
-
for (const dirPath of directoriesFound) {
|
|
652
|
-
childrenByParent.set(dirPath, []);
|
|
653
|
-
}
|
|
654
|
-
// Group entries by parent and create TreeEntry objects
|
|
655
|
-
for (const entry of collectedEntries) {
|
|
656
|
-
const treeEntry = {
|
|
657
|
-
name: entry.name,
|
|
658
|
-
type: entry.type,
|
|
659
|
-
};
|
|
660
|
-
if (entry.type === 'file' && entry.size !== undefined) {
|
|
661
|
-
treeEntry.size = entry.size;
|
|
662
|
-
}
|
|
663
|
-
if (entry.type === 'directory') {
|
|
664
|
-
const fullPath = path.join(entry.parentPath, entry.name);
|
|
665
|
-
treeEntry.children = childrenByParent.get(fullPath) ?? [];
|
|
666
|
-
}
|
|
667
|
-
const siblings = childrenByParent.get(entry.parentPath);
|
|
668
|
-
if (siblings) {
|
|
669
|
-
siblings.push(treeEntry);
|
|
670
|
-
}
|
|
671
|
-
}
|
|
672
|
-
// Sort children: directories first, then alphabetically by name
|
|
673
|
-
const sortChildren = (entries) => {
|
|
674
|
-
entries.sort((a, b) => {
|
|
675
|
-
if (a.type !== b.type) {
|
|
676
|
-
return a.type === 'directory' ? -1 : 1;
|
|
677
|
-
}
|
|
678
|
-
return a.name.localeCompare(b.name);
|
|
679
|
-
});
|
|
680
|
-
};
|
|
681
|
-
for (const children of childrenByParent.values()) {
|
|
682
|
-
sortChildren(children);
|
|
683
|
-
}
|
|
684
|
-
// Build root entry
|
|
685
|
-
const rootName = path.basename(validPath);
|
|
686
|
-
const tree = {
|
|
687
|
-
name: rootName || validPath,
|
|
688
|
-
type: 'directory',
|
|
689
|
-
children: childrenByParent.get(validPath) ?? [],
|
|
690
|
-
};
|
|
691
|
-
return {
|
|
692
|
-
tree,
|
|
693
|
-
summary: {
|
|
694
|
-
totalFiles,
|
|
695
|
-
totalDirectories,
|
|
696
|
-
maxDepthReached,
|
|
697
|
-
truncated,
|
|
698
|
-
skippedInaccessible,
|
|
699
|
-
symlinksNotFollowed,
|
|
700
|
-
},
|
|
701
|
-
};
|
|
702
|
-
}
|
|
703
|
-
export async function readMediaFile(filePath, { maxSize = MAX_MEDIA_FILE_SIZE } = {}) {
|
|
704
|
-
const validPath = await validateExistingPath(filePath);
|
|
705
|
-
const stats = await fs.stat(validPath);
|
|
706
|
-
const { size } = stats;
|
|
707
|
-
if (!stats.isFile()) {
|
|
708
|
-
throw new McpError(ErrorCode.E_NOT_FILE, `Not a file: ${filePath}`, filePath);
|
|
709
|
-
}
|
|
710
|
-
if (size > maxSize) {
|
|
711
|
-
throw new McpError(ErrorCode.E_TOO_LARGE, `File too large: ${size} bytes (max: ${maxSize} bytes)`, filePath, { size, maxSize });
|
|
712
|
-
}
|
|
713
|
-
const ext = path.extname(validPath).toLowerCase();
|
|
714
|
-
const mimeType = getMimeType(ext);
|
|
715
|
-
const buffer = await fs.readFile(validPath);
|
|
716
|
-
const data = buffer.toString('base64');
|
|
717
|
-
let width;
|
|
718
|
-
let height;
|
|
719
|
-
if (mimeType.startsWith('image/')) {
|
|
720
|
-
const dimensions = parseImageDimensions(buffer, ext);
|
|
721
|
-
if (dimensions) {
|
|
722
|
-
({ width, height } = dimensions);
|
|
723
|
-
}
|
|
724
|
-
}
|
|
725
|
-
return {
|
|
726
|
-
path: validPath,
|
|
727
|
-
mimeType,
|
|
728
|
-
size,
|
|
729
|
-
data,
|
|
730
|
-
width,
|
|
731
|
-
height,
|
|
732
|
-
};
|
|
733
|
-
}
|
|
1
|
+
export { analyzeDirectory } from './file-operations/analyze-directory.js';
|
|
2
|
+
export { getDirectoryTree } from './file-operations/directory-tree.js';
|
|
3
|
+
export { getFileInfo } from './file-operations/file-info.js';
|
|
4
|
+
export { listDirectory } from './file-operations/list-directory.js';
|
|
5
|
+
export { readMediaFile } from './file-operations/read-media-file.js';
|
|
6
|
+
export { readMultipleFiles } from './file-operations/read-multiple-files.js';
|
|
7
|
+
export { searchContent } from './file-operations/search-content.js';
|
|
8
|
+
export { searchFiles } from './file-operations/search-files.js';
|
|
9
|
+
export { readFile } from './fs-helpers.js';
|
|
734
10
|
//# sourceMappingURL=file-operations.js.map
|