projscan 0.11.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +45 -26
- package/dist/analyzers/unusedDependencyCheck.js +69 -17
- package/dist/analyzers/unusedDependencyCheck.js.map +1 -1
- package/dist/cli/_shared.d.ts +16 -0
- package/dist/cli/_shared.js +210 -0
- package/dist/cli/_shared.js.map +1 -0
- package/dist/cli/commands/analyze.d.ts +1 -0
- package/dist/cli/commands/analyze.js +87 -0
- package/dist/cli/commands/analyze.js.map +1 -0
- package/dist/cli/commands/audit.d.ts +1 -0
- package/dist/cli/commands/audit.js +47 -0
- package/dist/cli/commands/audit.js.map +1 -0
- package/dist/cli/commands/badge.d.ts +1 -0
- package/dist/cli/commands/badge.js +45 -0
- package/dist/cli/commands/badge.js.map +1 -0
- package/dist/cli/commands/ci.d.ts +1 -0
- package/dist/cli/commands/ci.js +57 -0
- package/dist/cli/commands/ci.js.map +1 -0
- package/dist/cli/commands/coupling.d.ts +1 -0
- package/dist/cli/commands/coupling.js +83 -0
- package/dist/cli/commands/coupling.js.map +1 -0
- package/dist/cli/commands/coverage.d.ts +1 -0
- package/dist/cli/commands/coverage.js +63 -0
- package/dist/cli/commands/coverage.js.map +1 -0
- package/dist/cli/commands/dependencies.d.ts +1 -0
- package/dist/cli/commands/dependencies.js +45 -0
- package/dist/cli/commands/dependencies.js.map +1 -0
- package/dist/cli/commands/diagram.d.ts +1 -0
- package/dist/cli/commands/diagram.js +45 -0
- package/dist/cli/commands/diagram.js.map +1 -0
- package/dist/cli/commands/diff.d.ts +1 -0
- package/dist/cli/commands/diff.js +70 -0
- package/dist/cli/commands/diff.js.map +1 -0
- package/dist/cli/commands/doctor.d.ts +1 -0
- package/dist/cli/commands/doctor.js +62 -0
- package/dist/cli/commands/doctor.js.map +1 -0
- package/dist/cli/commands/explain.d.ts +1 -0
- package/dist/cli/commands/explain.js +42 -0
- package/dist/cli/commands/explain.js.map +1 -0
- package/dist/cli/commands/file.d.ts +1 -0
- package/dist/cli/commands/file.js +45 -0
- package/dist/cli/commands/file.js.map +1 -0
- package/dist/cli/commands/fix.d.ts +1 -0
- package/dist/cli/commands/fix.js +70 -0
- package/dist/cli/commands/fix.js.map +1 -0
- package/dist/cli/commands/help.d.ts +1 -0
- package/dist/cli/commands/help.js +11 -0
- package/dist/cli/commands/help.js.map +1 -0
- package/dist/cli/commands/hotspots.d.ts +1 -0
- package/dist/cli/commands/hotspots.js +74 -0
- package/dist/cli/commands/hotspots.js.map +1 -0
- package/dist/cli/commands/mcp.d.ts +1 -0
- package/dist/cli/commands/mcp.js +21 -0
- package/dist/cli/commands/mcp.js.map +1 -0
- package/dist/cli/commands/outdated.d.ts +1 -0
- package/dist/cli/commands/outdated.js +51 -0
- package/dist/cli/commands/outdated.js.map +1 -0
- package/dist/cli/commands/prDiff.d.ts +1 -0
- package/dist/cli/commands/prDiff.js +59 -0
- package/dist/cli/commands/prDiff.js.map +1 -0
- package/dist/cli/commands/search.d.ts +1 -0
- package/dist/cli/commands/search.js +233 -0
- package/dist/cli/commands/search.js.map +1 -0
- package/dist/cli/commands/structure.d.ts +1 -0
- package/dist/cli/commands/structure.js +58 -0
- package/dist/cli/commands/structure.js.map +1 -0
- package/dist/cli/commands/upgrade.d.ts +1 -0
- package/dist/cli/commands/upgrade.js +44 -0
- package/dist/cli/commands/upgrade.js.map +1 -0
- package/dist/cli/commands/workspaces.d.ts +1 -0
- package/dist/cli/commands/workspaces.js +35 -0
- package/dist/cli/commands/workspaces.js.map +1 -0
- package/dist/cli/index.js +45 -1416
- package/dist/cli/index.js.map +1 -1
- package/dist/core/couplingAnalyzer.d.ts +1 -1
- package/dist/core/couplingAnalyzer.js +3 -3
- package/dist/core/hotspotAnalyzer.js +2 -2
- package/dist/core/languages/LanguageAdapter.d.ts +1 -1
- package/dist/core/languages/goAdapter.js +7 -5
- package/dist/core/languages/goAdapter.js.map +1 -1
- package/dist/core/languages/goCallSites.d.ts +20 -0
- package/dist/core/languages/goCallSites.js +42 -0
- package/dist/core/languages/goCallSites.js.map +1 -0
- package/dist/core/languages/goCyclomatic.d.ts +1 -1
- package/dist/core/languages/goCyclomatic.js +2 -2
- package/dist/core/languages/goExports.d.ts +1 -1
- package/dist/core/languages/goExports.js +1 -1
- package/dist/core/languages/goManifests.d.ts +1 -1
- package/dist/core/languages/goManifests.js +2 -2
- package/dist/core/languages/javaAdapter.d.ts +2 -0
- package/dist/core/languages/javaAdapter.js +148 -0
- package/dist/core/languages/javaAdapter.js.map +1 -0
- package/dist/core/languages/javaCallSites.d.ts +16 -0
- package/dist/core/languages/javaCallSites.js +45 -0
- package/dist/core/languages/javaCallSites.js.map +1 -0
- package/dist/core/languages/javaCyclomatic.d.ts +21 -0
- package/dist/core/languages/javaCyclomatic.js +49 -0
- package/dist/core/languages/javaCyclomatic.js.map +1 -0
- package/dist/core/languages/javaExports.d.ts +25 -0
- package/dist/core/languages/javaExports.js +80 -0
- package/dist/core/languages/javaExports.js.map +1 -0
- package/dist/core/languages/javaImports.d.ts +25 -0
- package/dist/core/languages/javaImports.js +49 -0
- package/dist/core/languages/javaImports.js.map +1 -0
- package/dist/core/languages/javaManifests.d.ts +25 -0
- package/dist/core/languages/javaManifests.js +86 -0
- package/dist/core/languages/javaManifests.js.map +1 -0
- package/dist/core/languages/pythonAdapter.js +3 -1
- package/dist/core/languages/pythonAdapter.js.map +1 -1
- package/dist/core/languages/pythonCallSites.d.ts +19 -0
- package/dist/core/languages/pythonCallSites.js +40 -0
- package/dist/core/languages/pythonCallSites.js.map +1 -0
- package/dist/core/languages/registry.js +3 -1
- package/dist/core/languages/registry.js.map +1 -1
- package/dist/core/languages/rubyAdapter.d.ts +2 -0
- package/dist/core/languages/rubyAdapter.js +131 -0
- package/dist/core/languages/rubyAdapter.js.map +1 -0
- package/dist/core/languages/rubyCallSites.d.ts +16 -0
- package/dist/core/languages/rubyCallSites.js +34 -0
- package/dist/core/languages/rubyCallSites.js.map +1 -0
- package/dist/core/languages/rubyCyclomatic.d.ts +19 -0
- package/dist/core/languages/rubyCyclomatic.js +47 -0
- package/dist/core/languages/rubyCyclomatic.js.map +1 -0
- package/dist/core/languages/rubyExports.d.ts +24 -0
- package/dist/core/languages/rubyExports.js +53 -0
- package/dist/core/languages/rubyExports.js.map +1 -0
- package/dist/core/languages/rubyImports.d.ts +12 -0
- package/dist/core/languages/rubyImports.js +75 -0
- package/dist/core/languages/rubyImports.js.map +1 -0
- package/dist/core/languages/rubyManifests.d.ts +20 -0
- package/dist/core/languages/rubyManifests.js +55 -0
- package/dist/core/languages/rubyManifests.js.map +1 -0
- package/dist/core/languages/treeSitterLoader.js +3 -1
- package/dist/core/languages/treeSitterLoader.js.map +1 -1
- package/dist/core/monorepo.js +5 -5
- package/dist/core/outdatedDetector.d.ts +13 -2
- package/dist/core/outdatedDetector.js +86 -16
- package/dist/core/outdatedDetector.js.map +1 -1
- package/dist/core/prDiff.d.ts +1 -1
- package/dist/core/prDiff.js +2 -2
- package/dist/grammars/tree-sitter-java.wasm +0 -0
- package/dist/grammars/tree-sitter-ruby.wasm +0 -0
- package/dist/mcp/server.js +0 -22
- package/dist/mcp/server.js.map +1 -1
- package/dist/mcp/tools/_shared.d.ts +24 -0
- package/dist/mcp/tools/_shared.js +82 -0
- package/dist/mcp/tools/_shared.js.map +1 -0
- package/dist/mcp/tools/analyze.d.ts +2 -0
- package/dist/mcp/tools/analyze.js +55 -0
- package/dist/mcp/tools/analyze.js.map +1 -0
- package/dist/mcp/tools/audit.d.ts +2 -0
- package/dist/mcp/tools/audit.js +32 -0
- package/dist/mcp/tools/audit.js.map +1 -0
- package/dist/mcp/tools/coupling.d.ts +2 -0
- package/dist/mcp/tools/coupling.js +67 -0
- package/dist/mcp/tools/coupling.js.map +1 -0
- package/dist/mcp/tools/coverage.d.ts +2 -0
- package/dist/mcp/tools/coverage.js +53 -0
- package/dist/mcp/tools/coverage.js.map +1 -0
- package/dist/mcp/tools/dependencies.d.ts +2 -0
- package/dist/mcp/tools/dependencies.js +16 -0
- package/dist/mcp/tools/dependencies.js.map +1 -0
- package/dist/mcp/tools/doctor.d.ts +2 -0
- package/dist/mcp/tools/doctor.js +30 -0
- package/dist/mcp/tools/doctor.js.map +1 -0
- package/dist/mcp/tools/explain.d.ts +2 -0
- package/dist/mcp/tools/explain.js +30 -0
- package/dist/mcp/tools/explain.js.map +1 -0
- package/dist/mcp/tools/file.d.ts +2 -0
- package/dist/mcp/tools/file.js +22 -0
- package/dist/mcp/tools/file.js.map +1 -0
- package/dist/mcp/tools/graph.d.ts +2 -0
- package/dist/mcp/tools/graph.js +69 -0
- package/dist/mcp/tools/graph.js.map +1 -0
- package/dist/mcp/tools/hotspots.d.ts +2 -0
- package/dist/mcp/tools/hotspots.js +64 -0
- package/dist/mcp/tools/hotspots.js.map +1 -0
- package/dist/mcp/tools/outdated.d.ts +2 -0
- package/dist/mcp/tools/outdated.js +36 -0
- package/dist/mcp/tools/outdated.js.map +1 -0
- package/dist/mcp/tools/prDiff.d.ts +2 -0
- package/dist/mcp/tools/prDiff.js +38 -0
- package/dist/mcp/tools/prDiff.js.map +1 -0
- package/dist/mcp/tools/search.d.ts +2 -0
- package/dist/mcp/tools/search.js +167 -0
- package/dist/mcp/tools/search.js.map +1 -0
- package/dist/mcp/tools/structure.d.ts +2 -0
- package/dist/mcp/tools/structure.js +34 -0
- package/dist/mcp/tools/structure.js.map +1 -0
- package/dist/mcp/tools/upgrade.d.ts +2 -0
- package/dist/mcp/tools/upgrade.js +38 -0
- package/dist/mcp/tools/upgrade.js.map +1 -0
- package/dist/mcp/tools/workspaces.d.ts +2 -0
- package/dist/mcp/tools/workspaces.js +13 -0
- package/dist/mcp/tools/workspaces.js.map +1 -0
- package/dist/mcp/tools.d.ts +12 -6
- package/dist/mcp/tools.js +40 -854
- package/dist/mcp/tools.js.map +1 -1
- package/dist/tool-manifest.json +358 -0
- package/dist/types.d.ts +8 -6
- package/dist/utils/config.js +0 -10
- package/dist/utils/config.js.map +1 -1
- package/package.json +6 -3
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { scanRepository } from '../../core/repositoryScanner.js';
|
|
2
|
+
import { collectIssues } from '../../core/issueEngine.js';
|
|
3
|
+
import { analyzeHotspots } from '../../core/hotspotAnalyzer.js';
|
|
4
|
+
import { parseCoverage, coverageMap } from '../../core/coverageParser.js';
|
|
5
|
+
import { joinCoverageWithHotspots } from '../../core/coverageJoin.js';
|
|
6
|
+
import { paginate, listChecksum, readPageParams } from '../pagination.js';
|
|
7
|
+
import { PACKAGE_ARG_SCHEMA, resolvePackageFilter } from './_shared.js';
|
|
8
|
+
export const coverageTool = {
|
|
9
|
+
name: 'projscan_coverage',
|
|
10
|
+
description: 'Join test coverage with hotspot risk. Returns files ranked by "risk × uncovered fraction" - the scariest untested files. Requires a coverage file at coverage/lcov.info, coverage/coverage-final.json, or coverage/coverage-summary.json.',
|
|
11
|
+
inputSchema: {
|
|
12
|
+
type: 'object',
|
|
13
|
+
properties: {
|
|
14
|
+
limit: {
|
|
15
|
+
type: 'number',
|
|
16
|
+
description: 'How many entries to return (default: 30, max: 200).',
|
|
17
|
+
},
|
|
18
|
+
max_tokens: {
|
|
19
|
+
type: 'number',
|
|
20
|
+
description: 'Cap the response size to roughly this many tokens (~4 chars/token). Truncates the entries array to fit.',
|
|
21
|
+
},
|
|
22
|
+
package: PACKAGE_ARG_SCHEMA,
|
|
23
|
+
},
|
|
24
|
+
},
|
|
25
|
+
handler: async (args, rootPath) => {
|
|
26
|
+
const coverage = await parseCoverage(rootPath);
|
|
27
|
+
const scan = await scanRepository(rootPath);
|
|
28
|
+
const issues = await collectIssues(rootPath, scan.files);
|
|
29
|
+
const rawLimit = typeof args.limit === 'number' ? args.limit : 200;
|
|
30
|
+
const limit = Math.max(1, Math.min(500, rawLimit));
|
|
31
|
+
const hotspots = await analyzeHotspots(rootPath, scan.files, issues, {
|
|
32
|
+
limit,
|
|
33
|
+
coverage: coverage.available ? coverageMap(coverage) : undefined,
|
|
34
|
+
});
|
|
35
|
+
const joined = joinCoverageWithHotspots(hotspots, coverage);
|
|
36
|
+
if (!joined.available)
|
|
37
|
+
return joined;
|
|
38
|
+
const passes = await resolvePackageFilter(rootPath, args);
|
|
39
|
+
const filteredEntries = passes
|
|
40
|
+
? joined.entries.filter((e) => passes(e.relativePath))
|
|
41
|
+
: joined.entries;
|
|
42
|
+
const page = paginate(filteredEntries, readPageParams(args), listChecksum(filteredEntries));
|
|
43
|
+
return {
|
|
44
|
+
available: true,
|
|
45
|
+
coverageSource: joined.coverageSource,
|
|
46
|
+
coverageSourceFile: joined.coverageSourceFile,
|
|
47
|
+
entries: page.items,
|
|
48
|
+
total: page.total,
|
|
49
|
+
nextCursor: page.nextCursor,
|
|
50
|
+
};
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
//# sourceMappingURL=coverage.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"coverage.js","sourceRoot":"","sources":["../../../src/mcp/tools/coverage.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,iCAAiC,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAChE,OAAO,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,8BAA8B,CAAC;AAC1E,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAC1E,OAAO,EAAE,kBAAkB,EAAE,oBAAoB,EAAgB,MAAM,cAAc,CAAC;AAEtF,MAAM,CAAC,MAAM,YAAY,GAAY;IACnC,IAAI,EAAE,mBAAmB;IACzB,WAAW,EACT,2OAA2O;IAC7O,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,KAAK,EAAE;gBACL,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,qDAAqD;aACnE;YACD,UAAU,EAAE;gBACV,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,yGAAyG;aACvH;YACD,OAAO,EAAE,kBAAkB;SAC5B;KACF;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,QAAQ,CAAC,CAAC;QAC/C,MAAM,IAAI,GAAG,MAAM,cAAc,CAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,MAAM,GAAG,MAAM,aAAa,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;QACzD,MAAM,QAAQ,GAAG,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC;QACnE,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnD,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE;YACnE,KAAK;YACL,QAAQ,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;SACjE,CAAC,CAAC;QACH,MAAM,MAAM,GAAG,wBAAwB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;QAC5D,IAAI,CAAC,MAAM,CAAC,SAAS;YAAE,OAAO,MAAM,CAAC;QACrC,MAAM,MAAM,GAAG,MAAM,oBAAoB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAC1D,MAAM,eAAe,GAAG,MAAM;YAC5B,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC;YACtD,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC;QACnB,MAAM,IAAI,GAAG,QAAQ,CAAC,eAAe,EAAE,cAAc,CAAC,IAAI,CAAC,EAAE,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5F,OAAO;YACL,SAAS,EAAE,IAAI;YACf,cAAc,EAAE,MAAM,CAAC,cAAc;YACrC,kBAAkB,EAAE,MAAM,CAAC,kBAAkB;YAC7C,OAAO,EAAE,IAAI,CAAC,KAAK;YACnB,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,UAAU,EAAE,IAAI,CAAC,UAAU;SAC5B,CAAC;IACJ,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { analyzeDependencies } from '../../core/dependencyAnalyzer.js';
|
|
2
|
+
export const dependenciesTool = {
|
|
3
|
+
name: 'projscan_dependencies',
|
|
4
|
+
description: 'Analyze package.json dependencies and return counts and risks (deprecated packages, wildcard versions, etc.).',
|
|
5
|
+
inputSchema: {
|
|
6
|
+
type: 'object',
|
|
7
|
+
properties: {},
|
|
8
|
+
},
|
|
9
|
+
handler: async (_args, rootPath) => {
|
|
10
|
+
const report = await analyzeDependencies(rootPath);
|
|
11
|
+
if (!report)
|
|
12
|
+
return { available: false, reason: 'No package.json found' };
|
|
13
|
+
return { available: true, ...report };
|
|
14
|
+
},
|
|
15
|
+
};
|
|
16
|
+
//# sourceMappingURL=dependencies.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dependencies.js","sourceRoot":"","sources":["../../../src/mcp/tools/dependencies.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,kCAAkC,CAAC;AAGvE,MAAM,CAAC,MAAM,gBAAgB,GAAY;IACvC,IAAI,EAAE,uBAAuB;IAC7B,WAAW,EAAE,+GAA+G;IAC5H,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE,EAAE;KACf;IACD,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,EAAE;QACjC,MAAM,MAAM,GAAG,MAAM,mBAAmB,CAAC,QAAQ,CAAC,CAAC;QACnD,IAAI,CAAC,MAAM;YAAE,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,MAAM,EAAE,uBAAuB,EAAE,CAAC;QAC1E,OAAO,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC;IACxC,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { scanRepository } from '../../core/repositoryScanner.js';
|
|
2
|
+
import { collectIssues } from '../../core/issueEngine.js';
|
|
3
|
+
import { calculateScore } from '../../utils/scoreCalculator.js';
|
|
4
|
+
import { PACKAGE_ARG_SCHEMA, resolvePackageFilter } from './_shared.js';
|
|
5
|
+
export const doctorTool = {
|
|
6
|
+
name: 'projscan_doctor',
|
|
7
|
+
description: 'Run a health check on the project. Returns a 0-100 score, letter grade, and the list of issues (linting, formatting, tests, security, architecture).',
|
|
8
|
+
inputSchema: {
|
|
9
|
+
type: 'object',
|
|
10
|
+
properties: {
|
|
11
|
+
package: PACKAGE_ARG_SCHEMA,
|
|
12
|
+
},
|
|
13
|
+
},
|
|
14
|
+
handler: async (args, rootPath) => {
|
|
15
|
+
const scan = await scanRepository(rootPath);
|
|
16
|
+
let issues = await collectIssues(rootPath, scan.files);
|
|
17
|
+
const passes = await resolvePackageFilter(rootPath, args);
|
|
18
|
+
if (passes) {
|
|
19
|
+
issues = issues.filter((i) => {
|
|
20
|
+
const locs = i.locations ?? [];
|
|
21
|
+
if (locs.length === 0)
|
|
22
|
+
return false;
|
|
23
|
+
return locs.some((l) => l.file && passes(l.file));
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
const health = calculateScore(issues);
|
|
27
|
+
return { health, issues };
|
|
28
|
+
},
|
|
29
|
+
};
|
|
30
|
+
//# sourceMappingURL=doctor.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"doctor.js","sourceRoot":"","sources":["../../../src/mcp/tools/doctor.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,iCAAiC,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,cAAc,EAAE,MAAM,gCAAgC,CAAC;AAChE,OAAO,EAAE,kBAAkB,EAAE,oBAAoB,EAAgB,MAAM,cAAc,CAAC;AAEtF,MAAM,CAAC,MAAM,UAAU,GAAY;IACjC,IAAI,EAAE,iBAAiB;IACvB,WAAW,EACT,sJAAsJ;IACxJ,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,OAAO,EAAE,kBAAkB;SAC5B;KACF;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,MAAM,IAAI,GAAG,MAAM,cAAc,CAAC,QAAQ,CAAC,CAAC;QAC5C,IAAI,MAAM,GAAG,MAAM,aAAa,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;QACvD,MAAM,MAAM,GAAG,MAAM,oBAAoB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAC1D,IAAI,MAAM,EAAE,CAAC;YACX,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;gBAC3B,MAAM,IAAI,GAAG,CAAC,CAAC,SAAS,IAAI,EAAE,CAAC;gBAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;oBAAE,OAAO,KAAK,CAAC;gBACpC,OAAO,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;YACpD,CAAC,CAAC,CAAC;QACL,CAAC;QACD,MAAM,MAAM,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC;QACtC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC;IAC5B,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import { explainFile } from './_shared.js';
|
|
4
|
+
export const explainTool = {
|
|
5
|
+
name: 'projscan_explain',
|
|
6
|
+
description: 'Explain a single file: purpose, imports, exports, and potential issues. Useful for understanding unfamiliar code before editing.',
|
|
7
|
+
inputSchema: {
|
|
8
|
+
type: 'object',
|
|
9
|
+
properties: {
|
|
10
|
+
file: {
|
|
11
|
+
type: 'string',
|
|
12
|
+
description: 'Path to the file relative to the project root.',
|
|
13
|
+
},
|
|
14
|
+
},
|
|
15
|
+
required: ['file'],
|
|
16
|
+
},
|
|
17
|
+
handler: async (args, rootPath) => {
|
|
18
|
+
const rel = typeof args.file === 'string' ? args.file : '';
|
|
19
|
+
if (!rel)
|
|
20
|
+
throw new Error('file argument is required');
|
|
21
|
+
const absolutePath = path.resolve(rootPath, rel);
|
|
22
|
+
const resolvedRoot = path.resolve(rootPath);
|
|
23
|
+
if (!absolutePath.startsWith(resolvedRoot + path.sep) && absolutePath !== resolvedRoot) {
|
|
24
|
+
throw new Error('file must be inside the project root');
|
|
25
|
+
}
|
|
26
|
+
const content = await fs.readFile(absolutePath, 'utf-8');
|
|
27
|
+
return explainFile(absolutePath, content, rootPath);
|
|
28
|
+
},
|
|
29
|
+
};
|
|
30
|
+
//# sourceMappingURL=explain.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"explain.js","sourceRoot":"","sources":["../../../src/mcp/tools/explain.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,WAAW,CAAC;AAC7B,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAClC,OAAO,EAAE,WAAW,EAAgB,MAAM,cAAc,CAAC;AAEzD,MAAM,CAAC,MAAM,WAAW,GAAY;IAClC,IAAI,EAAE,kBAAkB;IACxB,WAAW,EACT,kIAAkI;IACpI,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,IAAI,EAAE;gBACJ,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,gDAAgD;aAC9D;SACF;QACD,QAAQ,EAAE,CAAC,MAAM,CAAC;KACnB;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,MAAM,GAAG,GAAG,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;QAC3D,IAAI,CAAC,GAAG;YAAE,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACvD,MAAM,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;QACjD,MAAM,YAAY,GAAG,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC5C,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,YAAY,KAAK,YAAY,EAAE,CAAC;YACvF,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;QAC1D,CAAC;QACD,MAAM,OAAO,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;QACzD,OAAO,WAAW,CAAC,YAAY,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;IACtD,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { inspectFile } from '../../core/fileInspector.js';
|
|
2
|
+
export const fileTool = {
|
|
3
|
+
name: 'projscan_file',
|
|
4
|
+
description: 'Drill into a single file: purpose, imports, exports, churn/risk/ownership, related health issues, AST cyclomatic complexity, and coupling (fan-in / fan-out). Use this after projscan_hotspots when deciding how to approach a specific risky file.',
|
|
5
|
+
inputSchema: {
|
|
6
|
+
type: 'object',
|
|
7
|
+
properties: {
|
|
8
|
+
file: {
|
|
9
|
+
type: 'string',
|
|
10
|
+
description: 'Path to the file relative to the project root.',
|
|
11
|
+
},
|
|
12
|
+
},
|
|
13
|
+
required: ['file'],
|
|
14
|
+
},
|
|
15
|
+
handler: async (args, rootPath) => {
|
|
16
|
+
const rel = typeof args.file === 'string' ? args.file : '';
|
|
17
|
+
if (!rel)
|
|
18
|
+
throw new Error('file argument is required');
|
|
19
|
+
return await inspectFile(rootPath, rel);
|
|
20
|
+
},
|
|
21
|
+
};
|
|
22
|
+
//# sourceMappingURL=file.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file.js","sourceRoot":"","sources":["../../../src/mcp/tools/file.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,6BAA6B,CAAC;AAG1D,MAAM,CAAC,MAAM,QAAQ,GAAY;IAC/B,IAAI,EAAE,eAAe;IACrB,WAAW,EACT,qPAAqP;IACvP,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,IAAI,EAAE;gBACJ,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,gDAAgD;aAC9D;SACF;QACD,QAAQ,EAAE,CAAC,MAAM,CAAC;KACnB;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,MAAM,GAAG,GAAG,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;QAC3D,IAAI,CAAC,GAAG;YAAE,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACvD,OAAO,MAAM,WAAW,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC;IAC1C,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { scanRepository } from '../../core/repositoryScanner.js';
|
|
2
|
+
import { buildCodeGraph, filesImportingFile, filesImportingPackage, filesDefiningSymbol, exportsOf, importsOf, } from '../../core/codeGraph.js';
|
|
3
|
+
import { loadCachedGraph, saveCachedGraph } from '../../core/indexCache.js';
|
|
4
|
+
export const graphTool = {
|
|
5
|
+
name: 'projscan_graph',
|
|
6
|
+
description: 'Query the AST-based code graph directly. Returns imports, exports, importers, or symbol definitions for a file or symbol. Agents should prefer this over analyze/doctor/explain for targeted structural questions - it is much cheaper and more accurate.',
|
|
7
|
+
inputSchema: {
|
|
8
|
+
type: 'object',
|
|
9
|
+
properties: {
|
|
10
|
+
file: {
|
|
11
|
+
type: 'string',
|
|
12
|
+
description: 'File path (relative to project root) to query.',
|
|
13
|
+
},
|
|
14
|
+
symbol: {
|
|
15
|
+
type: 'string',
|
|
16
|
+
description: 'Symbol name to query (e.g. a function or class). Use instead of `file` to find where a symbol is defined.',
|
|
17
|
+
},
|
|
18
|
+
direction: {
|
|
19
|
+
type: 'string',
|
|
20
|
+
description: 'What to return: "imports" (what the file imports), "exports" (what the file exports), "importers" (who imports the file), "symbol_defs" (files defining the symbol), "package_importers" (files importing a package by name).',
|
|
21
|
+
enum: ['imports', 'exports', 'importers', 'symbol_defs', 'package_importers'],
|
|
22
|
+
},
|
|
23
|
+
limit: { type: 'number', description: 'Max entries returned (default 50).' },
|
|
24
|
+
max_tokens: { type: 'number', description: 'Cap the response to roughly this many tokens.' },
|
|
25
|
+
},
|
|
26
|
+
required: ['direction'],
|
|
27
|
+
},
|
|
28
|
+
handler: async (args, rootPath) => {
|
|
29
|
+
const scan = await scanRepository(rootPath);
|
|
30
|
+
const cached = await loadCachedGraph(rootPath);
|
|
31
|
+
const graph = await buildCodeGraph(rootPath, scan.files, cached);
|
|
32
|
+
await saveCachedGraph(rootPath, graph);
|
|
33
|
+
const direction = String(args.direction);
|
|
34
|
+
const file = typeof args.file === 'string' ? args.file : undefined;
|
|
35
|
+
const symbol = typeof args.symbol === 'string' ? args.symbol : undefined;
|
|
36
|
+
const limit = Math.max(1, Math.min(500, typeof args.limit === 'number' ? args.limit : 50));
|
|
37
|
+
switch (direction) {
|
|
38
|
+
case 'imports': {
|
|
39
|
+
if (!file)
|
|
40
|
+
throw new Error('file argument is required for direction=imports');
|
|
41
|
+
return { file, imports: importsOf(graph, file).slice(0, limit) };
|
|
42
|
+
}
|
|
43
|
+
case 'exports': {
|
|
44
|
+
if (!file)
|
|
45
|
+
throw new Error('file argument is required for direction=exports');
|
|
46
|
+
return { file, exports: exportsOf(graph, file).slice(0, limit) };
|
|
47
|
+
}
|
|
48
|
+
case 'importers': {
|
|
49
|
+
if (!file)
|
|
50
|
+
throw new Error('file argument is required for direction=importers');
|
|
51
|
+
return { file, importers: filesImportingFile(graph, file).slice(0, limit) };
|
|
52
|
+
}
|
|
53
|
+
case 'symbol_defs': {
|
|
54
|
+
if (!symbol)
|
|
55
|
+
throw new Error('symbol argument is required for direction=symbol_defs');
|
|
56
|
+
return { symbol, definedIn: filesDefiningSymbol(graph, symbol).slice(0, limit) };
|
|
57
|
+
}
|
|
58
|
+
case 'package_importers': {
|
|
59
|
+
const pkg = symbol ?? file;
|
|
60
|
+
if (!pkg)
|
|
61
|
+
throw new Error('symbol (or file) argument is required for direction=package_importers');
|
|
62
|
+
return { package: pkg, importers: filesImportingPackage(graph, pkg).slice(0, limit) };
|
|
63
|
+
}
|
|
64
|
+
default:
|
|
65
|
+
throw new Error(`unknown direction: ${direction}`);
|
|
66
|
+
}
|
|
67
|
+
},
|
|
68
|
+
};
|
|
69
|
+
//# sourceMappingURL=graph.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"graph.js","sourceRoot":"","sources":["../../../src/mcp/tools/graph.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,iCAAiC,CAAC;AACjE,OAAO,EACL,cAAc,EACd,kBAAkB,EAClB,qBAAqB,EACrB,mBAAmB,EACnB,SAAS,EACT,SAAS,GACV,MAAM,yBAAyB,CAAC;AACjC,OAAO,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAG5E,MAAM,CAAC,MAAM,SAAS,GAAY;IAChC,IAAI,EAAE,gBAAgB;IACtB,WAAW,EACT,2PAA2P;IAC7P,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,IAAI,EAAE;gBACJ,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,gDAAgD;aAC9D;YACD,MAAM,EAAE;gBACN,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,2GAA2G;aACzH;YACD,SAAS,EAAE;gBACT,IAAI,EAAE,QAAQ;gBACd,WAAW,EACT,+NAA+N;gBACjO,IAAI,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,WAAW,EAAE,aAAa,EAAE,mBAAmB,CAAC;aAC9E;YACD,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oCAAoC,EAAE;YAC5E,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,+CAA+C,EAAE;SAC7F;QACD,QAAQ,EAAE,CAAC,WAAW,CAAC;KACxB;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,MAAM,IAAI,GAAG,MAAM,cAAc,CAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,MAAM,GAAG,MAAM,eAAe,CAAC,QAAQ,CAAC,CAAC;QAC/C,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QACjE,MAAM,eAAe,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAEvC,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;QACzC,MAAM,IAAI,GAAG,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;QACnE,MAAM,MAAM,GAAG,OAAO,IAAI,CAAC,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC;QACzE,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAE3F,QAAQ,SAAS,EAAE,CAAC;YAClB,KAAK,SAAS,CAAC,CAAC,CAAC;gBACf,IAAI,CAAC,IAAI;oBAAE,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;gBAC9E,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,SAAS,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC;YACnE,CAAC;YACD,KAAK,SAAS,CAAC,CAAC,CAAC;gBACf,IAAI,CAAC,IAAI;oBAAE,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAC;gBAC9E,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,SAAS,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC;YACnE,CAAC;YACD,KAAK,WAAW,CAAC,CAAC,CAAC;gBACjB,IAAI,CAAC,IAAI;oBAAE,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;gBAChF,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,kBAAkB,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC;YAC9E,CAAC;YACD,KAAK,aAAa,CAAC,CAAC,CAAC;gBACnB,IAAI,CAAC,MAAM;oBAAE,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;gBACtF,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,mBAAmB,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC;YACnF,CAAC;YACD,KAAK,mBAAmB,CAAC,CAAC,CAAC;gBACzB,MAAM,GAAG,GAAG,MAAM,IAAI,IAAI,CAAC;gBAC3B,IAAI,CAAC,GAAG;oBAAE,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;gBACnG,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,qBAAqB,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,CAAC;YACxF,CAAC;YACD;gBACE,MAAM,IAAI,KAAK,CAAC,sBAAsB,SAAS,EAAE,CAAC,CAAC;QACvD,CAAC;IACH,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { scanRepository } from '../../core/repositoryScanner.js';
|
|
2
|
+
import { collectIssues } from '../../core/issueEngine.js';
|
|
3
|
+
import { analyzeHotspots } from '../../core/hotspotAnalyzer.js';
|
|
4
|
+
import { buildCodeGraph } from '../../core/codeGraph.js';
|
|
5
|
+
import { loadCachedGraph, saveCachedGraph } from '../../core/indexCache.js';
|
|
6
|
+
import { detectWorkspaces, filterFilesByPackage } from '../../core/monorepo.js';
|
|
7
|
+
import { paginate, listChecksum, readPageParams } from '../pagination.js';
|
|
8
|
+
import { emitProgress } from '../progress.js';
|
|
9
|
+
export const hotspotsTool = {
|
|
10
|
+
name: 'projscan_hotspots',
|
|
11
|
+
description: 'Rank files by risk using git churn × AST cyclomatic complexity × open issues. Returns the most dangerous files to touch. Each hotspot includes `cyclomaticComplexity` (null for non-AST languages, where line count is used as fallback). Supports cursor-based pagination: pass the `nextCursor` from a previous response back as `cursor` to fetch the next page.',
|
|
12
|
+
inputSchema: {
|
|
13
|
+
type: 'object',
|
|
14
|
+
properties: {
|
|
15
|
+
limit: {
|
|
16
|
+
type: 'number',
|
|
17
|
+
description: 'Cap on total hotspots ranked (default 100). For paging the returned set, use `page_size` + `cursor` instead.',
|
|
18
|
+
},
|
|
19
|
+
since: {
|
|
20
|
+
type: 'string',
|
|
21
|
+
description: 'Git history window. Examples: "12 months ago", "2024-01-01". Default: "12 months ago".',
|
|
22
|
+
},
|
|
23
|
+
cursor: { type: 'string', description: 'Opaque cursor from a previous response. Omit for the first page.' },
|
|
24
|
+
page_size: { type: 'number', description: 'Items per page (default 50, max 500).' },
|
|
25
|
+
max_tokens: { type: 'number', description: 'Cap response to roughly this many tokens.' },
|
|
26
|
+
package: {
|
|
27
|
+
type: 'string',
|
|
28
|
+
description: 'Optional. Workspace package name (from projscan_workspaces) to scope hotspots to one package only.',
|
|
29
|
+
},
|
|
30
|
+
},
|
|
31
|
+
},
|
|
32
|
+
handler: async (args, rootPath) => {
|
|
33
|
+
emitProgress(0, 5, 'scanning repository');
|
|
34
|
+
const scan = await scanRepository(rootPath);
|
|
35
|
+
emitProgress(1, 5, 'collecting issues');
|
|
36
|
+
const issues = await collectIssues(rootPath, scan.files);
|
|
37
|
+
const limit = typeof args.limit === 'number' ? args.limit : 100;
|
|
38
|
+
const since = typeof args.since === 'string' ? args.since : undefined;
|
|
39
|
+
emitProgress(2, 5, 'building code graph');
|
|
40
|
+
const cached = await loadCachedGraph(rootPath);
|
|
41
|
+
const graph = await buildCodeGraph(rootPath, scan.files, cached);
|
|
42
|
+
await saveCachedGraph(rootPath, graph);
|
|
43
|
+
emitProgress(3, 5, 'analyzing git churn + risk');
|
|
44
|
+
const report = await analyzeHotspots(rootPath, scan.files, issues, { limit, since, graph });
|
|
45
|
+
if (typeof args.package === 'string' && args.package.length > 0) {
|
|
46
|
+
const ws = await detectWorkspaces(rootPath);
|
|
47
|
+
const allowed = new Set(filterFilesByPackage(ws, args.package, report.hotspots.map((h) => h.relativePath)));
|
|
48
|
+
report.hotspots = report.hotspots.filter((h) => allowed.has(h.relativePath));
|
|
49
|
+
}
|
|
50
|
+
emitProgress(4, 5, 'paginating');
|
|
51
|
+
const page = paginate(report.hotspots, readPageParams(args), listChecksum(report.hotspots));
|
|
52
|
+
emitProgress(5, 5, 'done');
|
|
53
|
+
return {
|
|
54
|
+
available: report.available,
|
|
55
|
+
reason: report.reason,
|
|
56
|
+
window: report.window,
|
|
57
|
+
hotspots: page.items,
|
|
58
|
+
totalFilesRanked: report.totalFilesRanked,
|
|
59
|
+
nextCursor: page.nextCursor,
|
|
60
|
+
total: page.total,
|
|
61
|
+
};
|
|
62
|
+
},
|
|
63
|
+
};
|
|
64
|
+
//# sourceMappingURL=hotspots.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hotspots.js","sourceRoot":"","sources":["../../../src/mcp/tools/hotspots.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,iCAAiC,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,eAAe,EAAE,MAAM,+BAA+B,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,yBAAyB,CAAC;AACzD,OAAO,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC5E,OAAO,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAChF,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAC1E,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAG9C,MAAM,CAAC,MAAM,YAAY,GAAY;IACnC,IAAI,EAAE,mBAAmB;IACzB,WAAW,EACT,qWAAqW;IACvW,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,KAAK,EAAE;gBACL,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,8GAA8G;aAC5H;YACD,KAAK,EAAE;gBACL,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,wFAAwF;aACtG;YACD,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,kEAAkE,EAAE;YAC3G,SAAS,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,uCAAuC,EAAE;YACnF,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,2CAA2C,EAAE;YACxF,OAAO,EAAE;gBACP,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,oGAAoG;aAClH;SACF;KACF;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,qBAAqB,CAAC,CAAC;QAC1C,MAAM,IAAI,GAAG,MAAM,cAAc,CAAC,QAAQ,CAAC,CAAC;QAC5C,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,mBAAmB,CAAC,CAAC;QACxC,MAAM,MAAM,GAAG,MAAM,aAAa,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;QACzD,MAAM,KAAK,GAAG,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC;QAChE,MAAM,KAAK,GAAG,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;QACtE,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,qBAAqB,CAAC,CAAC;QAC1C,MAAM,MAAM,GAAG,MAAM,eAAe,CAAC,QAAQ,CAAC,CAAC;QAC/C,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QACjE,MAAM,eAAe,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QACvC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,4BAA4B,CAAC,CAAC;QACjD,MAAM,MAAM,GAAG,MAAM,eAAe,CAAC,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAC;QAC5F,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAChE,MAAM,EAAE,GAAG,MAAM,gBAAgB,CAAC,QAAQ,CAAC,CAAC;YAC5C,MAAM,OAAO,GAAG,IAAI,GAAG,CAAC,oBAAoB,CAAC,EAAE,EAAE,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YAC5G,MAAM,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC;QAC/E,CAAC;QACD,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,YAAY,CAAC,CAAC;QACjC,MAAM,IAAI,GAAG,QAAQ,CAAC,MAAM,CAAC,QAAQ,EAAE,cAAc,CAAC,IAAI,CAAC,EAAE,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;QAC5F,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;QAC3B,OAAO;YACL,SAAS,EAAE,MAAM,CAAC,SAAS;YAC3B,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,QAAQ,EAAE,IAAI,CAAC,KAAK;YACpB,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;YACzC,UAAU,EAAE,IAAI,CAAC,UAAU;YAC3B,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC;IACJ,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { detectOutdated } from '../../core/outdatedDetector.js';
|
|
2
|
+
import { detectWorkspaces } from '../../core/monorepo.js';
|
|
3
|
+
import { paginate, listChecksum, readPageParams } from '../pagination.js';
|
|
4
|
+
import { PACKAGE_ARG_SCHEMA } from './_shared.js';
|
|
5
|
+
export const outdatedTool = {
|
|
6
|
+
name: 'projscan_outdated',
|
|
7
|
+
description: 'Compare declared vs installed versions of every package. Reports drift (patch/minor/major). Workspace-aware in monorepos: each package.json is scanned, and each entry is tagged with the workspace it came from. Pass `package` to scope to a single workspace. Offline - does not hit the npm registry. Supports cursor pagination.',
|
|
8
|
+
inputSchema: {
|
|
9
|
+
type: 'object',
|
|
10
|
+
properties: {
|
|
11
|
+
package: PACKAGE_ARG_SCHEMA,
|
|
12
|
+
cursor: { type: 'string', description: 'Opaque cursor from a previous response.' },
|
|
13
|
+
page_size: { type: 'number', description: 'Items per page (default 50).' },
|
|
14
|
+
max_tokens: { type: 'number', description: 'Cap response size.' },
|
|
15
|
+
},
|
|
16
|
+
},
|
|
17
|
+
handler: async (args, rootPath) => {
|
|
18
|
+
const workspaces = await detectWorkspaces(rootPath);
|
|
19
|
+
const report = await detectOutdated(rootPath, {
|
|
20
|
+
workspaces,
|
|
21
|
+
...(typeof args.package === 'string' ? { workspaceFilter: args.package } : {}),
|
|
22
|
+
});
|
|
23
|
+
if (!report.available)
|
|
24
|
+
return report;
|
|
25
|
+
const page = paginate(report.packages, readPageParams(args), listChecksum(report.packages));
|
|
26
|
+
return {
|
|
27
|
+
available: true,
|
|
28
|
+
totalPackages: report.totalPackages,
|
|
29
|
+
packages: page.items,
|
|
30
|
+
byWorkspace: report.byWorkspace,
|
|
31
|
+
total: page.total,
|
|
32
|
+
nextCursor: page.nextCursor,
|
|
33
|
+
};
|
|
34
|
+
},
|
|
35
|
+
};
|
|
36
|
+
//# sourceMappingURL=outdated.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"outdated.js","sourceRoot":"","sources":["../../../src/mcp/tools/outdated.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,gCAAgC,CAAC;AAChE,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAC1E,OAAO,EAAE,kBAAkB,EAAgB,MAAM,cAAc,CAAC;AAEhE,MAAM,CAAC,MAAM,YAAY,GAAY;IACnC,IAAI,EAAE,mBAAmB;IACzB,WAAW,EACT,uUAAuU;IACzU,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,OAAO,EAAE,kBAAkB;YAC3B,MAAM,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,yCAAyC,EAAE;YAClF,SAAS,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,8BAA8B,EAAE;YAC1E,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oBAAoB,EAAE;SAClE;KACF;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,MAAM,UAAU,GAAG,MAAM,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACpD,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,QAAQ,EAAE;YAC5C,UAAU;YACV,GAAG,CAAC,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,EAAE,eAAe,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;SAC/E,CAAC,CAAC;QACH,IAAI,CAAC,MAAM,CAAC,SAAS;YAAE,OAAO,MAAM,CAAC;QACrC,MAAM,IAAI,GAAG,QAAQ,CAAC,MAAM,CAAC,QAAQ,EAAE,cAAc,CAAC,IAAI,CAAC,EAAE,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;QAC5F,OAAO;YACL,SAAS,EAAE,IAAI;YACf,aAAa,EAAE,MAAM,CAAC,aAAa;YACnC,QAAQ,EAAE,IAAI,CAAC,KAAK;YACpB,WAAW,EAAE,MAAM,CAAC,WAAW;YAC/B,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,UAAU,EAAE,IAAI,CAAC,UAAU;SAC5B,CAAC;IACJ,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { computePrDiff } from '../../core/prDiff.js';
|
|
2
|
+
import { emitProgress } from '../progress.js';
|
|
3
|
+
import { PACKAGE_ARG_SCHEMA, resolvePackageFilter } from './_shared.js';
|
|
4
|
+
export const prDiffTool = {
|
|
5
|
+
name: 'projscan_pr_diff',
|
|
6
|
+
description: 'Structural (AST) diff between two refs - what changed in exports, imports, call sites, cyclomatic complexity, and fan-in. Not a text diff: this surfaces the symbols and edges that an agent reviewing a PR actually cares about. Defaults: base=origin/main (falls back to main/master/HEAD~1), head=HEAD. Spins up a throwaway git worktree at the base ref to get a clean second graph.',
|
|
7
|
+
inputSchema: {
|
|
8
|
+
type: 'object',
|
|
9
|
+
properties: {
|
|
10
|
+
base: {
|
|
11
|
+
type: 'string',
|
|
12
|
+
description: 'Base ref (branch, tag, sha). Default: origin/main, falling back to main/master/HEAD~1.',
|
|
13
|
+
},
|
|
14
|
+
head: { type: 'string', description: 'Head ref. Default: HEAD.' },
|
|
15
|
+
max_tokens: { type: 'number', description: 'Cap the response to roughly this many tokens.' },
|
|
16
|
+
package: PACKAGE_ARG_SCHEMA,
|
|
17
|
+
},
|
|
18
|
+
},
|
|
19
|
+
handler: async (args, rootPath) => {
|
|
20
|
+
emitProgress(0, 3, 'resolving refs');
|
|
21
|
+
const base = typeof args.base === 'string' ? args.base : undefined;
|
|
22
|
+
const head = typeof args.head === 'string' ? args.head : undefined;
|
|
23
|
+
emitProgress(1, 3, 'building base + head graphs');
|
|
24
|
+
const report = await computePrDiff(rootPath, { base, head });
|
|
25
|
+
emitProgress(2, 3, 'diffing');
|
|
26
|
+
const passes = await resolvePackageFilter(rootPath, args);
|
|
27
|
+
if (passes) {
|
|
28
|
+
report.filesAdded = report.filesAdded.filter(passes);
|
|
29
|
+
report.filesRemoved = report.filesRemoved.filter(passes);
|
|
30
|
+
report.filesModified = report.filesModified.filter((f) => passes(f.relativePath));
|
|
31
|
+
report.totalFilesChanged =
|
|
32
|
+
report.filesAdded.length + report.filesRemoved.length + report.filesModified.length;
|
|
33
|
+
}
|
|
34
|
+
emitProgress(3, 3, 'done');
|
|
35
|
+
return report;
|
|
36
|
+
},
|
|
37
|
+
};
|
|
38
|
+
//# sourceMappingURL=prDiff.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prDiff.js","sourceRoot":"","sources":["../../../src/mcp/tools/prDiff.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,sBAAsB,CAAC;AACrD,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9C,OAAO,EAAE,kBAAkB,EAAE,oBAAoB,EAAgB,MAAM,cAAc,CAAC;AAEtF,MAAM,CAAC,MAAM,UAAU,GAAY;IACjC,IAAI,EAAE,kBAAkB;IACxB,WAAW,EACT,4XAA4X;IAC9X,WAAW,EAAE;QACX,IAAI,EAAE,QAAQ;QACd,UAAU,EAAE;YACV,IAAI,EAAE;gBACJ,IAAI,EAAE,QAAQ;gBACd,WAAW,EAAE,wFAAwF;aACtG;YACD,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,0BAA0B,EAAE;YACjE,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,+CAA+C,EAAE;YAC5F,OAAO,EAAE,kBAAkB;SAC5B;KACF;IACD,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;QAChC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,gBAAgB,CAAC,CAAC;QACrC,MAAM,IAAI,GAAG,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;QACnE,MAAM,IAAI,GAAG,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;QACnE,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,6BAA6B,CAAC,CAAC;QAClD,MAAM,MAAM,GAAG,MAAM,aAAa,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QAC7D,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;QAC9B,MAAM,MAAM,GAAG,MAAM,oBAAoB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAC1D,IAAI,MAAM,EAAE,CAAC;YACX,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YACrD,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YACzD,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC;YAClF,MAAM,CAAC,iBAAiB;gBACtB,MAAM,CAAC,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,MAAM,GAAG,MAAM,CAAC,aAAa,CAAC,MAAM,CAAC;QACxF,CAAC;QACD,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;QAC3B,OAAO,MAAM,CAAC;IAChB,CAAC;CACF,CAAC"}
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
import { scanRepository } from '../../core/repositoryScanner.js';
|
|
2
|
+
import { buildCodeGraph } from '../../core/codeGraph.js';
|
|
3
|
+
import { loadCachedGraph, saveCachedGraph } from '../../core/indexCache.js';
|
|
4
|
+
import { buildSearchIndex, search as searchIndex, attachExcerpts, expandQuery, } from '../../core/searchIndex.js';
|
|
5
|
+
import { buildSemanticIndex, semanticSearch, reciprocalRankFusion, } from '../../core/semanticSearch.js';
|
|
6
|
+
import { isSemanticAvailable } from '../../core/embeddings.js';
|
|
7
|
+
import { paginate, listChecksum, readPageParams } from '../pagination.js';
|
|
8
|
+
import { PACKAGE_ARG_SCHEMA, resolvePackageFilter } from './_shared.js';
|
|
9
|
+
export const searchTool = {
|
|
10
|
+
name: 'projscan_search',
|
|
11
|
+
description: 'Ranked search across the project. Lexical (BM25) by default; optional semantic (vector) and hybrid (RRF fusion) modes available when the @xenova/transformers peer dependency is installed. Scope controls what to search: "auto"/"content" (ranked content matches with excerpts), "symbols" (exported names), "files" (path substring).',
|
|
12
|
+
inputSchema: {
|
|
13
|
+
type: 'object',
|
|
14
|
+
properties: {
|
|
15
|
+
query: {
|
|
16
|
+
type: 'string',
|
|
17
|
+
description: 'Search string. Multi-word queries are treated as OR across BM25 terms; semantic mode embeds the full query.',
|
|
18
|
+
},
|
|
19
|
+
scope: {
|
|
20
|
+
type: 'string',
|
|
21
|
+
description: 'What to search over: "auto" (= content), "symbols", "files", "content".',
|
|
22
|
+
enum: ['auto', 'symbols', 'files', 'content'],
|
|
23
|
+
},
|
|
24
|
+
mode: {
|
|
25
|
+
type: 'string',
|
|
26
|
+
description: '"lexical" (default, BM25) | "semantic" (embeddings, requires peer dep) | "hybrid" (BM25 + semantic via reciprocal rank fusion). Ignored for "symbols" and "files" scopes.',
|
|
27
|
+
enum: ['lexical', 'semantic', 'hybrid'],
|
|
28
|
+
},
|
|
29
|
+
limit: { type: 'number', description: 'Max matches returned (default 30).' },
|
|
30
|
+
max_tokens: { type: 'number', description: 'Cap the response to roughly this many tokens.' },
|
|
31
|
+
package: PACKAGE_ARG_SCHEMA,
|
|
32
|
+
},
|
|
33
|
+
required: ['query'],
|
|
34
|
+
},
|
|
35
|
+
handler: async (args, rootPath) => {
|
|
36
|
+
const query = String(args.query ?? '').trim();
|
|
37
|
+
if (!query)
|
|
38
|
+
throw new Error('query argument is required and must be non-empty');
|
|
39
|
+
const scope = String(args.scope ?? 'auto');
|
|
40
|
+
const limit = Math.max(1, Math.min(500, typeof args.limit === 'number' ? args.limit : 30));
|
|
41
|
+
const scan = await scanRepository(rootPath);
|
|
42
|
+
const cached = await loadCachedGraph(rootPath);
|
|
43
|
+
const graph = await buildCodeGraph(rootPath, scan.files, cached);
|
|
44
|
+
await saveCachedGraph(rootPath, graph);
|
|
45
|
+
const passes = await resolvePackageFilter(rootPath, args);
|
|
46
|
+
if (scope === 'files') {
|
|
47
|
+
const q = query.toLowerCase();
|
|
48
|
+
const all = scan.files
|
|
49
|
+
.filter((f) => f.relativePath.toLowerCase().includes(q))
|
|
50
|
+
.filter((f) => !passes || passes(f.relativePath))
|
|
51
|
+
.map((f) => ({ file: f.relativePath, sizeBytes: f.sizeBytes }));
|
|
52
|
+
const page = paginate(all, readPageParams(args), listChecksum(all));
|
|
53
|
+
return { scope, query, matches: page.items, total: page.total, nextCursor: page.nextCursor };
|
|
54
|
+
}
|
|
55
|
+
if (scope === 'symbols') {
|
|
56
|
+
const q = query.toLowerCase();
|
|
57
|
+
const rawMatches = [];
|
|
58
|
+
for (const [file, entry] of graph.files) {
|
|
59
|
+
if (passes && !passes(file))
|
|
60
|
+
continue;
|
|
61
|
+
for (const exp of entry.exports) {
|
|
62
|
+
const name = exp.name.toLowerCase();
|
|
63
|
+
if (!name.includes(q))
|
|
64
|
+
continue;
|
|
65
|
+
const rank = name === q ? 0 : name.startsWith(q) ? 1 : 2;
|
|
66
|
+
rawMatches.push({ symbol: exp.name, kind: exp.kind, file, line: exp.line, rank });
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
rawMatches.sort((a, b) => a.rank - b.rank);
|
|
70
|
+
const cleaned = rawMatches.map((m) => ({
|
|
71
|
+
symbol: m.symbol,
|
|
72
|
+
kind: m.kind,
|
|
73
|
+
file: m.file,
|
|
74
|
+
line: m.line,
|
|
75
|
+
}));
|
|
76
|
+
const page = paginate(cleaned, readPageParams(args), listChecksum(cleaned));
|
|
77
|
+
return { scope, query, matches: page.items, total: page.total, nextCursor: page.nextCursor };
|
|
78
|
+
}
|
|
79
|
+
const mode = String(args.mode ?? 'lexical');
|
|
80
|
+
const index = await buildSearchIndex(rootPath, scan.files, graph);
|
|
81
|
+
const lexicalHitsAll = searchIndex(index, query, { limit });
|
|
82
|
+
const lexicalHits = passes ? lexicalHitsAll.filter((h) => passes(h.file)) : lexicalHitsAll;
|
|
83
|
+
const tokens = expandQuery(query);
|
|
84
|
+
if (mode === 'lexical') {
|
|
85
|
+
const withExcerpts = await attachExcerpts(rootPath, lexicalHits, tokens);
|
|
86
|
+
const page = paginate(withExcerpts, readPageParams(args), listChecksum(withExcerpts));
|
|
87
|
+
return {
|
|
88
|
+
scope: scope === 'auto' ? 'content' : scope,
|
|
89
|
+
mode: 'lexical',
|
|
90
|
+
query,
|
|
91
|
+
queryTokens: tokens,
|
|
92
|
+
matches: page.items,
|
|
93
|
+
total: page.total,
|
|
94
|
+
nextCursor: page.nextCursor,
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
const hasSemantic = await isSemanticAvailable();
|
|
98
|
+
if (!hasSemantic) {
|
|
99
|
+
return {
|
|
100
|
+
scope: scope === 'auto' ? 'content' : scope,
|
|
101
|
+
mode,
|
|
102
|
+
query,
|
|
103
|
+
error: 'Semantic search requires the optional peer dependency @xenova/transformers. Install it with: npm install @xenova/transformers',
|
|
104
|
+
available: false,
|
|
105
|
+
matches: [],
|
|
106
|
+
total: 0,
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
const semIndex = await buildSemanticIndex(rootPath, scan.files);
|
|
110
|
+
if (!semIndex) {
|
|
111
|
+
return {
|
|
112
|
+
scope: scope === 'auto' ? 'content' : scope,
|
|
113
|
+
mode,
|
|
114
|
+
query,
|
|
115
|
+
error: 'Semantic index build failed (peer loaded but model not usable).',
|
|
116
|
+
available: false,
|
|
117
|
+
matches: [],
|
|
118
|
+
total: 0,
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
const semHitsAll = await semanticSearch(semIndex, query, { limit });
|
|
122
|
+
const semHits = passes ? semHitsAll.filter((h) => passes(h.file)) : semHitsAll;
|
|
123
|
+
if (mode === 'semantic') {
|
|
124
|
+
const enriched = await attachExcerpts(rootPath, semHits.map((h) => ({
|
|
125
|
+
file: h.file,
|
|
126
|
+
score: h.score,
|
|
127
|
+
matched: [],
|
|
128
|
+
symbolMatch: false,
|
|
129
|
+
pathMatch: false,
|
|
130
|
+
excerpt: '',
|
|
131
|
+
line: 0,
|
|
132
|
+
})), tokens);
|
|
133
|
+
const page = paginate(enriched, readPageParams(args), listChecksum(enriched));
|
|
134
|
+
return {
|
|
135
|
+
scope: scope === 'auto' ? 'content' : scope,
|
|
136
|
+
mode: 'semantic',
|
|
137
|
+
query,
|
|
138
|
+
model: semIndex.model,
|
|
139
|
+
matches: page.items,
|
|
140
|
+
total: page.total,
|
|
141
|
+
nextCursor: page.nextCursor,
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
const fused = reciprocalRankFusion([lexicalHits, semHits]).slice(0, limit);
|
|
145
|
+
const enriched = await attachExcerpts(rootPath, fused.map((f) => ({
|
|
146
|
+
file: f.file,
|
|
147
|
+
score: f.score,
|
|
148
|
+
matched: [],
|
|
149
|
+
symbolMatch: false,
|
|
150
|
+
pathMatch: false,
|
|
151
|
+
excerpt: '',
|
|
152
|
+
line: 0,
|
|
153
|
+
})), tokens);
|
|
154
|
+
const page = paginate(enriched, readPageParams(args), listChecksum(enriched));
|
|
155
|
+
return {
|
|
156
|
+
scope: scope === 'auto' ? 'content' : scope,
|
|
157
|
+
mode: 'hybrid',
|
|
158
|
+
query,
|
|
159
|
+
queryTokens: tokens,
|
|
160
|
+
model: semIndex.model,
|
|
161
|
+
matches: page.items,
|
|
162
|
+
total: page.total,
|
|
163
|
+
nextCursor: page.nextCursor,
|
|
164
|
+
};
|
|
165
|
+
},
|
|
166
|
+
};
|
|
167
|
+
//# sourceMappingURL=search.js.map
|