@compilr-dev/agents-coding-ts 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +8 -5
- package/dist/index.js +14 -4
- package/dist/parser/index.d.ts +2 -2
- package/dist/parser/index.js +1 -1
- package/dist/parser/typescript-parser.d.ts +2 -2
- package/dist/parser/typescript-parser.js +77 -54
- package/dist/skills/code-health.js +18 -5
- package/dist/skills/code-structure.js +15 -5
- package/dist/skills/dependency-audit.js +16 -5
- package/dist/skills/index.d.ts +7 -7
- package/dist/skills/index.js +11 -11
- package/dist/skills/refactor-impact.js +16 -5
- package/dist/skills/type-analysis.js +16 -5
- package/dist/tools/find-dead-code.d.ts +2 -2
- package/dist/tools/find-dead-code.js +82 -58
- package/dist/tools/find-duplicates.d.ts +2 -2
- package/dist/tools/find-duplicates.js +41 -38
- package/dist/tools/find-implementations.d.ts +2 -2
- package/dist/tools/find-implementations.js +44 -36
- package/dist/tools/find-patterns.d.ts +2 -2
- package/dist/tools/find-patterns.js +154 -148
- package/dist/tools/find-references.d.ts +2 -2
- package/dist/tools/find-references.js +76 -72
- package/dist/tools/find-symbol.d.ts +2 -2
- package/dist/tools/find-symbol.js +106 -96
- package/dist/tools/get-call-graph.d.ts +2 -2
- package/dist/tools/get-call-graph.js +52 -47
- package/dist/tools/get-complexity.d.ts +2 -2
- package/dist/tools/get-complexity.js +94 -46
- package/dist/tools/get-dependency-graph.d.ts +2 -2
- package/dist/tools/get-dependency-graph.js +66 -52
- package/dist/tools/get-documentation.d.ts +2 -2
- package/dist/tools/get-documentation.js +154 -122
- package/dist/tools/get-exports.d.ts +2 -2
- package/dist/tools/get-exports.js +73 -61
- package/dist/tools/get-file-structure.d.ts +2 -2
- package/dist/tools/get-file-structure.js +16 -16
- package/dist/tools/get-imports.d.ts +2 -2
- package/dist/tools/get-imports.js +46 -46
- package/dist/tools/get-signature.d.ts +2 -2
- package/dist/tools/get-signature.js +168 -124
- package/dist/tools/get-type-hierarchy.d.ts +2 -2
- package/dist/tools/get-type-hierarchy.js +53 -44
- package/dist/tools/index.d.ts +18 -16
- package/dist/tools/index.js +17 -15
- package/dist/tools/read-symbol.d.ts +62 -0
- package/dist/tools/read-symbol.js +464 -0
- package/dist/tools/types.d.ts +27 -27
- package/package.json +3 -3
|
@@ -3,15 +3,26 @@
|
|
|
3
3
|
*
|
|
4
4
|
* Analyze the impact of refactoring a symbol across the codebase.
|
|
5
5
|
*/
|
|
6
|
-
import { defineSkill } from
|
|
6
|
+
import { defineSkill } from "@compilr-dev/agents";
|
|
7
7
|
/**
|
|
8
8
|
* Refactor impact skill - Analyze impact of refactoring a symbol
|
|
9
9
|
*/
|
|
10
10
|
export const refactorImpactSkill = defineSkill({
|
|
11
|
-
name:
|
|
12
|
-
description:
|
|
11
|
+
name: "refactor-impact",
|
|
12
|
+
description: "Analyze the impact of refactoring a symbol across the codebase",
|
|
13
13
|
prompt: `You are in REFACTOR IMPACT ANALYSIS mode. Analyze what would be affected by refactoring a symbol.
|
|
14
14
|
|
|
15
|
+
## When to Use
|
|
16
|
+
- Planning to rename, move, or change a function/class/interface
|
|
17
|
+
- Assessing risk before modifying public API
|
|
18
|
+
- Understanding what files will need updates
|
|
19
|
+
- Creating a refactoring checklist
|
|
20
|
+
|
|
21
|
+
## When NOT to Use
|
|
22
|
+
- Just want to understand what a symbol does → use code-structure skill
|
|
23
|
+
- Need to find where a symbol is used (not planning to change it) → use find_references directly
|
|
24
|
+
- Checking type hierarchy without planning changes → use type-analysis skill
|
|
25
|
+
|
|
15
26
|
## TOOLS TO USE
|
|
16
27
|
|
|
17
28
|
1. **find_symbol**: Locate the symbol definition
|
|
@@ -130,6 +141,6 @@ get_type_hierarchy({
|
|
|
130
141
|
- Quantify the impact (number of files, usages)
|
|
131
142
|
- Provide actionable checklist
|
|
132
143
|
- Highlight breaking changes for exported/public symbols`,
|
|
133
|
-
tags: [
|
|
134
|
-
version:
|
|
144
|
+
tags: ["refactoring", "analysis", "impact", "planning"],
|
|
145
|
+
version: "1.0.0",
|
|
135
146
|
});
|
|
@@ -3,15 +3,26 @@
|
|
|
3
3
|
*
|
|
4
4
|
* Analyze type hierarchies, interfaces, and their implementations.
|
|
5
5
|
*/
|
|
6
|
-
import { defineSkill } from
|
|
6
|
+
import { defineSkill } from "@compilr-dev/agents";
|
|
7
7
|
/**
|
|
8
8
|
* Type analysis skill - Analyze type hierarchies and implementations
|
|
9
9
|
*/
|
|
10
10
|
export const typeAnalysisSkill = defineSkill({
|
|
11
|
-
name:
|
|
12
|
-
description:
|
|
11
|
+
name: "type-analysis",
|
|
12
|
+
description: "Analyze type hierarchies, interfaces, and their implementations",
|
|
13
13
|
prompt: `You are in TYPE ANALYSIS mode. Analyze type relationships in the codebase.
|
|
14
14
|
|
|
15
|
+
## When to Use
|
|
16
|
+
- Understanding interface implementations in the codebase
|
|
17
|
+
- Exploring class inheritance hierarchies
|
|
18
|
+
- Finding all concrete implementations of an abstract class
|
|
19
|
+
- Checking which classes implement a specific interface
|
|
20
|
+
|
|
21
|
+
## When NOT to Use
|
|
22
|
+
- Planning to refactor a type → use refactor-impact skill first
|
|
23
|
+
- Need general file structure → use code-structure skill
|
|
24
|
+
- Looking for a specific symbol definition → use find_symbol directly
|
|
25
|
+
|
|
15
26
|
## TOOLS TO USE
|
|
16
27
|
|
|
17
28
|
1. **get_type_hierarchy**: Analyze inheritance relationships
|
|
@@ -145,6 +156,6 @@ TypeName
|
|
|
145
156
|
- Highlight incomplete implementations
|
|
146
157
|
- Note generic type parameters when present
|
|
147
158
|
- Keep diagrams simple - expand on request`,
|
|
148
|
-
tags: [
|
|
149
|
-
version:
|
|
159
|
+
tags: ["types", "analysis", "inheritance", "interfaces", "architecture"],
|
|
160
|
+
version: "1.0.0",
|
|
150
161
|
});
|
|
@@ -4,8 +4,8 @@
|
|
|
4
4
|
* Find potentially unused exports, functions, and variables across a codebase.
|
|
5
5
|
* Uses static analysis to identify code that may be dead.
|
|
6
6
|
*/
|
|
7
|
-
import type { Tool } from
|
|
8
|
-
import type { FindDeadCodeInput } from
|
|
7
|
+
import type { Tool } from "@compilr-dev/agents";
|
|
8
|
+
import type { FindDeadCodeInput } from "./types.js";
|
|
9
9
|
/**
|
|
10
10
|
* findDeadCode tool
|
|
11
11
|
*/
|
|
@@ -4,59 +4,59 @@
|
|
|
4
4
|
* Find potentially unused exports, functions, and variables across a codebase.
|
|
5
5
|
* Uses static analysis to identify code that may be dead.
|
|
6
6
|
*/
|
|
7
|
-
import * as fs from
|
|
8
|
-
import * as path from
|
|
9
|
-
import * as ts from
|
|
10
|
-
import { defineTool, createSuccessResult, createErrorResult } from
|
|
11
|
-
import { detectLanguage, isLanguageSupported } from
|
|
7
|
+
import * as fs from "node:fs/promises";
|
|
8
|
+
import * as path from "node:path";
|
|
9
|
+
import * as ts from "typescript";
|
|
10
|
+
import { defineTool, createSuccessResult, createErrorResult, } from "@compilr-dev/agents";
|
|
11
|
+
import { detectLanguage, isLanguageSupported, } from "../parser/typescript-parser.js";
|
|
12
12
|
// Tool description
|
|
13
13
|
const TOOL_DESCRIPTION = `Find potentially unused exports, functions, and variables.
|
|
14
14
|
Analyzes the codebase to identify code that may be dead (never used).
|
|
15
15
|
Useful for cleanup and reducing bundle size.`;
|
|
16
16
|
// Tool input schema
|
|
17
17
|
const TOOL_INPUT_SCHEMA = {
|
|
18
|
-
type:
|
|
18
|
+
type: "object",
|
|
19
19
|
properties: {
|
|
20
20
|
path: {
|
|
21
|
-
type:
|
|
22
|
-
description:
|
|
21
|
+
type: "string",
|
|
22
|
+
description: "Directory to analyze",
|
|
23
23
|
},
|
|
24
24
|
includeTests: {
|
|
25
|
-
type:
|
|
26
|
-
description:
|
|
25
|
+
type: "boolean",
|
|
26
|
+
description: "Include test files in analysis (default: false)",
|
|
27
27
|
default: false,
|
|
28
28
|
},
|
|
29
29
|
checkExports: {
|
|
30
|
-
type:
|
|
31
|
-
description:
|
|
30
|
+
type: "boolean",
|
|
31
|
+
description: "Check for unused exports (default: true)",
|
|
32
32
|
default: true,
|
|
33
33
|
},
|
|
34
34
|
checkFunctions: {
|
|
35
|
-
type:
|
|
36
|
-
description:
|
|
35
|
+
type: "boolean",
|
|
36
|
+
description: "Check for unused functions (default: true)",
|
|
37
37
|
default: true,
|
|
38
38
|
},
|
|
39
39
|
checkVariables: {
|
|
40
|
-
type:
|
|
41
|
-
description:
|
|
40
|
+
type: "boolean",
|
|
41
|
+
description: "Check for unused variables (default: false)",
|
|
42
42
|
default: false,
|
|
43
43
|
},
|
|
44
44
|
maxFiles: {
|
|
45
|
-
type:
|
|
46
|
-
description:
|
|
45
|
+
type: "number",
|
|
46
|
+
description: "Maximum files to analyze (default: 100)",
|
|
47
47
|
default: 100,
|
|
48
48
|
},
|
|
49
49
|
},
|
|
50
|
-
required: [
|
|
50
|
+
required: ["path"],
|
|
51
51
|
};
|
|
52
52
|
// Default exclusions
|
|
53
|
-
const DEFAULT_EXCLUDE = [
|
|
54
|
-
const TEST_PATTERNS = [
|
|
53
|
+
const DEFAULT_EXCLUDE = ["node_modules", "dist", "build", ".git", "coverage"];
|
|
54
|
+
const TEST_PATTERNS = [".test.", ".spec.", "__tests__", "__mocks__"];
|
|
55
55
|
/**
|
|
56
56
|
* findDeadCode tool
|
|
57
57
|
*/
|
|
58
58
|
export const findDeadCodeTool = defineTool({
|
|
59
|
-
name:
|
|
59
|
+
name: "find_dead_code",
|
|
60
60
|
description: TOOL_DESCRIPTION,
|
|
61
61
|
inputSchema: TOOL_INPUT_SCHEMA,
|
|
62
62
|
execute: executeFindDeadCode,
|
|
@@ -77,7 +77,7 @@ async function executeFindDeadCode(input) {
|
|
|
77
77
|
}
|
|
78
78
|
const stats = await fs.stat(resolvedPath);
|
|
79
79
|
if (!stats.isDirectory()) {
|
|
80
|
-
return createErrorResult(
|
|
80
|
+
return createErrorResult("findDeadCode requires a directory path");
|
|
81
81
|
}
|
|
82
82
|
// Collect files
|
|
83
83
|
const files = [];
|
|
@@ -101,7 +101,7 @@ async function executeFindDeadCode(input) {
|
|
|
101
101
|
let unusedVariables = 0;
|
|
102
102
|
for (const def of definitions) {
|
|
103
103
|
// Skip index files (re-exports)
|
|
104
|
-
if (def.path.endsWith(
|
|
104
|
+
if (def.path.endsWith("index.ts") || def.path.endsWith("index.js")) {
|
|
105
105
|
continue;
|
|
106
106
|
}
|
|
107
107
|
const isUsed = usages.has(def.name) || importedSymbols.has(def.name);
|
|
@@ -117,16 +117,16 @@ async function executeFindDeadCode(input) {
|
|
|
117
117
|
name: def.name,
|
|
118
118
|
path: def.path,
|
|
119
119
|
line: def.line,
|
|
120
|
-
kind:
|
|
120
|
+
kind: "export",
|
|
121
121
|
exported: true,
|
|
122
|
-
confidence:
|
|
123
|
-
reason:
|
|
122
|
+
confidence: "medium",
|
|
123
|
+
reason: "Export is not imported anywhere in the codebase",
|
|
124
124
|
});
|
|
125
125
|
}
|
|
126
126
|
}
|
|
127
127
|
}
|
|
128
128
|
// Check functions
|
|
129
|
-
if (def.kind ===
|
|
129
|
+
if (def.kind === "function" && checkFunctions) {
|
|
130
130
|
totalFunctions++;
|
|
131
131
|
if (!isUsed && !def.exported) {
|
|
132
132
|
unusedFunctions++;
|
|
@@ -134,15 +134,15 @@ async function executeFindDeadCode(input) {
|
|
|
134
134
|
name: def.name,
|
|
135
135
|
path: def.path,
|
|
136
136
|
line: def.line,
|
|
137
|
-
kind:
|
|
137
|
+
kind: "function",
|
|
138
138
|
exported: false,
|
|
139
|
-
confidence:
|
|
140
|
-
reason:
|
|
139
|
+
confidence: "high",
|
|
140
|
+
reason: "Function is not called anywhere in the file",
|
|
141
141
|
});
|
|
142
142
|
}
|
|
143
143
|
}
|
|
144
144
|
// Check variables
|
|
145
|
-
if (def.kind ===
|
|
145
|
+
if (def.kind === "variable" && checkVariables) {
|
|
146
146
|
totalVariables++;
|
|
147
147
|
if (!isUsed && !def.exported) {
|
|
148
148
|
unusedVariables++;
|
|
@@ -150,10 +150,10 @@ async function executeFindDeadCode(input) {
|
|
|
150
150
|
name: def.name,
|
|
151
151
|
path: def.path,
|
|
152
152
|
line: def.line,
|
|
153
|
-
kind:
|
|
153
|
+
kind: "variable",
|
|
154
154
|
exported: false,
|
|
155
|
-
confidence:
|
|
156
|
-
reason:
|
|
155
|
+
confidence: "medium",
|
|
156
|
+
reason: "Variable is not referenced after declaration",
|
|
157
157
|
});
|
|
158
158
|
}
|
|
159
159
|
}
|
|
@@ -193,15 +193,17 @@ async function collectFiles(dirPath, files, includeTests, maxFiles, currentDepth
|
|
|
193
193
|
if (entry.isDirectory()) {
|
|
194
194
|
if (DEFAULT_EXCLUDE.includes(entry.name))
|
|
195
195
|
continue;
|
|
196
|
-
if (!includeTests && entry.name ===
|
|
196
|
+
if (!includeTests && entry.name === "__tests__")
|
|
197
197
|
continue;
|
|
198
198
|
await collectFiles(fullPath, files, includeTests, maxFiles, currentDepth + 1);
|
|
199
199
|
}
|
|
200
200
|
else if (entry.isFile()) {
|
|
201
201
|
// Only include TypeScript/JavaScript files
|
|
202
|
-
if (/\.(ts|tsx|js|jsx)$/.test(entry.name) &&
|
|
202
|
+
if (/\.(ts|tsx|js|jsx)$/.test(entry.name) &&
|
|
203
|
+
!entry.name.endsWith(".d.ts")) {
|
|
203
204
|
// Skip test files if not including tests
|
|
204
|
-
if (!includeTests &&
|
|
205
|
+
if (!includeTests &&
|
|
206
|
+
TEST_PATTERNS.some((p) => fullPath.includes(p))) {
|
|
205
207
|
continue;
|
|
206
208
|
}
|
|
207
209
|
files.push(fullPath);
|
|
@@ -218,47 +220,51 @@ async function collectFiles(dirPath, files, includeTests, maxFiles, currentDepth
|
|
|
218
220
|
*/
|
|
219
221
|
async function analyzeFile(filePath, definitions, usages, importedSymbols) {
|
|
220
222
|
try {
|
|
221
|
-
const content = await fs.readFile(filePath,
|
|
223
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
222
224
|
const detection = detectLanguage(filePath);
|
|
223
225
|
if (!detection.language || !isLanguageSupported(detection.language)) {
|
|
224
226
|
return;
|
|
225
227
|
}
|
|
226
|
-
const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true, filePath.endsWith(
|
|
228
|
+
const sourceFile = ts.createSourceFile(filePath, content, ts.ScriptTarget.Latest, true, filePath.endsWith(".tsx") ? ts.ScriptKind.TSX : ts.ScriptKind.TS);
|
|
227
229
|
// Collect definitions
|
|
228
230
|
const visit = (node) => {
|
|
229
231
|
const { line } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
|
230
232
|
// Function declarations
|
|
231
233
|
if (ts.isFunctionDeclaration(node) && node.name) {
|
|
232
|
-
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
234
|
+
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
235
|
+
false;
|
|
233
236
|
definitions.push({
|
|
234
237
|
name: node.name.text,
|
|
235
238
|
path: filePath,
|
|
236
239
|
line: line + 1,
|
|
237
|
-
kind:
|
|
240
|
+
kind: "function",
|
|
238
241
|
exported: isExported,
|
|
239
242
|
});
|
|
240
243
|
}
|
|
241
244
|
// Class declarations
|
|
242
245
|
if (ts.isClassDeclaration(node) && node.name) {
|
|
243
|
-
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
246
|
+
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
247
|
+
false;
|
|
244
248
|
definitions.push({
|
|
245
249
|
name: node.name.text,
|
|
246
250
|
path: filePath,
|
|
247
251
|
line: line + 1,
|
|
248
|
-
kind:
|
|
252
|
+
kind: "class",
|
|
249
253
|
exported: isExported,
|
|
250
254
|
});
|
|
251
255
|
}
|
|
252
256
|
// Variable declarations (const, let, var)
|
|
253
257
|
if (ts.isVariableStatement(node)) {
|
|
254
|
-
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
258
|
+
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
259
|
+
false;
|
|
255
260
|
for (const decl of node.declarationList.declarations) {
|
|
256
261
|
if (ts.isIdentifier(decl.name)) {
|
|
257
262
|
// Check if it's a function expression
|
|
258
263
|
const kind = decl.initializer &&
|
|
259
|
-
(ts.isFunctionExpression(decl.initializer) ||
|
|
260
|
-
|
|
261
|
-
|
|
264
|
+
(ts.isFunctionExpression(decl.initializer) ||
|
|
265
|
+
ts.isArrowFunction(decl.initializer))
|
|
266
|
+
? "function"
|
|
267
|
+
: "variable";
|
|
262
268
|
definitions.push({
|
|
263
269
|
name: decl.name.text,
|
|
264
270
|
path: filePath,
|
|
@@ -271,41 +277,45 @@ async function analyzeFile(filePath, definitions, usages, importedSymbols) {
|
|
|
271
277
|
}
|
|
272
278
|
// Interface declarations
|
|
273
279
|
if (ts.isInterfaceDeclaration(node)) {
|
|
274
|
-
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
280
|
+
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
281
|
+
false;
|
|
275
282
|
definitions.push({
|
|
276
283
|
name: node.name.text,
|
|
277
284
|
path: filePath,
|
|
278
285
|
line: line + 1,
|
|
279
|
-
kind:
|
|
286
|
+
kind: "interface",
|
|
280
287
|
exported: isExported,
|
|
281
288
|
});
|
|
282
289
|
}
|
|
283
290
|
// Type alias declarations
|
|
284
291
|
if (ts.isTypeAliasDeclaration(node)) {
|
|
285
|
-
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
292
|
+
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
293
|
+
false;
|
|
286
294
|
definitions.push({
|
|
287
295
|
name: node.name.text,
|
|
288
296
|
path: filePath,
|
|
289
297
|
line: line + 1,
|
|
290
|
-
kind:
|
|
298
|
+
kind: "type",
|
|
291
299
|
exported: isExported,
|
|
292
300
|
});
|
|
293
301
|
}
|
|
294
302
|
// Enum declarations
|
|
295
303
|
if (ts.isEnumDeclaration(node)) {
|
|
296
|
-
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
304
|
+
const isExported = node.modifiers?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ??
|
|
305
|
+
false;
|
|
297
306
|
definitions.push({
|
|
298
307
|
name: node.name.text,
|
|
299
308
|
path: filePath,
|
|
300
309
|
line: line + 1,
|
|
301
|
-
kind:
|
|
310
|
+
kind: "enum",
|
|
302
311
|
exported: isExported,
|
|
303
312
|
});
|
|
304
313
|
}
|
|
305
314
|
// Import declarations - track what's imported
|
|
306
315
|
if (ts.isImportDeclaration(node) && node.importClause) {
|
|
307
316
|
// Named imports
|
|
308
|
-
if (node.importClause.namedBindings &&
|
|
317
|
+
if (node.importClause.namedBindings &&
|
|
318
|
+
ts.isNamedImports(node.importClause.namedBindings)) {
|
|
309
319
|
for (const specifier of node.importClause.namedBindings.elements) {
|
|
310
320
|
importedSymbols.add(specifier.name.text);
|
|
311
321
|
}
|
|
@@ -344,13 +354,27 @@ async function analyzeFile(filePath, definitions, usages, importedSymbols) {
|
|
|
344
354
|
*/
|
|
345
355
|
function isLikelyEntryPoint(filePath, symbolName) {
|
|
346
356
|
// Main entry files
|
|
347
|
-
const entryPatterns = [
|
|
357
|
+
const entryPatterns = [
|
|
358
|
+
"index.ts",
|
|
359
|
+
"index.js",
|
|
360
|
+
"main.ts",
|
|
361
|
+
"main.js",
|
|
362
|
+
"app.ts",
|
|
363
|
+
"app.js",
|
|
364
|
+
];
|
|
348
365
|
const fileName = path.basename(filePath);
|
|
349
366
|
if (entryPatterns.includes(fileName)) {
|
|
350
367
|
return true;
|
|
351
368
|
}
|
|
352
369
|
// Common exported names that are likely entry points
|
|
353
|
-
const entrySymbols = [
|
|
370
|
+
const entrySymbols = [
|
|
371
|
+
"default",
|
|
372
|
+
"main",
|
|
373
|
+
"app",
|
|
374
|
+
"handler",
|
|
375
|
+
"server",
|
|
376
|
+
"client",
|
|
377
|
+
];
|
|
354
378
|
if (entrySymbols.includes(symbolName.toLowerCase())) {
|
|
355
379
|
return true;
|
|
356
380
|
}
|
|
@@ -361,7 +385,7 @@ function isLikelyEntryPoint(filePath, symbolName) {
|
|
|
361
385
|
*/
|
|
362
386
|
export function createFindDeadCodeTool(options) {
|
|
363
387
|
return defineTool({
|
|
364
|
-
name: options?.name ??
|
|
388
|
+
name: options?.name ?? "find_dead_code",
|
|
365
389
|
description: options?.description ?? TOOL_DESCRIPTION,
|
|
366
390
|
inputSchema: TOOL_INPUT_SCHEMA,
|
|
367
391
|
execute: async (input) => {
|
|
@@ -4,8 +4,8 @@
|
|
|
4
4
|
* Detect duplicate code blocks across the codebase using content hashing.
|
|
5
5
|
* Helps identify opportunities for refactoring and code reuse.
|
|
6
6
|
*/
|
|
7
|
-
import type { Tool } from
|
|
8
|
-
import type { FindDuplicatesInput } from
|
|
7
|
+
import type { Tool } from "@compilr-dev/agents";
|
|
8
|
+
import type { FindDuplicatesInput } from "./types.js";
|
|
9
9
|
/**
|
|
10
10
|
* findDuplicates tool
|
|
11
11
|
*/
|
|
@@ -4,52 +4,52 @@
|
|
|
4
4
|
* Detect duplicate code blocks across the codebase using content hashing.
|
|
5
5
|
* Helps identify opportunities for refactoring and code reuse.
|
|
6
6
|
*/
|
|
7
|
-
import * as fs from
|
|
8
|
-
import * as path from
|
|
9
|
-
import * as crypto from
|
|
10
|
-
import { defineTool, createSuccessResult, createErrorResult } from
|
|
7
|
+
import * as fs from "node:fs/promises";
|
|
8
|
+
import * as path from "node:path";
|
|
9
|
+
import * as crypto from "node:crypto";
|
|
10
|
+
import { defineTool, createSuccessResult, createErrorResult, } from "@compilr-dev/agents";
|
|
11
11
|
// Tool description
|
|
12
12
|
const TOOL_DESCRIPTION = `Detect duplicate code blocks across the codebase.
|
|
13
13
|
Uses content hashing to find similar code patterns.
|
|
14
14
|
Useful for identifying refactoring opportunities and reducing code duplication.`;
|
|
15
15
|
// Tool input schema
|
|
16
16
|
const TOOL_INPUT_SCHEMA = {
|
|
17
|
-
type:
|
|
17
|
+
type: "object",
|
|
18
18
|
properties: {
|
|
19
19
|
path: {
|
|
20
|
-
type:
|
|
21
|
-
description:
|
|
20
|
+
type: "string",
|
|
21
|
+
description: "Directory to analyze",
|
|
22
22
|
},
|
|
23
23
|
minLines: {
|
|
24
|
-
type:
|
|
25
|
-
description:
|
|
24
|
+
type: "number",
|
|
25
|
+
description: "Minimum lines for a duplicate (default: 6)",
|
|
26
26
|
default: 6,
|
|
27
27
|
},
|
|
28
28
|
minTokens: {
|
|
29
|
-
type:
|
|
30
|
-
description:
|
|
29
|
+
type: "number",
|
|
30
|
+
description: "Minimum tokens for a duplicate (default: 50)",
|
|
31
31
|
default: 50,
|
|
32
32
|
},
|
|
33
33
|
ignoreIdenticalFiles: {
|
|
34
|
-
type:
|
|
35
|
-
description:
|
|
34
|
+
type: "boolean",
|
|
35
|
+
description: "Ignore identical files (default: true)",
|
|
36
36
|
default: true,
|
|
37
37
|
},
|
|
38
38
|
maxFiles: {
|
|
39
|
-
type:
|
|
40
|
-
description:
|
|
39
|
+
type: "number",
|
|
40
|
+
description: "Maximum files to analyze (default: 100)",
|
|
41
41
|
default: 100,
|
|
42
42
|
},
|
|
43
43
|
},
|
|
44
|
-
required: [
|
|
44
|
+
required: ["path"],
|
|
45
45
|
};
|
|
46
46
|
// Default exclusions
|
|
47
|
-
const DEFAULT_EXCLUDE = [
|
|
47
|
+
const DEFAULT_EXCLUDE = ["node_modules", "dist", "build", ".git", "coverage"];
|
|
48
48
|
/**
|
|
49
49
|
* findDuplicates tool
|
|
50
50
|
*/
|
|
51
51
|
export const findDuplicatesTool = defineTool({
|
|
52
|
-
name:
|
|
52
|
+
name: "find_duplicates",
|
|
53
53
|
description: TOOL_DESCRIPTION,
|
|
54
54
|
inputSchema: TOOL_INPUT_SCHEMA,
|
|
55
55
|
execute: executeFindDuplicates,
|
|
@@ -70,7 +70,7 @@ async function executeFindDuplicates(input) {
|
|
|
70
70
|
}
|
|
71
71
|
const stats = await fs.stat(resolvedPath);
|
|
72
72
|
if (!stats.isDirectory()) {
|
|
73
|
-
return createErrorResult(
|
|
73
|
+
return createErrorResult("findDuplicates requires a directory path");
|
|
74
74
|
}
|
|
75
75
|
// Collect files
|
|
76
76
|
const files = [];
|
|
@@ -79,8 +79,8 @@ async function executeFindDuplicates(input) {
|
|
|
79
79
|
const fileHashes = new Map();
|
|
80
80
|
if (!ignoreIdenticalFiles) {
|
|
81
81
|
for (const file of files) {
|
|
82
|
-
const content = await fs.readFile(file,
|
|
83
|
-
const hash = crypto.createHash(
|
|
82
|
+
const content = await fs.readFile(file, "utf-8");
|
|
83
|
+
const hash = crypto.createHash("md5").update(content).digest("hex");
|
|
84
84
|
const existing = fileHashes.get(hash) ?? [];
|
|
85
85
|
existing.push(file);
|
|
86
86
|
fileHashes.set(hash, existing);
|
|
@@ -93,8 +93,8 @@ async function executeFindDuplicates(input) {
|
|
|
93
93
|
const blocks = await extractCodeBlocks(file, minLines, minTokens);
|
|
94
94
|
allBlocks.push(...blocks);
|
|
95
95
|
// Count total lines
|
|
96
|
-
const content = await fs.readFile(file,
|
|
97
|
-
totalLines += content.split(
|
|
96
|
+
const content = await fs.readFile(file, "utf-8");
|
|
97
|
+
totalLines += content.split("\n").length;
|
|
98
98
|
}
|
|
99
99
|
// Group by hash
|
|
100
100
|
const hashGroups = new Map();
|
|
@@ -133,7 +133,9 @@ async function executeFindDuplicates(input) {
|
|
|
133
133
|
duplicateGroups.sort((a, b) => b.lines - a.lines);
|
|
134
134
|
// Limit results
|
|
135
135
|
const limitedGroups = duplicateGroups.slice(0, 20);
|
|
136
|
-
const percentageDuplicate = totalLines > 0
|
|
136
|
+
const percentageDuplicate = totalLines > 0
|
|
137
|
+
? Math.round((totalDuplicateLines / totalLines) * 10000) / 100
|
|
138
|
+
: 0;
|
|
137
139
|
const result = {
|
|
138
140
|
path: resolvedPath,
|
|
139
141
|
duplicates: limitedGroups,
|
|
@@ -169,7 +171,8 @@ async function collectFiles(dirPath, files, maxFiles, currentDepth = 0) {
|
|
|
169
171
|
}
|
|
170
172
|
else if (entry.isFile()) {
|
|
171
173
|
// Only include TypeScript/JavaScript files
|
|
172
|
-
if (/\.(ts|tsx|js|jsx)$/.test(entry.name) &&
|
|
174
|
+
if (/\.(ts|tsx|js|jsx)$/.test(entry.name) &&
|
|
175
|
+
!entry.name.endsWith(".d.ts")) {
|
|
173
176
|
files.push(fullPath);
|
|
174
177
|
}
|
|
175
178
|
}
|
|
@@ -184,15 +187,15 @@ async function collectFiles(dirPath, files, maxFiles, currentDepth = 0) {
|
|
|
184
187
|
*/
|
|
185
188
|
async function extractCodeBlocks(filePath, minLines, minTokens) {
|
|
186
189
|
try {
|
|
187
|
-
const content = await fs.readFile(filePath,
|
|
188
|
-
const lines = content.split(
|
|
190
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
191
|
+
const lines = content.split("\n");
|
|
189
192
|
const blocks = [];
|
|
190
193
|
// Use sliding window approach
|
|
191
194
|
for (let start = 0; start <= lines.length - minLines; start++) {
|
|
192
195
|
// Try different block sizes
|
|
193
196
|
for (let size = minLines; size <= Math.min(minLines * 3, lines.length - start); size++) {
|
|
194
197
|
const blockLines = lines.slice(start, start + size);
|
|
195
|
-
const blockContent = blockLines.join(
|
|
198
|
+
const blockContent = blockLines.join("\n");
|
|
196
199
|
// Normalize content for comparison
|
|
197
200
|
const normalized = normalizeCode(blockContent);
|
|
198
201
|
// Count tokens (simplified: split on whitespace and punctuation)
|
|
@@ -200,10 +203,10 @@ async function extractCodeBlocks(filePath, minLines, minTokens) {
|
|
|
200
203
|
if (tokens < minTokens)
|
|
201
204
|
continue;
|
|
202
205
|
// Skip if mostly empty/comments
|
|
203
|
-
const significantLines = blockLines.filter((l) => l.trim() && !l.trim().startsWith(
|
|
206
|
+
const significantLines = blockLines.filter((l) => l.trim() && !l.trim().startsWith("//") && !l.trim().startsWith("*"));
|
|
204
207
|
if (significantLines.length < minLines / 2)
|
|
205
208
|
continue;
|
|
206
|
-
const hash = crypto.createHash(
|
|
209
|
+
const hash = crypto.createHash("md5").update(normalized).digest("hex");
|
|
207
210
|
blocks.push({
|
|
208
211
|
hash,
|
|
209
212
|
path: filePath,
|
|
@@ -226,16 +229,16 @@ async function extractCodeBlocks(filePath, minLines, minTokens) {
|
|
|
226
229
|
function normalizeCode(code) {
|
|
227
230
|
return (code
|
|
228
231
|
// Remove comments
|
|
229
|
-
.replace(/\/\/.*$/gm,
|
|
230
|
-
.replace(/\/\*[\s\S]*?\*\//g,
|
|
232
|
+
.replace(/\/\/.*$/gm, "")
|
|
233
|
+
.replace(/\/\*[\s\S]*?\*\//g, "")
|
|
231
234
|
// Normalize whitespace
|
|
232
|
-
.replace(/\s+/g,
|
|
235
|
+
.replace(/\s+/g, " ")
|
|
233
236
|
// Remove string literals (replace with placeholder)
|
|
234
237
|
.replace(/"[^"]*"/g, '""')
|
|
235
238
|
.replace(/'[^']*'/g, "''")
|
|
236
|
-
.replace(/`[^`]*`/g,
|
|
239
|
+
.replace(/`[^`]*`/g, "``")
|
|
237
240
|
// Normalize numbers
|
|
238
|
-
.replace(/\b\d+\b/g,
|
|
241
|
+
.replace(/\b\d+\b/g, "0")
|
|
239
242
|
.trim());
|
|
240
243
|
}
|
|
241
244
|
/**
|
|
@@ -249,17 +252,17 @@ function countTokens(code) {
|
|
|
249
252
|
* Truncate sample to first N lines
|
|
250
253
|
*/
|
|
251
254
|
function truncateSample(content, maxLines) {
|
|
252
|
-
const lines = content.split(
|
|
255
|
+
const lines = content.split("\n");
|
|
253
256
|
if (lines.length <= maxLines)
|
|
254
257
|
return content;
|
|
255
|
-
return lines.slice(0, maxLines).join(
|
|
258
|
+
return lines.slice(0, maxLines).join("\n") + "\n...";
|
|
256
259
|
}
|
|
257
260
|
/**
|
|
258
261
|
* Create customizable findDuplicates tool
|
|
259
262
|
*/
|
|
260
263
|
export function createFindDuplicatesTool(options) {
|
|
261
264
|
return defineTool({
|
|
262
|
-
name: options?.name ??
|
|
265
|
+
name: options?.name ?? "find_duplicates",
|
|
263
266
|
description: options?.description ?? TOOL_DESCRIPTION,
|
|
264
267
|
inputSchema: TOOL_INPUT_SCHEMA,
|
|
265
268
|
execute: async (input) => {
|
|
@@ -4,8 +4,8 @@
|
|
|
4
4
|
* Find classes that implement an interface or extend an abstract class.
|
|
5
5
|
* Useful for understanding interface usage and finding concrete implementations.
|
|
6
6
|
*/
|
|
7
|
-
import type { Tool } from
|
|
8
|
-
import type { FindImplementationsInput } from
|
|
7
|
+
import type { Tool } from "@compilr-dev/agents";
|
|
8
|
+
import type { FindImplementationsInput } from "./types.js";
|
|
9
9
|
/**
|
|
10
10
|
* findImplementations tool - Find implementations of interfaces/abstract classes
|
|
11
11
|
*/
|