@compilr-dev/agents-coding-ts 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/LICENSE +21 -0
  2. package/dist/index.d.ts +34 -0
  3. package/dist/index.js +66 -0
  4. package/dist/parser/index.d.ts +7 -0
  5. package/dist/parser/index.js +6 -0
  6. package/dist/parser/typescript-parser.d.ts +22 -0
  7. package/dist/parser/typescript-parser.js +423 -0
  8. package/dist/skills/code-health.d.ts +9 -0
  9. package/dist/skills/code-health.js +167 -0
  10. package/dist/skills/code-structure.d.ts +9 -0
  11. package/dist/skills/code-structure.js +97 -0
  12. package/dist/skills/dependency-audit.d.ts +9 -0
  13. package/dist/skills/dependency-audit.js +110 -0
  14. package/dist/skills/index.d.ts +16 -0
  15. package/dist/skills/index.js +27 -0
  16. package/dist/skills/refactor-impact.d.ts +9 -0
  17. package/dist/skills/refactor-impact.js +135 -0
  18. package/dist/skills/type-analysis.d.ts +9 -0
  19. package/dist/skills/type-analysis.js +150 -0
  20. package/dist/tools/find-dead-code.d.ts +20 -0
  21. package/dist/tools/find-dead-code.js +375 -0
  22. package/dist/tools/find-duplicates.d.ts +21 -0
  23. package/dist/tools/find-duplicates.js +274 -0
  24. package/dist/tools/find-implementations.d.ts +21 -0
  25. package/dist/tools/find-implementations.js +436 -0
  26. package/dist/tools/find-patterns.d.ts +21 -0
  27. package/dist/tools/find-patterns.js +457 -0
  28. package/dist/tools/find-references.d.ts +23 -0
  29. package/dist/tools/find-references.js +488 -0
  30. package/dist/tools/find-symbol.d.ts +21 -0
  31. package/dist/tools/find-symbol.js +458 -0
  32. package/dist/tools/get-call-graph.d.ts +23 -0
  33. package/dist/tools/get-call-graph.js +469 -0
  34. package/dist/tools/get-complexity.d.ts +21 -0
  35. package/dist/tools/get-complexity.js +394 -0
  36. package/dist/tools/get-dependency-graph.d.ts +23 -0
  37. package/dist/tools/get-dependency-graph.js +482 -0
  38. package/dist/tools/get-documentation.d.ts +21 -0
  39. package/dist/tools/get-documentation.js +613 -0
  40. package/dist/tools/get-exports.d.ts +21 -0
  41. package/dist/tools/get-exports.js +427 -0
  42. package/dist/tools/get-file-structure.d.ts +27 -0
  43. package/dist/tools/get-file-structure.js +120 -0
  44. package/dist/tools/get-imports.d.ts +23 -0
  45. package/dist/tools/get-imports.js +350 -0
  46. package/dist/tools/get-signature.d.ts +20 -0
  47. package/dist/tools/get-signature.js +758 -0
  48. package/dist/tools/get-type-hierarchy.d.ts +22 -0
  49. package/dist/tools/get-type-hierarchy.js +485 -0
  50. package/dist/tools/index.d.ts +23 -0
  51. package/dist/tools/index.js +25 -0
  52. package/dist/tools/types.d.ts +1302 -0
  53. package/dist/tools/types.js +7 -0
  54. package/package.json +84 -0
@@ -0,0 +1,274 @@
1
+ /**
2
+ * findDuplicates Tool
3
+ *
4
+ * Detect duplicate code blocks across the codebase using content hashing.
5
+ * Helps identify opportunities for refactoring and code reuse.
6
+ */
7
+ import * as fs from 'node:fs/promises';
8
+ import * as path from 'node:path';
9
+ import * as crypto from 'node:crypto';
10
+ import { defineTool, createSuccessResult, createErrorResult } from '@compilr-dev/agents';
11
+ // Tool description
12
+ const TOOL_DESCRIPTION = `Detect duplicate code blocks across the codebase.
13
+ Uses content hashing to find similar code patterns.
14
+ Useful for identifying refactoring opportunities and reducing code duplication.`;
15
+ // Tool input schema
16
+ const TOOL_INPUT_SCHEMA = {
17
+ type: 'object',
18
+ properties: {
19
+ path: {
20
+ type: 'string',
21
+ description: 'Directory to analyze',
22
+ },
23
+ minLines: {
24
+ type: 'number',
25
+ description: 'Minimum lines for a duplicate (default: 6)',
26
+ default: 6,
27
+ },
28
+ minTokens: {
29
+ type: 'number',
30
+ description: 'Minimum tokens for a duplicate (default: 50)',
31
+ default: 50,
32
+ },
33
+ ignoreIdenticalFiles: {
34
+ type: 'boolean',
35
+ description: 'Ignore identical files (default: true)',
36
+ default: true,
37
+ },
38
+ maxFiles: {
39
+ type: 'number',
40
+ description: 'Maximum files to analyze (default: 100)',
41
+ default: 100,
42
+ },
43
+ },
44
+ required: ['path'],
45
+ };
46
+ // Default exclusions
47
+ const DEFAULT_EXCLUDE = ['node_modules', 'dist', 'build', '.git', 'coverage'];
48
+ /**
49
+ * findDuplicates tool
50
+ */
51
+ export const findDuplicatesTool = defineTool({
52
+ name: 'find_duplicates',
53
+ description: TOOL_DESCRIPTION,
54
+ inputSchema: TOOL_INPUT_SCHEMA,
55
+ execute: executeFindDuplicates,
56
+ });
57
+ /**
58
+ * Execute the findDuplicates tool
59
+ */
60
+ async function executeFindDuplicates(input) {
61
+ const { path: inputPath, minLines = 6, minTokens = 50, ignoreIdenticalFiles = true, maxFiles = 100, } = input;
62
+ try {
63
+ const resolvedPath = path.resolve(inputPath);
64
+ // Check if path exists
65
+ try {
66
+ await fs.access(resolvedPath);
67
+ }
68
+ catch {
69
+ return createErrorResult(`Path not found: ${resolvedPath}`);
70
+ }
71
+ const stats = await fs.stat(resolvedPath);
72
+ if (!stats.isDirectory()) {
73
+ return createErrorResult('findDuplicates requires a directory path');
74
+ }
75
+ // Collect files
76
+ const files = [];
77
+ await collectFiles(resolvedPath, files, maxFiles);
78
+ // Check for identical files first (if not ignoring)
79
+ const fileHashes = new Map();
80
+ if (!ignoreIdenticalFiles) {
81
+ for (const file of files) {
82
+ const content = await fs.readFile(file, 'utf-8');
83
+ const hash = crypto.createHash('md5').update(content).digest('hex');
84
+ const existing = fileHashes.get(hash) ?? [];
85
+ existing.push(file);
86
+ fileHashes.set(hash, existing);
87
+ }
88
+ }
89
+ // Extract code blocks from each file
90
+ const allBlocks = [];
91
+ let totalLines = 0;
92
+ for (const file of files) {
93
+ const blocks = await extractCodeBlocks(file, minLines, minTokens);
94
+ allBlocks.push(...blocks);
95
+ // Count total lines
96
+ const content = await fs.readFile(file, 'utf-8');
97
+ totalLines += content.split('\n').length;
98
+ }
99
+ // Group by hash
100
+ const hashGroups = new Map();
101
+ for (const block of allBlocks) {
102
+ const existing = hashGroups.get(block.hash) ?? [];
103
+ existing.push(block);
104
+ hashGroups.set(block.hash, existing);
105
+ }
106
+ // Filter to only groups with duplicates
107
+ const duplicateGroups = [];
108
+ let groupId = 1;
109
+ let totalDuplicateLines = 0;
110
+ for (const [_hash, blocks] of hashGroups) {
111
+ if (blocks.length < 2)
112
+ continue;
113
+ // Skip if all from same file
114
+ const uniqueFiles = new Set(blocks.map((b) => b.path));
115
+ if (uniqueFiles.size < 2)
116
+ continue;
117
+ const locations = blocks.map((b) => ({
118
+ path: b.path,
119
+ startLine: b.startLine,
120
+ endLine: b.endLine,
121
+ }));
122
+ const lines = blocks[0].endLine - blocks[0].startLine + 1;
123
+ totalDuplicateLines += lines * (blocks.length - 1); // Count extra copies
124
+ duplicateGroups.push({
125
+ id: `DUP-${String(groupId++)}`,
126
+ lines,
127
+ tokens: blocks[0].tokens,
128
+ locations,
129
+ sample: truncateSample(blocks[0].content, 5), // First 5 lines
130
+ });
131
+ }
132
+ // Sort by lines (largest first)
133
+ duplicateGroups.sort((a, b) => b.lines - a.lines);
134
+ // Limit results
135
+ const limitedGroups = duplicateGroups.slice(0, 20);
136
+ const percentageDuplicate = totalLines > 0 ? Math.round((totalDuplicateLines / totalLines) * 10000) / 100 : 0;
137
+ const result = {
138
+ path: resolvedPath,
139
+ duplicates: limitedGroups,
140
+ stats: {
141
+ filesAnalyzed: files.length,
142
+ duplicateGroups: duplicateGroups.length,
143
+ totalDuplicateLines,
144
+ percentageDuplicate,
145
+ },
146
+ };
147
+ return createSuccessResult(result);
148
+ }
149
+ catch (error) {
150
+ return createErrorResult(`Failed to find duplicates: ${error instanceof Error ? error.message : String(error)}`);
151
+ }
152
+ }
153
+ /**
154
+ * Collect files to analyze
155
+ */
156
+ async function collectFiles(dirPath, files, maxFiles, currentDepth = 0) {
157
+ if (currentDepth > 10 || files.length >= maxFiles)
158
+ return;
159
+ try {
160
+ const entries = await fs.readdir(dirPath, { withFileTypes: true });
161
+ for (const entry of entries) {
162
+ if (files.length >= maxFiles)
163
+ break;
164
+ const fullPath = path.join(dirPath, entry.name);
165
+ if (entry.isDirectory()) {
166
+ if (DEFAULT_EXCLUDE.includes(entry.name))
167
+ continue;
168
+ await collectFiles(fullPath, files, maxFiles, currentDepth + 1);
169
+ }
170
+ else if (entry.isFile()) {
171
+ // Only include TypeScript/JavaScript files
172
+ if (/\.(ts|tsx|js|jsx)$/.test(entry.name) && !entry.name.endsWith('.d.ts')) {
173
+ files.push(fullPath);
174
+ }
175
+ }
176
+ }
177
+ }
178
+ catch {
179
+ // Ignore permission errors
180
+ }
181
+ }
182
+ /**
183
+ * Extract code blocks from a file using sliding window
184
+ */
185
+ async function extractCodeBlocks(filePath, minLines, minTokens) {
186
+ try {
187
+ const content = await fs.readFile(filePath, 'utf-8');
188
+ const lines = content.split('\n');
189
+ const blocks = [];
190
+ // Use sliding window approach
191
+ for (let start = 0; start <= lines.length - minLines; start++) {
192
+ // Try different block sizes
193
+ for (let size = minLines; size <= Math.min(minLines * 3, lines.length - start); size++) {
194
+ const blockLines = lines.slice(start, start + size);
195
+ const blockContent = blockLines.join('\n');
196
+ // Normalize content for comparison
197
+ const normalized = normalizeCode(blockContent);
198
+ // Count tokens (simplified: split on whitespace and punctuation)
199
+ const tokens = countTokens(normalized);
200
+ if (tokens < minTokens)
201
+ continue;
202
+ // Skip if mostly empty/comments
203
+ const significantLines = blockLines.filter((l) => l.trim() && !l.trim().startsWith('//') && !l.trim().startsWith('*'));
204
+ if (significantLines.length < minLines / 2)
205
+ continue;
206
+ const hash = crypto.createHash('md5').update(normalized).digest('hex');
207
+ blocks.push({
208
+ hash,
209
+ path: filePath,
210
+ startLine: start + 1,
211
+ endLine: start + size,
212
+ content: blockContent,
213
+ tokens,
214
+ });
215
+ }
216
+ }
217
+ return blocks;
218
+ }
219
+ catch {
220
+ return [];
221
+ }
222
+ }
223
+ /**
224
+ * Normalize code for comparison (remove variable names, whitespace differences)
225
+ */
226
+ function normalizeCode(code) {
227
+ return (code
228
+ // Remove comments
229
+ .replace(/\/\/.*$/gm, '')
230
+ .replace(/\/\*[\s\S]*?\*\//g, '')
231
+ // Normalize whitespace
232
+ .replace(/\s+/g, ' ')
233
+ // Remove string literals (replace with placeholder)
234
+ .replace(/"[^"]*"/g, '""')
235
+ .replace(/'[^']*'/g, "''")
236
+ .replace(/`[^`]*`/g, '``')
237
+ // Normalize numbers
238
+ .replace(/\b\d+\b/g, '0')
239
+ .trim());
240
+ }
241
+ /**
242
+ * Count tokens in code
243
+ */
244
+ function countTokens(code) {
245
+ // Split on whitespace and common punctuation
246
+ return code.split(/[\s{}()[\];,.:]+/).filter((t) => t.length > 0).length;
247
+ }
248
+ /**
249
+ * Truncate sample to first N lines
250
+ */
251
+ function truncateSample(content, maxLines) {
252
+ const lines = content.split('\n');
253
+ if (lines.length <= maxLines)
254
+ return content;
255
+ return lines.slice(0, maxLines).join('\n') + '\n...';
256
+ }
257
+ /**
258
+ * Create customizable findDuplicates tool
259
+ */
260
+ export function createFindDuplicatesTool(options) {
261
+ return defineTool({
262
+ name: options?.name ?? 'find_duplicates',
263
+ description: options?.description ?? TOOL_DESCRIPTION,
264
+ inputSchema: TOOL_INPUT_SCHEMA,
265
+ execute: async (input) => {
266
+ const modifiedInput = {
267
+ ...input,
268
+ minLines: input.minLines ?? options?.defaultMinLines,
269
+ maxFiles: input.maxFiles ?? options?.defaultMaxFiles,
270
+ };
271
+ return executeFindDuplicates(modifiedInput);
272
+ },
273
+ });
274
+ }
@@ -0,0 +1,21 @@
1
+ /**
2
+ * findImplementations Tool
3
+ *
4
+ * Find classes that implement an interface or extend an abstract class.
5
+ * Useful for understanding interface usage and finding concrete implementations.
6
+ */
7
+ import type { Tool } from '@compilr-dev/agents';
8
+ import type { FindImplementationsInput } from './types.js';
9
+ /**
10
+ * findImplementations tool - Find implementations of interfaces/abstract classes
11
+ */
12
+ export declare const findImplementationsTool: Tool<FindImplementationsInput>;
13
+ /**
14
+ * Create customizable findImplementations tool
15
+ */
16
+ export declare function createFindImplementationsTool(options?: {
17
+ name?: string;
18
+ description?: string;
19
+ defaultScope?: string;
20
+ defaultMaxFiles?: number;
21
+ }): Tool<FindImplementationsInput>;