@aiready/context-analyzer 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +24 -0
- package/CONTRIBUTING.md +134 -0
- package/LICENSE +21 -0
- package/README.md +412 -0
- package/dist/chunk-K6U64EL3.mjs +517 -0
- package/dist/chunk-T6ZCOPPI.mjs +538 -0
- package/dist/cli.d.mts +1 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +942 -0
- package/dist/cli.mjs +387 -0
- package/dist/index.d.mts +79 -0
- package/dist/index.d.ts +79 -0
- package/dist/index.js +563 -0
- package/dist/index.mjs +8 -0
- package/package.json +72 -0
- package/src/__tests__/analyzer.test.ts +175 -0
- package/src/analyzer.ts +426 -0
- package/src/cli.ts +451 -0
- package/src/index.ts +396 -0
- package/src/types.ts +104 -0
- package/tsconfig.json +8 -0
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import { describe, it, expect } from 'vitest';
|
|
2
|
+
import {
|
|
3
|
+
buildDependencyGraph,
|
|
4
|
+
calculateImportDepth,
|
|
5
|
+
getTransitiveDependencies,
|
|
6
|
+
calculateContextBudget,
|
|
7
|
+
detectCircularDependencies,
|
|
8
|
+
calculateCohesion,
|
|
9
|
+
calculateFragmentation,
|
|
10
|
+
} from '../analyzer';
|
|
11
|
+
|
|
12
|
+
describe('buildDependencyGraph', () => {
|
|
13
|
+
it('should build a basic dependency graph', () => {
|
|
14
|
+
const files = [
|
|
15
|
+
{
|
|
16
|
+
file: 'a.ts',
|
|
17
|
+
content: `
|
|
18
|
+
export const a = 1;
|
|
19
|
+
`,
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
file: 'b.ts',
|
|
23
|
+
content: `
|
|
24
|
+
import { a } from './a';
|
|
25
|
+
export const b = a + 1;
|
|
26
|
+
`,
|
|
27
|
+
},
|
|
28
|
+
];
|
|
29
|
+
|
|
30
|
+
const graph = buildDependencyGraph(files);
|
|
31
|
+
|
|
32
|
+
expect(graph.nodes.size).toBe(2);
|
|
33
|
+
expect(graph.edges.get('b.ts')?.has('./a')).toBe(true);
|
|
34
|
+
});
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
describe('calculateImportDepth', () => {
|
|
38
|
+
it('should calculate import depth correctly', () => {
|
|
39
|
+
const files = [
|
|
40
|
+
{ file: 'a.ts', content: 'export const a = 1;' },
|
|
41
|
+
{ file: 'b.ts', content: 'import { a } from "./a";\nexport const b = a;' },
|
|
42
|
+
{ file: 'c.ts', content: 'import { b } from "./b";\nexport const c = b;' },
|
|
43
|
+
];
|
|
44
|
+
|
|
45
|
+
const graph = buildDependencyGraph(files);
|
|
46
|
+
|
|
47
|
+
expect(calculateImportDepth('a.ts', graph)).toBe(0);
|
|
48
|
+
expect(calculateImportDepth('b.ts', graph)).toBe(1);
|
|
49
|
+
expect(calculateImportDepth('c.ts', graph)).toBe(2);
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
it('should handle circular dependencies gracefully', () => {
|
|
53
|
+
const files = [
|
|
54
|
+
{ file: 'a.ts', content: 'import { b } from "./b";\nexport const a = 1;' },
|
|
55
|
+
{ file: 'b.ts', content: 'import { a } from "./a";\nexport const b = 2;' },
|
|
56
|
+
];
|
|
57
|
+
|
|
58
|
+
const graph = buildDependencyGraph(files);
|
|
59
|
+
|
|
60
|
+
// Should not infinite loop
|
|
61
|
+
const depth = calculateImportDepth('a.ts', graph);
|
|
62
|
+
expect(depth).toBeGreaterThanOrEqual(0);
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
describe('getTransitiveDependencies', () => {
|
|
67
|
+
it('should get all transitive dependencies', () => {
|
|
68
|
+
const files = [
|
|
69
|
+
{ file: 'a.ts', content: 'export const a = 1;' },
|
|
70
|
+
{ file: 'b.ts', content: 'import { a } from "./a";\nexport const b = a;' },
|
|
71
|
+
{ file: 'c.ts', content: 'import { b } from "./b";\nexport const c = b;' },
|
|
72
|
+
];
|
|
73
|
+
|
|
74
|
+
const graph = buildDependencyGraph(files);
|
|
75
|
+
const deps = getTransitiveDependencies('c.ts', graph);
|
|
76
|
+
|
|
77
|
+
expect(deps).toContain('./b');
|
|
78
|
+
expect(deps).toContain('./a');
|
|
79
|
+
expect(deps.length).toBe(2);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
describe('calculateContextBudget', () => {
|
|
84
|
+
it('should calculate total token cost including dependencies', () => {
|
|
85
|
+
const files = [
|
|
86
|
+
{ file: 'a.ts', content: 'export const a = 1;'.repeat(10) }, // ~40 tokens
|
|
87
|
+
{ file: 'b.ts', content: 'import { a } from "./a";\nexport const b = a;'.repeat(10) }, // ~60 tokens
|
|
88
|
+
];
|
|
89
|
+
|
|
90
|
+
const graph = buildDependencyGraph(files);
|
|
91
|
+
const budget = calculateContextBudget('b.ts', graph);
|
|
92
|
+
|
|
93
|
+
// Should include both files' tokens
|
|
94
|
+
expect(budget).toBeGreaterThan(80);
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
describe('detectCircularDependencies', () => {
|
|
99
|
+
it('should detect circular dependencies', () => {
|
|
100
|
+
const files = [
|
|
101
|
+
{ file: 'a.ts', content: 'import { b } from "./b";\nexport const a = 1;' },
|
|
102
|
+
{ file: 'b.ts', content: 'import { a } from "./a";\nexport const b = 2;' },
|
|
103
|
+
];
|
|
104
|
+
|
|
105
|
+
const graph = buildDependencyGraph(files);
|
|
106
|
+
const cycles = detectCircularDependencies(graph);
|
|
107
|
+
|
|
108
|
+
expect(cycles.length).toBeGreaterThan(0);
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it('should return empty for no circular dependencies', () => {
|
|
112
|
+
const files = [
|
|
113
|
+
{ file: 'a.ts', content: 'export const a = 1;' },
|
|
114
|
+
{ file: 'b.ts', content: 'import { a } from "./a";\nexport const b = a;' },
|
|
115
|
+
];
|
|
116
|
+
|
|
117
|
+
const graph = buildDependencyGraph(files);
|
|
118
|
+
const cycles = detectCircularDependencies(graph);
|
|
119
|
+
|
|
120
|
+
expect(cycles.length).toBe(0);
|
|
121
|
+
});
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
describe('calculateCohesion', () => {
|
|
125
|
+
it('should return 1 for single export', () => {
|
|
126
|
+
const exports = [{ name: 'foo', type: 'function' as const, inferredDomain: 'user' }];
|
|
127
|
+
expect(calculateCohesion(exports)).toBe(1);
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
it('should return high cohesion for related exports', () => {
|
|
131
|
+
const exports = [
|
|
132
|
+
{ name: 'getUser', type: 'function' as const, inferredDomain: 'user' },
|
|
133
|
+
{ name: 'updateUser', type: 'function' as const, inferredDomain: 'user' },
|
|
134
|
+
{ name: 'deleteUser', type: 'function' as const, inferredDomain: 'user' },
|
|
135
|
+
];
|
|
136
|
+
|
|
137
|
+
const cohesion = calculateCohesion(exports);
|
|
138
|
+
expect(cohesion).toBeGreaterThan(0.9);
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
it('should return low cohesion for mixed exports', () => {
|
|
142
|
+
const exports = [
|
|
143
|
+
{ name: 'getUser', type: 'function' as const, inferredDomain: 'user' },
|
|
144
|
+
{ name: 'getOrder', type: 'function' as const, inferredDomain: 'order' },
|
|
145
|
+
{ name: 'parseConfig', type: 'function' as const, inferredDomain: 'config' },
|
|
146
|
+
];
|
|
147
|
+
|
|
148
|
+
const cohesion = calculateCohesion(exports);
|
|
149
|
+
expect(cohesion).toBeLessThan(0.5);
|
|
150
|
+
});
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
describe('calculateFragmentation', () => {
|
|
154
|
+
it('should return 0 for single file', () => {
|
|
155
|
+
const files = ['src/user/user.ts'];
|
|
156
|
+
expect(calculateFragmentation(files, 'user')).toBe(0);
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
it('should return 0 for files in same directory', () => {
|
|
160
|
+
const files = ['src/user/get.ts', 'src/user/update.ts'];
|
|
161
|
+
expect(calculateFragmentation(files, 'user')).toBe(0);
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
it('should return high fragmentation for scattered files', () => {
|
|
165
|
+
const files = [
|
|
166
|
+
'src/user/get.ts',
|
|
167
|
+
'src/api/user.ts',
|
|
168
|
+
'src/services/user.ts',
|
|
169
|
+
'src/helpers/user.ts',
|
|
170
|
+
];
|
|
171
|
+
|
|
172
|
+
const fragmentation = calculateFragmentation(files, 'user');
|
|
173
|
+
expect(fragmentation).toBeGreaterThan(0.8);
|
|
174
|
+
});
|
|
175
|
+
});
|
package/src/analyzer.ts
ADDED
|
@@ -0,0 +1,426 @@
|
|
|
1
|
+
import { estimateTokens } from '@aiready/core';
|
|
2
|
+
import type {
|
|
3
|
+
ContextAnalysisResult,
|
|
4
|
+
DependencyGraph,
|
|
5
|
+
DependencyNode,
|
|
6
|
+
ExportInfo,
|
|
7
|
+
ModuleCluster,
|
|
8
|
+
} from './types';
|
|
9
|
+
|
|
10
|
+
interface FileContent {
|
|
11
|
+
file: string;
|
|
12
|
+
content: string;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Build a dependency graph from file contents
|
|
17
|
+
*/
|
|
18
|
+
export function buildDependencyGraph(
|
|
19
|
+
files: FileContent[]
|
|
20
|
+
): DependencyGraph {
|
|
21
|
+
const nodes = new Map<string, DependencyNode>();
|
|
22
|
+
const edges = new Map<string, Set<string>>();
|
|
23
|
+
|
|
24
|
+
// First pass: Create nodes
|
|
25
|
+
for (const { file, content } of files) {
|
|
26
|
+
const imports = extractImportsFromContent(content);
|
|
27
|
+
const exports = extractExports(content);
|
|
28
|
+
const tokenCost = estimateTokens(content);
|
|
29
|
+
const linesOfCode = content.split('\n').length;
|
|
30
|
+
|
|
31
|
+
nodes.set(file, {
|
|
32
|
+
file,
|
|
33
|
+
imports,
|
|
34
|
+
exports,
|
|
35
|
+
tokenCost,
|
|
36
|
+
linesOfCode,
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
edges.set(file, new Set(imports));
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return { nodes, edges };
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Extract imports from file content using regex
|
|
47
|
+
* Simple implementation - could be improved with AST parsing
|
|
48
|
+
*/
|
|
49
|
+
function extractImportsFromContent(content: string): string[] {
|
|
50
|
+
const imports: string[] = [];
|
|
51
|
+
|
|
52
|
+
// Match various import patterns
|
|
53
|
+
const patterns = [
|
|
54
|
+
/import\s+.*?\s+from\s+['"](.+?)['"]/g, // import ... from '...'
|
|
55
|
+
/import\s+['"](.+?)['"]/g, // import '...'
|
|
56
|
+
/require\(['"](.+?)['"]\)/g, // require('...')
|
|
57
|
+
];
|
|
58
|
+
|
|
59
|
+
for (const pattern of patterns) {
|
|
60
|
+
let match;
|
|
61
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
62
|
+
const importPath = match[1];
|
|
63
|
+
if (importPath && !importPath.startsWith('@') && !importPath.startsWith('node:')) {
|
|
64
|
+
// Only include relative/local imports
|
|
65
|
+
imports.push(importPath);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return [...new Set(imports)]; // Deduplicate
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Calculate the maximum depth of import tree for a file
|
|
75
|
+
*/
|
|
76
|
+
export function calculateImportDepth(
|
|
77
|
+
file: string,
|
|
78
|
+
graph: DependencyGraph,
|
|
79
|
+
visited = new Set<string>(),
|
|
80
|
+
depth = 0
|
|
81
|
+
): number {
|
|
82
|
+
if (visited.has(file)) {
|
|
83
|
+
return depth; // Circular dependency, return current depth
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const dependencies = graph.edges.get(file);
|
|
87
|
+
if (!dependencies || dependencies.size === 0) {
|
|
88
|
+
return depth;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
visited.add(file);
|
|
92
|
+
let maxDepth = depth;
|
|
93
|
+
|
|
94
|
+
for (const dep of dependencies) {
|
|
95
|
+
const depDepth = calculateImportDepth(dep, graph, visited, depth + 1);
|
|
96
|
+
maxDepth = Math.max(maxDepth, depDepth);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
visited.delete(file);
|
|
100
|
+
return maxDepth;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Get all transitive dependencies for a file
|
|
105
|
+
*/
|
|
106
|
+
export function getTransitiveDependencies(
|
|
107
|
+
file: string,
|
|
108
|
+
graph: DependencyGraph,
|
|
109
|
+
visited = new Set<string>()
|
|
110
|
+
): string[] {
|
|
111
|
+
if (visited.has(file)) {
|
|
112
|
+
return [];
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
visited.add(file);
|
|
116
|
+
const dependencies = graph.edges.get(file);
|
|
117
|
+
if (!dependencies || dependencies.size === 0) {
|
|
118
|
+
return [];
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const allDeps: string[] = [];
|
|
122
|
+
for (const dep of dependencies) {
|
|
123
|
+
allDeps.push(dep);
|
|
124
|
+
allDeps.push(...getTransitiveDependencies(dep, graph, visited));
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return [...new Set(allDeps)]; // Deduplicate
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Calculate total context budget (tokens needed to understand this file)
|
|
132
|
+
*/
|
|
133
|
+
export function calculateContextBudget(
|
|
134
|
+
file: string,
|
|
135
|
+
graph: DependencyGraph
|
|
136
|
+
): number {
|
|
137
|
+
const node = graph.nodes.get(file);
|
|
138
|
+
if (!node) return 0;
|
|
139
|
+
|
|
140
|
+
let totalTokens = node.tokenCost;
|
|
141
|
+
const deps = getTransitiveDependencies(file, graph);
|
|
142
|
+
|
|
143
|
+
for (const dep of deps) {
|
|
144
|
+
const depNode = graph.nodes.get(dep);
|
|
145
|
+
if (depNode) {
|
|
146
|
+
totalTokens += depNode.tokenCost;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return totalTokens;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Detect circular dependencies
|
|
155
|
+
*/
|
|
156
|
+
export function detectCircularDependencies(
|
|
157
|
+
graph: DependencyGraph
|
|
158
|
+
): string[][] {
|
|
159
|
+
const cycles: string[][] = [];
|
|
160
|
+
const visited = new Set<string>();
|
|
161
|
+
const recursionStack = new Set<string>();
|
|
162
|
+
|
|
163
|
+
function dfs(file: string, path: string[]): void {
|
|
164
|
+
if (recursionStack.has(file)) {
|
|
165
|
+
// Found a cycle
|
|
166
|
+
const cycleStart = path.indexOf(file);
|
|
167
|
+
if (cycleStart !== -1) {
|
|
168
|
+
cycles.push([...path.slice(cycleStart), file]);
|
|
169
|
+
}
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
if (visited.has(file)) {
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
visited.add(file);
|
|
178
|
+
recursionStack.add(file);
|
|
179
|
+
path.push(file);
|
|
180
|
+
|
|
181
|
+
const dependencies = graph.edges.get(file);
|
|
182
|
+
if (dependencies) {
|
|
183
|
+
for (const dep of dependencies) {
|
|
184
|
+
dfs(dep, [...path]);
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
recursionStack.delete(file);
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
for (const file of graph.nodes.keys()) {
|
|
192
|
+
if (!visited.has(file)) {
|
|
193
|
+
dfs(file, []);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return cycles;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Calculate cohesion score (how related are exports in a file)
|
|
202
|
+
* Uses entropy: low entropy = high cohesion
|
|
203
|
+
*/
|
|
204
|
+
export function calculateCohesion(exports: ExportInfo[]): number {
|
|
205
|
+
if (exports.length === 0) return 1;
|
|
206
|
+
if (exports.length === 1) return 1; // Single export = perfect cohesion
|
|
207
|
+
|
|
208
|
+
const domains = exports.map((e) => e.inferredDomain || 'unknown');
|
|
209
|
+
const domainCounts = new Map<string, number>();
|
|
210
|
+
|
|
211
|
+
for (const domain of domains) {
|
|
212
|
+
domainCounts.set(domain, (domainCounts.get(domain) || 0) + 1);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// Calculate Shannon entropy
|
|
216
|
+
const total = domains.length;
|
|
217
|
+
let entropy = 0;
|
|
218
|
+
|
|
219
|
+
for (const count of domainCounts.values()) {
|
|
220
|
+
const p = count / total;
|
|
221
|
+
if (p > 0) {
|
|
222
|
+
entropy -= p * Math.log2(p);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Normalize to 0-1 (higher = better cohesion)
|
|
227
|
+
const maxEntropy = Math.log2(total);
|
|
228
|
+
return maxEntropy > 0 ? 1 - entropy / maxEntropy : 1;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
/**
|
|
232
|
+
* Calculate fragmentation score (how scattered is a domain)
|
|
233
|
+
*/
|
|
234
|
+
export function calculateFragmentation(
|
|
235
|
+
files: string[],
|
|
236
|
+
domain: string
|
|
237
|
+
): number {
|
|
238
|
+
if (files.length <= 1) return 0; // Single file = no fragmentation
|
|
239
|
+
|
|
240
|
+
// Calculate how many different directories contain these files
|
|
241
|
+
const directories = new Set(files.map((f) => f.split('/').slice(0, -1).join('/')));
|
|
242
|
+
|
|
243
|
+
// Fragmentation = unique directories / total files
|
|
244
|
+
// 0 = all in same dir, 1 = all in different dirs
|
|
245
|
+
return (directories.size - 1) / (files.length - 1);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Group files by domain to detect module clusters
|
|
250
|
+
*/
|
|
251
|
+
export function detectModuleClusters(
|
|
252
|
+
graph: DependencyGraph
|
|
253
|
+
): ModuleCluster[] {
|
|
254
|
+
const domainMap = new Map<string, string[]>();
|
|
255
|
+
|
|
256
|
+
// Group files by their primary domain
|
|
257
|
+
for (const [file, node] of graph.nodes.entries()) {
|
|
258
|
+
const domains = node.exports.map((e) => e.inferredDomain || 'unknown');
|
|
259
|
+
const primaryDomain = domains[0] || 'unknown';
|
|
260
|
+
|
|
261
|
+
if (!domainMap.has(primaryDomain)) {
|
|
262
|
+
domainMap.set(primaryDomain, []);
|
|
263
|
+
}
|
|
264
|
+
domainMap.get(primaryDomain)!.push(file);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
const clusters: ModuleCluster[] = [];
|
|
268
|
+
|
|
269
|
+
for (const [domain, files] of domainMap.entries()) {
|
|
270
|
+
if (files.length < 2) continue; // Skip single-file domains
|
|
271
|
+
|
|
272
|
+
const totalTokens = files.reduce((sum, file) => {
|
|
273
|
+
const node = graph.nodes.get(file);
|
|
274
|
+
return sum + (node?.tokenCost || 0);
|
|
275
|
+
}, 0);
|
|
276
|
+
|
|
277
|
+
const fragmentationScore = calculateFragmentation(files, domain);
|
|
278
|
+
|
|
279
|
+
const avgCohesion =
|
|
280
|
+
files.reduce((sum, file) => {
|
|
281
|
+
const node = graph.nodes.get(file);
|
|
282
|
+
return sum + (node ? calculateCohesion(node.exports) : 0);
|
|
283
|
+
}, 0) / files.length;
|
|
284
|
+
|
|
285
|
+
// Generate consolidation plan
|
|
286
|
+
const targetFiles = Math.max(1, Math.ceil(files.length / 3)); // Aim to reduce by ~66%
|
|
287
|
+
const consolidationPlan = generateConsolidationPlan(
|
|
288
|
+
domain,
|
|
289
|
+
files,
|
|
290
|
+
targetFiles
|
|
291
|
+
);
|
|
292
|
+
|
|
293
|
+
clusters.push({
|
|
294
|
+
domain,
|
|
295
|
+
files,
|
|
296
|
+
totalTokens,
|
|
297
|
+
fragmentationScore,
|
|
298
|
+
avgCohesion,
|
|
299
|
+
suggestedStructure: {
|
|
300
|
+
targetFiles,
|
|
301
|
+
consolidationPlan,
|
|
302
|
+
},
|
|
303
|
+
});
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// Sort by fragmentation score (most fragmented first)
|
|
307
|
+
return clusters.sort((a, b) => b.fragmentationScore - a.fragmentationScore);
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
/**
|
|
311
|
+
* Extract export information from file content
|
|
312
|
+
* TODO: Use proper AST parsing for better accuracy
|
|
313
|
+
*/
|
|
314
|
+
function extractExports(content: string): ExportInfo[] {
|
|
315
|
+
const exports: ExportInfo[] = [];
|
|
316
|
+
|
|
317
|
+
// Simple regex-based extraction (improve with AST later)
|
|
318
|
+
const patterns = [
|
|
319
|
+
/export\s+function\s+(\w+)/g,
|
|
320
|
+
/export\s+class\s+(\w+)/g,
|
|
321
|
+
/export\s+const\s+(\w+)/g,
|
|
322
|
+
/export\s+type\s+(\w+)/g,
|
|
323
|
+
/export\s+interface\s+(\w+)/g,
|
|
324
|
+
/export\s+default/g,
|
|
325
|
+
];
|
|
326
|
+
|
|
327
|
+
const types: ExportInfo['type'][] = [
|
|
328
|
+
'function',
|
|
329
|
+
'class',
|
|
330
|
+
'const',
|
|
331
|
+
'type',
|
|
332
|
+
'interface',
|
|
333
|
+
'default',
|
|
334
|
+
];
|
|
335
|
+
|
|
336
|
+
patterns.forEach((pattern, index) => {
|
|
337
|
+
let match;
|
|
338
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
339
|
+
const name = match[1] || 'default';
|
|
340
|
+
const type = types[index];
|
|
341
|
+
const inferredDomain = inferDomain(name);
|
|
342
|
+
|
|
343
|
+
exports.push({ name, type, inferredDomain });
|
|
344
|
+
}
|
|
345
|
+
});
|
|
346
|
+
|
|
347
|
+
return exports;
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
/**
|
|
351
|
+
* Infer domain from export name
|
|
352
|
+
* Uses common naming patterns
|
|
353
|
+
*/
|
|
354
|
+
function inferDomain(name: string): string {
|
|
355
|
+
const lower = name.toLowerCase();
|
|
356
|
+
|
|
357
|
+
// Common domain keywords
|
|
358
|
+
const domainKeywords = [
|
|
359
|
+
'user',
|
|
360
|
+
'auth',
|
|
361
|
+
'order',
|
|
362
|
+
'product',
|
|
363
|
+
'payment',
|
|
364
|
+
'cart',
|
|
365
|
+
'invoice',
|
|
366
|
+
'customer',
|
|
367
|
+
'admin',
|
|
368
|
+
'api',
|
|
369
|
+
'util',
|
|
370
|
+
'helper',
|
|
371
|
+
'config',
|
|
372
|
+
'service',
|
|
373
|
+
'repository',
|
|
374
|
+
'controller',
|
|
375
|
+
'model',
|
|
376
|
+
'view',
|
|
377
|
+
];
|
|
378
|
+
|
|
379
|
+
for (const keyword of domainKeywords) {
|
|
380
|
+
if (lower.includes(keyword)) {
|
|
381
|
+
return keyword;
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
return 'unknown';
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
/**
|
|
389
|
+
* Generate consolidation plan for fragmented modules
|
|
390
|
+
*/
|
|
391
|
+
function generateConsolidationPlan(
|
|
392
|
+
domain: string,
|
|
393
|
+
files: string[],
|
|
394
|
+
targetFiles: number
|
|
395
|
+
): string[] {
|
|
396
|
+
const plan: string[] = [];
|
|
397
|
+
|
|
398
|
+
if (files.length <= targetFiles) {
|
|
399
|
+
return [`No consolidation needed for ${domain}`];
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
plan.push(
|
|
403
|
+
`Consolidate ${files.length} ${domain} files into ${targetFiles} cohesive file(s):`
|
|
404
|
+
);
|
|
405
|
+
|
|
406
|
+
// Group by directory
|
|
407
|
+
const dirGroups = new Map<string, string[]>();
|
|
408
|
+
for (const file of files) {
|
|
409
|
+
const dir = file.split('/').slice(0, -1).join('/');
|
|
410
|
+
if (!dirGroups.has(dir)) {
|
|
411
|
+
dirGroups.set(dir, []);
|
|
412
|
+
}
|
|
413
|
+
dirGroups.get(dir)!.push(file);
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
plan.push(`1. Create unified ${domain} module file`);
|
|
417
|
+
plan.push(
|
|
418
|
+
`2. Move related functionality from ${files.length} scattered files`
|
|
419
|
+
);
|
|
420
|
+
plan.push(`3. Update imports in dependent files`);
|
|
421
|
+
plan.push(
|
|
422
|
+
`4. Remove old files after consolidation (verify with tests first)`
|
|
423
|
+
);
|
|
424
|
+
|
|
425
|
+
return plan;
|
|
426
|
+
}
|