@aiready/context-analyzer 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/.turbo/turbo-test.log +36 -0
- package/README.md +47 -0
- package/dist/chunk-72QC5QUS.mjs +549 -0
- package/dist/chunk-EH3PMNZQ.mjs +569 -0
- package/dist/chunk-N6XBOOVA.mjs +564 -0
- package/dist/cli.js +24 -10
- package/dist/cli.mjs +24 -10
- package/package.json +2 -2
- package/src/__tests__/analyzer.test.ts +9 -9
- package/src/cli.ts +32 -10
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
|
|
2
2
|
|
|
3
|
-
> @aiready/context-analyzer@0.1.
|
|
3
|
+
> @aiready/context-analyzer@0.1.2 build /Users/pengcao/projects/aiready/packages/context-analyzer
|
|
4
4
|
> tsup src/index.ts src/cli.ts --format cjs,esm --dts
|
|
5
5
|
|
|
6
6
|
[34mCLI[39m Building entry: src/cli.ts, src/index.ts
|
|
@@ -9,15 +9,15 @@
|
|
|
9
9
|
[34mCLI[39m Target: es2020
|
|
10
10
|
[34mCJS[39m Build start
|
|
11
11
|
[34mESM[39m Build start
|
|
12
|
-
[32mESM[39m [1mdist/index.mjs [22m[32m124.00 B[39m
|
|
13
|
-
[32mESM[39m [1mdist/cli.mjs [22m[32m12.61 KB[39m
|
|
14
12
|
[32mESM[39m [1mdist/chunk-T6ZCOPPI.mjs [22m[32m17.25 KB[39m
|
|
15
|
-
[32mESM[39m
|
|
16
|
-
[
|
|
13
|
+
[32mESM[39m [1mdist/cli.mjs [22m[32m13.39 KB[39m
|
|
14
|
+
[32mESM[39m [1mdist/index.mjs [22m[32m124.00 B[39m
|
|
15
|
+
[32mESM[39m ⚡️ Build success in 57ms
|
|
16
|
+
[32mCJS[39m [1mdist/cli.js [22m[32m32.65 KB[39m
|
|
17
17
|
[32mCJS[39m [1mdist/index.js [22m[32m18.33 KB[39m
|
|
18
|
-
[32mCJS[39m ⚡️ Build success in
|
|
18
|
+
[32mCJS[39m ⚡️ Build success in 57ms
|
|
19
19
|
DTS Build start
|
|
20
|
-
DTS ⚡️ Build success in
|
|
20
|
+
DTS ⚡️ Build success in 624ms
|
|
21
21
|
DTS dist/cli.d.ts 20.00 B
|
|
22
22
|
DTS dist/index.d.ts 2.14 KB
|
|
23
23
|
DTS dist/cli.d.mts 20.00 B
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
> @aiready/context-analyzer@0.1.0 test /Users/pengcao/projects/aiready/packages/context-analyzer
|
|
4
|
+
> vitest run
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
[1m[7m[36m RUN [39m[27m[22m [36mv2.1.9 [39m[90m/Users/pengcao/projects/aiready/packages/context-analyzer[39m
|
|
8
|
+
|
|
9
|
+
[32m✓[39m [2msrc/__tests__/[22manalyzer[2m.test.ts[22m[2m (13)[22m
|
|
10
|
+
[32m✓[39m buildDependencyGraph[2m (1)[22m
|
|
11
|
+
[32m✓[39m should build a basic dependency graph
|
|
12
|
+
[32m✓[39m calculateImportDepth[2m (2)[22m
|
|
13
|
+
[32m✓[39m should calculate import depth correctly
|
|
14
|
+
[32m✓[39m should handle circular dependencies gracefully
|
|
15
|
+
[32m✓[39m getTransitiveDependencies[2m (1)[22m
|
|
16
|
+
[32m✓[39m should get all transitive dependencies
|
|
17
|
+
[32m✓[39m calculateContextBudget[2m (1)[22m
|
|
18
|
+
[32m✓[39m should calculate total token cost including dependencies
|
|
19
|
+
[32m✓[39m detectCircularDependencies[2m (2)[22m
|
|
20
|
+
[32m✓[39m should detect circular dependencies
|
|
21
|
+
[32m✓[39m should return empty for no circular dependencies
|
|
22
|
+
[32m✓[39m calculateCohesion[2m (3)[22m
|
|
23
|
+
[32m✓[39m should return 1 for single export
|
|
24
|
+
[32m✓[39m should return high cohesion for related exports
|
|
25
|
+
[32m✓[39m should return low cohesion for mixed exports
|
|
26
|
+
[32m✓[39m calculateFragmentation[2m (3)[22m
|
|
27
|
+
[32m✓[39m should return 0 for single file
|
|
28
|
+
[32m✓[39m should return 0 for files in same directory
|
|
29
|
+
[32m✓[39m should return high fragmentation for scattered files
|
|
30
|
+
|
|
31
|
+
[2m Test Files [22m [1m[32m1 passed[39m[22m[90m (1)[39m
|
|
32
|
+
[2m Tests [22m [1m[32m13 passed[39m[22m[90m (13)[39m
|
|
33
|
+
[2m Start at [22m 03:16:28
|
|
34
|
+
[2m Duration [22m 314ms[2m (transform 75ms, setup 0ms, collect 86ms, tests 4ms, environment 0ms, prepare 38ms)[22m
|
|
35
|
+
|
|
36
|
+
[?25h
|
package/README.md
CHANGED
|
@@ -85,6 +85,27 @@ aiready-context ./src --output html --output-file report.html
|
|
|
85
85
|
aiready-context ./src --exclude "**/test/**,**/*.test.ts"
|
|
86
86
|
```
|
|
87
87
|
|
|
88
|
+
### Configuration
|
|
89
|
+
|
|
90
|
+
Create an `aiready.json` or `aiready.config.json` file in your project root:
|
|
91
|
+
|
|
92
|
+
```json
|
|
93
|
+
{
|
|
94
|
+
"scan": {
|
|
95
|
+
"include": ["**/*.{ts,tsx,js,jsx}"],
|
|
96
|
+
"exclude": ["**/test/**", "**/*.test.*"]
|
|
97
|
+
},
|
|
98
|
+
"tools": {
|
|
99
|
+
"context-analyzer": {
|
|
100
|
+
"maxDepth": 4,
|
|
101
|
+
"maxContextBudget": 8000,
|
|
102
|
+
"minCohesion": 0.7,
|
|
103
|
+
"includeNodeModules": false
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
```
|
|
108
|
+
|
|
88
109
|
### Sample Output
|
|
89
110
|
|
|
90
111
|
```bash
|
|
@@ -236,6 +257,32 @@ for (const result of results) {
|
|
|
236
257
|
--output-file <path> # Output file path (for json/html)
|
|
237
258
|
```
|
|
238
259
|
|
|
260
|
+
### Default Exclusions
|
|
261
|
+
|
|
262
|
+
By default, these patterns are excluded (unless `--include-node-modules` is used):
|
|
263
|
+
```bash
|
|
264
|
+
# Dependencies (excluded by default, override with --include-node-modules)
|
|
265
|
+
**/node_modules/**
|
|
266
|
+
|
|
267
|
+
# Build outputs
|
|
268
|
+
**/dist/**, **/build/**, **/out/**, **/output/**, **/target/**, **/bin/**, **/obj/**
|
|
269
|
+
|
|
270
|
+
# Framework-specific build dirs
|
|
271
|
+
**/.next/**, **/.nuxt/**, **/.vuepress/**, **/.cache/**, **/.turbo/**
|
|
272
|
+
|
|
273
|
+
# Test and coverage
|
|
274
|
+
**/coverage/**, **/.nyc_output/**, **/.jest/**
|
|
275
|
+
|
|
276
|
+
# Version control and IDE
|
|
277
|
+
**/.git/**, **/.svn/**, **/.hg/**, **/.vscode/**, **/.idea/**, **/*.swp, **/*.swo
|
|
278
|
+
|
|
279
|
+
# Build artifacts and minified files
|
|
280
|
+
**/*.min.js, **/*.min.css, **/*.bundle.js, **/*.tsbuildinfo
|
|
281
|
+
|
|
282
|
+
# Logs and temporary files
|
|
283
|
+
**/logs/**, **/*.log, **/.DS_Store
|
|
284
|
+
```
|
|
285
|
+
|
|
239
286
|
### API Options
|
|
240
287
|
|
|
241
288
|
```typescript
|
|
@@ -0,0 +1,549 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { scanFiles, readFileContent } from "@aiready/core";
|
|
3
|
+
|
|
4
|
+
// src/analyzer.ts
|
|
5
|
+
import { estimateTokens } from "@aiready/core";
|
|
6
|
+
function buildDependencyGraph(files) {
|
|
7
|
+
const nodes = /* @__PURE__ */ new Map();
|
|
8
|
+
const edges = /* @__PURE__ */ new Map();
|
|
9
|
+
for (const { file, content } of files) {
|
|
10
|
+
const imports = extractImportsFromContent(content);
|
|
11
|
+
const exports = extractExports(content);
|
|
12
|
+
const tokenCost = estimateTokens(content);
|
|
13
|
+
const linesOfCode = content.split("\n").length;
|
|
14
|
+
nodes.set(file, {
|
|
15
|
+
file,
|
|
16
|
+
imports,
|
|
17
|
+
exports,
|
|
18
|
+
tokenCost,
|
|
19
|
+
linesOfCode
|
|
20
|
+
});
|
|
21
|
+
edges.set(file, new Set(imports));
|
|
22
|
+
}
|
|
23
|
+
return { nodes, edges };
|
|
24
|
+
}
|
|
25
|
+
function extractImportsFromContent(content) {
|
|
26
|
+
const imports = [];
|
|
27
|
+
const patterns = [
|
|
28
|
+
/import\s+.*?\s+from\s+['"](.+?)['"]/g,
|
|
29
|
+
// import ... from '...'
|
|
30
|
+
/import\s+['"](.+?)['"]/g,
|
|
31
|
+
// import '...'
|
|
32
|
+
/require\(['"](.+?)['"]\)/g
|
|
33
|
+
// require('...')
|
|
34
|
+
];
|
|
35
|
+
for (const pattern of patterns) {
|
|
36
|
+
let match;
|
|
37
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
38
|
+
const importPath = match[1];
|
|
39
|
+
if (importPath && !importPath.startsWith("@") && !importPath.startsWith("node:")) {
|
|
40
|
+
imports.push(importPath);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
return [...new Set(imports)];
|
|
45
|
+
}
|
|
46
|
+
function calculateImportDepth(file, graph, visited = /* @__PURE__ */ new Set()) {
|
|
47
|
+
if (visited.has(file)) {
|
|
48
|
+
return 0;
|
|
49
|
+
}
|
|
50
|
+
const dependencies = graph.edges.get(file);
|
|
51
|
+
if (!dependencies || dependencies.size === 0) {
|
|
52
|
+
return 0;
|
|
53
|
+
}
|
|
54
|
+
visited.add(file);
|
|
55
|
+
let maxDepth = 0;
|
|
56
|
+
for (const dep of dependencies) {
|
|
57
|
+
const depDepth = calculateImportDepth(dep, graph, visited);
|
|
58
|
+
maxDepth = Math.max(maxDepth, depDepth + 1);
|
|
59
|
+
}
|
|
60
|
+
visited.delete(file);
|
|
61
|
+
return maxDepth;
|
|
62
|
+
}
|
|
63
|
+
function getTransitiveDependencies(file, graph, visited = /* @__PURE__ */ new Set(), collected = /* @__PURE__ */ new Set()) {
|
|
64
|
+
if (visited.has(file)) {
|
|
65
|
+
return [];
|
|
66
|
+
}
|
|
67
|
+
visited.add(file);
|
|
68
|
+
const dependencies = graph.edges.get(file);
|
|
69
|
+
if (!dependencies || dependencies.size === 0) {
|
|
70
|
+
visited.delete(file);
|
|
71
|
+
return [];
|
|
72
|
+
}
|
|
73
|
+
const allDeps = [];
|
|
74
|
+
for (const dep of dependencies) {
|
|
75
|
+
if (!collected.has(dep)) {
|
|
76
|
+
collected.add(dep);
|
|
77
|
+
allDeps.push(dep);
|
|
78
|
+
}
|
|
79
|
+
const transitiveDeps = getTransitiveDependencies(dep, graph, visited, collected);
|
|
80
|
+
for (const transDep of transitiveDeps) {
|
|
81
|
+
if (!collected.has(transDep)) {
|
|
82
|
+
collected.add(transDep);
|
|
83
|
+
allDeps.push(transDep);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
visited.delete(file);
|
|
88
|
+
return allDeps;
|
|
89
|
+
}
|
|
90
|
+
function calculateContextBudget(file, graph) {
|
|
91
|
+
const node = graph.nodes.get(file);
|
|
92
|
+
if (!node) return 0;
|
|
93
|
+
let totalTokens = node.tokenCost;
|
|
94
|
+
const deps = getTransitiveDependencies(file, graph);
|
|
95
|
+
for (const dep of deps) {
|
|
96
|
+
const depNode = graph.nodes.get(dep);
|
|
97
|
+
if (depNode) {
|
|
98
|
+
totalTokens += depNode.tokenCost;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
return totalTokens;
|
|
102
|
+
}
|
|
103
|
+
function detectCircularDependencies(graph) {
|
|
104
|
+
const cycles = [];
|
|
105
|
+
const visited = /* @__PURE__ */ new Set();
|
|
106
|
+
const recursionStack = /* @__PURE__ */ new Set();
|
|
107
|
+
function dfs(file, path) {
|
|
108
|
+
if (recursionStack.has(file)) {
|
|
109
|
+
const cycleStart = path.indexOf(file);
|
|
110
|
+
if (cycleStart !== -1) {
|
|
111
|
+
cycles.push([...path.slice(cycleStart), file]);
|
|
112
|
+
}
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
if (visited.has(file)) {
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
visited.add(file);
|
|
119
|
+
recursionStack.add(file);
|
|
120
|
+
path.push(file);
|
|
121
|
+
const dependencies = graph.edges.get(file);
|
|
122
|
+
if (dependencies) {
|
|
123
|
+
for (const dep of dependencies) {
|
|
124
|
+
dfs(dep, [...path]);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
recursionStack.delete(file);
|
|
128
|
+
}
|
|
129
|
+
for (const file of graph.nodes.keys()) {
|
|
130
|
+
if (!visited.has(file)) {
|
|
131
|
+
dfs(file, []);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return cycles;
|
|
135
|
+
}
|
|
136
|
+
function calculateCohesion(exports) {
|
|
137
|
+
if (exports.length === 0) return 1;
|
|
138
|
+
if (exports.length === 1) return 1;
|
|
139
|
+
const domains = exports.map((e) => e.inferredDomain || "unknown");
|
|
140
|
+
const domainCounts = /* @__PURE__ */ new Map();
|
|
141
|
+
for (const domain of domains) {
|
|
142
|
+
domainCounts.set(domain, (domainCounts.get(domain) || 0) + 1);
|
|
143
|
+
}
|
|
144
|
+
const total = domains.length;
|
|
145
|
+
let entropy = 0;
|
|
146
|
+
for (const count of domainCounts.values()) {
|
|
147
|
+
const p = count / total;
|
|
148
|
+
if (p > 0) {
|
|
149
|
+
entropy -= p * Math.log2(p);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
const maxEntropy = Math.log2(total);
|
|
153
|
+
return maxEntropy > 0 ? 1 - entropy / maxEntropy : 1;
|
|
154
|
+
}
|
|
155
|
+
function calculateFragmentation(files, domain) {
|
|
156
|
+
if (files.length <= 1) return 0;
|
|
157
|
+
const directories = new Set(files.map((f) => f.split("/").slice(0, -1).join("/")));
|
|
158
|
+
return (directories.size - 1) / (files.length - 1);
|
|
159
|
+
}
|
|
160
|
+
function detectModuleClusters(graph) {
|
|
161
|
+
const domainMap = /* @__PURE__ */ new Map();
|
|
162
|
+
for (const [file, node] of graph.nodes.entries()) {
|
|
163
|
+
const domains = node.exports.map((e) => e.inferredDomain || "unknown");
|
|
164
|
+
const primaryDomain = domains[0] || "unknown";
|
|
165
|
+
if (!domainMap.has(primaryDomain)) {
|
|
166
|
+
domainMap.set(primaryDomain, []);
|
|
167
|
+
}
|
|
168
|
+
domainMap.get(primaryDomain).push(file);
|
|
169
|
+
}
|
|
170
|
+
const clusters = [];
|
|
171
|
+
for (const [domain, files] of domainMap.entries()) {
|
|
172
|
+
if (files.length < 2) continue;
|
|
173
|
+
const totalTokens = files.reduce((sum, file) => {
|
|
174
|
+
const node = graph.nodes.get(file);
|
|
175
|
+
return sum + (node?.tokenCost || 0);
|
|
176
|
+
}, 0);
|
|
177
|
+
const fragmentationScore = calculateFragmentation(files, domain);
|
|
178
|
+
const avgCohesion = files.reduce((sum, file) => {
|
|
179
|
+
const node = graph.nodes.get(file);
|
|
180
|
+
return sum + (node ? calculateCohesion(node.exports) : 0);
|
|
181
|
+
}, 0) / files.length;
|
|
182
|
+
const targetFiles = Math.max(1, Math.ceil(files.length / 3));
|
|
183
|
+
const consolidationPlan = generateConsolidationPlan(
|
|
184
|
+
domain,
|
|
185
|
+
files,
|
|
186
|
+
targetFiles
|
|
187
|
+
);
|
|
188
|
+
clusters.push({
|
|
189
|
+
domain,
|
|
190
|
+
files,
|
|
191
|
+
totalTokens,
|
|
192
|
+
fragmentationScore,
|
|
193
|
+
avgCohesion,
|
|
194
|
+
suggestedStructure: {
|
|
195
|
+
targetFiles,
|
|
196
|
+
consolidationPlan
|
|
197
|
+
}
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
return clusters.sort((a, b) => b.fragmentationScore - a.fragmentationScore);
|
|
201
|
+
}
|
|
202
|
+
function extractExports(content) {
|
|
203
|
+
const exports = [];
|
|
204
|
+
const patterns = [
|
|
205
|
+
/export\s+function\s+(\w+)/g,
|
|
206
|
+
/export\s+class\s+(\w+)/g,
|
|
207
|
+
/export\s+const\s+(\w+)/g,
|
|
208
|
+
/export\s+type\s+(\w+)/g,
|
|
209
|
+
/export\s+interface\s+(\w+)/g,
|
|
210
|
+
/export\s+default/g
|
|
211
|
+
];
|
|
212
|
+
const types = [
|
|
213
|
+
"function",
|
|
214
|
+
"class",
|
|
215
|
+
"const",
|
|
216
|
+
"type",
|
|
217
|
+
"interface",
|
|
218
|
+
"default"
|
|
219
|
+
];
|
|
220
|
+
patterns.forEach((pattern, index) => {
|
|
221
|
+
let match;
|
|
222
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
223
|
+
const name = match[1] || "default";
|
|
224
|
+
const type = types[index];
|
|
225
|
+
const inferredDomain = inferDomain(name);
|
|
226
|
+
exports.push({ name, type, inferredDomain });
|
|
227
|
+
}
|
|
228
|
+
});
|
|
229
|
+
return exports;
|
|
230
|
+
}
|
|
231
|
+
function inferDomain(name) {
|
|
232
|
+
const lower = name.toLowerCase();
|
|
233
|
+
const domainKeywords = [
|
|
234
|
+
"user",
|
|
235
|
+
"auth",
|
|
236
|
+
"order",
|
|
237
|
+
"product",
|
|
238
|
+
"payment",
|
|
239
|
+
"cart",
|
|
240
|
+
"invoice",
|
|
241
|
+
"customer",
|
|
242
|
+
"admin",
|
|
243
|
+
"api",
|
|
244
|
+
"util",
|
|
245
|
+
"helper",
|
|
246
|
+
"config",
|
|
247
|
+
"service",
|
|
248
|
+
"repository",
|
|
249
|
+
"controller",
|
|
250
|
+
"model",
|
|
251
|
+
"view"
|
|
252
|
+
];
|
|
253
|
+
for (const keyword of domainKeywords) {
|
|
254
|
+
if (lower.includes(keyword)) {
|
|
255
|
+
return keyword;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
return "unknown";
|
|
259
|
+
}
|
|
260
|
+
function generateConsolidationPlan(domain, files, targetFiles) {
|
|
261
|
+
const plan = [];
|
|
262
|
+
if (files.length <= targetFiles) {
|
|
263
|
+
return [`No consolidation needed for ${domain}`];
|
|
264
|
+
}
|
|
265
|
+
plan.push(
|
|
266
|
+
`Consolidate ${files.length} ${domain} files into ${targetFiles} cohesive file(s):`
|
|
267
|
+
);
|
|
268
|
+
const dirGroups = /* @__PURE__ */ new Map();
|
|
269
|
+
for (const file of files) {
|
|
270
|
+
const dir = file.split("/").slice(0, -1).join("/");
|
|
271
|
+
if (!dirGroups.has(dir)) {
|
|
272
|
+
dirGroups.set(dir, []);
|
|
273
|
+
}
|
|
274
|
+
dirGroups.get(dir).push(file);
|
|
275
|
+
}
|
|
276
|
+
plan.push(`1. Create unified ${domain} module file`);
|
|
277
|
+
plan.push(
|
|
278
|
+
`2. Move related functionality from ${files.length} scattered files`
|
|
279
|
+
);
|
|
280
|
+
plan.push(`3. Update imports in dependent files`);
|
|
281
|
+
plan.push(
|
|
282
|
+
`4. Remove old files after consolidation (verify with tests first)`
|
|
283
|
+
);
|
|
284
|
+
return plan;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// src/index.ts
|
|
288
|
+
async function analyzeContext(options) {
|
|
289
|
+
const {
|
|
290
|
+
maxDepth = 5,
|
|
291
|
+
maxContextBudget = 1e4,
|
|
292
|
+
minCohesion = 0.6,
|
|
293
|
+
maxFragmentation = 0.5,
|
|
294
|
+
focus = "all",
|
|
295
|
+
includeNodeModules = false,
|
|
296
|
+
...scanOptions
|
|
297
|
+
} = options;
|
|
298
|
+
const files = await scanFiles({
|
|
299
|
+
...scanOptions,
|
|
300
|
+
exclude: includeNodeModules ? scanOptions.exclude : [...scanOptions.exclude || [], "**/node_modules/**"]
|
|
301
|
+
});
|
|
302
|
+
const fileContents = await Promise.all(
|
|
303
|
+
files.map(async (file) => ({
|
|
304
|
+
file,
|
|
305
|
+
content: await readFileContent(file)
|
|
306
|
+
}))
|
|
307
|
+
);
|
|
308
|
+
const graph = buildDependencyGraph(fileContents);
|
|
309
|
+
const circularDeps = detectCircularDependencies(graph);
|
|
310
|
+
const clusters = detectModuleClusters(graph);
|
|
311
|
+
const fragmentationMap = /* @__PURE__ */ new Map();
|
|
312
|
+
for (const cluster of clusters) {
|
|
313
|
+
for (const file of cluster.files) {
|
|
314
|
+
fragmentationMap.set(file, cluster.fragmentationScore);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
const results = [];
|
|
318
|
+
for (const { file } of fileContents) {
|
|
319
|
+
const node = graph.nodes.get(file);
|
|
320
|
+
if (!node) continue;
|
|
321
|
+
const importDepth = focus === "depth" || focus === "all" ? calculateImportDepth(file, graph) : 0;
|
|
322
|
+
const dependencyList = focus === "depth" || focus === "all" ? getTransitiveDependencies(file, graph) : [];
|
|
323
|
+
const contextBudget = focus === "all" ? calculateContextBudget(file, graph) : node.tokenCost;
|
|
324
|
+
const cohesionScore = focus === "cohesion" || focus === "all" ? calculateCohesion(node.exports) : 1;
|
|
325
|
+
const fragmentationScore = fragmentationMap.get(file) || 0;
|
|
326
|
+
const relatedFiles = [];
|
|
327
|
+
for (const cluster of clusters) {
|
|
328
|
+
if (cluster.files.includes(file)) {
|
|
329
|
+
relatedFiles.push(...cluster.files.filter((f) => f !== file));
|
|
330
|
+
break;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
const { severity, issues, recommendations, potentialSavings } = analyzeIssues({
|
|
334
|
+
file,
|
|
335
|
+
importDepth,
|
|
336
|
+
contextBudget,
|
|
337
|
+
cohesionScore,
|
|
338
|
+
fragmentationScore,
|
|
339
|
+
maxDepth,
|
|
340
|
+
maxContextBudget,
|
|
341
|
+
minCohesion,
|
|
342
|
+
maxFragmentation,
|
|
343
|
+
circularDeps
|
|
344
|
+
});
|
|
345
|
+
const domains = [
|
|
346
|
+
...new Set(node.exports.map((e) => e.inferredDomain || "unknown"))
|
|
347
|
+
];
|
|
348
|
+
results.push({
|
|
349
|
+
file,
|
|
350
|
+
tokenCost: node.tokenCost,
|
|
351
|
+
linesOfCode: node.linesOfCode,
|
|
352
|
+
importDepth,
|
|
353
|
+
dependencyCount: dependencyList.length,
|
|
354
|
+
dependencyList,
|
|
355
|
+
circularDeps: circularDeps.filter((cycle) => cycle.includes(file)),
|
|
356
|
+
cohesionScore,
|
|
357
|
+
domains,
|
|
358
|
+
exportCount: node.exports.length,
|
|
359
|
+
contextBudget,
|
|
360
|
+
fragmentationScore,
|
|
361
|
+
relatedFiles,
|
|
362
|
+
severity,
|
|
363
|
+
issues,
|
|
364
|
+
recommendations,
|
|
365
|
+
potentialSavings
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
return results.sort((a, b) => {
|
|
369
|
+
const severityOrder = { critical: 0, major: 1, minor: 2, info: 3 };
|
|
370
|
+
const severityDiff = severityOrder[a.severity] - severityOrder[b.severity];
|
|
371
|
+
if (severityDiff !== 0) return severityDiff;
|
|
372
|
+
return b.contextBudget - a.contextBudget;
|
|
373
|
+
});
|
|
374
|
+
}
|
|
375
|
+
function generateSummary(results) {
|
|
376
|
+
if (results.length === 0) {
|
|
377
|
+
return {
|
|
378
|
+
totalFiles: 0,
|
|
379
|
+
totalTokens: 0,
|
|
380
|
+
avgContextBudget: 0,
|
|
381
|
+
maxContextBudget: 0,
|
|
382
|
+
avgImportDepth: 0,
|
|
383
|
+
maxImportDepth: 0,
|
|
384
|
+
deepFiles: [],
|
|
385
|
+
avgFragmentation: 0,
|
|
386
|
+
fragmentedModules: [],
|
|
387
|
+
avgCohesion: 0,
|
|
388
|
+
lowCohesionFiles: [],
|
|
389
|
+
criticalIssues: 0,
|
|
390
|
+
majorIssues: 0,
|
|
391
|
+
minorIssues: 0,
|
|
392
|
+
totalPotentialSavings: 0,
|
|
393
|
+
topExpensiveFiles: []
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
const totalFiles = results.length;
|
|
397
|
+
const totalTokens = results.reduce((sum, r) => sum + r.tokenCost, 0);
|
|
398
|
+
const totalContextBudget = results.reduce(
|
|
399
|
+
(sum, r) => sum + r.contextBudget,
|
|
400
|
+
0
|
|
401
|
+
);
|
|
402
|
+
const avgContextBudget = totalContextBudget / totalFiles;
|
|
403
|
+
const maxContextBudget = Math.max(...results.map((r) => r.contextBudget));
|
|
404
|
+
const avgImportDepth = results.reduce((sum, r) => sum + r.importDepth, 0) / totalFiles;
|
|
405
|
+
const maxImportDepth = Math.max(...results.map((r) => r.importDepth));
|
|
406
|
+
const deepFiles = results.filter((r) => r.importDepth >= 5).map((r) => ({ file: r.file, depth: r.importDepth })).sort((a, b) => b.depth - a.depth).slice(0, 10);
|
|
407
|
+
const avgFragmentation = results.reduce((sum, r) => sum + r.fragmentationScore, 0) / totalFiles;
|
|
408
|
+
const moduleMap = /* @__PURE__ */ new Map();
|
|
409
|
+
for (const result of results) {
|
|
410
|
+
for (const domain of result.domains) {
|
|
411
|
+
if (!moduleMap.has(domain)) {
|
|
412
|
+
moduleMap.set(domain, []);
|
|
413
|
+
}
|
|
414
|
+
moduleMap.get(domain).push(result);
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
const fragmentedModules = [];
|
|
418
|
+
for (const [domain, files] of moduleMap.entries()) {
|
|
419
|
+
if (files.length < 2) continue;
|
|
420
|
+
const fragmentationScore = files.reduce((sum, f) => sum + f.fragmentationScore, 0) / files.length;
|
|
421
|
+
if (fragmentationScore < 0.3) continue;
|
|
422
|
+
const totalTokens2 = files.reduce((sum, f) => sum + f.tokenCost, 0);
|
|
423
|
+
const avgCohesion2 = files.reduce((sum, f) => sum + f.cohesionScore, 0) / files.length;
|
|
424
|
+
const targetFiles = Math.max(1, Math.ceil(files.length / 3));
|
|
425
|
+
fragmentedModules.push({
|
|
426
|
+
domain,
|
|
427
|
+
files: files.map((f) => f.file),
|
|
428
|
+
totalTokens: totalTokens2,
|
|
429
|
+
fragmentationScore,
|
|
430
|
+
avgCohesion: avgCohesion2,
|
|
431
|
+
suggestedStructure: {
|
|
432
|
+
targetFiles,
|
|
433
|
+
consolidationPlan: [
|
|
434
|
+
`Consolidate ${files.length} ${domain} files into ${targetFiles} cohesive file(s)`,
|
|
435
|
+
`Current token cost: ${totalTokens2.toLocaleString()}`,
|
|
436
|
+
`Estimated savings: ${Math.floor(totalTokens2 * 0.3).toLocaleString()} tokens (30%)`
|
|
437
|
+
]
|
|
438
|
+
}
|
|
439
|
+
});
|
|
440
|
+
}
|
|
441
|
+
fragmentedModules.sort((a, b) => b.fragmentationScore - a.fragmentationScore);
|
|
442
|
+
const avgCohesion = results.reduce((sum, r) => sum + r.cohesionScore, 0) / totalFiles;
|
|
443
|
+
const lowCohesionFiles = results.filter((r) => r.cohesionScore < 0.6).map((r) => ({ file: r.file, score: r.cohesionScore })).sort((a, b) => a.score - b.score).slice(0, 10);
|
|
444
|
+
const criticalIssues = results.filter((r) => r.severity === "critical").length;
|
|
445
|
+
const majorIssues = results.filter((r) => r.severity === "major").length;
|
|
446
|
+
const minorIssues = results.filter((r) => r.severity === "minor").length;
|
|
447
|
+
const totalPotentialSavings = results.reduce(
|
|
448
|
+
(sum, r) => sum + r.potentialSavings,
|
|
449
|
+
0
|
|
450
|
+
);
|
|
451
|
+
const topExpensiveFiles = results.sort((a, b) => b.contextBudget - a.contextBudget).slice(0, 10).map((r) => ({
|
|
452
|
+
file: r.file,
|
|
453
|
+
contextBudget: r.contextBudget,
|
|
454
|
+
severity: r.severity
|
|
455
|
+
}));
|
|
456
|
+
return {
|
|
457
|
+
totalFiles,
|
|
458
|
+
totalTokens,
|
|
459
|
+
avgContextBudget,
|
|
460
|
+
maxContextBudget,
|
|
461
|
+
avgImportDepth,
|
|
462
|
+
maxImportDepth,
|
|
463
|
+
deepFiles,
|
|
464
|
+
avgFragmentation,
|
|
465
|
+
fragmentedModules: fragmentedModules.slice(0, 10),
|
|
466
|
+
avgCohesion,
|
|
467
|
+
lowCohesionFiles,
|
|
468
|
+
criticalIssues,
|
|
469
|
+
majorIssues,
|
|
470
|
+
minorIssues,
|
|
471
|
+
totalPotentialSavings,
|
|
472
|
+
topExpensiveFiles
|
|
473
|
+
};
|
|
474
|
+
}
|
|
475
|
+
function analyzeIssues(params) {
|
|
476
|
+
const {
|
|
477
|
+
file,
|
|
478
|
+
importDepth,
|
|
479
|
+
contextBudget,
|
|
480
|
+
cohesionScore,
|
|
481
|
+
fragmentationScore,
|
|
482
|
+
maxDepth,
|
|
483
|
+
maxContextBudget,
|
|
484
|
+
minCohesion,
|
|
485
|
+
maxFragmentation,
|
|
486
|
+
circularDeps
|
|
487
|
+
} = params;
|
|
488
|
+
const issues = [];
|
|
489
|
+
const recommendations = [];
|
|
490
|
+
let severity = "info";
|
|
491
|
+
let potentialSavings = 0;
|
|
492
|
+
if (circularDeps.length > 0) {
|
|
493
|
+
severity = "critical";
|
|
494
|
+
issues.push(
|
|
495
|
+
`Part of ${circularDeps.length} circular dependency chain(s)`
|
|
496
|
+
);
|
|
497
|
+
recommendations.push("Break circular dependencies by extracting interfaces or using dependency injection");
|
|
498
|
+
potentialSavings += contextBudget * 0.2;
|
|
499
|
+
}
|
|
500
|
+
if (importDepth > maxDepth * 1.5) {
|
|
501
|
+
severity = severity === "critical" ? "critical" : "critical";
|
|
502
|
+
issues.push(`Import depth ${importDepth} exceeds limit by 50%`);
|
|
503
|
+
recommendations.push("Flatten dependency tree or use facade pattern");
|
|
504
|
+
potentialSavings += contextBudget * 0.3;
|
|
505
|
+
} else if (importDepth > maxDepth) {
|
|
506
|
+
severity = severity === "critical" ? "critical" : "major";
|
|
507
|
+
issues.push(`Import depth ${importDepth} exceeds recommended maximum ${maxDepth}`);
|
|
508
|
+
recommendations.push("Consider reducing dependency depth");
|
|
509
|
+
potentialSavings += contextBudget * 0.15;
|
|
510
|
+
}
|
|
511
|
+
if (contextBudget > maxContextBudget * 1.5) {
|
|
512
|
+
severity = severity === "critical" ? "critical" : "critical";
|
|
513
|
+
issues.push(`Context budget ${contextBudget.toLocaleString()} tokens is 50% over limit`);
|
|
514
|
+
recommendations.push("Split into smaller modules or reduce dependency tree");
|
|
515
|
+
potentialSavings += contextBudget * 0.4;
|
|
516
|
+
} else if (contextBudget > maxContextBudget) {
|
|
517
|
+
severity = severity === "critical" || severity === "major" ? severity : "major";
|
|
518
|
+
issues.push(`Context budget ${contextBudget.toLocaleString()} exceeds ${maxContextBudget.toLocaleString()}`);
|
|
519
|
+
recommendations.push("Reduce file size or dependencies");
|
|
520
|
+
potentialSavings += contextBudget * 0.2;
|
|
521
|
+
}
|
|
522
|
+
if (cohesionScore < minCohesion * 0.5) {
|
|
523
|
+
severity = severity === "critical" ? "critical" : "major";
|
|
524
|
+
issues.push(`Very low cohesion (${(cohesionScore * 100).toFixed(0)}%) - mixed concerns`);
|
|
525
|
+
recommendations.push("Split file by domain - separate unrelated functionality");
|
|
526
|
+
potentialSavings += contextBudget * 0.25;
|
|
527
|
+
} else if (cohesionScore < minCohesion) {
|
|
528
|
+
severity = severity === "critical" || severity === "major" ? severity : "minor";
|
|
529
|
+
issues.push(`Low cohesion (${(cohesionScore * 100).toFixed(0)}%)`);
|
|
530
|
+
recommendations.push("Consider grouping related exports together");
|
|
531
|
+
potentialSavings += contextBudget * 0.1;
|
|
532
|
+
}
|
|
533
|
+
if (fragmentationScore > maxFragmentation) {
|
|
534
|
+
severity = severity === "critical" || severity === "major" ? severity : "minor";
|
|
535
|
+
issues.push(`High fragmentation (${(fragmentationScore * 100).toFixed(0)}%) - scattered implementation`);
|
|
536
|
+
recommendations.push("Consolidate with related files in same domain");
|
|
537
|
+
potentialSavings += contextBudget * 0.3;
|
|
538
|
+
}
|
|
539
|
+
if (issues.length === 0) {
|
|
540
|
+
issues.push("No significant issues detected");
|
|
541
|
+
recommendations.push("File is well-structured for AI context usage");
|
|
542
|
+
}
|
|
543
|
+
return { severity, issues, recommendations, potentialSavings: Math.floor(potentialSavings) };
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
export {
|
|
547
|
+
analyzeContext,
|
|
548
|
+
generateSummary
|
|
549
|
+
};
|