@aiready/context-analyzer 0.4.5 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
 
2
2
  
3
- > @aiready/context-analyzer@0.4.5 build /Users/pengcao/projects/aiready/packages/context-analyzer
3
+ > @aiready/context-analyzer@0.5.0 build /Users/pengcao/projects/aiready/packages/context-analyzer
4
4
  > tsup src/index.ts src/cli.ts --format cjs,esm --dts
5
5
 
6
6
  CLI Building entry: src/cli.ts, src/index.ts
@@ -9,15 +9,15 @@
9
9
  CLI Target: es2020
10
10
  CJS Build start
11
11
  ESM Build start
12
- CJS dist/index.js 20.38 KB
13
- CJS dist/cli.js 39.03 KB
14
- CJS ⚡️ Build success in 58ms
15
- ESM dist/chunk-45P4RDYP.mjs 19.24 KB
12
+ CJS dist/cli.js 39.27 KB
13
+ CJS dist/index.js 20.62 KB
14
+ CJS ⚡️ Build success in 42ms
16
15
  ESM dist/cli.mjs 18.45 KB
17
16
  ESM dist/index.mjs 164.00 B
18
- ESM ⚡️ Build success in 58ms
17
+ ESM dist/chunk-NJUW6VED.mjs 19.48 KB
18
+ ESM ⚡️ Build success in 42ms
19
19
  DTS Build start
20
- DTS ⚡️ Build success in 543ms
20
+ DTS ⚡️ Build success in 551ms
21
21
  DTS dist/cli.d.ts 20.00 B
22
22
  DTS dist/index.d.ts 2.44 KB
23
23
  DTS dist/cli.d.mts 20.00 B
@@ -1,12 +1,33 @@
1
1
 
2
2
  
3
- > @aiready/context-analyzer@0.3.8 test /Users/pengcao/projects/aiready/packages/context-analyzer
3
+ > @aiready/context-analyzer@0.5.0 test /Users/pengcao/projects/aiready/packages/context-analyzer
4
4
  > vitest run
5
5
 
6
6
 
7
7
   RUN  v2.1.9 /Users/pengcao/projects/aiready/packages/context-analyzer
8
8
 
9
- ✓ src/__tests__/analyzer.test.ts (13)
9
+ [?25l · src/__tests__/analyzer.test.ts (13)
10
+ · buildDependencyGraph (1)
11
+ · should build a basic dependency graph
12
+ · calculateImportDepth (2)
13
+ · should calculate import depth correctly
14
+ · should handle circular dependencies gracefully
15
+ · getTransitiveDependencies (1)
16
+ · should get all transitive dependencies
17
+ · calculateContextBudget (1)
18
+ · should calculate total token cost including dependencies
19
+ · detectCircularDependencies (2)
20
+ · should detect circular dependencies
21
+ · should return empty for no circular dependencies
22
+ · calculateCohesion (3)
23
+ · should return 1 for single export
24
+ · should return high cohesion for related exports
25
+ · should return low cohesion for mixed exports
26
+ · calculateFragmentation (3)
27
+ · should return 0 for single file
28
+ · should return 0 for files in same directory
29
+ · should return high fragmentation for scattered files
30
+ [?25l ✓ src/__tests__/analyzer.test.ts (13)
10
31
  ✓ buildDependencyGraph (1)
11
32
  ✓ should build a basic dependency graph
12
33
  ✓ calculateImportDepth (2)
@@ -30,7 +51,7 @@
30
51
 
31
52
   Test Files  1 passed (1)
32
53
   Tests  13 passed (13)
33
-  Start at  03:56:27
34
-  Duration  286ms (transform 65ms, setup 0ms, collect 76ms, tests 3ms, environment 0ms, prepare 45ms)
54
+  Start at  07:46:59
55
+  Duration  394ms (transform 66ms, setup 0ms, collect 75ms, tests 41ms, environment 0ms, prepare 42ms)
35
56
 
36
- [?25h
57
+ [?25h[?25h
package/README.md CHANGED
@@ -67,6 +67,9 @@ npx @aiready/context-analyzer ./src
67
67
  # Basic usage
68
68
  aiready-context ./src
69
69
 
70
+ # Show more results in console (default: 10)
71
+ aiready-context ./src --max-results 25
72
+
70
73
  # Focus on specific concerns
71
74
  aiready-context ./src --focus fragmentation
72
75
  aiready-context ./src --focus cohesion
@@ -75,7 +78,7 @@ aiready-context ./src --focus depth
75
78
  # Set thresholds
76
79
  aiready-context ./src --max-depth 5 --max-context 10000 --min-cohesion 0.6
77
80
 
78
- # Export to JSON (saved to .aiready/ by default)
81
+ # Export to JSON for full details (saved to .aiready/ by default)
79
82
  aiready-context ./src --output json
80
83
 
81
84
  # Or specify custom path
@@ -88,6 +91,8 @@ aiready-context ./src --output json --output-file custom-report.json
88
91
 
89
92
  **Smart defaults automatically adjust based on your repository size** to show ~10 most serious issues.
90
93
 
94
+ > **💡 Tip:** By default, console output shows the top 10 results per category. Use `--max-results <number>` to see more, or use `--output json` to get complete details of all issues.
95
+
91
96
  ### Getting More/Fewer Results
92
97
 
93
98
  **Want to catch MORE potential issues?** (More sensitive, shows smaller problems)
@@ -132,9 +137,47 @@ aiready-context ./src --max-depth 5 --max-context 15000
132
137
  aiready-context ./src --max-depth 8 --max-context 25000 --min-cohesion 0.3
133
138
  ```
134
139
 
140
+ ## 📤 Output Options
141
+
142
+ ### Console Output (Default)
143
+
144
+ Shows a summary with top 10 results per category:
145
+
146
+ ```bash
147
+ # Default - shows top 10 items
148
+ aiready-context ./src
149
+
150
+ # Show more items (e.g., top 25)
151
+ aiready-context ./src --max-results 25
152
+
153
+ # Show all items (use a large number)
154
+ aiready-context ./src --max-results 999
155
+ ```
156
+
157
+ ### JSON Output
158
+
159
+ Get complete details of **all** issues (not limited to 10):
160
+
161
+ ```bash
162
+ # Generate JSON with all issues
163
+ aiready-context ./src --output json
164
+
165
+ # Custom output path
166
+ aiready-context ./src --output json --output-file reports/analysis.json
167
+ ```
168
+
169
+ ### HTML Report
170
+
171
+ Visual report with charts and detailed breakdown:
172
+
173
+ ```bash
135
174
  # Generate HTML report
136
175
  aiready-context ./src --output html --output-file report.html
176
+ ```
177
+
178
+ ### Include/Exclude Patterns
137
179
 
180
+ ```bash
138
181
  # Include/exclude patterns
139
182
  aiready-context ./src --exclude "**/test/**,**/*.test.ts"
140
183
  ```
@@ -154,12 +197,29 @@ Create an `aiready.json` or `aiready.config.json` file in your project root:
154
197
  "maxDepth": 4,
155
198
  "maxContextBudget": 8000,
156
199
  "minCohesion": 0.7,
157
- "includeNodeModules": false
200
+ "maxFragmentation": 0.6,
201
+ "focus": "all",
202
+ "maxResults": 10
158
203
  }
204
+ },
205
+ "output": {
206
+ "format": "console"
159
207
  }
160
208
  }
161
209
  ```
162
210
 
211
+ **Configuration Options:**
212
+
213
+ | Option | Type | Default | Description |
214
+ |--------|------|---------|-------------|
215
+ | `maxDepth` | number | `5` | Max acceptable import depth |
216
+ | `maxContextBudget` | number | `10000` | Max acceptable token budget |
217
+ | `minCohesion` | number | `0.6` | Min acceptable cohesion score (0-1) |
218
+ | `maxFragmentation` | number | `0.5` | Max acceptable fragmentation (0-1) |
219
+ | `focus` | string | `'all'` | Focus: `'fragmentation'`, `'cohesion'`, `'depth'`, `'all'` |
220
+ | `maxResults` | number | `10` | Max results per category in console |
221
+ | `includeNodeModules` | boolean | `false` | Include node_modules in analysis |
222
+
163
223
  ### Sample Output
164
224
 
165
225
  ```bash
@@ -0,0 +1,610 @@
1
+ // src/index.ts
2
+ import { scanFiles, readFileContent } from "@aiready/core";
3
+
4
+ // src/analyzer.ts
5
+ import { estimateTokens } from "@aiready/core";
6
+ function buildDependencyGraph(files) {
7
+ const nodes = /* @__PURE__ */ new Map();
8
+ const edges = /* @__PURE__ */ new Map();
9
+ for (const { file, content } of files) {
10
+ const imports = extractImportsFromContent(content);
11
+ const exports = extractExports(content);
12
+ const tokenCost = estimateTokens(content);
13
+ const linesOfCode = content.split("\n").length;
14
+ nodes.set(file, {
15
+ file,
16
+ imports,
17
+ exports,
18
+ tokenCost,
19
+ linesOfCode
20
+ });
21
+ edges.set(file, new Set(imports));
22
+ }
23
+ return { nodes, edges };
24
+ }
25
+ function extractImportsFromContent(content) {
26
+ const imports = [];
27
+ const patterns = [
28
+ /import\s+.*?\s+from\s+['"](.+?)['"]/g,
29
+ // import ... from '...'
30
+ /import\s+['"](.+?)['"]/g,
31
+ // import '...'
32
+ /require\(['"](.+?)['"]\)/g
33
+ // require('...')
34
+ ];
35
+ for (const pattern of patterns) {
36
+ let match;
37
+ while ((match = pattern.exec(content)) !== null) {
38
+ const importPath = match[1];
39
+ if (importPath && !importPath.startsWith("@") && !importPath.startsWith("node:")) {
40
+ imports.push(importPath);
41
+ }
42
+ }
43
+ }
44
+ return [...new Set(imports)];
45
+ }
46
+ function calculateImportDepth(file, graph, visited = /* @__PURE__ */ new Set(), depth = 0) {
47
+ if (visited.has(file)) {
48
+ return depth;
49
+ }
50
+ const dependencies = graph.edges.get(file);
51
+ if (!dependencies || dependencies.size === 0) {
52
+ return depth;
53
+ }
54
+ visited.add(file);
55
+ let maxDepth = depth;
56
+ for (const dep of dependencies) {
57
+ const depDepth = calculateImportDepth(dep, graph, visited, depth + 1);
58
+ maxDepth = Math.max(maxDepth, depDepth);
59
+ }
60
+ visited.delete(file);
61
+ return maxDepth;
62
+ }
63
+ function getTransitiveDependencies(file, graph, visited = /* @__PURE__ */ new Set()) {
64
+ if (visited.has(file)) {
65
+ return [];
66
+ }
67
+ visited.add(file);
68
+ const dependencies = graph.edges.get(file);
69
+ if (!dependencies || dependencies.size === 0) {
70
+ return [];
71
+ }
72
+ const allDeps = [];
73
+ for (const dep of dependencies) {
74
+ allDeps.push(dep);
75
+ allDeps.push(...getTransitiveDependencies(dep, graph, visited));
76
+ }
77
+ return [...new Set(allDeps)];
78
+ }
79
+ function calculateContextBudget(file, graph) {
80
+ const node = graph.nodes.get(file);
81
+ if (!node) return 0;
82
+ let totalTokens = node.tokenCost;
83
+ const deps = getTransitiveDependencies(file, graph);
84
+ for (const dep of deps) {
85
+ const depNode = graph.nodes.get(dep);
86
+ if (depNode) {
87
+ totalTokens += depNode.tokenCost;
88
+ }
89
+ }
90
+ return totalTokens;
91
+ }
92
+ function detectCircularDependencies(graph) {
93
+ const cycles = [];
94
+ const visited = /* @__PURE__ */ new Set();
95
+ const recursionStack = /* @__PURE__ */ new Set();
96
+ function dfs(file, path) {
97
+ if (recursionStack.has(file)) {
98
+ const cycleStart = path.indexOf(file);
99
+ if (cycleStart !== -1) {
100
+ cycles.push([...path.slice(cycleStart), file]);
101
+ }
102
+ return;
103
+ }
104
+ if (visited.has(file)) {
105
+ return;
106
+ }
107
+ visited.add(file);
108
+ recursionStack.add(file);
109
+ path.push(file);
110
+ const dependencies = graph.edges.get(file);
111
+ if (dependencies) {
112
+ for (const dep of dependencies) {
113
+ dfs(dep, [...path]);
114
+ }
115
+ }
116
+ recursionStack.delete(file);
117
+ }
118
+ for (const file of graph.nodes.keys()) {
119
+ if (!visited.has(file)) {
120
+ dfs(file, []);
121
+ }
122
+ }
123
+ return cycles;
124
+ }
125
+ function calculateCohesion(exports) {
126
+ if (exports.length === 0) return 1;
127
+ if (exports.length === 1) return 1;
128
+ const domains = exports.map((e) => e.inferredDomain || "unknown");
129
+ const domainCounts = /* @__PURE__ */ new Map();
130
+ for (const domain of domains) {
131
+ domainCounts.set(domain, (domainCounts.get(domain) || 0) + 1);
132
+ }
133
+ const total = domains.length;
134
+ let entropy = 0;
135
+ for (const count of domainCounts.values()) {
136
+ const p = count / total;
137
+ if (p > 0) {
138
+ entropy -= p * Math.log2(p);
139
+ }
140
+ }
141
+ const maxEntropy = Math.log2(total);
142
+ return maxEntropy > 0 ? 1 - entropy / maxEntropy : 1;
143
+ }
144
+ function calculateFragmentation(files, domain) {
145
+ if (files.length <= 1) return 0;
146
+ const directories = new Set(files.map((f) => f.split("/").slice(0, -1).join("/")));
147
+ return (directories.size - 1) / (files.length - 1);
148
+ }
149
+ function detectModuleClusters(graph) {
150
+ const domainMap = /* @__PURE__ */ new Map();
151
+ for (const [file, node] of graph.nodes.entries()) {
152
+ const domains = node.exports.map((e) => e.inferredDomain || "unknown");
153
+ const primaryDomain = domains[0] || "unknown";
154
+ if (!domainMap.has(primaryDomain)) {
155
+ domainMap.set(primaryDomain, []);
156
+ }
157
+ domainMap.get(primaryDomain).push(file);
158
+ }
159
+ const clusters = [];
160
+ for (const [domain, files] of domainMap.entries()) {
161
+ if (files.length < 2) continue;
162
+ const totalTokens = files.reduce((sum, file) => {
163
+ const node = graph.nodes.get(file);
164
+ return sum + (node?.tokenCost || 0);
165
+ }, 0);
166
+ const fragmentationScore = calculateFragmentation(files, domain);
167
+ const avgCohesion = files.reduce((sum, file) => {
168
+ const node = graph.nodes.get(file);
169
+ return sum + (node ? calculateCohesion(node.exports) : 0);
170
+ }, 0) / files.length;
171
+ const targetFiles = Math.max(1, Math.ceil(files.length / 3));
172
+ const consolidationPlan = generateConsolidationPlan(
173
+ domain,
174
+ files,
175
+ targetFiles
176
+ );
177
+ clusters.push({
178
+ domain,
179
+ files,
180
+ totalTokens,
181
+ fragmentationScore,
182
+ avgCohesion,
183
+ suggestedStructure: {
184
+ targetFiles,
185
+ consolidationPlan
186
+ }
187
+ });
188
+ }
189
+ return clusters.sort((a, b) => b.fragmentationScore - a.fragmentationScore);
190
+ }
191
+ function extractExports(content) {
192
+ const exports = [];
193
+ const patterns = [
194
+ /export\s+function\s+(\w+)/g,
195
+ /export\s+class\s+(\w+)/g,
196
+ /export\s+const\s+(\w+)/g,
197
+ /export\s+type\s+(\w+)/g,
198
+ /export\s+interface\s+(\w+)/g,
199
+ /export\s+default/g
200
+ ];
201
+ const types = [
202
+ "function",
203
+ "class",
204
+ "const",
205
+ "type",
206
+ "interface",
207
+ "default"
208
+ ];
209
+ patterns.forEach((pattern, index) => {
210
+ let match;
211
+ while ((match = pattern.exec(content)) !== null) {
212
+ const name = match[1] || "default";
213
+ const type = types[index];
214
+ const inferredDomain = inferDomain(name);
215
+ exports.push({ name, type, inferredDomain });
216
+ }
217
+ });
218
+ return exports;
219
+ }
220
+ function inferDomain(name) {
221
+ const lower = name.toLowerCase();
222
+ const domainKeywords = [
223
+ "user",
224
+ "auth",
225
+ "order",
226
+ "product",
227
+ "payment",
228
+ "cart",
229
+ "invoice",
230
+ "customer",
231
+ "admin",
232
+ "api",
233
+ "util",
234
+ "helper",
235
+ "config",
236
+ "service",
237
+ "repository",
238
+ "controller",
239
+ "model",
240
+ "view"
241
+ ];
242
+ for (const keyword of domainKeywords) {
243
+ if (lower.includes(keyword)) {
244
+ return keyword;
245
+ }
246
+ }
247
+ return "unknown";
248
+ }
249
+ function generateConsolidationPlan(domain, files, targetFiles) {
250
+ const plan = [];
251
+ if (files.length <= targetFiles) {
252
+ return [`No consolidation needed for ${domain}`];
253
+ }
254
+ plan.push(
255
+ `Consolidate ${files.length} ${domain} files into ${targetFiles} cohesive file(s):`
256
+ );
257
+ const dirGroups = /* @__PURE__ */ new Map();
258
+ for (const file of files) {
259
+ const dir = file.split("/").slice(0, -1).join("/");
260
+ if (!dirGroups.has(dir)) {
261
+ dirGroups.set(dir, []);
262
+ }
263
+ dirGroups.get(dir).push(file);
264
+ }
265
+ plan.push(`1. Create unified ${domain} module file`);
266
+ plan.push(
267
+ `2. Move related functionality from ${files.length} scattered files`
268
+ );
269
+ plan.push(`3. Update imports in dependent files`);
270
+ plan.push(
271
+ `4. Remove old files after consolidation (verify with tests first)`
272
+ );
273
+ return plan;
274
+ }
275
+
276
+ // src/index.ts
277
+ async function getSmartDefaults(directory, userOptions) {
278
+ const files = await scanFiles({
279
+ rootDir: directory,
280
+ include: userOptions.include,
281
+ exclude: userOptions.exclude
282
+ });
283
+ const estimatedBlocks = files.length;
284
+ let maxDepth;
285
+ let maxContextBudget;
286
+ let minCohesion;
287
+ let maxFragmentation;
288
+ if (estimatedBlocks < 100) {
289
+ maxDepth = 4;
290
+ maxContextBudget = 8e3;
291
+ minCohesion = 0.5;
292
+ maxFragmentation = 0.5;
293
+ } else if (estimatedBlocks < 500) {
294
+ maxDepth = 5;
295
+ maxContextBudget = 15e3;
296
+ minCohesion = 0.45;
297
+ maxFragmentation = 0.6;
298
+ } else if (estimatedBlocks < 2e3) {
299
+ maxDepth = 7;
300
+ maxContextBudget = 25e3;
301
+ minCohesion = 0.4;
302
+ maxFragmentation = 0.7;
303
+ } else {
304
+ maxDepth = 10;
305
+ maxContextBudget = 4e4;
306
+ minCohesion = 0.35;
307
+ maxFragmentation = 0.8;
308
+ }
309
+ return {
310
+ maxDepth,
311
+ maxContextBudget,
312
+ minCohesion,
313
+ maxFragmentation,
314
+ focus: "all",
315
+ includeNodeModules: false,
316
+ rootDir: userOptions.rootDir || directory,
317
+ include: userOptions.include,
318
+ exclude: userOptions.exclude
319
+ };
320
+ }
321
+ async function analyzeContext(options) {
322
+ const {
323
+ maxDepth = 5,
324
+ maxContextBudget = 1e4,
325
+ minCohesion = 0.6,
326
+ maxFragmentation = 0.5,
327
+ focus = "all",
328
+ includeNodeModules = false,
329
+ ...scanOptions
330
+ } = options;
331
+ const files = await scanFiles({
332
+ ...scanOptions,
333
+ // Only add node_modules to exclude if includeNodeModules is false
334
+ // The DEFAULT_EXCLUDE already includes node_modules, so this is only needed
335
+ // if user overrides the default exclude list
336
+ exclude: includeNodeModules && scanOptions.exclude ? scanOptions.exclude.filter((pattern) => pattern !== "**/node_modules/**") : scanOptions.exclude
337
+ });
338
+ const fileContents = await Promise.all(
339
+ files.map(async (file) => ({
340
+ file,
341
+ content: await readFileContent(file)
342
+ }))
343
+ );
344
+ const graph = buildDependencyGraph(fileContents);
345
+ const circularDeps = detectCircularDependencies(graph);
346
+ const clusters = detectModuleClusters(graph);
347
+ const fragmentationMap = /* @__PURE__ */ new Map();
348
+ for (const cluster of clusters) {
349
+ for (const file of cluster.files) {
350
+ fragmentationMap.set(file, cluster.fragmentationScore);
351
+ }
352
+ }
353
+ const results = [];
354
+ for (const { file } of fileContents) {
355
+ const node = graph.nodes.get(file);
356
+ if (!node) continue;
357
+ const importDepth = focus === "depth" || focus === "all" ? calculateImportDepth(file, graph) : 0;
358
+ const dependencyList = focus === "depth" || focus === "all" ? getTransitiveDependencies(file, graph) : [];
359
+ const contextBudget = focus === "all" ? calculateContextBudget(file, graph) : node.tokenCost;
360
+ const cohesionScore = focus === "cohesion" || focus === "all" ? calculateCohesion(node.exports) : 1;
361
+ const fragmentationScore = fragmentationMap.get(file) || 0;
362
+ const relatedFiles = [];
363
+ for (const cluster of clusters) {
364
+ if (cluster.files.includes(file)) {
365
+ relatedFiles.push(...cluster.files.filter((f) => f !== file));
366
+ break;
367
+ }
368
+ }
369
+ const { severity, issues, recommendations, potentialSavings } = analyzeIssues({
370
+ file,
371
+ importDepth,
372
+ contextBudget,
373
+ cohesionScore,
374
+ fragmentationScore,
375
+ maxDepth,
376
+ maxContextBudget,
377
+ minCohesion,
378
+ maxFragmentation,
379
+ circularDeps
380
+ });
381
+ const domains = [
382
+ ...new Set(node.exports.map((e) => e.inferredDomain || "unknown"))
383
+ ];
384
+ results.push({
385
+ file,
386
+ tokenCost: node.tokenCost,
387
+ linesOfCode: node.linesOfCode,
388
+ importDepth,
389
+ dependencyCount: dependencyList.length,
390
+ dependencyList,
391
+ circularDeps: circularDeps.filter((cycle) => cycle.includes(file)),
392
+ cohesionScore,
393
+ domains,
394
+ exportCount: node.exports.length,
395
+ contextBudget,
396
+ fragmentationScore,
397
+ relatedFiles,
398
+ severity,
399
+ issues,
400
+ recommendations,
401
+ potentialSavings
402
+ });
403
+ }
404
+ const issuesOnly = results.filter((r) => r.severity !== "info");
405
+ const sorted = issuesOnly.sort((a, b) => {
406
+ const severityOrder = { critical: 0, major: 1, minor: 2, info: 3 };
407
+ const severityDiff = severityOrder[a.severity] - severityOrder[b.severity];
408
+ if (severityDiff !== 0) return severityDiff;
409
+ return b.contextBudget - a.contextBudget;
410
+ });
411
+ return sorted.length > 0 ? sorted : results;
412
+ }
413
+ function generateSummary(results) {
414
+ if (results.length === 0) {
415
+ return {
416
+ totalFiles: 0,
417
+ totalTokens: 0,
418
+ avgContextBudget: 0,
419
+ maxContextBudget: 0,
420
+ avgImportDepth: 0,
421
+ maxImportDepth: 0,
422
+ deepFiles: [],
423
+ avgFragmentation: 0,
424
+ fragmentedModules: [],
425
+ avgCohesion: 0,
426
+ lowCohesionFiles: [],
427
+ criticalIssues: 0,
428
+ majorIssues: 0,
429
+ minorIssues: 0,
430
+ totalPotentialSavings: 0,
431
+ topExpensiveFiles: []
432
+ };
433
+ }
434
+ const totalFiles = results.length;
435
+ const totalTokens = results.reduce((sum, r) => sum + r.tokenCost, 0);
436
+ const totalContextBudget = results.reduce(
437
+ (sum, r) => sum + r.contextBudget,
438
+ 0
439
+ );
440
+ const avgContextBudget = totalContextBudget / totalFiles;
441
+ const maxContextBudget = Math.max(...results.map((r) => r.contextBudget));
442
+ const avgImportDepth = results.reduce((sum, r) => sum + r.importDepth, 0) / totalFiles;
443
+ const maxImportDepth = Math.max(...results.map((r) => r.importDepth));
444
+ const deepFiles = results.filter((r) => r.importDepth >= 5).map((r) => ({ file: r.file, depth: r.importDepth })).sort((a, b) => b.depth - a.depth).slice(0, 10);
445
+ const avgFragmentation = results.reduce((sum, r) => sum + r.fragmentationScore, 0) / totalFiles;
446
+ const moduleMap = /* @__PURE__ */ new Map();
447
+ for (const result of results) {
448
+ for (const domain of result.domains) {
449
+ if (!moduleMap.has(domain)) {
450
+ moduleMap.set(domain, []);
451
+ }
452
+ moduleMap.get(domain).push(result);
453
+ }
454
+ }
455
+ const fragmentedModules = [];
456
+ for (const [domain, files] of moduleMap.entries()) {
457
+ if (files.length < 2) continue;
458
+ const fragmentationScore = files.reduce((sum, f) => sum + f.fragmentationScore, 0) / files.length;
459
+ if (fragmentationScore < 0.3) continue;
460
+ const totalTokens2 = files.reduce((sum, f) => sum + f.tokenCost, 0);
461
+ const avgCohesion2 = files.reduce((sum, f) => sum + f.cohesionScore, 0) / files.length;
462
+ const targetFiles = Math.max(1, Math.ceil(files.length / 3));
463
+ fragmentedModules.push({
464
+ domain,
465
+ files: files.map((f) => f.file),
466
+ totalTokens: totalTokens2,
467
+ fragmentationScore,
468
+ avgCohesion: avgCohesion2,
469
+ suggestedStructure: {
470
+ targetFiles,
471
+ consolidationPlan: [
472
+ `Consolidate ${files.length} ${domain} files into ${targetFiles} cohesive file(s)`,
473
+ `Current token cost: ${totalTokens2.toLocaleString()}`,
474
+ `Estimated savings: ${Math.floor(totalTokens2 * 0.3).toLocaleString()} tokens (30%)`
475
+ ]
476
+ }
477
+ });
478
+ }
479
+ fragmentedModules.sort((a, b) => b.fragmentationScore - a.fragmentationScore);
480
+ const avgCohesion = results.reduce((sum, r) => sum + r.cohesionScore, 0) / totalFiles;
481
+ const lowCohesionFiles = results.filter((r) => r.cohesionScore < 0.6).map((r) => ({ file: r.file, score: r.cohesionScore })).sort((a, b) => a.score - b.score).slice(0, 10);
482
+ const criticalIssues = results.filter((r) => r.severity === "critical").length;
483
+ const majorIssues = results.filter((r) => r.severity === "major").length;
484
+ const minorIssues = results.filter((r) => r.severity === "minor").length;
485
+ const totalPotentialSavings = results.reduce(
486
+ (sum, r) => sum + r.potentialSavings,
487
+ 0
488
+ );
489
+ const topExpensiveFiles = results.sort((a, b) => b.contextBudget - a.contextBudget).slice(0, 10).map((r) => ({
490
+ file: r.file,
491
+ contextBudget: r.contextBudget,
492
+ severity: r.severity
493
+ }));
494
+ return {
495
+ totalFiles,
496
+ totalTokens,
497
+ avgContextBudget,
498
+ maxContextBudget,
499
+ avgImportDepth,
500
+ maxImportDepth,
501
+ deepFiles,
502
+ avgFragmentation,
503
+ fragmentedModules: fragmentedModules.slice(0, 10),
504
+ avgCohesion,
505
+ lowCohesionFiles,
506
+ criticalIssues,
507
+ majorIssues,
508
+ minorIssues,
509
+ totalPotentialSavings,
510
+ topExpensiveFiles
511
+ };
512
+ }
513
+ function analyzeIssues(params) {
514
+ const {
515
+ file,
516
+ importDepth,
517
+ contextBudget,
518
+ cohesionScore,
519
+ fragmentationScore,
520
+ maxDepth,
521
+ maxContextBudget,
522
+ minCohesion,
523
+ maxFragmentation,
524
+ circularDeps
525
+ } = params;
526
+ const issues = [];
527
+ const recommendations = [];
528
+ let severity = "info";
529
+ let potentialSavings = 0;
530
+ if (circularDeps.length > 0) {
531
+ severity = "critical";
532
+ issues.push(
533
+ `Part of ${circularDeps.length} circular dependency chain(s)`
534
+ );
535
+ recommendations.push("Break circular dependencies by extracting interfaces or using dependency injection");
536
+ potentialSavings += contextBudget * 0.2;
537
+ }
538
+ if (importDepth > maxDepth * 1.5) {
539
+ severity = severity === "critical" ? "critical" : "critical";
540
+ issues.push(`Import depth ${importDepth} exceeds limit by 50%`);
541
+ recommendations.push("Flatten dependency tree or use facade pattern");
542
+ potentialSavings += contextBudget * 0.3;
543
+ } else if (importDepth > maxDepth) {
544
+ severity = severity === "critical" ? "critical" : "major";
545
+ issues.push(`Import depth ${importDepth} exceeds recommended maximum ${maxDepth}`);
546
+ recommendations.push("Consider reducing dependency depth");
547
+ potentialSavings += contextBudget * 0.15;
548
+ }
549
+ if (contextBudget > maxContextBudget * 1.5) {
550
+ severity = severity === "critical" ? "critical" : "critical";
551
+ issues.push(`Context budget ${contextBudget.toLocaleString()} tokens is 50% over limit`);
552
+ recommendations.push("Split into smaller modules or reduce dependency tree");
553
+ potentialSavings += contextBudget * 0.4;
554
+ } else if (contextBudget > maxContextBudget) {
555
+ severity = severity === "critical" || severity === "major" ? severity : "major";
556
+ issues.push(`Context budget ${contextBudget.toLocaleString()} exceeds ${maxContextBudget.toLocaleString()}`);
557
+ recommendations.push("Reduce file size or dependencies");
558
+ potentialSavings += contextBudget * 0.2;
559
+ }
560
+ if (cohesionScore < minCohesion * 0.5) {
561
+ severity = severity === "critical" ? "critical" : "major";
562
+ issues.push(`Very low cohesion (${(cohesionScore * 100).toFixed(0)}%) - mixed concerns`);
563
+ recommendations.push("Split file by domain - separate unrelated functionality");
564
+ potentialSavings += contextBudget * 0.25;
565
+ } else if (cohesionScore < minCohesion) {
566
+ severity = severity === "critical" || severity === "major" ? severity : "minor";
567
+ issues.push(`Low cohesion (${(cohesionScore * 100).toFixed(0)}%)`);
568
+ recommendations.push("Consider grouping related exports together");
569
+ potentialSavings += contextBudget * 0.1;
570
+ }
571
+ if (fragmentationScore > maxFragmentation) {
572
+ severity = severity === "critical" || severity === "major" ? severity : "minor";
573
+ issues.push(`High fragmentation (${(fragmentationScore * 100).toFixed(0)}%) - scattered implementation`);
574
+ recommendations.push("Consolidate with related files in same domain");
575
+ potentialSavings += contextBudget * 0.3;
576
+ }
577
+ if (issues.length === 0) {
578
+ issues.push("No significant issues detected");
579
+ recommendations.push("File is well-structured for AI context usage");
580
+ }
581
+ if (isBuildArtifact(file)) {
582
+ issues.push("Detected build artifact (bundled/output file)");
583
+ recommendations.push("Exclude build outputs (e.g., cdk.out, dist, build, .next) from analysis");
584
+ severity = downgradeSeverity(severity);
585
+ potentialSavings = 0;
586
+ }
587
+ return { severity, issues, recommendations, potentialSavings: Math.floor(potentialSavings) };
588
+ }
589
+ function isBuildArtifact(filePath) {
590
+ const lower = filePath.toLowerCase();
591
+ return lower.includes("/node_modules/") || lower.includes("/dist/") || lower.includes("/build/") || lower.includes("/out/") || lower.includes("/output/") || lower.includes("/cdk.out/") || lower.includes("/.next/") || /\/asset\.[^/]+\//.test(lower);
592
+ }
593
+ function downgradeSeverity(s) {
594
+ switch (s) {
595
+ case "critical":
596
+ return "minor";
597
+ case "major":
598
+ return "minor";
599
+ case "minor":
600
+ return "info";
601
+ default:
602
+ return "info";
603
+ }
604
+ }
605
+
606
+ export {
607
+ getSmartDefaults,
608
+ analyzeContext,
609
+ generateSummary
610
+ };
package/dist/cli.js CHANGED
@@ -314,7 +314,10 @@ async function analyzeContext(options) {
314
314
  } = options;
315
315
  const files = await (0, import_core2.scanFiles)({
316
316
  ...scanOptions,
317
- exclude: includeNodeModules ? scanOptions.exclude : [...scanOptions.exclude || [], "**/node_modules/**"]
317
+ // Only add node_modules to exclude if includeNodeModules is false
318
+ // The DEFAULT_EXCLUDE already includes node_modules, so this is only needed
319
+ // if user overrides the default exclude list
320
+ exclude: includeNodeModules && scanOptions.exclude ? scanOptions.exclude.filter((pattern) => pattern !== "**/node_modules/**") : scanOptions.exclude
318
321
  });
319
322
  const fileContents = await Promise.all(
320
323
  files.map(async (file) => ({
package/dist/cli.mjs CHANGED
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  analyzeContext,
4
4
  generateSummary
5
- } from "./chunk-45P4RDYP.mjs";
5
+ } from "./chunk-NJUW6VED.mjs";
6
6
 
7
7
  // src/cli.ts
8
8
  import { Command } from "commander";
package/dist/index.js CHANGED
@@ -356,7 +356,10 @@ async function analyzeContext(options) {
356
356
  } = options;
357
357
  const files = await (0, import_core2.scanFiles)({
358
358
  ...scanOptions,
359
- exclude: includeNodeModules ? scanOptions.exclude : [...scanOptions.exclude || [], "**/node_modules/**"]
359
+ // Only add node_modules to exclude if includeNodeModules is false
360
+ // The DEFAULT_EXCLUDE already includes node_modules, so this is only needed
361
+ // if user overrides the default exclude list
362
+ exclude: includeNodeModules && scanOptions.exclude ? scanOptions.exclude.filter((pattern) => pattern !== "**/node_modules/**") : scanOptions.exclude
360
363
  });
361
364
  const fileContents = await Promise.all(
362
365
  files.map(async (file) => ({
package/dist/index.mjs CHANGED
@@ -2,7 +2,7 @@ import {
2
2
  analyzeContext,
3
3
  generateSummary,
4
4
  getSmartDefaults
5
- } from "./chunk-45P4RDYP.mjs";
5
+ } from "./chunk-NJUW6VED.mjs";
6
6
  export {
7
7
  analyzeContext,
8
8
  generateSummary,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aiready/context-analyzer",
3
- "version": "0.4.5",
3
+ "version": "0.5.0",
4
4
  "description": "AI context window cost analysis - detect fragmented code, deep import chains, and expensive context budgets",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",
@@ -50,7 +50,7 @@
50
50
  "commander": "^12.1.0",
51
51
  "chalk": "^5.3.0",
52
52
  "prompts": "^2.4.2",
53
- "@aiready/core": "0.5.5"
53
+ "@aiready/core": "0.5.6"
54
54
  },
55
55
  "devDependencies": {
56
56
  "@types/node": "^22.10.2",
package/src/index.ts CHANGED
@@ -100,11 +100,15 @@ export async function analyzeContext(
100
100
  } = options;
101
101
 
102
102
  // Scan files
103
+ // Note: scanFiles now automatically merges user excludes with DEFAULT_EXCLUDE
103
104
  const files = await scanFiles({
104
105
  ...scanOptions,
105
- exclude: includeNodeModules
106
- ? scanOptions.exclude
107
- : [...(scanOptions.exclude || []), '**/node_modules/**'],
106
+ // Only add node_modules to exclude if includeNodeModules is false
107
+ // The DEFAULT_EXCLUDE already includes node_modules, so this is only needed
108
+ // if user overrides the default exclude list
109
+ exclude: includeNodeModules && scanOptions.exclude
110
+ ? scanOptions.exclude.filter(pattern => pattern !== '**/node_modules/**')
111
+ : scanOptions.exclude,
108
112
  });
109
113
 
110
114
  // Read all file contents