@aiready/context-analyzer 0.9.41 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/.turbo/turbo-build.log +10 -10
  2. package/.turbo/turbo-test.log +21 -20
  3. package/dist/chunk-4SYIJ7CU.mjs +1538 -0
  4. package/dist/chunk-4XQVYYPC.mjs +1470 -0
  5. package/dist/chunk-5CLU3HYU.mjs +1475 -0
  6. package/dist/chunk-5K73Q3OQ.mjs +1520 -0
  7. package/dist/chunk-6AVS4KTM.mjs +1536 -0
  8. package/dist/chunk-6I4552YB.mjs +1467 -0
  9. package/dist/chunk-6LPITDKG.mjs +1539 -0
  10. package/dist/chunk-AECWO7NQ.mjs +1539 -0
  11. package/dist/chunk-AJC3FR6G.mjs +1509 -0
  12. package/dist/chunk-CVGIDSMN.mjs +1522 -0
  13. package/dist/chunk-DXG5NIYL.mjs +1527 -0
  14. package/dist/chunk-G3CCJCBI.mjs +1521 -0
  15. package/dist/chunk-GFADGYXZ.mjs +1752 -0
  16. package/dist/chunk-GTRIBVS6.mjs +1467 -0
  17. package/dist/chunk-H4HWBQU6.mjs +1530 -0
  18. package/dist/chunk-JH535NPP.mjs +1619 -0
  19. package/dist/chunk-KGFWKSGJ.mjs +1442 -0
  20. package/dist/chunk-N2GQWNFG.mjs +1527 -0
  21. package/dist/chunk-NQA3F2HJ.mjs +1532 -0
  22. package/dist/chunk-NXXQ2U73.mjs +1467 -0
  23. package/dist/chunk-QDGPR3L6.mjs +1518 -0
  24. package/dist/chunk-SAVOSPM3.mjs +1522 -0
  25. package/dist/chunk-SIX4KMF2.mjs +1468 -0
  26. package/dist/chunk-SPAM2YJE.mjs +1537 -0
  27. package/dist/chunk-UG7OPVHB.mjs +1521 -0
  28. package/dist/chunk-VIJTZPBI.mjs +1470 -0
  29. package/dist/chunk-W37E7MW5.mjs +1403 -0
  30. package/dist/chunk-W76FEISE.mjs +1538 -0
  31. package/dist/chunk-WCFQYXQA.mjs +1532 -0
  32. package/dist/chunk-XY77XABG.mjs +1545 -0
  33. package/dist/chunk-YCGDIGOG.mjs +1467 -0
  34. package/dist/cli.js +768 -1160
  35. package/dist/cli.mjs +1 -1
  36. package/dist/index.d.mts +196 -64
  37. package/dist/index.d.ts +196 -64
  38. package/dist/index.js +937 -1209
  39. package/dist/index.mjs +65 -3
  40. package/package.json +2 -2
  41. package/src/__tests__/contract.test.ts +38 -0
  42. package/src/analyzer.ts +143 -2177
  43. package/src/ast-utils.ts +94 -0
  44. package/src/classifier.ts +497 -0
  45. package/src/cluster-detector.ts +100 -0
  46. package/src/defaults.ts +59 -0
  47. package/src/graph-builder.ts +272 -0
  48. package/src/index.ts +30 -519
  49. package/src/metrics.ts +231 -0
  50. package/src/remediation.ts +139 -0
  51. package/src/scoring.ts +12 -34
  52. package/src/semantic-analysis.ts +192 -126
  53. package/src/summary.ts +168 -0
@@ -0,0 +1,59 @@
1
+ import { scanFiles } from '@aiready/core';
2
+ import type { ContextAnalyzerOptions } from './types';
3
+
4
+ /**
5
+ * Generate smart defaults for context analysis based on repository size
6
+ * Automatically tunes thresholds to target ~10 most serious issues
7
+ */
8
+ export async function getSmartDefaults(
9
+ directory: string,
10
+ userOptions: Partial<ContextAnalyzerOptions>
11
+ ): Promise<ContextAnalyzerOptions> {
12
+ // Estimate repository size by scanning files
13
+ const files = await scanFiles({
14
+ rootDir: directory,
15
+ include: userOptions.include,
16
+ exclude: userOptions.exclude,
17
+ });
18
+
19
+ const estimatedBlocks = files.length;
20
+
21
+ let maxDepth: number;
22
+ let maxContextBudget: number;
23
+ let minCohesion: number;
24
+ let maxFragmentation: number;
25
+
26
+ if (estimatedBlocks < 100) {
27
+ maxDepth = 4;
28
+ maxContextBudget = 8000;
29
+ minCohesion = 0.5;
30
+ maxFragmentation = 0.5;
31
+ } else if (estimatedBlocks < 500) {
32
+ maxDepth = 5;
33
+ maxContextBudget = 15000;
34
+ minCohesion = 0.45;
35
+ maxFragmentation = 0.6;
36
+ } else if (estimatedBlocks < 2000) {
37
+ maxDepth = 7;
38
+ maxContextBudget = 25000;
39
+ minCohesion = 0.4;
40
+ maxFragmentation = 0.7;
41
+ } else {
42
+ maxDepth = 10;
43
+ maxContextBudget = 40000;
44
+ minCohesion = 0.35;
45
+ maxFragmentation = 0.8;
46
+ }
47
+
48
+ return {
49
+ maxDepth,
50
+ maxContextBudget,
51
+ minCohesion,
52
+ maxFragmentation,
53
+ focus: 'all',
54
+ includeNodeModules: false,
55
+ rootDir: userOptions.rootDir || directory,
56
+ include: userOptions.include,
57
+ exclude: userOptions.exclude,
58
+ };
59
+ }
@@ -0,0 +1,272 @@
1
+ import { estimateTokens } from '@aiready/core';
2
+ import type { DependencyGraph, DependencyNode } from './types';
3
+ import {
4
+ buildCoUsageMatrix,
5
+ buildTypeGraph,
6
+ inferDomainFromSemantics,
7
+ } from './semantic-analysis';
8
+ import { extractExportsWithAST } from './ast-utils';
9
+
10
+ interface FileContent {
11
+ file: string;
12
+ content: string;
13
+ }
14
+
15
+ /**
16
+ * Auto-detect domain keywords from workspace folder structure
17
+ */
18
+ export function extractDomainKeywordsFromPaths(files: FileContent[]): string[] {
19
+ const folderNames = new Set<string>();
20
+
21
+ for (const { file } of files) {
22
+ const segments = file.split('/');
23
+ const skipFolders = new Set([
24
+ 'src',
25
+ 'lib',
26
+ 'dist',
27
+ 'build',
28
+ 'node_modules',
29
+ 'test',
30
+ 'tests',
31
+ '__tests__',
32
+ 'spec',
33
+ 'e2e',
34
+ 'scripts',
35
+ 'components',
36
+ 'utils',
37
+ 'helpers',
38
+ 'util',
39
+ 'helper',
40
+ 'api',
41
+ 'apis',
42
+ ]);
43
+
44
+ for (const segment of segments) {
45
+ const normalized = segment.toLowerCase();
46
+ if (
47
+ normalized &&
48
+ !skipFolders.has(normalized) &&
49
+ !normalized.includes('.')
50
+ ) {
51
+ folderNames.add(singularize(normalized));
52
+ }
53
+ }
54
+ }
55
+
56
+ return Array.from(folderNames);
57
+ }
58
+
59
+ /**
60
+ * Simple singularization for common English plurals
61
+ */
62
+ function singularize(word: string): string {
63
+ const irregulars: Record<string, string> = {
64
+ people: 'person',
65
+ children: 'child',
66
+ men: 'man',
67
+ women: 'woman',
68
+ };
69
+
70
+ if (irregulars[word]) return irregulars[word];
71
+ if (word.endsWith('ies')) return word.slice(0, -3) + 'y';
72
+ if (word.endsWith('ses')) return word.slice(0, -2);
73
+ if (word.endsWith('s') && word.length > 3) return word.slice(0, -1);
74
+
75
+ return word;
76
+ }
77
+
78
+ /**
79
+ * Build a dependency graph from file contents
80
+ */
81
+ export function buildDependencyGraph(
82
+ files: FileContent[],
83
+ options?: { domainKeywords?: string[] }
84
+ ): DependencyGraph {
85
+ const nodes = new Map<string, DependencyNode>();
86
+ const edges = new Map<string, Set<string>>();
87
+
88
+ const autoDetectedKeywords =
89
+ options?.domainKeywords ?? extractDomainKeywordsFromPaths(files);
90
+
91
+ for (const { file, content } of files) {
92
+ const imports = extractImportsFromContent(content);
93
+ const exports = extractExportsWithAST(
94
+ content,
95
+ file,
96
+ { domainKeywords: autoDetectedKeywords },
97
+ imports
98
+ );
99
+
100
+ const tokenCost = estimateTokens(content);
101
+ const linesOfCode = content.split('\n').length;
102
+
103
+ nodes.set(file, { file, imports, exports, tokenCost, linesOfCode });
104
+ edges.set(file, new Set(imports));
105
+ }
106
+
107
+ const graph: DependencyGraph = { nodes, edges };
108
+ const coUsageMatrix = buildCoUsageMatrix(graph);
109
+ const typeGraph = buildTypeGraph(graph);
110
+
111
+ graph.coUsageMatrix = coUsageMatrix;
112
+ graph.typeGraph = typeGraph;
113
+
114
+ for (const [file, node] of nodes) {
115
+ for (const exp of node.exports) {
116
+ const semanticAssignments = inferDomainFromSemantics(
117
+ file,
118
+ exp.name,
119
+ graph,
120
+ coUsageMatrix,
121
+ typeGraph,
122
+ exp.typeReferences
123
+ );
124
+ exp.domains = semanticAssignments;
125
+ if (semanticAssignments.length > 0) {
126
+ exp.inferredDomain = semanticAssignments[0].domain;
127
+ }
128
+ }
129
+ }
130
+
131
+ return graph;
132
+ }
133
+
134
+ /**
135
+ * Extract imports from file content using regex
136
+ */
137
+ export function extractImportsFromContent(content: string): string[] {
138
+ const imports: string[] = [];
139
+ const patterns = [
140
+ /import\s+.*?\s+from\s+['"](.+?)['"]/g,
141
+ /import\s+['"](.+?)['"]/g,
142
+ /require\(['"](.+?)['"]\)/g,
143
+ ];
144
+
145
+ for (const pattern of patterns) {
146
+ let match;
147
+ while ((match = pattern.exec(content)) !== null) {
148
+ const importPath = match[1];
149
+ if (importPath && !importPath.startsWith('node:')) {
150
+ imports.push(importPath);
151
+ }
152
+ }
153
+ }
154
+
155
+ return [...new Set(imports)];
156
+ }
157
+
158
+ /**
159
+ * Calculate the maximum depth of import tree for a file
160
+ */
161
+ export function calculateImportDepth(
162
+ file: string,
163
+ graph: DependencyGraph,
164
+ visited = new Set<string>(),
165
+ depth = 0
166
+ ): number {
167
+ if (visited.has(file)) return depth;
168
+
169
+ const dependencies = graph.edges.get(file);
170
+ if (!dependencies || dependencies.size === 0) return depth;
171
+
172
+ visited.add(file);
173
+ let maxDepth = depth;
174
+
175
+ for (const dep of dependencies) {
176
+ maxDepth = Math.max(
177
+ maxDepth,
178
+ calculateImportDepth(dep, graph, visited, depth + 1)
179
+ );
180
+ }
181
+
182
+ visited.delete(file);
183
+ return maxDepth;
184
+ }
185
+
186
+ /**
187
+ * Get all transitive dependencies for a file
188
+ */
189
+ export function getTransitiveDependencies(
190
+ file: string,
191
+ graph: DependencyGraph,
192
+ visited = new Set<string>()
193
+ ): string[] {
194
+ if (visited.has(file)) return [];
195
+
196
+ visited.add(file);
197
+ const dependencies = graph.edges.get(file);
198
+ if (!dependencies || dependencies.size === 0) return [];
199
+
200
+ const allDeps: string[] = [];
201
+ for (const dep of dependencies) {
202
+ allDeps.push(dep);
203
+ allDeps.push(...getTransitiveDependencies(dep, graph, visited));
204
+ }
205
+
206
+ return [...new Set(allDeps)];
207
+ }
208
+
209
+ /**
210
+ * Calculate total context budget (tokens needed to understand this file)
211
+ */
212
+ export function calculateContextBudget(
213
+ file: string,
214
+ graph: DependencyGraph
215
+ ): number {
216
+ const node = graph.nodes.get(file);
217
+ if (!node) return 0;
218
+
219
+ let totalTokens = node.tokenCost;
220
+ const deps = getTransitiveDependencies(file, graph);
221
+
222
+ for (const dep of deps) {
223
+ const depNode = graph.nodes.get(dep);
224
+ if (depNode) {
225
+ totalTokens += depNode.tokenCost;
226
+ }
227
+ }
228
+
229
+ return totalTokens;
230
+ }
231
+
232
+ /**
233
+ * Detect circular dependencies
234
+ */
235
+ export function detectCircularDependencies(graph: DependencyGraph): string[][] {
236
+ const cycles: string[][] = [];
237
+ const visited = new Set<string>();
238
+ const recursionStack = new Set<string>();
239
+
240
+ function dfs(file: string, path: string[]): void {
241
+ if (recursionStack.has(file)) {
242
+ const cycleStart = path.indexOf(file);
243
+ if (cycleStart !== -1) {
244
+ cycles.push([...path.slice(cycleStart), file]);
245
+ }
246
+ return;
247
+ }
248
+
249
+ if (visited.has(file)) return;
250
+
251
+ visited.add(file);
252
+ recursionStack.add(file);
253
+ path.push(file);
254
+
255
+ const dependencies = graph.edges.get(file);
256
+ if (dependencies) {
257
+ for (const dep of dependencies) {
258
+ dfs(dep, [...path]);
259
+ }
260
+ }
261
+
262
+ recursionStack.delete(file);
263
+ }
264
+
265
+ for (const file of graph.nodes.keys()) {
266
+ if (!visited.has(file)) {
267
+ dfs(file, []);
268
+ }
269
+ }
270
+
271
+ return cycles;
272
+ }