@aiready/context-analyzer 0.9.36 → 0.9.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +9 -9
- package/.turbo/turbo-lint.log +6 -0
- package/.turbo/turbo-test.log +23 -20
- package/dist/chunk-EBXG2Q5Y.mjs +2059 -0
- package/dist/cli.js +5 -3
- package/dist/cli.mjs +1 -1
- package/dist/index.js +5 -3
- package/dist/index.mjs +1 -1
- package/dist/python-context-TBI5FVFY.mjs +203 -0
- package/package.json +2 -2
- package/src/__tests__/file-classification.test.ts +1 -1
- package/src/analyzer.ts +7 -3
- package/src/analyzers/python-context.ts +1 -0
- package/src/index.ts +5 -5
- package/src/semantic-analysis.ts +2 -1
package/dist/cli.js
CHANGED
|
@@ -180,6 +180,7 @@ async function calculatePythonImportDepth(file, dependencyGraph, visited, depth
|
|
|
180
180
|
return maxDepth;
|
|
181
181
|
}
|
|
182
182
|
function estimateContextBudget(code, imports, dependencyGraph) {
|
|
183
|
+
void dependencyGraph;
|
|
183
184
|
let budget = (0, import_core3.estimateTokens)(code);
|
|
184
185
|
const avgTokensPerDep = 500;
|
|
185
186
|
budget += imports.length * avgTokensPerDep;
|
|
@@ -253,6 +254,7 @@ var import_core = require("@aiready/core");
|
|
|
253
254
|
function buildCoUsageMatrix(graph) {
|
|
254
255
|
const coUsageMatrix = /* @__PURE__ */ new Map();
|
|
255
256
|
for (const [sourceFile, node] of graph.nodes) {
|
|
257
|
+
void sourceFile;
|
|
256
258
|
const imports = node.imports;
|
|
257
259
|
for (let i = 0; i < imports.length; i++) {
|
|
258
260
|
const fileA = imports[i];
|
|
@@ -314,7 +316,7 @@ function inferDomainFromSemantics(file, exportName, graph, coUsageMatrix, typeGr
|
|
|
314
316
|
const assignments = [];
|
|
315
317
|
const domainSignals = /* @__PURE__ */ new Map();
|
|
316
318
|
const coUsages = coUsageMatrix.get(file) || /* @__PURE__ */ new Map();
|
|
317
|
-
const strongCoUsages = Array.from(coUsages.entries()).filter(([
|
|
319
|
+
const strongCoUsages = Array.from(coUsages.entries()).filter(([, count]) => count >= 3).map(([coFile]) => coFile);
|
|
318
320
|
for (const coFile of strongCoUsages) {
|
|
319
321
|
const coNode = graph.nodes.get(coFile);
|
|
320
322
|
if (coNode) {
|
|
@@ -430,10 +432,10 @@ function singularize(word) {
|
|
|
430
432
|
}
|
|
431
433
|
return word;
|
|
432
434
|
}
|
|
433
|
-
function buildDependencyGraph(files) {
|
|
435
|
+
function buildDependencyGraph(files, options) {
|
|
434
436
|
const nodes = /* @__PURE__ */ new Map();
|
|
435
437
|
const edges = /* @__PURE__ */ new Map();
|
|
436
|
-
const autoDetectedKeywords = extractDomainKeywordsFromPaths(files);
|
|
438
|
+
const autoDetectedKeywords = options?.domainKeywords ?? extractDomainKeywordsFromPaths(files);
|
|
437
439
|
void import_core.calculateImportSimilarity;
|
|
438
440
|
for (const { file, content } of files) {
|
|
439
441
|
const imports = extractImportsFromContent(content);
|
package/dist/cli.mjs
CHANGED
package/dist/index.js
CHANGED
|
@@ -180,6 +180,7 @@ async function calculatePythonImportDepth(file, dependencyGraph, visited, depth
|
|
|
180
180
|
return maxDepth;
|
|
181
181
|
}
|
|
182
182
|
function estimateContextBudget(code, imports, dependencyGraph) {
|
|
183
|
+
void dependencyGraph;
|
|
183
184
|
let budget = (0, import_core3.estimateTokens)(code);
|
|
184
185
|
const avgTokensPerDep = 500;
|
|
185
186
|
budget += imports.length * avgTokensPerDep;
|
|
@@ -267,6 +268,7 @@ var import_core = require("@aiready/core");
|
|
|
267
268
|
function buildCoUsageMatrix(graph) {
|
|
268
269
|
const coUsageMatrix = /* @__PURE__ */ new Map();
|
|
269
270
|
for (const [sourceFile, node] of graph.nodes) {
|
|
271
|
+
void sourceFile;
|
|
270
272
|
const imports = node.imports;
|
|
271
273
|
for (let i = 0; i < imports.length; i++) {
|
|
272
274
|
const fileA = imports[i];
|
|
@@ -347,7 +349,7 @@ function inferDomainFromSemantics(file, exportName, graph, coUsageMatrix, typeGr
|
|
|
347
349
|
const assignments = [];
|
|
348
350
|
const domainSignals = /* @__PURE__ */ new Map();
|
|
349
351
|
const coUsages = coUsageMatrix.get(file) || /* @__PURE__ */ new Map();
|
|
350
|
-
const strongCoUsages = Array.from(coUsages.entries()).filter(([
|
|
352
|
+
const strongCoUsages = Array.from(coUsages.entries()).filter(([, count]) => count >= 3).map(([coFile]) => coFile);
|
|
351
353
|
for (const coFile of strongCoUsages) {
|
|
352
354
|
const coNode = graph.nodes.get(coFile);
|
|
353
355
|
if (coNode) {
|
|
@@ -509,10 +511,10 @@ function singularize(word) {
|
|
|
509
511
|
}
|
|
510
512
|
return word;
|
|
511
513
|
}
|
|
512
|
-
function buildDependencyGraph(files) {
|
|
514
|
+
function buildDependencyGraph(files, options) {
|
|
513
515
|
const nodes = /* @__PURE__ */ new Map();
|
|
514
516
|
const edges = /* @__PURE__ */ new Map();
|
|
515
|
-
const autoDetectedKeywords = extractDomainKeywordsFromPaths(files);
|
|
517
|
+
const autoDetectedKeywords = options?.domainKeywords ?? extractDomainKeywordsFromPaths(files);
|
|
516
518
|
void import_core.calculateImportSimilarity;
|
|
517
519
|
for (const { file, content } of files) {
|
|
518
520
|
const imports = extractImportsFromContent(content);
|
package/dist/index.mjs
CHANGED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
// src/analyzers/python-context.ts
|
|
2
|
+
import { getParser, estimateTokens } from "@aiready/core";
|
|
3
|
+
import { resolve, relative, dirname, join } from "path";
|
|
4
|
+
import fs from "fs";
|
|
5
|
+
async function analyzePythonContext(files, rootDir) {
|
|
6
|
+
const results = [];
|
|
7
|
+
const parser = getParser("dummy.py");
|
|
8
|
+
if (!parser) {
|
|
9
|
+
console.warn("Python parser not available");
|
|
10
|
+
return results;
|
|
11
|
+
}
|
|
12
|
+
const pythonFiles = files.filter((f) => f.toLowerCase().endsWith(".py"));
|
|
13
|
+
void relative;
|
|
14
|
+
void join;
|
|
15
|
+
const dependencyGraph = await buildPythonDependencyGraph(
|
|
16
|
+
pythonFiles,
|
|
17
|
+
rootDir
|
|
18
|
+
);
|
|
19
|
+
for (const file of pythonFiles) {
|
|
20
|
+
try {
|
|
21
|
+
const code = await fs.promises.readFile(file, "utf-8");
|
|
22
|
+
const result = parser.parse(code, file);
|
|
23
|
+
const imports = result.imports.map((imp) => ({
|
|
24
|
+
source: imp.source,
|
|
25
|
+
specifiers: imp.specifiers,
|
|
26
|
+
isRelative: imp.source.startsWith("."),
|
|
27
|
+
resolvedPath: resolvePythonImport(file, imp.source, rootDir)
|
|
28
|
+
}));
|
|
29
|
+
const exports = result.exports.map((exp) => ({
|
|
30
|
+
name: exp.name,
|
|
31
|
+
type: exp.type
|
|
32
|
+
}));
|
|
33
|
+
const linesOfCode = code.split("\n").length;
|
|
34
|
+
const importDepth = await calculatePythonImportDepth(
|
|
35
|
+
file,
|
|
36
|
+
dependencyGraph,
|
|
37
|
+
/* @__PURE__ */ new Set()
|
|
38
|
+
);
|
|
39
|
+
const contextBudget = estimateContextBudget(
|
|
40
|
+
code,
|
|
41
|
+
imports,
|
|
42
|
+
dependencyGraph
|
|
43
|
+
);
|
|
44
|
+
const cohesion = calculatePythonCohesion(exports, imports);
|
|
45
|
+
const circularDependencies = detectCircularDependencies(
|
|
46
|
+
file,
|
|
47
|
+
dependencyGraph
|
|
48
|
+
);
|
|
49
|
+
results.push({
|
|
50
|
+
file,
|
|
51
|
+
importDepth,
|
|
52
|
+
contextBudget,
|
|
53
|
+
cohesion,
|
|
54
|
+
imports,
|
|
55
|
+
exports,
|
|
56
|
+
metrics: {
|
|
57
|
+
linesOfCode,
|
|
58
|
+
importCount: imports.length,
|
|
59
|
+
exportCount: exports.length,
|
|
60
|
+
circularDependencies
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
} catch (error) {
|
|
64
|
+
console.warn(`Failed to analyze ${file}:`, error);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return results;
|
|
68
|
+
}
|
|
69
|
+
async function buildPythonDependencyGraph(files, rootDir) {
|
|
70
|
+
const graph = /* @__PURE__ */ new Map();
|
|
71
|
+
const parser = getParser("dummy.py");
|
|
72
|
+
if (!parser) return graph;
|
|
73
|
+
for (const file of files) {
|
|
74
|
+
try {
|
|
75
|
+
const code = await fs.promises.readFile(file, "utf-8");
|
|
76
|
+
const result = parser.parse(code, file);
|
|
77
|
+
const dependencies = /* @__PURE__ */ new Set();
|
|
78
|
+
for (const imp of result.imports) {
|
|
79
|
+
const resolved = resolvePythonImport(file, imp.source, rootDir);
|
|
80
|
+
if (resolved && files.includes(resolved)) {
|
|
81
|
+
dependencies.add(resolved);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
graph.set(file, dependencies);
|
|
85
|
+
} catch (error) {
|
|
86
|
+
void error;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return graph;
|
|
90
|
+
}
|
|
91
|
+
function resolvePythonImport(fromFile, importPath, rootDir) {
|
|
92
|
+
const dir = dirname(fromFile);
|
|
93
|
+
if (importPath.startsWith(".")) {
|
|
94
|
+
const parts = importPath.split(".");
|
|
95
|
+
let upCount = 0;
|
|
96
|
+
while (parts[0] === "") {
|
|
97
|
+
upCount++;
|
|
98
|
+
parts.shift();
|
|
99
|
+
}
|
|
100
|
+
let targetDir = dir;
|
|
101
|
+
for (let i = 0; i < upCount - 1; i++) {
|
|
102
|
+
targetDir = dirname(targetDir);
|
|
103
|
+
}
|
|
104
|
+
const modulePath = parts.join("/");
|
|
105
|
+
const possiblePaths = [
|
|
106
|
+
resolve(targetDir, `${modulePath}.py`),
|
|
107
|
+
resolve(targetDir, modulePath, "__init__.py")
|
|
108
|
+
];
|
|
109
|
+
for (const path of possiblePaths) {
|
|
110
|
+
if (fs.existsSync(path)) {
|
|
111
|
+
return path;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
} else {
|
|
115
|
+
const modulePath = importPath.replace(/\./g, "/");
|
|
116
|
+
const possiblePaths = [
|
|
117
|
+
resolve(rootDir, `${modulePath}.py`),
|
|
118
|
+
resolve(rootDir, modulePath, "__init__.py")
|
|
119
|
+
];
|
|
120
|
+
for (const path of possiblePaths) {
|
|
121
|
+
if (fs.existsSync(path)) {
|
|
122
|
+
return path;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
return void 0;
|
|
127
|
+
}
|
|
128
|
+
async function calculatePythonImportDepth(file, dependencyGraph, visited, depth = 0) {
|
|
129
|
+
if (visited.has(file)) {
|
|
130
|
+
return depth;
|
|
131
|
+
}
|
|
132
|
+
visited.add(file);
|
|
133
|
+
const dependencies = dependencyGraph.get(file) || /* @__PURE__ */ new Set();
|
|
134
|
+
if (dependencies.size === 0) {
|
|
135
|
+
return depth;
|
|
136
|
+
}
|
|
137
|
+
let maxDepth = depth;
|
|
138
|
+
for (const dep of dependencies) {
|
|
139
|
+
const depDepth = await calculatePythonImportDepth(
|
|
140
|
+
dep,
|
|
141
|
+
dependencyGraph,
|
|
142
|
+
new Set(visited),
|
|
143
|
+
depth + 1
|
|
144
|
+
);
|
|
145
|
+
maxDepth = Math.max(maxDepth, depDepth);
|
|
146
|
+
}
|
|
147
|
+
return maxDepth;
|
|
148
|
+
}
|
|
149
|
+
function estimateContextBudget(code, imports, dependencyGraph) {
|
|
150
|
+
void dependencyGraph;
|
|
151
|
+
let budget = estimateTokens(code);
|
|
152
|
+
const avgTokensPerDep = 500;
|
|
153
|
+
budget += imports.length * avgTokensPerDep;
|
|
154
|
+
return budget;
|
|
155
|
+
}
|
|
156
|
+
function calculatePythonCohesion(exports, imports) {
|
|
157
|
+
if (exports.length === 0) return 1;
|
|
158
|
+
const exportCount = exports.length;
|
|
159
|
+
const importCount = imports.length;
|
|
160
|
+
let cohesion = 1;
|
|
161
|
+
if (exportCount > 10) {
|
|
162
|
+
cohesion *= 0.6;
|
|
163
|
+
} else if (exportCount > 5) {
|
|
164
|
+
cohesion *= 0.8;
|
|
165
|
+
}
|
|
166
|
+
if (exportCount > 0) {
|
|
167
|
+
const ratio = importCount / exportCount;
|
|
168
|
+
if (ratio > 2) {
|
|
169
|
+
cohesion *= 1.1;
|
|
170
|
+
} else if (ratio < 0.5) {
|
|
171
|
+
cohesion *= 0.9;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
return Math.min(1, Math.max(0, cohesion));
|
|
175
|
+
}
|
|
176
|
+
function detectCircularDependencies(file, dependencyGraph) {
|
|
177
|
+
const circular = [];
|
|
178
|
+
const visited = /* @__PURE__ */ new Set();
|
|
179
|
+
const recursionStack = /* @__PURE__ */ new Set();
|
|
180
|
+
function dfs(current, path) {
|
|
181
|
+
if (recursionStack.has(current)) {
|
|
182
|
+
const cycleStart = path.indexOf(current);
|
|
183
|
+
const cycle = path.slice(cycleStart).concat([current]);
|
|
184
|
+
circular.push(cycle.join(" \u2192 "));
|
|
185
|
+
return;
|
|
186
|
+
}
|
|
187
|
+
if (visited.has(current)) {
|
|
188
|
+
return;
|
|
189
|
+
}
|
|
190
|
+
visited.add(current);
|
|
191
|
+
recursionStack.add(current);
|
|
192
|
+
const dependencies = dependencyGraph.get(current) || /* @__PURE__ */ new Set();
|
|
193
|
+
for (const dep of dependencies) {
|
|
194
|
+
dfs(dep, [...path, current]);
|
|
195
|
+
}
|
|
196
|
+
recursionStack.delete(current);
|
|
197
|
+
}
|
|
198
|
+
dfs(file, []);
|
|
199
|
+
return [...new Set(circular)];
|
|
200
|
+
}
|
|
201
|
+
export {
|
|
202
|
+
analyzePythonContext
|
|
203
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aiready/context-analyzer",
|
|
3
|
-
"version": "0.9.
|
|
3
|
+
"version": "0.9.40",
|
|
4
4
|
"description": "AI context window cost analysis - detect fragmented code, deep import chains, and expensive context budgets",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"module": "./dist/index.mjs",
|
|
@@ -49,7 +49,7 @@
|
|
|
49
49
|
"commander": "^14.0.0",
|
|
50
50
|
"chalk": "^5.3.0",
|
|
51
51
|
"prompts": "^2.4.2",
|
|
52
|
-
"@aiready/core": "0.9.
|
|
52
|
+
"@aiready/core": "0.9.37"
|
|
53
53
|
},
|
|
54
54
|
"devDependencies": {
|
|
55
55
|
"@types/node": "^24.0.0",
|
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
adjustCohesionForClassification,
|
|
6
6
|
getClassificationRecommendations,
|
|
7
7
|
} from '../analyzer';
|
|
8
|
-
import type { DependencyNode
|
|
8
|
+
import type { DependencyNode } from '../types';
|
|
9
9
|
|
|
10
10
|
describe('file classification', () => {
|
|
11
11
|
const createNode = (overrides: Partial<DependencyNode>): DependencyNode => ({
|
package/src/analyzer.ts
CHANGED
|
@@ -102,12 +102,16 @@ function singularize(word: string): string {
|
|
|
102
102
|
/**
|
|
103
103
|
* Build a dependency graph from file contents
|
|
104
104
|
*/
|
|
105
|
-
export function buildDependencyGraph(
|
|
105
|
+
export function buildDependencyGraph(
|
|
106
|
+
files: FileContent[],
|
|
107
|
+
options?: { domainKeywords?: string[] }
|
|
108
|
+
): DependencyGraph {
|
|
106
109
|
const nodes = new Map<string, DependencyNode>();
|
|
107
110
|
const edges = new Map<string, Set<string>>();
|
|
108
111
|
|
|
109
|
-
// Auto-detect domain keywords from workspace folder structure
|
|
110
|
-
const autoDetectedKeywords =
|
|
112
|
+
// Auto-detect domain keywords from workspace folder structure (allow override)
|
|
113
|
+
const autoDetectedKeywords =
|
|
114
|
+
options?.domainKeywords ?? extractDomainKeywordsFromPaths(files);
|
|
111
115
|
|
|
112
116
|
// Some imported helpers are optional for future features; reference to avoid lint warnings
|
|
113
117
|
void calculateImportSimilarity;
|
package/src/index.ts
CHANGED
|
@@ -299,11 +299,11 @@ export async function analyzeContext(
|
|
|
299
299
|
maxFragmentation,
|
|
300
300
|
circularDeps,
|
|
301
301
|
});
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
302
|
+
// Some returned fields are not needed for the Python mapping here
|
|
303
|
+
void severity;
|
|
304
|
+
void issues;
|
|
305
|
+
void recommendations;
|
|
306
|
+
void potentialSavings;
|
|
307
307
|
|
|
308
308
|
// Get domains from exports
|
|
309
309
|
const domains = [
|
package/src/semantic-analysis.ts
CHANGED
|
@@ -17,6 +17,7 @@ export function buildCoUsageMatrix(
|
|
|
17
17
|
|
|
18
18
|
// For each file, track which other files are imported alongside it
|
|
19
19
|
for (const [sourceFile, node] of graph.nodes) {
|
|
20
|
+
void sourceFile;
|
|
20
21
|
const imports = node.imports;
|
|
21
22
|
|
|
22
23
|
// For each pair of imports in this file, increment their co-usage count
|
|
@@ -149,7 +150,7 @@ export function inferDomainFromSemantics(
|
|
|
149
150
|
// 1. Check co-usage patterns
|
|
150
151
|
const coUsages = coUsageMatrix.get(file) || new Map();
|
|
151
152
|
const strongCoUsages = Array.from(coUsages.entries())
|
|
152
|
-
.filter(([
|
|
153
|
+
.filter(([, count]) => count >= 3)
|
|
153
154
|
.map(([coFile]) => coFile);
|
|
154
155
|
|
|
155
156
|
// Extract domains from frequently co-imported files
|