@aiready/context-analyzer 0.21.26 → 0.21.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +32 -25
- package/.turbo/turbo-test.log +51 -87
- package/coverage/clover.xml +392 -1878
- package/coverage/coverage-final.json +15 -19
- package/coverage/index.html +48 -63
- package/coverage/src/analyzers/index.html +21 -21
- package/coverage/src/analyzers/python-context.ts.html +96 -96
- package/coverage/src/ast-utils.ts.html +34 -109
- package/coverage/src/classifier.ts.html +104 -104
- package/coverage/src/classify/classification-patterns.ts.html +1 -1
- package/coverage/src/classify/file-classifiers.ts.html +1 -1
- package/coverage/src/classify/index.html +1 -1
- package/coverage/src/cluster-detector.ts.html +72 -72
- package/coverage/src/defaults.ts.html +1 -1
- package/coverage/src/graph-builder.ts.html +131 -131
- package/coverage/src/index.html +101 -116
- package/coverage/src/index.ts.html +2 -2
- package/coverage/src/issue-analyzer.ts.html +32 -83
- package/coverage/src/mapper.ts.html +20 -2
- package/coverage/src/metrics.ts.html +127 -130
- package/coverage/src/orchestrator.ts.html +13 -13
- package/coverage/src/provider.ts.html +19 -19
- package/coverage/src/remediation.ts.html +59 -59
- package/coverage/src/report/console-report.ts.html +2 -2
- package/coverage/src/report/html-report.ts.html +60 -84
- package/coverage/src/report/index.html +7 -7
- package/coverage/src/report/interactive-setup.ts.html +1 -1
- package/coverage/src/scoring.ts.html +62 -62
- package/coverage/src/semantic/co-usage.ts.html +1 -1
- package/coverage/src/semantic/consolidation.ts.html +1 -1
- package/coverage/src/semantic/domain-inference.ts.html +1 -1
- package/coverage/src/semantic/index.html +1 -1
- package/coverage/src/semantic/type-graph.ts.html +1 -1
- package/coverage/src/summary.ts.html +67 -67
- package/coverage/src/types.ts.html +1 -1
- package/coverage/src/utils/dependency-graph-utils.ts.html +41 -41
- package/coverage/src/utils/index.html +21 -21
- package/coverage/src/utils/string-utils.ts.html +1 -1
- package/dist/chunk-22ZO4EKZ.mjs +1297 -0
- package/dist/chunk-4U4LDWGF.mjs +360 -0
- package/dist/chunk-BA7QGUHN.mjs +1722 -0
- package/dist/chunk-BCEZGRXI.mjs +1297 -0
- package/dist/chunk-BQCISA2F.mjs +91 -0
- package/dist/chunk-EMYD7NS6.mjs +137 -0
- package/dist/chunk-EWFR366Y.mjs +1740 -0
- package/dist/chunk-FO6YT6RG.mjs +1751 -0
- package/dist/chunk-J3SZQZNU.mjs +221 -0
- package/dist/chunk-OZE3FVZT.mjs +1089 -0
- package/dist/chunk-WHB7QI7N.mjs +91 -0
- package/dist/cli-action-CXIHOVAC.mjs +95 -0
- package/dist/cli-action-SA7SCYNV.mjs +95 -0
- package/dist/cli-action-YAJOJCXJ.mjs +95 -0
- package/dist/cli.js +688 -566
- package/dist/cli.mjs +4 -88
- package/dist/index.js +889 -773
- package/dist/index.mjs +21 -14
- package/dist/orchestrator-3L3NAZYP.mjs +10 -0
- package/dist/orchestrator-MONOZHVW.mjs +10 -0
- package/dist/orchestrator-ZR7JSKWI.mjs +10 -0
- package/dist/summary-7PZVW72O.mjs +7 -0
- package/dist/summary-LKUCJAIS.mjs +7 -0
- package/package.json +2 -2
- package/src/__tests__/analyzer.test.ts +1 -1
- package/src/__tests__/enhanced-cohesion.test.ts +4 -1
- package/src/__tests__/orchestrator.test.ts +19 -4
- package/src/__tests__/python-context.test.ts +6 -0
- package/src/__tests__/report/html-report.test.ts +8 -2
- package/src/ast-utils.ts +1 -26
- package/src/cli-definition.ts +4 -2
- package/src/issue-analyzer.ts +4 -19
- package/src/metrics.ts +1 -2
- package/src/provider.ts +4 -4
- package/src/report/html-report.ts +43 -59
- package/coverage/dist/chunk-64U3PNO3.mjs.html +0 -367
- package/coverage/dist/chunk-J3MUOWHC.mjs.html +0 -5326
- package/coverage/dist/index.html +0 -146
- package/coverage/dist/index.mjs.html +0 -1396
- package/coverage/src/analyzer.ts.html +0 -88
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
// src/utils/string-utils.ts
|
|
2
|
+
function singularize(word) {
|
|
3
|
+
const irregulars = {
|
|
4
|
+
people: "person",
|
|
5
|
+
children: "child",
|
|
6
|
+
men: "man",
|
|
7
|
+
women: "woman"
|
|
8
|
+
};
|
|
9
|
+
if (irregulars[word]) return irregulars[word];
|
|
10
|
+
if (word.endsWith("ies")) return word.slice(0, -3) + "y";
|
|
11
|
+
if (word.endsWith("ses")) return word.slice(0, -2);
|
|
12
|
+
if (word.endsWith("s") && word.length > 3) return word.slice(0, -1);
|
|
13
|
+
return word;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// src/semantic/domain-inference.ts
|
|
17
|
+
function calculateDomainConfidence(signals) {
|
|
18
|
+
const weights = {
|
|
19
|
+
coUsage: 0.35,
|
|
20
|
+
typeReference: 0.3,
|
|
21
|
+
exportName: 0.15,
|
|
22
|
+
importPath: 0.1,
|
|
23
|
+
folderStructure: 0.1
|
|
24
|
+
};
|
|
25
|
+
let confidence = 0;
|
|
26
|
+
if (signals.coUsage) confidence += weights.coUsage;
|
|
27
|
+
if (signals.typeReference) confidence += weights.typeReference;
|
|
28
|
+
if (signals.exportName) confidence += weights.exportName;
|
|
29
|
+
if (signals.importPath) confidence += weights.importPath;
|
|
30
|
+
if (signals.folderStructure) confidence += weights.folderStructure;
|
|
31
|
+
return confidence;
|
|
32
|
+
}
|
|
33
|
+
function inferDomainFromSemantics(file, exportName, graph, coUsageMatrix, typeGraph, exportTypeRefs) {
|
|
34
|
+
const domainSignals = /* @__PURE__ */ new Map();
|
|
35
|
+
const coUsages = coUsageMatrix.get(file) || /* @__PURE__ */ new Map();
|
|
36
|
+
const strongCoUsages = Array.from(coUsages.entries()).filter(([, count]) => count >= 3).map(([coFile]) => coFile);
|
|
37
|
+
for (const coFile of strongCoUsages) {
|
|
38
|
+
const coNode = graph.nodes.get(coFile);
|
|
39
|
+
if (coNode) {
|
|
40
|
+
for (const exp of coNode.exports) {
|
|
41
|
+
if (exp.inferredDomain && exp.inferredDomain !== "unknown") {
|
|
42
|
+
const domain = exp.inferredDomain;
|
|
43
|
+
if (!domainSignals.has(domain)) {
|
|
44
|
+
domainSignals.set(domain, {
|
|
45
|
+
coUsage: false,
|
|
46
|
+
typeReference: false,
|
|
47
|
+
exportName: false,
|
|
48
|
+
importPath: false,
|
|
49
|
+
folderStructure: false
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
domainSignals.get(domain).coUsage = true;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
if (exportTypeRefs) {
|
|
58
|
+
for (const typeRef of exportTypeRefs) {
|
|
59
|
+
const filesWithType = typeGraph.get(typeRef);
|
|
60
|
+
if (filesWithType) {
|
|
61
|
+
for (const typeFile of filesWithType) {
|
|
62
|
+
if (typeFile === file) continue;
|
|
63
|
+
const typeNode = graph.nodes.get(typeFile);
|
|
64
|
+
if (typeNode) {
|
|
65
|
+
for (const exp of typeNode.exports) {
|
|
66
|
+
if (exp.inferredDomain && exp.inferredDomain !== "unknown") {
|
|
67
|
+
const domain = exp.inferredDomain;
|
|
68
|
+
if (!domainSignals.has(domain)) {
|
|
69
|
+
domainSignals.set(domain, {
|
|
70
|
+
coUsage: false,
|
|
71
|
+
typeReference: false,
|
|
72
|
+
exportName: false,
|
|
73
|
+
importPath: false,
|
|
74
|
+
folderStructure: false
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
domainSignals.get(domain).typeReference = true;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
const assignments = [];
|
|
86
|
+
for (const [domain, signals] of domainSignals) {
|
|
87
|
+
const confidence = calculateDomainConfidence(signals);
|
|
88
|
+
if (confidence >= 0.3) assignments.push({ domain, confidence, signals });
|
|
89
|
+
}
|
|
90
|
+
assignments.sort((a, b) => b.confidence - a.confidence);
|
|
91
|
+
return assignments;
|
|
92
|
+
}
|
|
93
|
+
function extractExports(content, filePath, domainOptions, fileImports) {
|
|
94
|
+
const exports = [];
|
|
95
|
+
const patterns = [
|
|
96
|
+
/export\s+function\s+(\w+)/g,
|
|
97
|
+
/export\s+class\s+(\w+)/g,
|
|
98
|
+
/export\s+const\s+(\w+)/g,
|
|
99
|
+
/export\s+type\s+(\w+)/g,
|
|
100
|
+
/export\s+interface\s+(\w+)/g,
|
|
101
|
+
/export\s+default/g
|
|
102
|
+
];
|
|
103
|
+
const types = [
|
|
104
|
+
"function",
|
|
105
|
+
"class",
|
|
106
|
+
"const",
|
|
107
|
+
"type",
|
|
108
|
+
"interface",
|
|
109
|
+
"default"
|
|
110
|
+
];
|
|
111
|
+
patterns.forEach((pattern, index) => {
|
|
112
|
+
let match;
|
|
113
|
+
while ((match = pattern.exec(content)) !== null) {
|
|
114
|
+
const name = match[1] || "default";
|
|
115
|
+
const type = types[index];
|
|
116
|
+
const inferredDomain = inferDomain(
|
|
117
|
+
name,
|
|
118
|
+
filePath,
|
|
119
|
+
domainOptions,
|
|
120
|
+
fileImports
|
|
121
|
+
);
|
|
122
|
+
exports.push({ name, type, inferredDomain });
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
return exports;
|
|
126
|
+
}
|
|
127
|
+
function inferDomain(name, filePath, domainOptions, fileImports) {
|
|
128
|
+
const lower = name.toLowerCase();
|
|
129
|
+
const tokens = Array.from(
|
|
130
|
+
new Set(
|
|
131
|
+
lower.replace(/([a-z0-9])([A-Z])/g, "$1 $2").replace(/[^a-z0-9]+/gi, " ").split(" ").filter(Boolean)
|
|
132
|
+
)
|
|
133
|
+
);
|
|
134
|
+
const defaultKeywords = [
|
|
135
|
+
"authentication",
|
|
136
|
+
"authorization",
|
|
137
|
+
"payment",
|
|
138
|
+
"invoice",
|
|
139
|
+
"customer",
|
|
140
|
+
"product",
|
|
141
|
+
"order",
|
|
142
|
+
"cart",
|
|
143
|
+
"user",
|
|
144
|
+
"admin",
|
|
145
|
+
"repository",
|
|
146
|
+
"controller",
|
|
147
|
+
"service",
|
|
148
|
+
"config",
|
|
149
|
+
"model",
|
|
150
|
+
"view",
|
|
151
|
+
"auth"
|
|
152
|
+
];
|
|
153
|
+
const domainKeywords = domainOptions?.domainKeywords?.length ? [...domainOptions.domainKeywords, ...defaultKeywords] : defaultKeywords;
|
|
154
|
+
for (const keyword of domainKeywords) {
|
|
155
|
+
if (tokens.includes(keyword)) return keyword;
|
|
156
|
+
}
|
|
157
|
+
for (const keyword of domainKeywords) {
|
|
158
|
+
if (lower.includes(keyword)) return keyword;
|
|
159
|
+
}
|
|
160
|
+
if (fileImports) {
|
|
161
|
+
for (const importPath of fileImports) {
|
|
162
|
+
const segments = importPath.split("/");
|
|
163
|
+
for (const segment of segments) {
|
|
164
|
+
const segLower = segment.toLowerCase();
|
|
165
|
+
const singularSegment = singularize(segLower);
|
|
166
|
+
for (const keyword of domainKeywords) {
|
|
167
|
+
if (singularSegment === keyword || segLower === keyword || segLower.includes(keyword))
|
|
168
|
+
return keyword;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
if (filePath) {
|
|
174
|
+
const segments = filePath.split("/");
|
|
175
|
+
for (const segment of segments) {
|
|
176
|
+
const segLower = segment.toLowerCase();
|
|
177
|
+
const singularSegment = singularize(segLower);
|
|
178
|
+
for (const keyword of domainKeywords) {
|
|
179
|
+
if (singularSegment === keyword || segLower === keyword) return keyword;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
return "unknown";
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// src/metrics.ts
|
|
187
|
+
import { calculateImportSimilarity } from "@aiready/core";
|
|
188
|
+
|
|
189
|
+
// src/ast-utils.ts
|
|
190
|
+
import { parseFileExports } from "@aiready/core";
|
|
191
|
+
async function extractExportsWithAST(content, filePath, domainOptions, fileImports) {
|
|
192
|
+
try {
|
|
193
|
+
const { exports: astExports } = await parseFileExports(content, filePath);
|
|
194
|
+
if (astExports.length === 0 && !isTestFile(filePath)) {
|
|
195
|
+
return extractExports(content, filePath, domainOptions, fileImports);
|
|
196
|
+
}
|
|
197
|
+
return astExports.map((exp) => ({
|
|
198
|
+
name: exp.name,
|
|
199
|
+
type: exp.type,
|
|
200
|
+
inferredDomain: inferDomain(
|
|
201
|
+
exp.name,
|
|
202
|
+
filePath,
|
|
203
|
+
domainOptions,
|
|
204
|
+
fileImports
|
|
205
|
+
),
|
|
206
|
+
imports: exp.imports,
|
|
207
|
+
dependencies: exp.dependencies,
|
|
208
|
+
typeReferences: exp.typeReferences
|
|
209
|
+
}));
|
|
210
|
+
} catch {
|
|
211
|
+
return extractExports(content, filePath, domainOptions, fileImports);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
function isTestFile(filePath) {
|
|
215
|
+
const lower = filePath.toLowerCase();
|
|
216
|
+
return lower.includes(".test.") || lower.includes(".spec.") || lower.includes("/__tests__/") || lower.includes("/tests/") || lower.includes("/test/") || lower.includes("test-") || lower.includes("-test") || lower.includes("/__mocks__/") || lower.includes("/mocks/") || lower.includes("/fixtures/") || lower.includes(".mock.") || lower.includes(".fixture.") || lower.includes("/test-utils/");
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// src/metrics.ts
|
|
220
|
+
function calculateEnhancedCohesion(exports, filePath, options) {
|
|
221
|
+
if (exports.length <= 1) return 1;
|
|
222
|
+
if (filePath && isTestFile(filePath)) return 1;
|
|
223
|
+
const domains = exports.map((e) => e.inferredDomain || "unknown");
|
|
224
|
+
const domainCounts = /* @__PURE__ */ new Map();
|
|
225
|
+
for (const domain of domains)
|
|
226
|
+
domainCounts.set(domain, (domainCounts.get(domain) || 0) + 1);
|
|
227
|
+
if (domainCounts.size === 1 && domains[0] !== "unknown") {
|
|
228
|
+
if (!options?.weights) return 1;
|
|
229
|
+
}
|
|
230
|
+
const probs = Array.from(domainCounts.values()).map(
|
|
231
|
+
(count) => count / exports.length
|
|
232
|
+
);
|
|
233
|
+
let domainEntropy = 0;
|
|
234
|
+
for (const prob of probs) {
|
|
235
|
+
if (prob > 0) domainEntropy -= prob * Math.log2(prob);
|
|
236
|
+
}
|
|
237
|
+
const maxEntropy = Math.log2(Math.max(2, domainCounts.size));
|
|
238
|
+
const domainScore = 1 - domainEntropy / maxEntropy;
|
|
239
|
+
let importScoreTotal = 0;
|
|
240
|
+
let pairsWithData = 0;
|
|
241
|
+
let anyImportData = false;
|
|
242
|
+
for (let i = 0; i < exports.length; i++) {
|
|
243
|
+
for (let j = i + 1; j < exports.length; j++) {
|
|
244
|
+
const exp1Imports = exports[i].imports;
|
|
245
|
+
const exp2Imports = exports[j].imports;
|
|
246
|
+
if (exp1Imports || exp2Imports) {
|
|
247
|
+
anyImportData = true;
|
|
248
|
+
const sim = calculateImportSimilarity(
|
|
249
|
+
{ ...exports[i], imports: exp1Imports || [] },
|
|
250
|
+
{ ...exports[j], imports: exp2Imports || [] }
|
|
251
|
+
);
|
|
252
|
+
importScoreTotal += sim;
|
|
253
|
+
pairsWithData++;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
const avgImportScore = pairsWithData > 0 ? importScoreTotal / pairsWithData : 0;
|
|
258
|
+
let score = anyImportData ? domainScore * 0.4 + avgImportScore * 0.6 : domainScore;
|
|
259
|
+
if (anyImportData && score === 0 && domainScore === 0) {
|
|
260
|
+
score = 0.1;
|
|
261
|
+
}
|
|
262
|
+
let structuralScore = 0;
|
|
263
|
+
for (const exp of exports) {
|
|
264
|
+
if (exp.dependencies && exp.dependencies.length > 0) {
|
|
265
|
+
structuralScore += 1;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
if (structuralScore > 0) {
|
|
269
|
+
score = Math.min(1, score + 0.1);
|
|
270
|
+
}
|
|
271
|
+
if (!options?.weights && !anyImportData && domainCounts.size === 1) return 1;
|
|
272
|
+
return score;
|
|
273
|
+
}
|
|
274
|
+
function calculateStructuralCohesionFromCoUsage(file, coUsageMatrix) {
|
|
275
|
+
if (!coUsageMatrix) return 1;
|
|
276
|
+
const coUsages = coUsageMatrix.get(file);
|
|
277
|
+
if (!coUsages || coUsages.size === 0) return 1;
|
|
278
|
+
let total = 0;
|
|
279
|
+
for (const count of coUsages.values()) total += count;
|
|
280
|
+
if (total === 0) return 1;
|
|
281
|
+
const probs = [];
|
|
282
|
+
for (const count of coUsages.values()) {
|
|
283
|
+
if (count > 0) probs.push(count / total);
|
|
284
|
+
}
|
|
285
|
+
if (probs.length <= 1) return 1;
|
|
286
|
+
let entropy = 0;
|
|
287
|
+
for (const prob of probs) {
|
|
288
|
+
entropy -= prob * Math.log2(prob);
|
|
289
|
+
}
|
|
290
|
+
const maxEntropy = Math.log2(probs.length);
|
|
291
|
+
return maxEntropy > 0 ? 1 - entropy / maxEntropy : 1;
|
|
292
|
+
}
|
|
293
|
+
function calculateFragmentation(files, domain, options) {
|
|
294
|
+
if (files.length <= 1) return 0;
|
|
295
|
+
const directories = new Set(
|
|
296
|
+
files.map((file) => file.split("/").slice(0, -1).join("/"))
|
|
297
|
+
);
|
|
298
|
+
const uniqueDirs = directories.size;
|
|
299
|
+
let score = options?.useLogScale ? uniqueDirs <= 1 ? 0 : Math.log(uniqueDirs) / Math.log(options.logBase || Math.E) / (Math.log(files.length) / Math.log(options.logBase || Math.E)) : (uniqueDirs - 1) / (files.length - 1);
|
|
300
|
+
if (options?.sharedImportRatio && options.sharedImportRatio > 0.5) {
|
|
301
|
+
const discount = (options.sharedImportRatio - 0.5) * 0.4;
|
|
302
|
+
score = score * (1 - discount);
|
|
303
|
+
}
|
|
304
|
+
return score;
|
|
305
|
+
}
|
|
306
|
+
function calculatePathEntropy(files) {
|
|
307
|
+
if (!files || files.length === 0) return 0;
|
|
308
|
+
const dirCounts = /* @__PURE__ */ new Map();
|
|
309
|
+
for (const file of files) {
|
|
310
|
+
const dir = file.split("/").slice(0, -1).join("/") || ".";
|
|
311
|
+
dirCounts.set(dir, (dirCounts.get(dir) || 0) + 1);
|
|
312
|
+
}
|
|
313
|
+
const counts = Array.from(dirCounts.values());
|
|
314
|
+
if (counts.length <= 1) return 0;
|
|
315
|
+
const total = counts.reduce((sum, value) => sum + value, 0);
|
|
316
|
+
let entropy = 0;
|
|
317
|
+
for (const count of counts) {
|
|
318
|
+
const prob = count / total;
|
|
319
|
+
entropy -= prob * Math.log2(prob);
|
|
320
|
+
}
|
|
321
|
+
const maxEntropy = Math.log2(counts.length);
|
|
322
|
+
return maxEntropy > 0 ? entropy / maxEntropy : 0;
|
|
323
|
+
}
|
|
324
|
+
function calculateDirectoryDistance(files) {
|
|
325
|
+
if (!files || files.length <= 1) return 0;
|
|
326
|
+
const pathSegments = (pathStr) => pathStr.split("/").filter(Boolean);
|
|
327
|
+
const commonAncestorDepth = (pathA, pathB) => {
|
|
328
|
+
const minLen = Math.min(pathA.length, pathB.length);
|
|
329
|
+
let i = 0;
|
|
330
|
+
while (i < minLen && pathA[i] === pathB[i]) i++;
|
|
331
|
+
return i;
|
|
332
|
+
};
|
|
333
|
+
let totalNormalized = 0;
|
|
334
|
+
let comparisons = 0;
|
|
335
|
+
for (let i = 0; i < files.length; i++) {
|
|
336
|
+
for (let j = i + 1; j < files.length; j++) {
|
|
337
|
+
const segA = pathSegments(files[i]);
|
|
338
|
+
const segB = pathSegments(files[j]);
|
|
339
|
+
const shared = commonAncestorDepth(segA, segB);
|
|
340
|
+
const maxDepth = Math.max(segA.length, segB.length);
|
|
341
|
+
totalNormalized += 1 - (maxDepth > 0 ? shared / maxDepth : 0);
|
|
342
|
+
comparisons++;
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
return comparisons > 0 ? totalNormalized / comparisons : 0;
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
export {
|
|
349
|
+
singularize,
|
|
350
|
+
calculateDomainConfidence,
|
|
351
|
+
inferDomainFromSemantics,
|
|
352
|
+
extractExports,
|
|
353
|
+
inferDomain,
|
|
354
|
+
extractExportsWithAST,
|
|
355
|
+
calculateEnhancedCohesion,
|
|
356
|
+
calculateStructuralCohesionFromCoUsage,
|
|
357
|
+
calculateFragmentation,
|
|
358
|
+
calculatePathEntropy,
|
|
359
|
+
calculateDirectoryDistance
|
|
360
|
+
};
|