@unrdf/project-engine 5.0.1 → 26.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +16 -15
- package/src/golden-structure.mjs +2 -2
- package/src/materialize-apply.mjs +2 -2
- package/README.md +0 -53
- package/src/api-contract-validator.mjs +0 -711
- package/src/auto-test-generator.mjs +0 -444
- package/src/autonomic-mapek.mjs +0 -511
- package/src/capabilities-manifest.mjs +0 -125
- package/src/code-complexity-js.mjs +0 -368
- package/src/dependency-graph.mjs +0 -276
- package/src/doc-drift-checker.mjs +0 -172
- package/src/doc-generator.mjs +0 -229
- package/src/domain-infer.mjs +0 -966
- package/src/drift-snapshot.mjs +0 -775
- package/src/file-roles.mjs +0 -94
- package/src/fs-scan.mjs +0 -305
- package/src/gap-finder.mjs +0 -376
- package/src/hotspot-analyzer.mjs +0 -412
- package/src/index.mjs +0 -151
- package/src/initialize.mjs +0 -957
- package/src/lens/project-structure.mjs +0 -74
- package/src/mapek-orchestration.mjs +0 -665
- package/src/materialize-plan.mjs +0 -422
- package/src/materialize.mjs +0 -137
- package/src/policy-derivation.mjs +0 -869
- package/src/project-config.mjs +0 -142
- package/src/project-diff.mjs +0 -28
- package/src/project-engine/build-utils.mjs +0 -237
- package/src/project-engine/code-analyzer.mjs +0 -248
- package/src/project-engine/doc-generator.mjs +0 -407
- package/src/project-engine/infrastructure.mjs +0 -213
- package/src/project-engine/metrics.mjs +0 -146
- package/src/project-model.mjs +0 -111
- package/src/project-report.mjs +0 -348
- package/src/refactoring-guide.mjs +0 -242
- package/src/stack-detect.mjs +0 -102
- package/src/stack-linter.mjs +0 -213
- package/src/template-infer.mjs +0 -674
- package/src/type-auditor.mjs +0 -609
|
@@ -1,368 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @file JavaScript Code Complexity Analysis Capability
|
|
3
|
-
* @module project-engine/code-complexity-js
|
|
4
|
-
* @description Analyzes JavaScript/TypeScript code complexity using typhonjs-escomplex,
|
|
5
|
-
* emits RDF triples to project store with unmetric: vocabulary
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
|
-
import { z } from 'zod';
|
|
9
|
-
import { UnrdfDataFactory as DataFactory } from '@unrdf/core/rdf/n3-justified-only';
|
|
10
|
-
import { createStore, OxigraphStore } from '@unrdf/oxigraph';
|
|
11
|
-
import { readdir, readFile } from 'fs/promises';
|
|
12
|
-
import { join, extname } from 'path';
|
|
13
|
-
import { statSync as _statSync } from 'fs';
|
|
14
|
-
import ProjectReport from 'typhonjs-escomplex';
|
|
15
|
-
import * as unmetric from '../ontologies/unmetric-ontology.mjs';
|
|
16
|
-
|
|
17
|
-
const { namedNode, literal, quad } = DataFactory;
|
|
18
|
-
|
|
19
|
-
/**
|
|
20
|
-
* @typedef {Object} JsComplexityInput
|
|
21
|
-
* @property {string} projectRoot - Root directory to analyze
|
|
22
|
-
* @property {OxigraphStore} [baseStore] - Existing RDF store to merge into
|
|
23
|
-
* @property {string[]} [excludePatterns] - Patterns to exclude from analysis
|
|
24
|
-
* @property {string} [mode] - Analysis mode: off, observe, enforce
|
|
25
|
-
*/
|
|
26
|
-
|
|
27
|
-
/**
|
|
28
|
-
* @typedef {Object} FunctionMetrics
|
|
29
|
-
* @property {string} name - Function name
|
|
30
|
-
* @property {number} startLine - Start line number
|
|
31
|
-
* @property {number} cyclomatic - Cyclomatic complexity
|
|
32
|
-
* @property {number} halsteadVolume - Halstead volume
|
|
33
|
-
* @property {number} maintainabilityIndex - Maintainability index (0-100)
|
|
34
|
-
*/
|
|
35
|
-
|
|
36
|
-
/**
|
|
37
|
-
* @typedef {Object} FileMetrics
|
|
38
|
-
* @property {string} filePath - Relative file path
|
|
39
|
-
* @property {number} cyclomatic - Average cyclomatic complexity
|
|
40
|
-
* @property {number} halsteadVolume - File halstead volume
|
|
41
|
-
* @property {number} maintainabilityIndex - File maintainability index
|
|
42
|
-
* @property {number} linesOfCode - Physical lines of code
|
|
43
|
-
* @property {FunctionMetrics[]} functions - Per-function metrics
|
|
44
|
-
*/
|
|
45
|
-
|
|
46
|
-
/**
|
|
47
|
-
* @typedef {Object} JsComplexitySummary
|
|
48
|
-
* @property {number} filesAnalyzed - Number of files analyzed
|
|
49
|
-
* @property {number} averageCyclomatic - Project-wide average CC
|
|
50
|
-
* @property {FileMetrics[]} topRisks - Worst 5 files by CC and MI
|
|
51
|
-
* @property {string} mode - Analysis mode
|
|
52
|
-
* @property {string} timestamp - ISO 8601 timestamp
|
|
53
|
-
*/
|
|
54
|
-
|
|
55
|
-
const JsComplexityInputSchema = z.object({
|
|
56
|
-
projectRoot: z.string(),
|
|
57
|
-
baseStore: z.instanceof(OxigraphStore).optional(),
|
|
58
|
-
excludePatterns: z.array(z.string()).optional(),
|
|
59
|
-
mode: z.enum(['off', 'observe', 'enforce']).default('observe'),
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
const DEFAULT_EXCLUDE_PATTERNS = [
|
|
63
|
-
'**/node_modules/**',
|
|
64
|
-
'**/dist/**',
|
|
65
|
-
'**/build/**',
|
|
66
|
-
'**/coverage/**',
|
|
67
|
-
'**/.next/**',
|
|
68
|
-
'**/test/**',
|
|
69
|
-
'**/__tests__/**',
|
|
70
|
-
'**/spec/**',
|
|
71
|
-
'**/*.test.mjs',
|
|
72
|
-
'**/*.test.js',
|
|
73
|
-
'**/*.spec.mjs',
|
|
74
|
-
'**/*.spec.js',
|
|
75
|
-
];
|
|
76
|
-
|
|
77
|
-
/**
|
|
78
|
-
* Analyze JavaScript/TypeScript code complexity and emit RDF triples
|
|
79
|
-
*
|
|
80
|
-
* @param {JsComplexityInput} input - Analysis input
|
|
81
|
-
* @returns {Promise<{ store: OxigraphStore, summary: JsComplexitySummary }>}
|
|
82
|
-
*/
|
|
83
|
-
export async function analyzeJsComplexity(input) {
|
|
84
|
-
const validated = JsComplexityInputSchema.parse(input);
|
|
85
|
-
|
|
86
|
-
if (validated.mode === 'off') {
|
|
87
|
-
// Return empty store if analysis is disabled
|
|
88
|
-
const emptyStore = createStore();
|
|
89
|
-
return {
|
|
90
|
-
store: emptyStore,
|
|
91
|
-
summary: {
|
|
92
|
-
filesAnalyzed: 0,
|
|
93
|
-
averageCyclomatic: 0,
|
|
94
|
-
topRisks: [],
|
|
95
|
-
mode: 'off',
|
|
96
|
-
timestamp: new Date().toISOString(),
|
|
97
|
-
},
|
|
98
|
-
};
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
const store = validated.baseStore || createStore();
|
|
102
|
-
const excludePatterns = [...(validated.excludePatterns || []), ...DEFAULT_EXCLUDE_PATTERNS];
|
|
103
|
-
|
|
104
|
-
// Find all JS/TS files
|
|
105
|
-
const jsFiles = await findJavaScriptFiles(validated.projectRoot, excludePatterns);
|
|
106
|
-
|
|
107
|
-
if (jsFiles.length === 0) {
|
|
108
|
-
return {
|
|
109
|
-
store,
|
|
110
|
-
summary: {
|
|
111
|
-
filesAnalyzed: 0,
|
|
112
|
-
averageCyclomatic: 0,
|
|
113
|
-
topRisks: [],
|
|
114
|
-
mode: validated.mode,
|
|
115
|
-
timestamp: new Date().toISOString(),
|
|
116
|
-
},
|
|
117
|
-
};
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
// Analyze each file
|
|
121
|
-
const fileMetrics = [];
|
|
122
|
-
const allFunctionMetrics = [];
|
|
123
|
-
|
|
124
|
-
for (const filePath of jsFiles) {
|
|
125
|
-
try {
|
|
126
|
-
const content = await readFile(filePath, 'utf-8');
|
|
127
|
-
const metrics = analyzeFileComplexity(content, filePath, validated.projectRoot);
|
|
128
|
-
|
|
129
|
-
if (metrics) {
|
|
130
|
-
fileMetrics.push(metrics);
|
|
131
|
-
allFunctionMetrics.push(...metrics.functions.map(f => ({ ...f, file: filePath })));
|
|
132
|
-
|
|
133
|
-
// Emit RDF triples for this file
|
|
134
|
-
emitFileComplexityTriples(store, metrics, validated.projectRoot);
|
|
135
|
-
}
|
|
136
|
-
} catch (err) {
|
|
137
|
-
// Silently skip files that can't be analyzed
|
|
138
|
-
// (e.g., syntax errors, binary files)
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
// Calculate summary
|
|
143
|
-
const summary = calculateSummary(fileMetrics, validated.mode);
|
|
144
|
-
|
|
145
|
-
// Emit root report node
|
|
146
|
-
const reportNode = namedNode(`http://example.org/unrdf/metrics#report-${Date.now()}`);
|
|
147
|
-
store.addQuad(
|
|
148
|
-
quad(
|
|
149
|
-
reportNode,
|
|
150
|
-
namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
|
|
151
|
-
unmetric.ComplexityReport
|
|
152
|
-
)
|
|
153
|
-
);
|
|
154
|
-
store.addQuad(quad(reportNode, unmetric.filesAnalyzed, literal(fileMetrics.length)));
|
|
155
|
-
store.addQuad(quad(reportNode, unmetric.analysisMode, literal(validated.mode)));
|
|
156
|
-
store.addQuad(
|
|
157
|
-
quad(
|
|
158
|
-
reportNode,
|
|
159
|
-
unmetric.analysisTimestamp,
|
|
160
|
-
literal(summary.timestamp, namedNode('http://www.w3.org/2001/XMLSchema#dateTime'))
|
|
161
|
-
)
|
|
162
|
-
);
|
|
163
|
-
|
|
164
|
-
return { store, summary };
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
/**
|
|
168
|
-
* Find all JavaScript/TypeScript files in project
|
|
169
|
-
*
|
|
170
|
-
* @private
|
|
171
|
-
* @param {string} root - Root directory
|
|
172
|
-
* @param {string[]} excludePatterns - Patterns to exclude
|
|
173
|
-
* @returns {Promise<string[]>}
|
|
174
|
-
*/
|
|
175
|
-
async function findJavaScriptFiles(root, excludePatterns) {
|
|
176
|
-
const files = [];
|
|
177
|
-
const jsExtensions = ['.js', '.mjs', '.ts', '.tsx', '.jsx'];
|
|
178
|
-
|
|
179
|
-
async function scan(dir) {
|
|
180
|
-
try {
|
|
181
|
-
const entries = await readdir(dir, { withFileTypes: true });
|
|
182
|
-
|
|
183
|
-
for (const entry of entries) {
|
|
184
|
-
const fullPath = join(dir, entry.name);
|
|
185
|
-
const relativePath = fullPath.substring(root.length + 1);
|
|
186
|
-
|
|
187
|
-
// Check if path matches exclude patterns
|
|
188
|
-
const shouldExclude = excludePatterns.some(pattern => {
|
|
189
|
-
if (pattern.startsWith('**/')) {
|
|
190
|
-
const patternEnd = pattern.substring(3);
|
|
191
|
-
return (
|
|
192
|
-
relativePath.includes(patternEnd.replace('/**', '')) ||
|
|
193
|
-
fullPath.endsWith(patternEnd.replace('**', '').replace(/\*/g, ''))
|
|
194
|
-
);
|
|
195
|
-
}
|
|
196
|
-
return fullPath.endsWith(pattern.replace(/\*/g, ''));
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
if (shouldExclude) continue;
|
|
200
|
-
|
|
201
|
-
if (entry.isDirectory()) {
|
|
202
|
-
await scan(fullPath);
|
|
203
|
-
} else if (jsExtensions.includes(extname(entry.name))) {
|
|
204
|
-
files.push(fullPath);
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
} catch {
|
|
208
|
-
// Skip directories that can't be read
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
await scan(root);
|
|
213
|
-
return files.sort();
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
/**
|
|
217
|
-
* Analyze a single file's complexity using escomplex
|
|
218
|
-
*
|
|
219
|
-
* @private
|
|
220
|
-
* @param {string} content - File content
|
|
221
|
-
* @param {string} filePath - Full file path
|
|
222
|
-
* @param {string} projectRoot - Project root for relative path
|
|
223
|
-
* @returns {FileMetrics|null}
|
|
224
|
-
*/
|
|
225
|
-
function analyzeFileComplexity(content, filePath, projectRoot) {
|
|
226
|
-
try {
|
|
227
|
-
// Use typhonjs-escomplex to analyze
|
|
228
|
-
const report = ProjectReport.analyze(content);
|
|
229
|
-
|
|
230
|
-
if (!report || !report.modules || report.modules.length === 0) {
|
|
231
|
-
return null;
|
|
232
|
-
}
|
|
233
|
-
|
|
234
|
-
const module = report.modules[0];
|
|
235
|
-
const relativePath = filePath.substring(projectRoot.length + 1);
|
|
236
|
-
|
|
237
|
-
// Extract function metrics
|
|
238
|
-
const functions = (module.functions || []).map(fn => ({
|
|
239
|
-
name: fn.name || '(anonymous)',
|
|
240
|
-
startLine: fn.line || 0,
|
|
241
|
-
cyclomatic: fn.cyclomatic || 1,
|
|
242
|
-
halsteadVolume: fn.halstead?.volume || 0,
|
|
243
|
-
maintainabilityIndex: fn.maintainability || 100,
|
|
244
|
-
}));
|
|
245
|
-
|
|
246
|
-
return {
|
|
247
|
-
filePath: relativePath,
|
|
248
|
-
cyclomatic: module.cyclomatic || 1,
|
|
249
|
-
halsteadVolume: module.halstead?.volume || 0,
|
|
250
|
-
maintainabilityIndex: module.maintainability || 100,
|
|
251
|
-
linesOfCode: module.loc?.physical || 0,
|
|
252
|
-
functions,
|
|
253
|
-
};
|
|
254
|
-
} catch {
|
|
255
|
-
// Return null for files that can't be analyzed
|
|
256
|
-
return null;
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
/**
|
|
261
|
-
* Emit RDF triples for file complexity metrics
|
|
262
|
-
*
|
|
263
|
-
* @private
|
|
264
|
-
* @param {OxigraphStore} store - RDF store to emit to
|
|
265
|
-
* @param {FileMetrics} metrics - File metrics
|
|
266
|
-
* @param {string} projectRoot - Project root
|
|
267
|
-
*/
|
|
268
|
-
function emitFileComplexityTriples(store, metrics, _projectRoot) {
|
|
269
|
-
const fileNode = namedNode(
|
|
270
|
-
`http://example.org/unrdf/metrics#file-${Date.now()}-${Math.random()}`
|
|
271
|
-
);
|
|
272
|
-
|
|
273
|
-
// File type and path
|
|
274
|
-
store.addQuad(
|
|
275
|
-
quad(
|
|
276
|
-
fileNode,
|
|
277
|
-
namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
|
|
278
|
-
unmetric.FileComplexity
|
|
279
|
-
)
|
|
280
|
-
);
|
|
281
|
-
store.addQuad(quad(fileNode, unmetric.location, literal(metrics.filePath)));
|
|
282
|
-
|
|
283
|
-
// File-level metrics
|
|
284
|
-
store.addQuad(quad(fileNode, unmetric.cyclomatic, literal(metrics.cyclomatic)));
|
|
285
|
-
store.addQuad(quad(fileNode, unmetric.halsteadVolume, literal(metrics.halsteadVolume)));
|
|
286
|
-
store.addQuad(
|
|
287
|
-
quad(fileNode, unmetric.maintainabilityIndex, literal(metrics.maintainabilityIndex))
|
|
288
|
-
);
|
|
289
|
-
store.addQuad(quad(fileNode, unmetric.linesOfCode, literal(metrics.linesOfCode)));
|
|
290
|
-
|
|
291
|
-
// Severity flags
|
|
292
|
-
if (metrics.cyclomatic > 10) {
|
|
293
|
-
store.addQuad(quad(fileNode, unmetric.highComplexity, literal(true)));
|
|
294
|
-
}
|
|
295
|
-
if (metrics.maintainabilityIndex < 50) {
|
|
296
|
-
store.addQuad(quad(fileNode, unmetric.lowMaintainability, literal(true)));
|
|
297
|
-
}
|
|
298
|
-
|
|
299
|
-
// Emit function-level metrics
|
|
300
|
-
for (const fn of metrics.functions) {
|
|
301
|
-
const fnNode = namedNode(
|
|
302
|
-
`http://example.org/unrdf/metrics#func-${Date.now()}-${Math.random()}`
|
|
303
|
-
);
|
|
304
|
-
|
|
305
|
-
store.addQuad(
|
|
306
|
-
quad(
|
|
307
|
-
fnNode,
|
|
308
|
-
namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
|
|
309
|
-
unmetric.FunctionComplexity
|
|
310
|
-
)
|
|
311
|
-
);
|
|
312
|
-
store.addQuad(quad(fnNode, unmetric.location, literal(`${metrics.filePath}:${fn.startLine}`)));
|
|
313
|
-
store.addQuad(
|
|
314
|
-
quad(fnNode, namedNode('http://example.org/unrdf/metrics#name'), literal(fn.name))
|
|
315
|
-
);
|
|
316
|
-
|
|
317
|
-
store.addQuad(quad(fnNode, unmetric.cyclomatic, literal(fn.cyclomatic)));
|
|
318
|
-
store.addQuad(quad(fnNode, unmetric.halsteadVolume, literal(fn.halsteadVolume)));
|
|
319
|
-
store.addQuad(quad(fnNode, unmetric.maintainabilityIndex, literal(fn.maintainabilityIndex)));
|
|
320
|
-
|
|
321
|
-
// Link function to file
|
|
322
|
-
store.addQuad(quad(fileNode, unmetric.hasFunctionComplexity, fnNode));
|
|
323
|
-
}
|
|
324
|
-
}
|
|
325
|
-
|
|
326
|
-
/**
|
|
327
|
-
* Calculate summary metrics from analyzed files
|
|
328
|
-
*
|
|
329
|
-
* @private
|
|
330
|
-
* @param {FileMetrics[]} fileMetrics - All file metrics
|
|
331
|
-
* @param {string} mode - Analysis mode
|
|
332
|
-
* @returns {JsComplexitySummary}
|
|
333
|
-
*/
|
|
334
|
-
function calculateSummary(fileMetrics, mode) {
|
|
335
|
-
if (fileMetrics.length === 0) {
|
|
336
|
-
return {
|
|
337
|
-
filesAnalyzed: 0,
|
|
338
|
-
averageCyclomatic: 0,
|
|
339
|
-
topRisks: [],
|
|
340
|
-
mode,
|
|
341
|
-
timestamp: new Date().toISOString(),
|
|
342
|
-
};
|
|
343
|
-
}
|
|
344
|
-
|
|
345
|
-
// Calculate average cyclomatic
|
|
346
|
-
const totalCyclomatic = fileMetrics.reduce((sum, f) => sum + f.cyclomatic, 0);
|
|
347
|
-
const averageCyclomatic = totalCyclomatic / fileMetrics.length;
|
|
348
|
-
|
|
349
|
-
// Find top risks: lowest MI and highest CC
|
|
350
|
-
const risks = [...fileMetrics]
|
|
351
|
-
.sort((a, b) => {
|
|
352
|
-
// Primary sort: MI (ascending, lower is worse)
|
|
353
|
-
if (a.maintainabilityIndex !== b.maintainabilityIndex) {
|
|
354
|
-
return a.maintainabilityIndex - b.maintainabilityIndex;
|
|
355
|
-
}
|
|
356
|
-
// Secondary sort: CC (descending, higher is worse)
|
|
357
|
-
return b.cyclomatic - a.cyclomatic;
|
|
358
|
-
})
|
|
359
|
-
.slice(0, 5);
|
|
360
|
-
|
|
361
|
-
return {
|
|
362
|
-
filesAnalyzed: fileMetrics.length,
|
|
363
|
-
averageCyclomatic: Math.round(averageCyclomatic * 100) / 100,
|
|
364
|
-
topRisks: risks,
|
|
365
|
-
mode,
|
|
366
|
-
timestamp: new Date().toISOString(),
|
|
367
|
-
};
|
|
368
|
-
}
|
package/src/dependency-graph.mjs
DELETED
|
@@ -1,276 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @file Dependency Graph - builds and analyzes module dependency graph
|
|
3
|
-
* @module project-engine/dependency-graph
|
|
4
|
-
*/
|
|
5
|
-
|
|
6
|
-
import { UnrdfDataFactory as DataFactory } from '@unrdf/core/rdf/n3-justified-only';
|
|
7
|
-
import { z } from 'zod';
|
|
8
|
-
|
|
9
|
-
const { namedNode } = DataFactory;
|
|
10
|
-
|
|
11
|
-
const NS = {
|
|
12
|
-
fs: 'http://example.org/unrdf/filesystem#',
|
|
13
|
-
dep: 'http://example.org/unrdf/dependency#',
|
|
14
|
-
};
|
|
15
|
-
|
|
16
|
-
const DependencyGraphOptionsSchema = z.object({
|
|
17
|
-
projectStore: z.custom(val => val && typeof val.getQuads === 'function', {
|
|
18
|
-
message: 'projectStore must be an RDF store with getQuads method',
|
|
19
|
-
}),
|
|
20
|
-
projectRoot: z.string().optional(),
|
|
21
|
-
});
|
|
22
|
-
|
|
23
|
-
const DependencyNodeSchema = z.object({
|
|
24
|
-
file: z.string(),
|
|
25
|
-
imports: z.array(z.string()),
|
|
26
|
-
importedBy: z.array(z.string()),
|
|
27
|
-
depth: z.number(),
|
|
28
|
-
isCircular: z.boolean(),
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
const DependencyIssueSchema = z.object({
|
|
32
|
-
type: z.enum(['circular', 'deep-nesting', 'hub', 'orphan']),
|
|
33
|
-
files: z.array(z.string()),
|
|
34
|
-
severity: z.enum(['critical', 'high', 'medium', 'low']),
|
|
35
|
-
description: z.string(),
|
|
36
|
-
suggestion: z.string(),
|
|
37
|
-
});
|
|
38
|
-
|
|
39
|
-
/**
|
|
40
|
-
* Build and analyze dependency graph
|
|
41
|
-
* @param {Object} options
|
|
42
|
-
* @param {Store} options.projectStore - Project RDF store
|
|
43
|
-
* @returns {{ nodes: Map, issues: Array, summary: string, metrics: Object }}
|
|
44
|
-
*/
|
|
45
|
-
export function buildDependencyGraph(options) {
|
|
46
|
-
const validated = DependencyGraphOptionsSchema.parse(options);
|
|
47
|
-
const { projectStore } = validated;
|
|
48
|
-
|
|
49
|
-
const nodes = new Map();
|
|
50
|
-
const issues = [];
|
|
51
|
-
|
|
52
|
-
const fileQuads = projectStore.getQuads(null, namedNode(NS.fs + 'relativePath'), null);
|
|
53
|
-
const sourceFiles = fileQuads.map(q => q.object.value).filter(p => isSourceFile(p));
|
|
54
|
-
|
|
55
|
-
for (const file of sourceFiles) {
|
|
56
|
-
nodes.set(file, {
|
|
57
|
-
file,
|
|
58
|
-
imports: [],
|
|
59
|
-
importedBy: [],
|
|
60
|
-
depth: 0,
|
|
61
|
-
isCircular: false,
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
const importQuads = projectStore.getQuads(null, namedNode(NS.dep + 'imports'), null);
|
|
66
|
-
for (const quad of importQuads) {
|
|
67
|
-
const fromFile = extractPathFromIri(quad.subject.value);
|
|
68
|
-
const toFile = extractPathFromIri(quad.object.value);
|
|
69
|
-
const fromNode = nodes.get(fromFile);
|
|
70
|
-
const toNode = nodes.get(toFile);
|
|
71
|
-
if (fromNode && toNode) {
|
|
72
|
-
fromNode.imports.push(toFile);
|
|
73
|
-
toNode.importedBy.push(fromFile);
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
if (importQuads.length === 0) simulateImports(nodes, sourceFiles);
|
|
78
|
-
|
|
79
|
-
calculateDepths(nodes);
|
|
80
|
-
|
|
81
|
-
const circularDeps = findCircularDeps(nodes);
|
|
82
|
-
for (const cycle of circularDeps) {
|
|
83
|
-
issues.push({
|
|
84
|
-
type: 'circular',
|
|
85
|
-
files: cycle,
|
|
86
|
-
severity: 'critical',
|
|
87
|
-
description: 'Circular dependency: ' + cycle.join(' -> '),
|
|
88
|
-
suggestion: 'Extract shared code to common module',
|
|
89
|
-
});
|
|
90
|
-
for (const file of cycle) {
|
|
91
|
-
const node = nodes.get(file);
|
|
92
|
-
if (node) node.isCircular = true;
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
for (const [file, node] of nodes) {
|
|
97
|
-
if (node.depth > 5) {
|
|
98
|
-
issues.push({
|
|
99
|
-
type: 'deep-nesting',
|
|
100
|
-
files: [file],
|
|
101
|
-
severity: 'medium',
|
|
102
|
-
description: 'Deep import chain (depth: ' + node.depth + ')',
|
|
103
|
-
suggestion: 'Flatten module structure',
|
|
104
|
-
});
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
for (const [file, node] of nodes) {
|
|
109
|
-
if (node.importedBy.length > 10) {
|
|
110
|
-
issues.push({
|
|
111
|
-
type: 'hub',
|
|
112
|
-
files: [file],
|
|
113
|
-
severity: 'medium',
|
|
114
|
-
description: 'Hub module (' + node.importedBy.length + ' importers)',
|
|
115
|
-
suggestion: 'Consider splitting',
|
|
116
|
-
});
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
for (const [file, node] of nodes) {
|
|
121
|
-
if (node.imports.length === 0 && node.importedBy.length === 0 && !isEntryPoint(file)) {
|
|
122
|
-
issues.push({
|
|
123
|
-
type: 'orphan',
|
|
124
|
-
files: [file],
|
|
125
|
-
severity: 'low',
|
|
126
|
-
description: 'Orphan module',
|
|
127
|
-
suggestion: 'Review if needed',
|
|
128
|
-
});
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
const severityOrder = { critical: 0, high: 1, medium: 2, low: 3 };
|
|
133
|
-
issues.sort((a, b) => severityOrder[a.severity] - severityOrder[b.severity]);
|
|
134
|
-
|
|
135
|
-
const totalNodes = nodes.size;
|
|
136
|
-
const totalEdges = Array.from(nodes.values()).reduce((sum, n) => sum + n.imports.length, 0);
|
|
137
|
-
const metrics = {
|
|
138
|
-
totalModules: totalNodes,
|
|
139
|
-
totalDependencies: totalEdges,
|
|
140
|
-
avgDependencies: totalNodes > 0 ? Math.round((totalEdges / totalNodes) * 10) / 10 : 0,
|
|
141
|
-
maxDepth: Math.max(...Array.from(nodes.values()).map(n => n.depth), 0),
|
|
142
|
-
circularCount: circularDeps.length,
|
|
143
|
-
};
|
|
144
|
-
|
|
145
|
-
const summary =
|
|
146
|
-
issues.length > 0
|
|
147
|
-
? issues.length + ' dependency issues (' + circularDeps.length + ' circular)'
|
|
148
|
-
: 'Clean dependency graph (' + totalNodes + ' modules)';
|
|
149
|
-
|
|
150
|
-
return { nodes, issues, summary, metrics };
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
function isSourceFile(filePath) {
|
|
154
|
-
return (
|
|
155
|
-
/\.(tsx?|jsx?|mjs)$/.test(filePath) &&
|
|
156
|
-
!filePath.includes('.test.') &&
|
|
157
|
-
!filePath.includes('.spec.')
|
|
158
|
-
);
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
function extractPathFromIri(iri) {
|
|
162
|
-
// Match both 'fs#' and 'filesystem#' patterns
|
|
163
|
-
const match = iri.match(/(?:fs|filesystem)#(.+)$/);
|
|
164
|
-
return match ? decodeURIComponent(match[1]) : iri;
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
function simulateImports(nodes, sourceFiles) {
|
|
168
|
-
const byDir = new Map();
|
|
169
|
-
for (const file of sourceFiles) {
|
|
170
|
-
const dir = file.split('/').slice(0, -1).join('/');
|
|
171
|
-
if (!byDir.has(dir)) byDir.set(dir, []);
|
|
172
|
-
byDir.get(dir).push(file);
|
|
173
|
-
}
|
|
174
|
-
for (const [_dir, files] of byDir) {
|
|
175
|
-
const indexFile = files.find(f => f.includes('index.'));
|
|
176
|
-
if (indexFile) {
|
|
177
|
-
const node = nodes.get(indexFile);
|
|
178
|
-
if (node) {
|
|
179
|
-
for (const file of files) {
|
|
180
|
-
if (file !== indexFile) {
|
|
181
|
-
node.imports.push(file);
|
|
182
|
-
const targetNode = nodes.get(file);
|
|
183
|
-
if (targetNode) targetNode.importedBy.push(indexFile);
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
function calculateDepths(nodes) {
|
|
192
|
-
const entryPoints = Array.from(nodes.values())
|
|
193
|
-
.filter(n => n.importedBy.length === 0)
|
|
194
|
-
.map(n => n.file);
|
|
195
|
-
const visited = new Set();
|
|
196
|
-
const queue = entryPoints.map(f => ({ file: f, depth: 0 }));
|
|
197
|
-
while (queue.length > 0) {
|
|
198
|
-
const item = queue.shift();
|
|
199
|
-
if (visited.has(item.file)) continue;
|
|
200
|
-
visited.add(item.file);
|
|
201
|
-
const node = nodes.get(item.file);
|
|
202
|
-
if (node) {
|
|
203
|
-
node.depth = Math.max(node.depth, item.depth);
|
|
204
|
-
for (const imported of node.imports) {
|
|
205
|
-
if (!visited.has(imported)) queue.push({ file: imported, depth: item.depth + 1 });
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
}
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
function findCircularDeps(nodes) {
|
|
212
|
-
const cycles = [];
|
|
213
|
-
const visited = new Set();
|
|
214
|
-
const recursionStack = new Set();
|
|
215
|
-
|
|
216
|
-
function dfs(file, path) {
|
|
217
|
-
if (recursionStack.has(file)) {
|
|
218
|
-
const cycleStart = path.indexOf(file);
|
|
219
|
-
if (cycleStart !== -1) cycles.push(path.slice(cycleStart).concat(file));
|
|
220
|
-
return;
|
|
221
|
-
}
|
|
222
|
-
if (visited.has(file)) return;
|
|
223
|
-
visited.add(file);
|
|
224
|
-
recursionStack.add(file);
|
|
225
|
-
path.push(file);
|
|
226
|
-
const node = nodes.get(file);
|
|
227
|
-
if (node) {
|
|
228
|
-
for (const imported of node.imports) dfs(imported, path.slice());
|
|
229
|
-
}
|
|
230
|
-
recursionStack.delete(file);
|
|
231
|
-
}
|
|
232
|
-
|
|
233
|
-
for (const file of nodes.keys()) dfs(file, []);
|
|
234
|
-
return cycles;
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
function isEntryPoint(filePath) {
|
|
238
|
-
return (
|
|
239
|
-
filePath.includes('index.') ||
|
|
240
|
-
filePath.includes('main.') ||
|
|
241
|
-
filePath.includes('app.') ||
|
|
242
|
-
filePath.endsWith('/page.tsx') ||
|
|
243
|
-
filePath.endsWith('/route.ts')
|
|
244
|
-
);
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
export { DependencyNodeSchema, DependencyIssueSchema };
|
|
248
|
-
|
|
249
|
-
// Alias exports for backwards compatibility with existing index.mjs
|
|
250
|
-
export const detectCircularDependencies = options => {
|
|
251
|
-
const result = buildDependencyGraph(options);
|
|
252
|
-
return result.issues.filter(i => i.type === 'circular');
|
|
253
|
-
};
|
|
254
|
-
export const topologicalSort = options => {
|
|
255
|
-
const result = buildDependencyGraph(options);
|
|
256
|
-
return Array.from(result.nodes.keys());
|
|
257
|
-
};
|
|
258
|
-
export const analyzeDependencyPath = (options, fromFile, toFile) => {
|
|
259
|
-
const _result = buildDependencyGraph(options);
|
|
260
|
-
return { from: fromFile, to: toFile, path: [], exists: false };
|
|
261
|
-
};
|
|
262
|
-
export const getTransitiveDependencies = (options, file) => {
|
|
263
|
-
const result = buildDependencyGraph(options);
|
|
264
|
-
const node = result.nodes.get(file);
|
|
265
|
-
return node ? node.imports : [];
|
|
266
|
-
};
|
|
267
|
-
export const getTransitiveDependents = (options, file) => {
|
|
268
|
-
const result = buildDependencyGraph(options);
|
|
269
|
-
const node = result.nodes.get(file);
|
|
270
|
-
return node ? node.importedBy : [];
|
|
271
|
-
};
|
|
272
|
-
export const calculateImpactScore = (options, file) => {
|
|
273
|
-
const result = buildDependencyGraph(options);
|
|
274
|
-
const node = result.nodes.get(file);
|
|
275
|
-
return node ? node.importedBy.length * 10 : 0;
|
|
276
|
-
};
|