arcvision 0.2.3 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1017 -636
- package/package.json +12 -2
- package/arcvision.context.json +0 -533
- package/docs/blast-radius-implementation.md +0 -76
- package/docs/blast-radius.md +0 -44
- package/output1.json +0 -281
- package/output2.json +0 -281
- package/scan_output.txt +0 -0
- package/schema/arcvision_context_schema_v1.json +0 -84
- package/src/core/blastRadius.js +0 -249
- package/src/core/di-detector.js +0 -202
- package/src/core/method-tracker.js +0 -174
- package/src/core/parser-enhanced.js +0 -73
- package/src/core/parser.js +0 -343
- package/src/core/path-resolver.js +0 -174
- package/src/core/react-nextjs-detector.js +0 -245
- package/src/core/scanner.js +0 -518
- package/src/core/semantic-analyzer.js +0 -204
- package/src/core/tsconfig-utils.js +0 -35
- package/src/core/type-analyzer.js +0 -272
- package/src/core/watcher.js +0 -18
- package/src/engine/context_builder.js +0 -153
- package/src/engine/context_sorter.js +0 -41
- package/src/engine/context_validator.js +0 -75
- package/src/engine/id-generator.js +0 -16
- package/src/index.js +0 -325
- package/src/plugins/express-plugin.js +0 -48
- package/src/plugins/plugin-manager.js +0 -58
- package/src/plugins/react-plugin.js +0 -54
- package/test/determinism-test.js +0 -65
package/src/core/scanner.js
DELETED
|
@@ -1,518 +0,0 @@
|
|
|
1
|
-
const { glob } = require('glob');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const fs = require('fs');
|
|
4
|
-
const parser = require('./parser-enhanced'); // Use enhanced parser
|
|
5
|
-
const pluginManager = require('../plugins/plugin-manager');
|
|
6
|
-
const { loadTSConfig } = require('./tsconfig-utils');
|
|
7
|
-
const { resolveImport } = require('./path-resolver');
|
|
8
|
-
const { buildReverseDependencyGraph, computeBlastRadius } = require('./blastRadius');
|
|
9
|
-
|
|
10
|
-
// Import new engine modules
|
|
11
|
-
const { buildContext } = require('../engine/context_builder');
|
|
12
|
-
const { validateContext } = require('../engine/context_validator');
|
|
13
|
-
const { sortContext } = require('../engine/context_sorter');
|
|
14
|
-
|
|
15
|
-
// Import semantic analyzer for cross-file analysis
|
|
16
|
-
const { analyzeSemantics } = require('./semantic-analyzer');
|
|
17
|
-
|
|
18
|
-
async function scan(directory) {
|
|
19
|
-
// Normalize helper
|
|
20
|
-
const normalize = p => p.replace(/\\/g, '/');
|
|
21
|
-
|
|
22
|
-
const options = {
|
|
23
|
-
ignore: ['**/node_modules/**', '**/.git/**', '**/dist/**', '**/build/**'],
|
|
24
|
-
cwd: directory,
|
|
25
|
-
absolute: true
|
|
26
|
-
};
|
|
27
|
-
|
|
28
|
-
try {
|
|
29
|
-
// Load plugins
|
|
30
|
-
const pluginDir = path.join(__dirname, '../plugins');
|
|
31
|
-
pluginManager.loadPluginsFromDirectory(pluginDir);
|
|
32
|
-
|
|
33
|
-
const files = await glob('**/*.{js,jsx,ts,tsx,json}', { ...options, ignore: [...options.ignore, '**/*.d.ts', '**/node_modules/**', '**/.git/**', '**/dist/**', '**/build/**', '**/.next/**', '**/coverage/**'] });
|
|
34
|
-
|
|
35
|
-
const architectureMap = {
|
|
36
|
-
nodes: [],
|
|
37
|
-
edges: []
|
|
38
|
-
};
|
|
39
|
-
|
|
40
|
-
const fileMap = new Map();
|
|
41
|
-
let totalImportsFound = 0; // Track total imports found
|
|
42
|
-
|
|
43
|
-
// Process files with plugins
|
|
44
|
-
for (const file of files) {
|
|
45
|
-
try {
|
|
46
|
-
const relativePath = path.relative(directory, file);
|
|
47
|
-
const normalizedRelativePath = normalize(relativePath);
|
|
48
|
-
|
|
49
|
-
// Handle different file types appropriately
|
|
50
|
-
let metadata;
|
|
51
|
-
|
|
52
|
-
if (file.endsWith('.json')) {
|
|
53
|
-
// Handle JSON files separately
|
|
54
|
-
try {
|
|
55
|
-
const content = fs.readFileSync(file, 'utf-8');
|
|
56
|
-
JSON.parse(content); // Validate it's valid JSON
|
|
57
|
-
metadata = {
|
|
58
|
-
id: file,
|
|
59
|
-
imports: [],
|
|
60
|
-
exports: [],
|
|
61
|
-
functions: [],
|
|
62
|
-
apiCalls: [],
|
|
63
|
-
isJson: true
|
|
64
|
-
};
|
|
65
|
-
} catch (jsonError) {
|
|
66
|
-
// If JSON is invalid, skip this file
|
|
67
|
-
console.warn(`⚠️ Invalid JSON in ${file} — file skipped`);
|
|
68
|
-
continue;
|
|
69
|
-
}
|
|
70
|
-
} else {
|
|
71
|
-
// Parse code files
|
|
72
|
-
metadata = parser.parseFile(file);
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
// Process with plugins
|
|
76
|
-
metadata = await pluginManager.processFile(file, metadata);
|
|
77
|
-
|
|
78
|
-
// Count imports found in this file
|
|
79
|
-
if (metadata.imports && Array.isArray(metadata.imports)) {
|
|
80
|
-
totalImportsFound += metadata.imports.length;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
const node = {
|
|
84
|
-
id: normalizedRelativePath,
|
|
85
|
-
type: 'file',
|
|
86
|
-
metadata: metadata
|
|
87
|
-
};
|
|
88
|
-
|
|
89
|
-
architectureMap.nodes.push(node);
|
|
90
|
-
fileMap.set(normalizedRelativePath, metadata);
|
|
91
|
-
} catch (e) {
|
|
92
|
-
// Log a clear, user-friendly warning message and continue scanning
|
|
93
|
-
console.warn(`⚠️ Failed to process ${file} — file skipped, you should check manually (${e.message})`);
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
// NEW: Perform semantic analysis to build cross-file usage edges
|
|
98
|
-
console.log('Performing semantic analysis...');
|
|
99
|
-
const semanticResults = analyzeSemantics(architectureMap.nodes);
|
|
100
|
-
console.log(`Semantic analysis complete: ${semanticResults.stats.totalSymbols} symbols, ${semanticResults.stats.totalUsages} usage edges`);
|
|
101
|
-
|
|
102
|
-
// Load tsconfig for path resolution
|
|
103
|
-
const tsconfig = loadTSConfig(directory);
|
|
104
|
-
|
|
105
|
-
// Create a mapping of all possible normalized paths that could match imports
|
|
106
|
-
// This includes both the direct file paths and possible variations
|
|
107
|
-
const allPossiblePaths = new Set();
|
|
108
|
-
const pathToNodeIdMap = new Map(); // This maps normalized paths to node IDs
|
|
109
|
-
|
|
110
|
-
architectureMap.nodes.forEach(node => {
|
|
111
|
-
const normalizedPath = normalize(node.id);
|
|
112
|
-
allPossiblePaths.add(normalizedPath);
|
|
113
|
-
pathToNodeIdMap.set(normalizedPath, node.id);
|
|
114
|
-
});
|
|
115
|
-
|
|
116
|
-
// Process imports to create edges
|
|
117
|
-
let unresolvedImports = 0;
|
|
118
|
-
let resolvedImports = 0;
|
|
119
|
-
architectureMap.nodes.forEach(node => {
|
|
120
|
-
if (node.metadata.imports && Array.isArray(node.metadata.imports)) {
|
|
121
|
-
node.metadata.imports.forEach(imp => {
|
|
122
|
-
if (imp.source && typeof imp.source === 'string') {
|
|
123
|
-
let targetFound = false;
|
|
124
|
-
|
|
125
|
-
try {
|
|
126
|
-
// First, check if imp.source directly matches any node path (exact match)
|
|
127
|
-
if (allPossiblePaths.has(imp.source)) {
|
|
128
|
-
architectureMap.edges.push({
|
|
129
|
-
source: normalize(node.id),
|
|
130
|
-
target: imp.source,
|
|
131
|
-
type: imp.type || 'import'
|
|
132
|
-
});
|
|
133
|
-
resolvedImports++;
|
|
134
|
-
targetFound = true;
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
if (!targetFound) {
|
|
138
|
-
// Try to resolve the import path using the resolver
|
|
139
|
-
const resolvedPath = resolveImport(
|
|
140
|
-
imp.source,
|
|
141
|
-
path.join(directory, node.id),
|
|
142
|
-
directory,
|
|
143
|
-
tsconfig
|
|
144
|
-
);
|
|
145
|
-
|
|
146
|
-
if (resolvedPath) {
|
|
147
|
-
// Convert resolved absolute path back to relative path
|
|
148
|
-
const relativeResolvedPath = path.relative(directory, resolvedPath);
|
|
149
|
-
const normalizedResolvedPath = normalize(relativeResolvedPath);
|
|
150
|
-
|
|
151
|
-
// Check if the resolved file exists in our scanned files
|
|
152
|
-
if (allPossiblePaths.has(normalizedResolvedPath)) {
|
|
153
|
-
architectureMap.edges.push({
|
|
154
|
-
source: normalize(node.id),
|
|
155
|
-
target: normalizedResolvedPath,
|
|
156
|
-
type: imp.type || 'import'
|
|
157
|
-
});
|
|
158
|
-
resolvedImports++;
|
|
159
|
-
targetFound = true;
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
if (!targetFound) {
|
|
165
|
-
// As a fallback, check if the import path could match by simple relative path calculation
|
|
166
|
-
// For example, if node is 'src/core/scanner.js' and import is './parser.js',
|
|
167
|
-
// it should resolve to 'src/core/parser.js'
|
|
168
|
-
try {
|
|
169
|
-
const baseDir = path.dirname(path.join(directory, node.id));
|
|
170
|
-
const calculatedAbsolutePath = path.resolve(baseDir, imp.source);
|
|
171
|
-
const calculatedRelativePath = path.relative(directory, calculatedAbsolutePath);
|
|
172
|
-
const calculatedNormalizedPath = normalize(calculatedRelativePath);
|
|
173
|
-
|
|
174
|
-
if (allPossiblePaths.has(calculatedNormalizedPath)) {
|
|
175
|
-
architectureMap.edges.push({
|
|
176
|
-
source: normalize(node.id),
|
|
177
|
-
target: calculatedNormalizedPath,
|
|
178
|
-
type: imp.type || 'import'
|
|
179
|
-
});
|
|
180
|
-
resolvedImports++;
|
|
181
|
-
targetFound = true;
|
|
182
|
-
}
|
|
183
|
-
} catch (e) {
|
|
184
|
-
console.warn(`⚠️ Path calculation failed for import '${imp.source}' in file '${node.id}': ${e.message}`);
|
|
185
|
-
// If path calculation fails, continue to unresolved
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
if (!targetFound) {
|
|
190
|
-
// Another fallback: if import starts with ./ or ../, try to find a match
|
|
191
|
-
// by appending common extensions to the import path
|
|
192
|
-
if (imp.source.startsWith('./') || imp.source.startsWith('../')) {
|
|
193
|
-
// Try various extensions that might match
|
|
194
|
-
const extensions = ['', '.js', '.ts', '.jsx', '.tsx', '.mjs', '.cjs'];
|
|
195
|
-
|
|
196
|
-
for (const ext of extensions) {
|
|
197
|
-
let testPath = imp.source;
|
|
198
|
-
if (!imp.source.endsWith(ext)) {
|
|
199
|
-
testPath = imp.source + ext;
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
// Calculate the relative path from the project directory
|
|
203
|
-
try {
|
|
204
|
-
const baseDir = path.dirname(path.join(directory, node.id));
|
|
205
|
-
const calculatedAbsolutePath = path.resolve(baseDir, testPath);
|
|
206
|
-
const calculatedRelativePath = path.relative(directory, calculatedAbsolutePath);
|
|
207
|
-
const calculatedNormalizedPath = normalize(calculatedRelativePath);
|
|
208
|
-
|
|
209
|
-
if (allPossiblePaths.has(calculatedNormalizedPath)) {
|
|
210
|
-
architectureMap.edges.push({
|
|
211
|
-
source: normalize(node.id),
|
|
212
|
-
target: calculatedNormalizedPath,
|
|
213
|
-
type: imp.type || 'import'
|
|
214
|
-
});
|
|
215
|
-
resolvedImports++;
|
|
216
|
-
targetFound = true;
|
|
217
|
-
break; // Found a match, exit the loop
|
|
218
|
-
}
|
|
219
|
-
} catch (e) {
|
|
220
|
-
console.warn(`⚠️ Extension path calculation failed for import '${imp.source}' with extension '${ext}' in file '${node.id}': ${e.message}`);
|
|
221
|
-
// Continue to next extension if path calculation fails
|
|
222
|
-
}
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
}
|
|
226
|
-
|
|
227
|
-
if (!targetFound) {
|
|
228
|
-
// Additional fallback: Check for index files when importing a directory
|
|
229
|
-
if (imp.source.startsWith('./') || imp.source.startsWith('../')) {
|
|
230
|
-
try {
|
|
231
|
-
const baseDir = path.dirname(path.join(directory, node.id));
|
|
232
|
-
const calculatedAbsolutePath = path.resolve(baseDir, imp.source);
|
|
233
|
-
const directoryPath = calculatedAbsolutePath;
|
|
234
|
-
|
|
235
|
-
// Check for index files in the directory
|
|
236
|
-
const indexFiles = ['index.js', 'index.ts', 'index.jsx', 'index.tsx', 'index.mjs', 'index.cjs'];
|
|
237
|
-
for (const indexFile of indexFiles) {
|
|
238
|
-
const indexPath = path.join(directoryPath, indexFile);
|
|
239
|
-
const indexRelativePath = path.relative(directory, indexPath);
|
|
240
|
-
const indexNormalizedPath = normalize(indexRelativePath);
|
|
241
|
-
|
|
242
|
-
if (allPossiblePaths.has(indexNormalizedPath)) {
|
|
243
|
-
architectureMap.edges.push({
|
|
244
|
-
source: normalize(node.id),
|
|
245
|
-
target: indexNormalizedPath,
|
|
246
|
-
type: imp.type || 'import'
|
|
247
|
-
});
|
|
248
|
-
resolvedImports++;
|
|
249
|
-
targetFound = true;
|
|
250
|
-
break; // Found a match, exit the loop
|
|
251
|
-
}
|
|
252
|
-
}
|
|
253
|
-
} catch (e) {
|
|
254
|
-
console.warn(`⚠️ Index file check failed for import '${imp.source}' in file '${node.id}': ${e.message}`);
|
|
255
|
-
// Continue if index file check fails
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
if (!targetFound) {
|
|
261
|
-
// Additional fallback: Check for absolute imports that might map to common directories
|
|
262
|
-
// For example, if tsconfig has paths like "@/*": ["src/*"], an import like "@/utils/helper"
|
|
263
|
-
// should resolve to "src/utils/helper.js" or similar
|
|
264
|
-
if (imp.source.startsWith('@') || imp.source.startsWith('/') || !imp.source.startsWith('.')) {
|
|
265
|
-
// Try to match against common source directories
|
|
266
|
-
const commonSourceDirs = ['src', 'app', 'lib', 'components', 'utils', 'services', 'assets'];
|
|
267
|
-
|
|
268
|
-
for (const srcDir of commonSourceDirs) {
|
|
269
|
-
// Try appending to common source directories
|
|
270
|
-
const potentialPath = path.join(srcDir, imp.source);
|
|
271
|
-
const potentialRelativePath = path.relative(directory, path.resolve(directory, potentialPath));
|
|
272
|
-
const potentialNormalizedPath = normalize(potentialRelativePath);
|
|
273
|
-
|
|
274
|
-
if (allPossiblePaths.has(potentialNormalizedPath)) {
|
|
275
|
-
architectureMap.edges.push({
|
|
276
|
-
source: normalize(node.id),
|
|
277
|
-
target: potentialNormalizedPath,
|
|
278
|
-
type: imp.type || 'import'
|
|
279
|
-
});
|
|
280
|
-
resolvedImports++;
|
|
281
|
-
targetFound = true;
|
|
282
|
-
break; // Found a match, exit the loop
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
// Also try with extensions
|
|
286
|
-
const extensions = ['.js', '.ts', '.jsx', '.tsx', '.mjs', '.cjs',
|
|
287
|
-
'index.js', 'index.ts', 'index.jsx', 'index.tsx', 'index.mjs', 'index.cjs'];
|
|
288
|
-
|
|
289
|
-
for (const ext of extensions) {
|
|
290
|
-
let testPath;
|
|
291
|
-
if (ext.startsWith('index')) {
|
|
292
|
-
testPath = path.join(srcDir, imp.source, ext);
|
|
293
|
-
} else {
|
|
294
|
-
testPath = path.join(srcDir, imp.source + ext);
|
|
295
|
-
}
|
|
296
|
-
|
|
297
|
-
const testRelativePath = path.relative(directory, path.resolve(directory, testPath));
|
|
298
|
-
const testNormalizedPath = normalize(testRelativePath);
|
|
299
|
-
|
|
300
|
-
if (allPossiblePaths.has(testNormalizedPath)) {
|
|
301
|
-
architectureMap.edges.push({
|
|
302
|
-
source: normalize(node.id),
|
|
303
|
-
target: testNormalizedPath,
|
|
304
|
-
type: imp.type || 'import'
|
|
305
|
-
});
|
|
306
|
-
resolvedImports++;
|
|
307
|
-
targetFound = true;
|
|
308
|
-
break; // Found a match, exit the loops
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
|
|
312
|
-
if (targetFound) break;
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
if (!targetFound) {
|
|
318
|
-
// Additional fallback: Try to handle barrel files (index.js that exports from other files)
|
|
319
|
-
// If import is '@/components/Header' but only '@/components/index.js' exists
|
|
320
|
-
if (imp.source.startsWith('@') || imp.source.startsWith('/')) {
|
|
321
|
-
try {
|
|
322
|
-
// Try to find a directory with an index file that matches
|
|
323
|
-
const parts = imp.source.split('/');
|
|
324
|
-
if (parts.length > 1) {
|
|
325
|
-
// Remove the last part and add index
|
|
326
|
-
const dirParts = parts.slice(0, -1);
|
|
327
|
-
const fileName = parts[parts.length - 1];
|
|
328
|
-
|
|
329
|
-
// Look for patterns like: components/index.js exporting { Header }
|
|
330
|
-
// or components/index.js containing export * from './Header'
|
|
331
|
-
const dirPath = dirParts.join('/') + '/index';
|
|
332
|
-
const indexFiles = ['index.js', 'index.ts', 'index.jsx', 'index.tsx', 'index.mjs', 'index.cjs'];
|
|
333
|
-
|
|
334
|
-
for (const indexFile of indexFiles) {
|
|
335
|
-
const indexPath = path.join(dirParts.join('/'), indexFile);
|
|
336
|
-
const indexRelativePath = path.relative(directory, path.resolve(directory, indexPath));
|
|
337
|
-
const indexNormalizedPath = normalize(indexRelativePath);
|
|
338
|
-
|
|
339
|
-
if (allPossiblePaths.has(indexNormalizedPath)) {
|
|
340
|
-
architectureMap.edges.push({
|
|
341
|
-
source: normalize(node.id),
|
|
342
|
-
target: indexNormalizedPath,
|
|
343
|
-
type: imp.type || 'import'
|
|
344
|
-
});
|
|
345
|
-
resolvedImports++;
|
|
346
|
-
targetFound = true;
|
|
347
|
-
break; // Found a match, exit the loop
|
|
348
|
-
}
|
|
349
|
-
}
|
|
350
|
-
}
|
|
351
|
-
} catch (e) {
|
|
352
|
-
console.warn(`⚠️ Barrel file check failed for import '${imp.source}' in file '${node.id}': ${e.message}`);
|
|
353
|
-
// Continue if barrel file check fails
|
|
354
|
-
}
|
|
355
|
-
}
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
if (!targetFound) {
|
|
359
|
-
// Additional fallback: Check for exact path matches with different capitalization
|
|
360
|
-
// This handles cases where import uses different casing than actual file
|
|
361
|
-
for (const possiblePath of allPossiblePaths) {
|
|
362
|
-
if (possiblePath.toLowerCase() === imp.source.toLowerCase()) {
|
|
363
|
-
architectureMap.edges.push({
|
|
364
|
-
source: normalize(node.id),
|
|
365
|
-
target: possiblePath,
|
|
366
|
-
type: imp.type || 'import'
|
|
367
|
-
});
|
|
368
|
-
resolvedImports++;
|
|
369
|
-
targetFound = true;
|
|
370
|
-
break; // Found a case-insensitive match
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
}
|
|
374
|
-
} catch (e) {
|
|
375
|
-
console.error(`❌ Critical error processing import '${imp.source}' in file '${node.id}': ${e.message}`);
|
|
376
|
-
console.error(e.stack);
|
|
377
|
-
// Even if there's an error processing this import, increment unresolved counter
|
|
378
|
-
unresolvedImports++;
|
|
379
|
-
}
|
|
380
|
-
|
|
381
|
-
if (!targetFound) {
|
|
382
|
-
// Track imports that couldn't be matched to existing nodes
|
|
383
|
-
unresolvedImports++;
|
|
384
|
-
}
|
|
385
|
-
}
|
|
386
|
-
});
|
|
387
|
-
}
|
|
388
|
-
});
|
|
389
|
-
console.log('RESOLVED IMPORTS:', resolvedImports);
|
|
390
|
-
console.log('UNRESOLVED IMPORTS:', unresolvedImports);
|
|
391
|
-
console.log('IMPORT EDGES CREATED:', architectureMap.edges.length);
|
|
392
|
-
|
|
393
|
-
// NEW: Add semantic usage edges
|
|
394
|
-
if (semanticResults && semanticResults.usageEdges) {
|
|
395
|
-
semanticResults.usageEdges.forEach(usageEdge => {
|
|
396
|
-
// Only add if both source and target exist in our nodes
|
|
397
|
-
const sourceExists = architectureMap.nodes.some(n => n.id === usageEdge.source);
|
|
398
|
-
const targetExists = architectureMap.nodes.some(n => n.id === usageEdge.target);
|
|
399
|
-
|
|
400
|
-
if (sourceExists && targetExists) {
|
|
401
|
-
architectureMap.edges.push({
|
|
402
|
-
source: normalize(usageEdge.source),
|
|
403
|
-
target: normalize(usageEdge.target),
|
|
404
|
-
type: usageEdge.type // 'function_call', 'constructor_call', 'component_usage'
|
|
405
|
-
});
|
|
406
|
-
}
|
|
407
|
-
});
|
|
408
|
-
}
|
|
409
|
-
console.log('TOTAL EDGES (with usage):', architectureMap.edges.length);
|
|
410
|
-
|
|
411
|
-
// Calculate blast radius for each file
|
|
412
|
-
const reverseGraph = buildReverseDependencyGraph(architectureMap);
|
|
413
|
-
const blastRadiusMap = computeBlastRadius(reverseGraph);
|
|
414
|
-
|
|
415
|
-
// Add blast_radius to each node
|
|
416
|
-
architectureMap.nodes.forEach(node => {
|
|
417
|
-
node.metadata.blast_radius = blastRadiusMap[node.id] || 0;
|
|
418
|
-
});
|
|
419
|
-
|
|
420
|
-
console.log('BEFORE DEDUPLICATION EDGES:', architectureMap.edges.length);
|
|
421
|
-
|
|
422
|
-
// Process edges to ensure they match existing nodes and remove duplicates
|
|
423
|
-
const validEdges = [];
|
|
424
|
-
const edgeSet = new Set(); // To track unique edges
|
|
425
|
-
|
|
426
|
-
architectureMap.edges.forEach(edge => {
|
|
427
|
-
// Normalize both source and target paths for consistent matching
|
|
428
|
-
const normalizedSource = normalize(edge.source);
|
|
429
|
-
const normalizedTarget = normalize(edge.target);
|
|
430
|
-
|
|
431
|
-
// Create a unique key for the edge to avoid duplicates
|
|
432
|
-
const edgeKey = `${normalizedSource}→${normalizedTarget}`;
|
|
433
|
-
|
|
434
|
-
// Check if both source and target nodes exist in our architecture map
|
|
435
|
-
const sourceNodeExists = architectureMap.nodes.some(node => node.id === normalizedSource);
|
|
436
|
-
const targetNodeExists = architectureMap.nodes.some(node => node.id === normalizedTarget);
|
|
437
|
-
|
|
438
|
-
if (sourceNodeExists && targetNodeExists && !edgeSet.has(edgeKey)) {
|
|
439
|
-
edgeSet.add(edgeKey);
|
|
440
|
-
validEdges.push({
|
|
441
|
-
source: normalizedSource,
|
|
442
|
-
target: normalizedTarget,
|
|
443
|
-
type: edge.type
|
|
444
|
-
});
|
|
445
|
-
}
|
|
446
|
-
});
|
|
447
|
-
|
|
448
|
-
console.log('AFTER DEDUPLICATION EDGES:', validEdges.length);
|
|
449
|
-
|
|
450
|
-
// Replace the edges with only valid ones
|
|
451
|
-
architectureMap.edges = validEdges;
|
|
452
|
-
|
|
453
|
-
// Add top blast radius files to the architecture map
|
|
454
|
-
const totalFiles = architectureMap.nodes.length;
|
|
455
|
-
const { computeBlastRadiusWithPercentage, analyzeCriticality } = require('./blastRadius');
|
|
456
|
-
const allFilesWithPercentage = computeBlastRadiusWithPercentage(blastRadiusMap, totalFiles);
|
|
457
|
-
|
|
458
|
-
// Analyze criticality of files
|
|
459
|
-
const criticalityAnalysis = analyzeCriticality(blastRadiusMap, reverseGraph, architectureMap.nodes);
|
|
460
|
-
|
|
461
|
-
// Sort by blast radius descending and take top 3
|
|
462
|
-
const topFiles = allFilesWithPercentage.slice(0, 3);
|
|
463
|
-
|
|
464
|
-
architectureMap.contextSurface = {
|
|
465
|
-
topBlastRadiusFiles: topFiles,
|
|
466
|
-
criticalityAnalysis: criticalityAnalysis.slice(0, 5) // Top 5 most critical files
|
|
467
|
-
};
|
|
468
|
-
|
|
469
|
-
// Build the new context object that conforms to the schema
|
|
470
|
-
let context = buildContext(architectureMap.nodes, architectureMap.edges, {
|
|
471
|
-
directory: directory,
|
|
472
|
-
projectName: path.basename(directory),
|
|
473
|
-
language: 'javascript',
|
|
474
|
-
contextSurface: architectureMap.contextSurface
|
|
475
|
-
});
|
|
476
|
-
|
|
477
|
-
console.log('Validating structural context...');
|
|
478
|
-
|
|
479
|
-
// Validate the context against the schema
|
|
480
|
-
let validation = validateContext(context);
|
|
481
|
-
|
|
482
|
-
if (!validation.valid) {
|
|
483
|
-
console.error('VALIDATION FAILED (Attempt 1)');
|
|
484
|
-
validation.errors.forEach(e => console.error(' -', e));
|
|
485
|
-
|
|
486
|
-
console.log('Re-running generation deterministically...');
|
|
487
|
-
|
|
488
|
-
// Rebuild context deterministically
|
|
489
|
-
context = buildContext(architectureMap.nodes, architectureMap.edges, {
|
|
490
|
-
directory: directory,
|
|
491
|
-
projectName: path.basename(directory),
|
|
492
|
-
language: 'javascript',
|
|
493
|
-
contextSurface: architectureMap.contextSurface
|
|
494
|
-
});
|
|
495
|
-
|
|
496
|
-
validation = validateContext(context);
|
|
497
|
-
|
|
498
|
-
if (!validation.valid) {
|
|
499
|
-
console.error('VALIDATION FAILED (Attempt 2)');
|
|
500
|
-
validation.errors.forEach(e => console.error(' -', e));
|
|
501
|
-
console.error('ABORTING. JSON NOT SAVED.');
|
|
502
|
-
process.exit(1);
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
|
|
506
|
-
// Sort the context deterministically
|
|
507
|
-
const sortedContext = sortContext(context);
|
|
508
|
-
|
|
509
|
-
console.log('STRUCTURAL CONTEXT VALIDATED — READY FOR UPLOAD');
|
|
510
|
-
|
|
511
|
-
return sortedContext;
|
|
512
|
-
|
|
513
|
-
} catch (err) {
|
|
514
|
-
throw err;
|
|
515
|
-
}
|
|
516
|
-
}
|
|
517
|
-
|
|
518
|
-
module.exports = { scan };
|