arcvision 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1690 -439
- package/package.json +12 -2
- package/arcvision.context.json +0 -7870
- package/docs/blast-radius-implementation.md +0 -76
- package/docs/blast-radius.md +0 -44
- package/output1.json +0 -281
- package/output2.json +0 -281
- package/scan_output.txt +0 -0
- package/schema/arcvision_context_schema_v1.json +0 -84
- package/src/core/blastRadius.js +0 -249
- package/src/core/parser.js +0 -343
- package/src/core/path-resolver.js +0 -174
- package/src/core/scanner.js +0 -492
- package/src/core/tsconfig-utils.js +0 -35
- package/src/core/watcher.js +0 -18
- package/src/engine/context_builder.js +0 -130
- package/src/engine/context_sorter.js +0 -41
- package/src/engine/context_validator.js +0 -75
- package/src/engine/id-generator.js +0 -16
- package/src/index.js +0 -325
- package/src/plugins/express-plugin.js +0 -48
- package/src/plugins/plugin-manager.js +0 -58
- package/src/plugins/react-plugin.js +0 -54
- package/test/determinism-test.js +0 -65
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
const fs = require('fs');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* Load and parse tsconfig.json from project root
|
|
6
|
-
* @param {string} projectRoot - The root directory of the project
|
|
7
|
-
* @returns {Object|null} The compilerOptions from tsconfig.json or null if not found
|
|
8
|
-
*/
|
|
9
|
-
function loadTSConfig(projectRoot) {
|
|
10
|
-
const tsconfigPaths = [
|
|
11
|
-
path.join(projectRoot, 'tsconfig.json'),
|
|
12
|
-
path.join(projectRoot, 'jsconfig.json')
|
|
13
|
-
];
|
|
14
|
-
|
|
15
|
-
for (const tsconfigPath of tsconfigPaths) {
|
|
16
|
-
if (fs.existsSync(tsconfigPath)) {
|
|
17
|
-
try {
|
|
18
|
-
let content = fs.readFileSync(tsconfigPath, 'utf-8');
|
|
19
|
-
// Remove BOM if present (0xFEFF)
|
|
20
|
-
if (content.charCodeAt(0) === 0xFEFF) {
|
|
21
|
-
content = content.slice(1);
|
|
22
|
-
}
|
|
23
|
-
const raw = JSON.parse(content);
|
|
24
|
-
return raw.compilerOptions || {};
|
|
25
|
-
} catch (error) {
|
|
26
|
-
console.warn(`Warning: Could not parse ${tsconfigPath}:`, error.message);
|
|
27
|
-
return null;
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
return null;
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
module.exports = { loadTSConfig };
|
package/src/core/watcher.js
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
const chokidar = require('chokidar');
|
|
2
|
-
|
|
3
|
-
function watch(directory, callback) {
|
|
4
|
-
const watcher = chokidar.watch(directory, {
|
|
5
|
-
ignored: [/(^|[\/\\])\../, '**/node_modules/**'], // ignore dotfiles
|
|
6
|
-
persistent: true,
|
|
7
|
-
ignoreInitial: true
|
|
8
|
-
});
|
|
9
|
-
|
|
10
|
-
watcher
|
|
11
|
-
.on('add', path => callback('add', path))
|
|
12
|
-
.on('change', path => callback('change', path))
|
|
13
|
-
.on('unlink', path => callback('unlink', path));
|
|
14
|
-
|
|
15
|
-
return watcher;
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
module.exports = { watch };
|
|
@@ -1,130 +0,0 @@
|
|
|
1
|
-
const path = require('path');
|
|
2
|
-
const { stableId } = require('./id-generator');
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* Build the context object that conforms to the Arcvision schema
|
|
6
|
-
* @param {Array} files - List of files from scanner
|
|
7
|
-
* @param {Array} edges - List of edges from scanner
|
|
8
|
-
* @param {Object} options - Options including directory path and project name
|
|
9
|
-
* @returns {Object} - Context object conforming to schema
|
|
10
|
-
*/
|
|
11
|
-
function buildContext(files, edges, options = {}) {
|
|
12
|
-
const {
|
|
13
|
-
directory = '.',
|
|
14
|
-
projectName = 'arcvision',
|
|
15
|
-
language = 'javascript'
|
|
16
|
-
} = options;
|
|
17
|
-
|
|
18
|
-
// Build nodes from files
|
|
19
|
-
const nodes = files.map(file => {
|
|
20
|
-
// Use the node's id field which is already the normalized relative path
|
|
21
|
-
const normalizedPath = file.id;
|
|
22
|
-
|
|
23
|
-
// Determine role based on multiple factors for better accuracy
|
|
24
|
-
const hasFunctions = file.metadata.functions && file.metadata.functions.length > 0;
|
|
25
|
-
const hasClasses = file.metadata.classes && file.metadata.classes.length > 0;
|
|
26
|
-
const hasExports = file.metadata.exports && file.metadata.exports.length > 0;
|
|
27
|
-
const hasApiCalls = file.metadata.apiCalls && file.metadata.apiCalls.length > 0;
|
|
28
|
-
|
|
29
|
-
let role = 'Structure';
|
|
30
|
-
if (hasFunctions || hasClasses || hasApiCalls) {
|
|
31
|
-
role = 'Implementation';
|
|
32
|
-
} else if (hasExports) {
|
|
33
|
-
role = 'Interface';
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
// Extract dependencies more accurately and remove duplicates
|
|
37
|
-
const dependencies = [];
|
|
38
|
-
const uniqueDeps = new Set();
|
|
39
|
-
if (file.metadata.imports && Array.isArray(file.metadata.imports)) {
|
|
40
|
-
file.metadata.imports.forEach(imp => {
|
|
41
|
-
if (imp.source && typeof imp.source === 'string') {
|
|
42
|
-
// Only add unique dependencies
|
|
43
|
-
if (!uniqueDeps.has(imp.source)) {
|
|
44
|
-
uniqueDeps.add(imp.source);
|
|
45
|
-
dependencies.push(imp.source);
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
return {
|
|
52
|
-
id: stableId(normalizedPath),
|
|
53
|
-
type: 'file',
|
|
54
|
-
path: normalizedPath,
|
|
55
|
-
role: role,
|
|
56
|
-
dependencies: dependencies,
|
|
57
|
-
blast_radius: file.metadata.blast_radius || 0
|
|
58
|
-
};
|
|
59
|
-
});
|
|
60
|
-
|
|
61
|
-
// Build edges from dependencies
|
|
62
|
-
const schemaEdges = [];
|
|
63
|
-
for (const edge of edges) {
|
|
64
|
-
// Find source and target nodes to get their stable IDs
|
|
65
|
-
const sourceNode = nodes.find(n => n.path === edge.source);
|
|
66
|
-
const targetNode = nodes.find(n => n.path === edge.target);
|
|
67
|
-
|
|
68
|
-
// Include edges where source exists (even if target doesn't exist as a node)
|
|
69
|
-
if (sourceNode) {
|
|
70
|
-
// Normalize the edge type to fit the schema
|
|
71
|
-
let relationType = 'imports';
|
|
72
|
-
if (edge.type === 'imports' || edge.type === 'require' || edge.type === 'export-from' || edge.type === 'export-all' || edge.type === 'dynamic-import' || edge.type === 'require-assignment') {
|
|
73
|
-
relationType = 'imports';
|
|
74
|
-
} else if (edge.type === 'calls') {
|
|
75
|
-
relationType = 'calls';
|
|
76
|
-
} else if (edge.type === 'owns') {
|
|
77
|
-
relationType = 'owns';
|
|
78
|
-
} else if (edge.type === 'depends_on') {
|
|
79
|
-
relationType = 'depends_on';
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
if (targetNode) {
|
|
83
|
-
// Both source and target nodes exist
|
|
84
|
-
schemaEdges.push({
|
|
85
|
-
from: sourceNode.id,
|
|
86
|
-
to: targetNode.id,
|
|
87
|
-
relation: relationType
|
|
88
|
-
});
|
|
89
|
-
} else {
|
|
90
|
-
// Source exists but target doesn't (external dependency like node_modules)
|
|
91
|
-
// We'll still include the edge to preserve the relationship
|
|
92
|
-
// However, since the schema requires valid node IDs, we'll only add if the target is a known node
|
|
93
|
-
// For now, we'll skip edges to non-existent targets to maintain schema compliance
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
// Calculate metrics
|
|
99
|
-
const metrics = {
|
|
100
|
-
total_files: nodes.length,
|
|
101
|
-
total_nodes: nodes.length,
|
|
102
|
-
total_edges: schemaEdges.length,
|
|
103
|
-
total_imports: schemaEdges.length,
|
|
104
|
-
files_with_functions: nodes.filter(n => n.role === 'Implementation').length
|
|
105
|
-
};
|
|
106
|
-
|
|
107
|
-
// Build and return the context object
|
|
108
|
-
const context = {
|
|
109
|
-
schema_version: '1.0.0',
|
|
110
|
-
generated_at: new Date().toISOString(),
|
|
111
|
-
system: {
|
|
112
|
-
name: projectName,
|
|
113
|
-
root_path: path.resolve(directory),
|
|
114
|
-
language: language
|
|
115
|
-
},
|
|
116
|
-
nodes,
|
|
117
|
-
edges: schemaEdges,
|
|
118
|
-
metrics,
|
|
119
|
-
contextSurface: options.contextSurface || {}
|
|
120
|
-
};
|
|
121
|
-
|
|
122
|
-
// Add additional computed metrics
|
|
123
|
-
context.metrics.files_with_functions = nodes.filter(n => n.role === 'Implementation').length;
|
|
124
|
-
context.metrics.files_with_high_blast_radius = nodes.filter(n => n.blast_radius > 5).length;
|
|
125
|
-
context.metrics.total_dependencies = schemaEdges.length;
|
|
126
|
-
|
|
127
|
-
return context;
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
module.exports = { buildContext };
|
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Sort the context object deterministically to ensure consistent output
|
|
3
|
-
* @param {Object} context - The context object to sort
|
|
4
|
-
* @returns {Object} - The sorted context object
|
|
5
|
-
*/
|
|
6
|
-
function sortContext(context) {
|
|
7
|
-
// Create a copy of the context to avoid modifying the original
|
|
8
|
-
const sortedContext = { ...context };
|
|
9
|
-
|
|
10
|
-
// Sort nodes by ID
|
|
11
|
-
if (Array.isArray(sortedContext.nodes)) {
|
|
12
|
-
sortedContext.nodes = [...sortedContext.nodes].sort((a, b) => a.id.localeCompare(b.id));
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
// Sort edges by from, to, and relation
|
|
16
|
-
if (Array.isArray(sortedContext.edges)) {
|
|
17
|
-
sortedContext.edges = [...sortedContext.edges].sort((a, b) => {
|
|
18
|
-
if (a.from !== b.from) {
|
|
19
|
-
return a.from.localeCompare(b.from);
|
|
20
|
-
}
|
|
21
|
-
if (a.to !== b.to) {
|
|
22
|
-
return a.to.localeCompare(b.to);
|
|
23
|
-
}
|
|
24
|
-
return a.relation.localeCompare(b.relation);
|
|
25
|
-
});
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// Sort metrics alphabetically
|
|
29
|
-
if (sortedContext.metrics && typeof sortedContext.metrics === 'object') {
|
|
30
|
-
const sortedMetrics = {};
|
|
31
|
-
const keys = Object.keys(sortedContext.metrics).sort();
|
|
32
|
-
for (const key of keys) {
|
|
33
|
-
sortedMetrics[key] = sortedContext.metrics[key];
|
|
34
|
-
}
|
|
35
|
-
sortedContext.metrics = sortedMetrics;
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
return sortedContext;
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
module.exports = { sortContext };
|
|
@@ -1,75 +0,0 @@
|
|
|
1
|
-
const fs = require('fs');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const Ajv = require('ajv');
|
|
4
|
-
|
|
5
|
-
// Create AJV instance with options for detailed error reporting
|
|
6
|
-
const ajv = new Ajv({ allErrors: true, strict: false });
|
|
7
|
-
|
|
8
|
-
/**
|
|
9
|
-
* Validate the context object against the Arcvision schema using AJV
|
|
10
|
-
* @param {Object} context - The context object to validate
|
|
11
|
-
* @param {string} schemaPath - Path to the schema file (optional, defaults to standard schema)
|
|
12
|
-
* @returns {Object} - Validation result with valid flag and errors
|
|
13
|
-
*/
|
|
14
|
-
function validateContext(context, schemaPath = null) {
|
|
15
|
-
try {
|
|
16
|
-
// Use provided schema path or default to the standard schema
|
|
17
|
-
// When bundled, the schema file is located differently
|
|
18
|
-
let schemaFilePath;
|
|
19
|
-
if (schemaPath) {
|
|
20
|
-
schemaFilePath = schemaPath;
|
|
21
|
-
} else {
|
|
22
|
-
// Try multiple possible locations for the schema file
|
|
23
|
-
const possiblePaths = [
|
|
24
|
-
path.join(__dirname, '../../schema/arcvision_context_schema_v1.json'), // Standard path
|
|
25
|
-
path.join(__dirname, '../schema/arcvision_context_schema_v1.json'), // Bundled path
|
|
26
|
-
path.join(__dirname, 'schema/arcvision_context_schema_v1.json'), // Alternative bundled path
|
|
27
|
-
path.join(process.cwd(), 'schema/arcvision_context_schema_v1.json'), // Current working directory
|
|
28
|
-
];
|
|
29
|
-
|
|
30
|
-
for (const possiblePath of possiblePaths) {
|
|
31
|
-
if (fs.existsSync(possiblePath)) {
|
|
32
|
-
schemaFilePath = possiblePath;
|
|
33
|
-
break;
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
if (!schemaFilePath) {
|
|
38
|
-
throw new Error(`Schema file not found in any of the expected locations: ${possiblePaths.join(', ')}`);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
if (!fs.existsSync(schemaFilePath)) {
|
|
43
|
-
throw new Error(`Schema file not found: ${schemaFilePath}`);
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
const schema = JSON.parse(fs.readFileSync(schemaFilePath, 'utf8'));
|
|
47
|
-
|
|
48
|
-
// Compile the schema with AJV
|
|
49
|
-
const validate = ajv.compile(schema);
|
|
50
|
-
|
|
51
|
-
// Validate the context
|
|
52
|
-
const valid = validate(context);
|
|
53
|
-
|
|
54
|
-
if (!valid) {
|
|
55
|
-
// Format errors for human readability
|
|
56
|
-
const errors = (validate.errors || []).map(err => {
|
|
57
|
-
const path = err.instancePath || "root";
|
|
58
|
-
const message = err.message || "schema violation";
|
|
59
|
-
return `${path}: ${message}`;
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
return { valid: false, errors };
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
return { valid: true };
|
|
66
|
-
} catch (error) {
|
|
67
|
-
return {
|
|
68
|
-
valid: false,
|
|
69
|
-
errors: [`Validation error: ${error.message}`],
|
|
70
|
-
details: null
|
|
71
|
-
};
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
module.exports = { validateContext };
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
const crypto = require('crypto');
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* Generate a stable, deterministic ID based on the file path
|
|
5
|
-
* Same path will always produce the same ID
|
|
6
|
-
* @param {string} relativePath - The relative path to generate ID for
|
|
7
|
-
* @returns {string} - 16-character deterministic ID
|
|
8
|
-
*/
|
|
9
|
-
function stableId(relativePath) {
|
|
10
|
-
// Normalize the path by replacing backslashes with forward slashes and converting to lowercase
|
|
11
|
-
const normalized = relativePath.replace(/\\/g, '/').toLowerCase();
|
|
12
|
-
// Generate SHA-256 hash and take first 16 characters
|
|
13
|
-
return crypto.createHash('sha256').update(normalized).digest('hex').substring(0, 16);
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
module.exports = { stableId };
|
package/src/index.js
DELETED
|
@@ -1,325 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
const { Command } = require('commander');
|
|
4
|
-
const chalk = require('chalk');
|
|
5
|
-
const path = require('path');
|
|
6
|
-
const fs = require('fs');
|
|
7
|
-
const os = require('os');
|
|
8
|
-
const scanner = require('./core/scanner');
|
|
9
|
-
|
|
10
|
-
// Get version from package.json
|
|
11
|
-
// Use a try-catch to handle bundled environment
|
|
12
|
-
let version = '1.0.0'; // fallback version
|
|
13
|
-
try {
|
|
14
|
-
// Try to get the package.json path relative to the bundled file
|
|
15
|
-
const packageJsonPath = path.join(__dirname, '../package.json');
|
|
16
|
-
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
|
17
|
-
version = packageJson.version;
|
|
18
|
-
} catch (error) {
|
|
19
|
-
// Fallback to 1.0.0 if package.json cannot be found
|
|
20
|
-
// This can happen in bundled environments
|
|
21
|
-
console.warn('Warning: Could not load version from package.json, using default');
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
const CONFIG_FILE = path.join(os.homedir(), '.arcvisionrc');
|
|
25
|
-
const API_URL = process.env.ARCVISION_API_URL || 'https://arcvisiondev.vercel.app';
|
|
26
|
-
|
|
27
|
-
// Blast radius analysis
|
|
28
|
-
const { findHighestBlastRadius, getTopBlastRadiusFiles, computeBlastRadiusWithPercentage } = require('./core/blastRadius');
|
|
29
|
-
|
|
30
|
-
function analyzeBlastRadius(architectureMap) {
|
|
31
|
-
// Extract blast radius information from nodes
|
|
32
|
-
// Handle both old and new schema formats
|
|
33
|
-
const blastRadiusData = [];
|
|
34
|
-
|
|
35
|
-
if (architectureMap.nodes && architectureMap.nodes.length > 0) {
|
|
36
|
-
// Check if nodes have the old format (with metadata) or new format
|
|
37
|
-
architectureMap.nodes.forEach(node => {
|
|
38
|
-
let blastRadius = 0;
|
|
39
|
-
|
|
40
|
-
// Try to get blast radius from different possible locations
|
|
41
|
-
if (node.metadata && node.metadata.blast_radius !== undefined) {
|
|
42
|
-
// Old format
|
|
43
|
-
blastRadius = node.metadata.blast_radius;
|
|
44
|
-
} else if (architectureMap.contextSurface && architectureMap.contextSurface.topBlastRadiusFiles) {
|
|
45
|
-
// Get from contextSurface if available
|
|
46
|
-
const foundFile = architectureMap.contextSurface.topBlastRadiusFiles.find(f => f.file === node.path || f.file === node.id);
|
|
47
|
-
if (foundFile) {
|
|
48
|
-
blastRadius = foundFile.blastRadius || 0;
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
blastRadiusData.push({
|
|
53
|
-
file: node.path || node.id, // Use path if available, otherwise id
|
|
54
|
-
blast_radius: blastRadius
|
|
55
|
-
});
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
const blastRadiusMap = blastRadiusData.reduce((acc, item) => {
|
|
60
|
-
acc[item.file] = item.blast_radius;
|
|
61
|
-
return acc;
|
|
62
|
-
}, {});
|
|
63
|
-
|
|
64
|
-
const totalFiles = architectureMap.nodes ? architectureMap.nodes.length : 0;
|
|
65
|
-
|
|
66
|
-
// Get top 3 files by blast radius with percentages
|
|
67
|
-
const topFiles = computeBlastRadiusWithPercentage(blastRadiusMap, totalFiles);
|
|
68
|
-
|
|
69
|
-
return {
|
|
70
|
-
topFiles: topFiles.slice(0, 3),
|
|
71
|
-
totalFiles: totalFiles
|
|
72
|
-
};
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
function saveToken(token) {
|
|
76
|
-
try {
|
|
77
|
-
fs.writeFileSync(CONFIG_FILE, JSON.stringify({ token }));
|
|
78
|
-
console.log(chalk.green('✅ Token saved successfully!'));
|
|
79
|
-
} catch (error) {
|
|
80
|
-
if (error.code === 'EACCES') {
|
|
81
|
-
console.error(chalk.red('❌ Permission denied: Cannot write to config file.'));
|
|
82
|
-
console.error(chalk.yellow(`Please ensure you have write permissions for: ${CONFIG_FILE}`));
|
|
83
|
-
} else {
|
|
84
|
-
console.error(chalk.red('❌ Failed to save token:'), error.message);
|
|
85
|
-
}
|
|
86
|
-
process.exit(1);
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
function getToken() {
|
|
91
|
-
try {
|
|
92
|
-
if (fs.existsSync(CONFIG_FILE)) {
|
|
93
|
-
const config = JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'));
|
|
94
|
-
return config.token;
|
|
95
|
-
}
|
|
96
|
-
} catch (error) {
|
|
97
|
-
console.error(chalk.red('❌ Failed to read token configuration:'), error.message);
|
|
98
|
-
console.error(chalk.yellow('Token file may be corrupted. Run `arcvision link <TOKEN>` to reset.'));
|
|
99
|
-
return null;
|
|
100
|
-
}
|
|
101
|
-
return null;
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
// Function to upload JSON to database via Token
|
|
105
|
-
async function uploadToDatabase(jsonData) {
|
|
106
|
-
const token = getToken();
|
|
107
|
-
if (!token) {
|
|
108
|
-
console.log(chalk.red('❌ No upload token found.'));
|
|
109
|
-
console.log(chalk.yellow('Run `arcvision link <TOKEN>` first to connect to a project.'));
|
|
110
|
-
process.exit(1);
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
try {
|
|
114
|
-
console.log(chalk.blue(`Uploading to ${API_URL}/api/upload...`));
|
|
115
|
-
|
|
116
|
-
// Add timeout to fetch request with progress indicators
|
|
117
|
-
const controller = new AbortController();
|
|
118
|
-
|
|
119
|
-
// Set up progress indicators
|
|
120
|
-
const progress30s = setTimeout(() => {
|
|
121
|
-
console.log(chalk.yellow('Upload taking longer than expected, please wait while the process continues...'));
|
|
122
|
-
}, 30000); // 30 seconds
|
|
123
|
-
|
|
124
|
-
const progress60s = setTimeout(() => {
|
|
125
|
-
console.log(chalk.yellow('File size is large and may take additional time, process still working...'));
|
|
126
|
-
}, 60000); // 60 seconds
|
|
127
|
-
|
|
128
|
-
const timeoutId = setTimeout(() => {
|
|
129
|
-
controller.abort();
|
|
130
|
-
clearTimeout(progress30s);
|
|
131
|
-
clearTimeout(progress60s);
|
|
132
|
-
}, 120000); // 120 second timeout
|
|
133
|
-
|
|
134
|
-
const response = await fetch(`${API_URL}/api/upload`, {
|
|
135
|
-
method: 'POST',
|
|
136
|
-
headers: {
|
|
137
|
-
'Content-Type': 'application/json',
|
|
138
|
-
'Authorization': `Bearer ${token}`
|
|
139
|
-
},
|
|
140
|
-
body: JSON.stringify({
|
|
141
|
-
graph: jsonData
|
|
142
|
-
}),
|
|
143
|
-
signal: controller.signal
|
|
144
|
-
});
|
|
145
|
-
|
|
146
|
-
// Clear all timeouts and intervals
|
|
147
|
-
clearTimeout(timeoutId);
|
|
148
|
-
clearTimeout(progress30s);
|
|
149
|
-
clearTimeout(progress60s);
|
|
150
|
-
|
|
151
|
-
if (response.status === 401) {
|
|
152
|
-
console.error(chalk.red('❌ Invalid or revoked token.'));
|
|
153
|
-
console.error(chalk.yellow('The token may be invalid, revoked, or the associated project may have been deleted.'));
|
|
154
|
-
console.log(chalk.yellow('💡 Please create a new project on the dashboard and generate a new token.'));
|
|
155
|
-
process.exit(1);
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
if (response.status === 404) {
|
|
159
|
-
console.error(chalk.red('❌ Project not found.'));
|
|
160
|
-
console.error(chalk.yellow('The project associated with this token may have been deleted.'));
|
|
161
|
-
console.log(chalk.yellow('💡 Please create a new project on the dashboard and generate a new token.'));
|
|
162
|
-
process.exit(1);
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
if (response.status === 429) {
|
|
166
|
-
console.error(chalk.red('❌ Rate limit exceeded. Please wait before trying again.'));
|
|
167
|
-
process.exit(1);
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
if (!response.ok) {
|
|
171
|
-
console.error(chalk.red(`❌ Upload failed: ${response.status} ${response.statusText}`));
|
|
172
|
-
const text = await response.text();
|
|
173
|
-
try {
|
|
174
|
-
const errorJson = JSON.parse(text);
|
|
175
|
-
if (errorJson.error) console.error(chalk.red(`Server Error: ${errorJson.error}`));
|
|
176
|
-
} catch (e) {
|
|
177
|
-
console.error(chalk.red(`Server Error: ${text}`));
|
|
178
|
-
}
|
|
179
|
-
process.exit(1);
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
const result = await response.json();
|
|
183
|
-
if (result.success) {
|
|
184
|
-
console.log(chalk.green('✅ Graph uploaded successfully!'));
|
|
185
|
-
} else {
|
|
186
|
-
console.log(chalk.red('❌ Upload reported failure despite 200 OK.'));
|
|
187
|
-
if (result.error) {
|
|
188
|
-
console.error(chalk.red(`Error details: ${result.error}`));
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
} catch (error) {
|
|
193
|
-
if (error.name === 'AbortError') {
|
|
194
|
-
console.error(chalk.red('❌ Upload timeout: Request took too long to complete.'));
|
|
195
|
-
console.error(chalk.yellow('Check your internet connection and try again.'));
|
|
196
|
-
} else if (error.code === 'ENOTFOUND' || error.code === 'ECONNREFUSED' || error.code === 'ECONNRESET') {
|
|
197
|
-
console.error(chalk.red('❌ Network error: Unable to connect to the server.'));
|
|
198
|
-
console.error(chalk.yellow('Check your internet connection and ensure the API endpoint is accessible.'));
|
|
199
|
-
console.error(chalk.yellow(`API endpoint: ${API_URL}/api/upload`));
|
|
200
|
-
} else {
|
|
201
|
-
console.error(chalk.red('Upload network error:'), error.message);
|
|
202
|
-
console.error(chalk.yellow('This might be a temporary issue. Please try again later.'));
|
|
203
|
-
}
|
|
204
|
-
process.exit(1);
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
const program = new Command();
|
|
209
|
-
|
|
210
|
-
program
|
|
211
|
-
.name('arcvision')
|
|
212
|
-
.description(`CLI to visualize codebase architecture
|
|
213
|
-
|
|
214
|
-
Quick Start:
|
|
215
|
-
1. Sign up at the ArcVision dashboard
|
|
216
|
-
2. Create a project and name it
|
|
217
|
-
3. Generate a CLI token
|
|
218
|
-
4. Run: arcvision link <token>
|
|
219
|
-
5. Run: arcvision scan --upload
|
|
220
|
-
6. Open dashboard to see results
|
|
221
|
-
`)
|
|
222
|
-
.version(version);
|
|
223
|
-
|
|
224
|
-
program
|
|
225
|
-
.command('link <token>')
|
|
226
|
-
.description('Link this CLI to a project via upload token')
|
|
227
|
-
.action((token) => {
|
|
228
|
-
try {
|
|
229
|
-
saveToken(token);
|
|
230
|
-
} catch (error) {
|
|
231
|
-
// saveToken already handles its own errors and exits
|
|
232
|
-
process.exit(1);
|
|
233
|
-
}
|
|
234
|
-
});
|
|
235
|
-
|
|
236
|
-
program
|
|
237
|
-
.command('scan')
|
|
238
|
-
.description('Scan the current directory and generate architecture map')
|
|
239
|
-
.argument('[directory]', 'Directory to scan', '.')
|
|
240
|
-
.option('-u, --upload', 'Upload to database')
|
|
241
|
-
.action(async (directory, options) => {
|
|
242
|
-
const targetDir = path.resolve(directory);
|
|
243
|
-
console.log(chalk.blue(`Scanning directory: ${targetDir}`));
|
|
244
|
-
|
|
245
|
-
try {
|
|
246
|
-
const map = await scanner.scan(targetDir);
|
|
247
|
-
console.log(chalk.green('Scan complete!'));
|
|
248
|
-
|
|
249
|
-
// Analyze and print blast radius insight
|
|
250
|
-
const blastRadiusAnalysis = analyzeBlastRadius(map);
|
|
251
|
-
if (blastRadiusAnalysis && blastRadiusAnalysis.topFiles && blastRadiusAnalysis.topFiles.length > 0) {
|
|
252
|
-
console.log('\n⚠️ Top Structural Context Hubs Detected:\n');
|
|
253
|
-
|
|
254
|
-
blastRadiusAnalysis.topFiles.forEach((item, index) => {
|
|
255
|
-
let warningMessage = '';
|
|
256
|
-
if (index === 0) {
|
|
257
|
-
warningMessage = 'Changes here may silently propagate across the system.';
|
|
258
|
-
} else if (index === 1) {
|
|
259
|
-
warningMessage = 'Acts as a coordination layer between components.';
|
|
260
|
-
} else {
|
|
261
|
-
warningMessage = 'Modifications can cause widespread inconsistencies.';
|
|
262
|
-
}
|
|
263
|
-
|
|
264
|
-
console.log(`${index + 1}. ${item.file}`);
|
|
265
|
-
console.log(` Blast Radius: ${item.blastRadius} files (${item.percentOfGraph}%)`);
|
|
266
|
-
console.log(` Warning: ${warningMessage}\n`);
|
|
267
|
-
});
|
|
268
|
-
} else {
|
|
269
|
-
console.log('\nNo high-structure files detected based on import dependencies.');
|
|
270
|
-
}
|
|
271
|
-
|
|
272
|
-
// Validate the map before saving or uploading
|
|
273
|
-
const { validateContext } = require('./engine/context_validator');
|
|
274
|
-
const validation = validateContext(map);
|
|
275
|
-
|
|
276
|
-
if (!validation.valid) {
|
|
277
|
-
console.error('FINAL VALIDATION FAILED:');
|
|
278
|
-
validation.errors.forEach(e => console.error(' -', e));
|
|
279
|
-
console.error('ABORTING. JSON NOT SAVED.');
|
|
280
|
-
process.exit(1);
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
// Upload to database if requested
|
|
284
|
-
if (options.upload) {
|
|
285
|
-
await uploadToDatabase(map);
|
|
286
|
-
} else {
|
|
287
|
-
// Save to file with validation
|
|
288
|
-
const fs = require('fs');
|
|
289
|
-
const outputFileName = 'arcvision.context.json';
|
|
290
|
-
fs.writeFileSync(outputFileName, JSON.stringify(map, null, 2));
|
|
291
|
-
console.log(chalk.green(`✅ Structural context saved to ${outputFileName}`));
|
|
292
|
-
console.log(JSON.stringify(map, null, 2)); // Print if not uploading
|
|
293
|
-
console.log(chalk.dim('\nUse --upload to send to dashboard.'));
|
|
294
|
-
}
|
|
295
|
-
} catch (error) {
|
|
296
|
-
if (error.code === 'ENOENT') {
|
|
297
|
-
console.error(chalk.red('❌ Directory not found:'), targetDir);
|
|
298
|
-
console.error(chalk.yellow('Please check the directory path and ensure it exists.'));
|
|
299
|
-
process.exit(1);
|
|
300
|
-
} else if (error.code === 'EACCES') {
|
|
301
|
-
console.error(chalk.red('❌ Permission denied:'), targetDir);
|
|
302
|
-
console.error(chalk.yellow('Please ensure you have read permissions for the specified directory.'));
|
|
303
|
-
process.exit(1);
|
|
304
|
-
} else {
|
|
305
|
-
console.error(chalk.red('Scan failed:'), error.message);
|
|
306
|
-
console.error(chalk.yellow('This might be caused by file access issues or unsupported file types.'));
|
|
307
|
-
|
|
308
|
-
// Provide more specific guidance based on common errors
|
|
309
|
-
if (error.message && error.message.toLowerCase().includes('parse')) {
|
|
310
|
-
console.error(chalk.yellow('If this is a TypeScript declaration file, note that .d.ts files are now skipped.'));
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
// Additional error handling for validation failures
|
|
314
|
-
if (error.message && error.message.includes('VALIDATION FAILED')) {
|
|
315
|
-
console.error(chalk.red('❌ Schema validation failed - output not saved'));
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
process.exit(1);
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
});
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
program.parse();
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
const parser = require('@babel/parser');
|
|
2
|
-
const traverse = require('@babel/traverse').default;
|
|
3
|
-
const fs = require('fs');
|
|
4
|
-
|
|
5
|
-
module.exports = {
|
|
6
|
-
name: 'express-route-detector',
|
|
7
|
-
|
|
8
|
-
process: async (filePath, metadata) => {
|
|
9
|
-
try {
|
|
10
|
-
const content = fs.readFileSync(filePath, 'utf-8');
|
|
11
|
-
const ast = parser.parse(content, {
|
|
12
|
-
sourceType: 'module',
|
|
13
|
-
plugins: ['jsx', 'typescript']
|
|
14
|
-
});
|
|
15
|
-
|
|
16
|
-
const routes = [];
|
|
17
|
-
|
|
18
|
-
traverse(ast, {
|
|
19
|
-
CallExpression({ node }) {
|
|
20
|
-
// Detect Express routes: app.get(), app.post(), router.get(), etc.
|
|
21
|
-
if (node.callee.type === 'MemberExpression') {
|
|
22
|
-
const obj = node.callee.object.name;
|
|
23
|
-
const method = node.callee.property.name;
|
|
24
|
-
|
|
25
|
-
if ((obj === 'app' || obj === 'router') &&
|
|
26
|
-
['get', 'post', 'put', 'delete', 'patch'].includes(method)) {
|
|
27
|
-
const pathArg = node.arguments[0];
|
|
28
|
-
if (pathArg && pathArg.type === 'StringLiteral') {
|
|
29
|
-
routes.push({
|
|
30
|
-
method: method.toUpperCase(),
|
|
31
|
-
path: pathArg.value
|
|
32
|
-
});
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
});
|
|
38
|
-
|
|
39
|
-
if (routes.length > 0) {
|
|
40
|
-
return { expressRoutes: routes };
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
return null;
|
|
44
|
-
} catch (error) {
|
|
45
|
-
return null;
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
};
|