arcvision 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +91 -0
- package/arcvision-0.1.0.tgz +0 -0
- package/dist/index.js +56622 -0
- package/jest.config.json +6 -0
- package/package.json +37 -0
- package/src/core/parser.js +73 -0
- package/src/core/scanner.js +86 -0
- package/src/core/watcher.js +18 -0
- package/src/index.js +201 -0
- package/src/plugins/express-plugin.js +48 -0
- package/src/plugins/plugin-manager.js +57 -0
- package/src/plugins/react-plugin.js +54 -0
- package/tests/plugins.test.js +49 -0
- package/tests/scanner.test.js +52 -0
package/jest.config.json
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "arcvision",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Architecture scanner for modern codebases",
|
|
5
|
+
"bin": {
|
|
6
|
+
"arcvision": "./dist/index.js"
|
|
7
|
+
},
|
|
8
|
+
"scripts": {
|
|
9
|
+
"build": "esbuild src/index.js --bundle --platform=node --outfile=dist/index.js",
|
|
10
|
+
"prepublishOnly": "npm run build",
|
|
11
|
+
"dev": "node src/index.js",
|
|
12
|
+
"test": "jest",
|
|
13
|
+
"test:watch": "jest --watch"
|
|
14
|
+
},
|
|
15
|
+
"dependencies": {
|
|
16
|
+
"commander": "^12.0.0",
|
|
17
|
+
"chalk": "^4.1.2",
|
|
18
|
+
"glob": "^10.3.10",
|
|
19
|
+
"@babel/parser": "^7.24.0",
|
|
20
|
+
"@babel/traverse": "^7.24.0",
|
|
21
|
+
"node-fetch": "^3.3.2"
|
|
22
|
+
},
|
|
23
|
+
"devDependencies": {
|
|
24
|
+
"esbuild": "^0.20.0",
|
|
25
|
+
"jest": "^29.7.0"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"architecture",
|
|
29
|
+
"visualization",
|
|
30
|
+
"scanning",
|
|
31
|
+
"codebase",
|
|
32
|
+
"dependency",
|
|
33
|
+
"graph"
|
|
34
|
+
],
|
|
35
|
+
"author": "ArcVision",
|
|
36
|
+
"license": "MIT"
|
|
37
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
const parser = require('@babel/parser');
|
|
2
|
+
const traverse = require('@babel/traverse').default;
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
|
|
6
|
+
function parseFile(filePath) {
|
|
7
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
8
|
+
const ast = parser.parse(content, {
|
|
9
|
+
sourceType: 'module',
|
|
10
|
+
plugins: ['jsx', 'typescript', 'classProperties', 'dynamicImport']
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
const metadata = {
|
|
14
|
+
id: filePath,
|
|
15
|
+
imports: [],
|
|
16
|
+
exports: [],
|
|
17
|
+
functions: [],
|
|
18
|
+
apiCalls: []
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
traverse(ast, {
|
|
22
|
+
ImportDeclaration({ node }) {
|
|
23
|
+
metadata.imports.push({
|
|
24
|
+
source: node.source.value,
|
|
25
|
+
specifiers: node.specifiers.map(s => s.local.name)
|
|
26
|
+
});
|
|
27
|
+
},
|
|
28
|
+
ExportNamedDeclaration({ node }) {
|
|
29
|
+
if (node.declaration) {
|
|
30
|
+
if (node.declaration.type === 'FunctionDeclaration') {
|
|
31
|
+
metadata.exports.push(node.declaration.id.name);
|
|
32
|
+
} else if (node.declaration.type === 'VariableDeclaration') {
|
|
33
|
+
node.declaration.declarations.forEach(d => {
|
|
34
|
+
metadata.exports.push(d.id.name);
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
ExportDefaultDeclaration({ node }) {
|
|
40
|
+
metadata.exports.push('default');
|
|
41
|
+
},
|
|
42
|
+
FunctionDeclaration({ node }) {
|
|
43
|
+
if (node.id) {
|
|
44
|
+
metadata.functions.push({
|
|
45
|
+
name: node.id.name,
|
|
46
|
+
params: node.params.map(p => p.name || (p.type === 'ObjectPattern' ? '{}' : '?'))
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
},
|
|
50
|
+
CallExpression({ node }) {
|
|
51
|
+
if (node.callee.name === 'fetch') {
|
|
52
|
+
const arg = node.arguments[0];
|
|
53
|
+
metadata.apiCalls.push({
|
|
54
|
+
type: 'fetch',
|
|
55
|
+
url: arg.type === 'StringLiteral' ? arg.value : 'dynamic'
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
if (node.callee.name === 'require' && node.arguments.length > 0) {
|
|
59
|
+
const source = node.arguments[0].value;
|
|
60
|
+
if (source) {
|
|
61
|
+
metadata.imports.push({
|
|
62
|
+
source: source,
|
|
63
|
+
specifiers: []
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
return metadata;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
module.exports = { parseFile };
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
const { glob } = require('glob');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const parser = require('./parser');
|
|
4
|
+
const pluginManager = require('../plugins/plugin-manager');
|
|
5
|
+
|
|
6
|
+
async function scan(directory) {
|
|
7
|
+
const options = {
|
|
8
|
+
ignore: ['**/node_modules/**', '**/.git/**', '**/dist/**', '**/build/**'],
|
|
9
|
+
cwd: directory,
|
|
10
|
+
absolute: true
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
try {
|
|
14
|
+
// Load plugins
|
|
15
|
+
const pluginDir = path.join(__dirname, '../plugins');
|
|
16
|
+
pluginManager.loadPluginsFromDirectory(pluginDir);
|
|
17
|
+
|
|
18
|
+
const files = await glob('**/*.{js,jsx,ts,tsx}', { ...options, ignore: [...options.ignore, '**/*.d.ts'] });
|
|
19
|
+
|
|
20
|
+
const architectureMap = {
|
|
21
|
+
nodes: [],
|
|
22
|
+
edges: []
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
const fileMap = new Map();
|
|
26
|
+
|
|
27
|
+
// Process files with plugins
|
|
28
|
+
for (const file of files) {
|
|
29
|
+
try {
|
|
30
|
+
let metadata = parser.parseFile(file);
|
|
31
|
+
const relativePath = path.relative(directory, file);
|
|
32
|
+
|
|
33
|
+
// Process with plugins
|
|
34
|
+
metadata = await pluginManager.processFile(file, metadata);
|
|
35
|
+
|
|
36
|
+
const node = {
|
|
37
|
+
id: relativePath,
|
|
38
|
+
type: 'file',
|
|
39
|
+
metadata: metadata
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
architectureMap.nodes.push(node);
|
|
43
|
+
fileMap.set(relativePath, metadata);
|
|
44
|
+
} catch (e) {
|
|
45
|
+
// Non-fatal parse errors (like .d.ts files) are logged but don't stop the scan
|
|
46
|
+
console.warn(`⚠️ Parse warning for ${file}: ${e.message}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Normalize helper
|
|
51
|
+
const normalize = p => p.replace(/\\/g, '/');
|
|
52
|
+
|
|
53
|
+
const normalizedFileMap = new Set();
|
|
54
|
+
architectureMap.nodes.forEach(n => normalizedFileMap.add(normalize(n.id)));
|
|
55
|
+
|
|
56
|
+
architectureMap.nodes.forEach(node => {
|
|
57
|
+
node.metadata.imports.forEach(imp => {
|
|
58
|
+
let target = imp.source;
|
|
59
|
+
if (target.startsWith('.')) {
|
|
60
|
+
const dir = path.dirname(node.id);
|
|
61
|
+
let resolved = path.join(dir, target);
|
|
62
|
+
const extensions = ['', '.js', '.jsx', '.ts', '.tsx'];
|
|
63
|
+
|
|
64
|
+
for (const ext of extensions) {
|
|
65
|
+
const probe = normalize(resolved + ext);
|
|
66
|
+
if (normalizedFileMap.has(probe)) {
|
|
67
|
+
architectureMap.edges.push({
|
|
68
|
+
source: normalize(node.id),
|
|
69
|
+
target: probe,
|
|
70
|
+
type: 'import'
|
|
71
|
+
});
|
|
72
|
+
break;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
return architectureMap;
|
|
80
|
+
|
|
81
|
+
} catch (err) {
|
|
82
|
+
throw err;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
module.exports = { scan };
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
const chokidar = require('chokidar');
|
|
2
|
+
|
|
3
|
+
function watch(directory, callback) {
|
|
4
|
+
const watcher = chokidar.watch(directory, {
|
|
5
|
+
ignored: [/(^|[\/\\])\../, '**/node_modules/**'], // ignore dotfiles
|
|
6
|
+
persistent: true,
|
|
7
|
+
ignoreInitial: true
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
watcher
|
|
11
|
+
.on('add', path => callback('add', path))
|
|
12
|
+
.on('change', path => callback('change', path))
|
|
13
|
+
.on('unlink', path => callback('unlink', path));
|
|
14
|
+
|
|
15
|
+
return watcher;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
module.exports = { watch };
|
package/src/index.js
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const { Command } = require('commander');
|
|
4
|
+
const chalk = require('chalk');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const os = require('os');
|
|
8
|
+
const scanner = require('./core/scanner');
|
|
9
|
+
|
|
10
|
+
const CONFIG_FILE = path.join(os.homedir(), '.arcvisionrc');
|
|
11
|
+
const API_URL = process.env.ARCVISION_API_URL || 'https://arcvisiondev.vercel.app';
|
|
12
|
+
|
|
13
|
+
function saveToken(token) {
|
|
14
|
+
try {
|
|
15
|
+
fs.writeFileSync(CONFIG_FILE, JSON.stringify({ token }));
|
|
16
|
+
console.log(chalk.green('✅ Token saved successfully!'));
|
|
17
|
+
} catch (error) {
|
|
18
|
+
if (error.code === 'EACCES') {
|
|
19
|
+
console.error(chalk.red('❌ Permission denied: Cannot write to config file.'));
|
|
20
|
+
console.error(chalk.yellow(`Please ensure you have write permissions for: ${CONFIG_FILE}`));
|
|
21
|
+
} else {
|
|
22
|
+
console.error(chalk.red('❌ Failed to save token:'), error.message);
|
|
23
|
+
}
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function getToken() {
|
|
29
|
+
try {
|
|
30
|
+
if (fs.existsSync(CONFIG_FILE)) {
|
|
31
|
+
const config = JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'));
|
|
32
|
+
return config.token;
|
|
33
|
+
}
|
|
34
|
+
} catch (error) {
|
|
35
|
+
console.error(chalk.red('❌ Failed to read token configuration:'), error.message);
|
|
36
|
+
console.error(chalk.yellow('Token file may be corrupted. Run `arcvision link <TOKEN>` to reset.'));
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Function to upload JSON to database via Token
|
|
43
|
+
async function uploadToDatabase(jsonData) {
|
|
44
|
+
const token = getToken();
|
|
45
|
+
if (!token) {
|
|
46
|
+
console.log(chalk.red('❌ No upload token found.'));
|
|
47
|
+
console.log(chalk.yellow('Run `arcvision link <TOKEN>` first to connect to a project.'));
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
console.log(chalk.blue(`Uploading to ${API_URL}/api/upload...`));
|
|
53
|
+
|
|
54
|
+
// Add timeout to fetch request
|
|
55
|
+
const controller = new AbortController();
|
|
56
|
+
const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout
|
|
57
|
+
|
|
58
|
+
const response = await fetch(`${API_URL}/api/upload`, {
|
|
59
|
+
method: 'POST',
|
|
60
|
+
headers: {
|
|
61
|
+
'Content-Type': 'application/json',
|
|
62
|
+
'Authorization': `Bearer ${token}`
|
|
63
|
+
},
|
|
64
|
+
body: JSON.stringify({
|
|
65
|
+
graph: jsonData
|
|
66
|
+
}),
|
|
67
|
+
signal: controller.signal
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
clearTimeout(timeoutId);
|
|
71
|
+
|
|
72
|
+
if (response.status === 401) {
|
|
73
|
+
console.error(chalk.red('❌ Invalid or revoked token.'));
|
|
74
|
+
console.error(chalk.yellow('The token may be invalid, revoked, or the associated project may have been deleted.'));
|
|
75
|
+
console.log(chalk.yellow('💡 Please create a new project on the dashboard and generate a new token.'));
|
|
76
|
+
process.exit(1);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (response.status === 404) {
|
|
80
|
+
console.error(chalk.red('❌ Project not found.'));
|
|
81
|
+
console.error(chalk.yellow('The project associated with this token may have been deleted.'));
|
|
82
|
+
console.log(chalk.yellow('💡 Please create a new project on the dashboard and generate a new token.'));
|
|
83
|
+
process.exit(1);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (response.status === 429) {
|
|
87
|
+
console.error(chalk.red('❌ Rate limit exceeded. Please wait before trying again.'));
|
|
88
|
+
process.exit(1);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (!response.ok) {
|
|
92
|
+
console.error(chalk.red(`❌ Upload failed: ${response.status} ${response.statusText}`));
|
|
93
|
+
const text = await response.text();
|
|
94
|
+
try {
|
|
95
|
+
const errorJson = JSON.parse(text);
|
|
96
|
+
if (errorJson.error) console.error(chalk.red(`Server Error: ${errorJson.error}`));
|
|
97
|
+
} catch (e) {
|
|
98
|
+
console.error(chalk.red(`Server Error: ${text}`));
|
|
99
|
+
}
|
|
100
|
+
process.exit(1);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const result = await response.json();
|
|
104
|
+
if (result.success) {
|
|
105
|
+
console.log(chalk.green('✅ Graph uploaded successfully!'));
|
|
106
|
+
} else {
|
|
107
|
+
console.log(chalk.red('❌ Upload reported failure despite 200 OK.'));
|
|
108
|
+
if (result.error) {
|
|
109
|
+
console.error(chalk.red(`Error details: ${result.error}`));
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
} catch (error) {
|
|
114
|
+
if (error.name === 'AbortError') {
|
|
115
|
+
console.error(chalk.red('❌ Upload timeout: Request took too long to complete.'));
|
|
116
|
+
console.error(chalk.yellow('Check your internet connection and try again.'));
|
|
117
|
+
} else if (error.code === 'ENOTFOUND' || error.code === 'ECONNREFUSED' || error.code === 'ECONNRESET') {
|
|
118
|
+
console.error(chalk.red('❌ Network error: Unable to connect to the server.'));
|
|
119
|
+
console.error(chalk.yellow('Check your internet connection and ensure the API endpoint is accessible.'));
|
|
120
|
+
console.error(chalk.yellow(`API endpoint: ${API_URL}/api/upload`));
|
|
121
|
+
} else {
|
|
122
|
+
console.error(chalk.red('Upload network error:'), error.message);
|
|
123
|
+
console.error(chalk.yellow('This might be a temporary issue. Please try again later.'));
|
|
124
|
+
}
|
|
125
|
+
process.exit(1);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const program = new Command();
|
|
130
|
+
|
|
131
|
+
program
|
|
132
|
+
.name('arcvision')
|
|
133
|
+
.description(`CLI to visualize codebase architecture
|
|
134
|
+
|
|
135
|
+
Quick Start:
|
|
136
|
+
1. Sign up at the ArcVision dashboard
|
|
137
|
+
2. Create a project and name it
|
|
138
|
+
3. Generate a CLI token
|
|
139
|
+
4. Run: arcvision link <token>
|
|
140
|
+
5. Run: arcvision scan --upload
|
|
141
|
+
6. Open dashboard to see results
|
|
142
|
+
`)
|
|
143
|
+
.version('1.0.0');
|
|
144
|
+
|
|
145
|
+
program
|
|
146
|
+
.command('link <token>')
|
|
147
|
+
.description('Link this CLI to a project via upload token')
|
|
148
|
+
.action((token) => {
|
|
149
|
+
try {
|
|
150
|
+
saveToken(token);
|
|
151
|
+
} catch (error) {
|
|
152
|
+
// saveToken already handles its own errors and exits
|
|
153
|
+
process.exit(1);
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
program
|
|
158
|
+
.command('scan')
|
|
159
|
+
.description('Scan the current directory and generate architecture map')
|
|
160
|
+
.argument('[directory]', 'Directory to scan', '.')
|
|
161
|
+
.option('-u, --upload', 'Upload to database')
|
|
162
|
+
.action(async (directory, options) => {
|
|
163
|
+
const targetDir = path.resolve(directory);
|
|
164
|
+
console.log(chalk.blue(`Scanning directory: ${targetDir}`));
|
|
165
|
+
|
|
166
|
+
try {
|
|
167
|
+
const map = await scanner.scan(targetDir);
|
|
168
|
+
console.log(chalk.green('Scan complete!'));
|
|
169
|
+
|
|
170
|
+
// Upload to database if requested
|
|
171
|
+
if (options.upload) {
|
|
172
|
+
await uploadToDatabase(map);
|
|
173
|
+
} else {
|
|
174
|
+
console.log(JSON.stringify(map, null, 2)); // Print if not uploading
|
|
175
|
+
console.log(chalk.dim('\nUse --upload to send to dashboard.'));
|
|
176
|
+
}
|
|
177
|
+
} catch (error) {
|
|
178
|
+
if (error.code === 'ENOENT') {
|
|
179
|
+
console.error(chalk.red('❌ Directory not found:'), targetDir);
|
|
180
|
+
console.error(chalk.yellow('Please check the directory path and ensure it exists.'));
|
|
181
|
+
process.exit(1);
|
|
182
|
+
} else if (error.code === 'EACCES') {
|
|
183
|
+
console.error(chalk.red('❌ Permission denied:'), targetDir);
|
|
184
|
+
console.error(chalk.yellow('Please ensure you have read permissions for the specified directory.'));
|
|
185
|
+
process.exit(1);
|
|
186
|
+
} else {
|
|
187
|
+
console.error(chalk.red('Scan failed:'), error.message);
|
|
188
|
+
console.error(chalk.yellow('This might be caused by file access issues or unsupported file types.'));
|
|
189
|
+
|
|
190
|
+
// Provide more specific guidance based on common errors
|
|
191
|
+
if (error.message && error.message.toLowerCase().includes('parse')) {
|
|
192
|
+
console.error(chalk.yellow('If this is a TypeScript declaration file, note that .d.ts files are now skipped.'));
|
|
193
|
+
}
|
|
194
|
+
process.exit(1);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
program.parse();
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
const parser = require('@babel/parser');
|
|
2
|
+
const traverse = require('@babel/traverse').default;
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
|
|
5
|
+
module.exports = {
|
|
6
|
+
name: 'express-route-detector',
|
|
7
|
+
|
|
8
|
+
process: async (filePath, metadata) => {
|
|
9
|
+
try {
|
|
10
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
11
|
+
const ast = parser.parse(content, {
|
|
12
|
+
sourceType: 'module',
|
|
13
|
+
plugins: ['jsx', 'typescript']
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
const routes = [];
|
|
17
|
+
|
|
18
|
+
traverse(ast, {
|
|
19
|
+
CallExpression({ node }) {
|
|
20
|
+
// Detect Express routes: app.get(), app.post(), router.get(), etc.
|
|
21
|
+
if (node.callee.type === 'MemberExpression') {
|
|
22
|
+
const obj = node.callee.object.name;
|
|
23
|
+
const method = node.callee.property.name;
|
|
24
|
+
|
|
25
|
+
if ((obj === 'app' || obj === 'router') &&
|
|
26
|
+
['get', 'post', 'put', 'delete', 'patch'].includes(method)) {
|
|
27
|
+
const pathArg = node.arguments[0];
|
|
28
|
+
if (pathArg && pathArg.type === 'StringLiteral') {
|
|
29
|
+
routes.push({
|
|
30
|
+
method: method.toUpperCase(),
|
|
31
|
+
path: pathArg.value
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
if (routes.length > 0) {
|
|
40
|
+
return { expressRoutes: routes };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
return null;
|
|
44
|
+
} catch (error) {
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
};
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
class PluginManager {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.plugins = [];
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
register(plugin) {
|
|
10
|
+
if (typeof plugin.name !== 'string') {
|
|
11
|
+
throw new Error('Plugin must have a name');
|
|
12
|
+
}
|
|
13
|
+
if (typeof plugin.process !== 'function') {
|
|
14
|
+
throw new Error('Plugin must have a process function');
|
|
15
|
+
}
|
|
16
|
+
this.plugins.push(plugin);
|
|
17
|
+
console.log(`Plugin registered: ${plugin.name}`);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async processFile(filePath, metadata) {
|
|
21
|
+
let enhancedMetadata = { ...metadata };
|
|
22
|
+
|
|
23
|
+
for (const plugin of this.plugins) {
|
|
24
|
+
try {
|
|
25
|
+
const result = await plugin.process(filePath, enhancedMetadata);
|
|
26
|
+
if (result) {
|
|
27
|
+
enhancedMetadata = { ...enhancedMetadata, ...result };
|
|
28
|
+
}
|
|
29
|
+
} catch (error) {
|
|
30
|
+
console.warn(`Plugin ${plugin.name} failed: ${error.message}`);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return enhancedMetadata;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
loadPluginsFromDirectory(directory) {
|
|
38
|
+
if (!fs.existsSync(directory)) {
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const files = fs.readdirSync(directory);
|
|
43
|
+
files.forEach(file => {
|
|
44
|
+
if (file.endsWith('.js') && file !== 'plugin-manager.js') {
|
|
45
|
+
try {
|
|
46
|
+
const pluginPath = path.join(directory, file);
|
|
47
|
+
const plugin = require(pluginPath);
|
|
48
|
+
this.register(plugin);
|
|
49
|
+
} catch (error) {
|
|
50
|
+
console.warn(`Failed to load plugin ${file}: ${error.message}`);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
module.exports = new PluginManager();
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
const parser = require('@babel/parser');
|
|
2
|
+
const traverse = require('@babel/traverse').default;
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
|
|
5
|
+
module.exports = {
|
|
6
|
+
name: 'react-component-detector',
|
|
7
|
+
|
|
8
|
+
process: async (filePath, metadata) => {
|
|
9
|
+
if (!filePath.match(/\.(jsx|tsx)$/)) {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
try {
|
|
14
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
15
|
+
const ast = parser.parse(content, {
|
|
16
|
+
sourceType: 'module',
|
|
17
|
+
plugins: ['jsx', 'typescript']
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
const components = [];
|
|
21
|
+
const hooks = [];
|
|
22
|
+
|
|
23
|
+
traverse(ast, {
|
|
24
|
+
FunctionDeclaration({ node }) {
|
|
25
|
+
// Check if it's a React component (starts with uppercase)
|
|
26
|
+
if (node.id && /^[A-Z]/.test(node.id.name)) {
|
|
27
|
+
components.push(node.id.name);
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
VariableDeclarator({ node }) {
|
|
31
|
+
// Check for arrow function components
|
|
32
|
+
if (node.id && /^[A-Z]/.test(node.id.name)) {
|
|
33
|
+
if (node.init && (node.init.type === 'ArrowFunctionExpression' || node.init.type === 'FunctionExpression')) {
|
|
34
|
+
components.push(node.id.name);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
CallExpression({ node }) {
|
|
39
|
+
// Detect React hooks
|
|
40
|
+
if (node.callee.name && node.callee.name.startsWith('use')) {
|
|
41
|
+
hooks.push(node.callee.name);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
return {
|
|
47
|
+
reactComponents: [...new Set(components)],
|
|
48
|
+
reactHooks: [...new Set(hooks)]
|
|
49
|
+
};
|
|
50
|
+
} catch (error) {
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
};
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
const pluginManager = require('./src/plugins/plugin-manager');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
describe('Plugin System', () => {
|
|
5
|
+
test('should register a plugin', () => {
|
|
6
|
+
const testPlugin = {
|
|
7
|
+
name: 'test-plugin',
|
|
8
|
+
process: async (filePath, metadata) => {
|
|
9
|
+
return { testField: 'test-value' };
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
pluginManager.register(testPlugin);
|
|
14
|
+
expect(pluginManager.plugins.length).toBeGreaterThan(0);
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test('should process file with plugin', async () => {
|
|
18
|
+
const testPlugin = {
|
|
19
|
+
name: 'test-enhancer',
|
|
20
|
+
process: async (filePath, metadata) => {
|
|
21
|
+
return { enhanced: true };
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
pluginManager.register(testPlugin);
|
|
26
|
+
|
|
27
|
+
const metadata = { id: 'test.js', imports: [] };
|
|
28
|
+
const result = await pluginManager.processFile('test.js', metadata);
|
|
29
|
+
|
|
30
|
+
expect(result.enhanced).toBe(true);
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
test('should handle plugin errors gracefully', async () => {
|
|
34
|
+
const faultyPlugin = {
|
|
35
|
+
name: 'faulty-plugin',
|
|
36
|
+
process: async () => {
|
|
37
|
+
throw new Error('Plugin error');
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
pluginManager.register(faultyPlugin);
|
|
42
|
+
|
|
43
|
+
const metadata = { id: 'test.js' };
|
|
44
|
+
const result = await pluginManager.processFile('test.js', metadata);
|
|
45
|
+
|
|
46
|
+
// Should not crash
|
|
47
|
+
expect(result).toBeDefined();
|
|
48
|
+
});
|
|
49
|
+
});
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
const scanner = require('./src/core/scanner');
|
|
2
|
+
const parser = require('./src/core/parser');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
describe('ArcVision Scanner', () => {
|
|
6
|
+
test('should parse a simple JavaScript file', () => {
|
|
7
|
+
const testFile = path.join(__dirname, '../example-project/utils.js');
|
|
8
|
+
const metadata = parser.parseFile(testFile);
|
|
9
|
+
|
|
10
|
+
expect(metadata).toBeDefined();
|
|
11
|
+
expect(metadata.exports).toContain('helper');
|
|
12
|
+
expect(metadata.functions).toBeDefined();
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
test('should scan a directory and return architecture map', async () => {
|
|
16
|
+
const testDir = path.join(__dirname, '../example-project');
|
|
17
|
+
const map = await scanner.scan(testDir);
|
|
18
|
+
|
|
19
|
+
expect(map).toBeDefined();
|
|
20
|
+
expect(map.nodes).toBeDefined();
|
|
21
|
+
expect(map.edges).toBeDefined();
|
|
22
|
+
expect(map.nodes.length).toBeGreaterThan(0);
|
|
23
|
+
}, 30000);
|
|
24
|
+
|
|
25
|
+
test('should detect imports and create edges', async () => {
|
|
26
|
+
const testDir = path.join(__dirname, '../example-project');
|
|
27
|
+
const map = await scanner.scan(testDir);
|
|
28
|
+
|
|
29
|
+
expect(map.edges.length).toBeGreaterThan(0);
|
|
30
|
+
const hasImportEdge = map.edges.some(e => e.type === 'import');
|
|
31
|
+
expect(hasImportEdge).toBe(true);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
test('should detect API calls', async () => {
|
|
35
|
+
const testDir = path.join(__dirname, '../example-project');
|
|
36
|
+
const map = await scanner.scan(testDir);
|
|
37
|
+
|
|
38
|
+
const apiFile = map.nodes.find(n => n.id.includes('api.js'));
|
|
39
|
+
expect(apiFile).toBeDefined();
|
|
40
|
+
expect(apiFile.metadata.apiCalls).toBeDefined();
|
|
41
|
+
expect(apiFile.metadata.apiCalls.length).toBeGreaterThan(0);
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
test('should handle circular dependencies', async () => {
|
|
45
|
+
// This is a basic test - in real scenarios we'd create files with circular deps
|
|
46
|
+
const testDir = path.join(__dirname, '../example-project');
|
|
47
|
+
const map = await scanner.scan(testDir);
|
|
48
|
+
|
|
49
|
+
// Should not crash
|
|
50
|
+
expect(map).toBeDefined();
|
|
51
|
+
});
|
|
52
|
+
});
|