@gianmarcomaz/vantyr 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/vantyr.js ADDED
@@ -0,0 +1,6 @@
1
+ #!/usr/bin/env node
2
+ import { run } from '../src/cli.js';
3
+ run().catch((error) => {
4
+ console.error(`\nUnexpected error: ${error.message}\n`);
5
+ process.exitCode = 1;
6
+ });
package/package.json ADDED
@@ -0,0 +1,50 @@
1
+ {
2
+ "name": "@gianmarcomaz/vantyr",
3
+ "version": "1.0.0",
4
+ "description": "MCP security scanner — Trust Score for AI dev environments. 100% local, zero telemetry.",
5
+ "author": "Gianmarco Mazzella",
6
+ "license": "MIT",
7
+ "type": "module",
8
+ "repository": {
9
+ "type": "git",
10
+ "url": "git+https://github.com/gianmarcomaz/vantyr.git"
11
+ },
12
+ "homepage": "https://github.com/gianmarcomaz/vantyr#readme",
13
+ "bugs": {
14
+ "url": "https://github.com/gianmarcomaz/vantyr/issues"
15
+ },
16
+ "bin": {
17
+ "vantyr": "./bin/vantyr.js"
18
+ },
19
+ "main": "./bin/vantyr.js",
20
+ "files": [
21
+ "bin/",
22
+ "src/",
23
+ "README.md",
24
+ "LICENSE"
25
+ ],
26
+ "publishConfig": {
27
+ "access": "public"
28
+ },
29
+ "keywords": [
30
+ "mcp",
31
+ "model-context-protocol",
32
+ "security",
33
+ "scanner",
34
+ "static-analysis",
35
+ "ai",
36
+ "llm",
37
+ "trust-score",
38
+ "cli",
39
+ "sarif",
40
+ "owasp",
41
+ "prompt-injection"
42
+ ],
43
+ "engines": {
44
+ "node": ">=18.0.0"
45
+ },
46
+ "dependencies": {
47
+ "chalk": "^5.3.0",
48
+ "commander": "^12.0.0"
49
+ }
50
+ }
package/src/cli.js ADDED
@@ -0,0 +1,148 @@
1
+ import { Command } from 'commander';
2
+ import { fetchRepoFiles } from './fetcher/github.js';
3
+ import { discoverLocalFiles } from './config/localScanner.js';
4
+ import { runAllChecks } from './scanner/index.js';
5
+ import { calculateTrustScore } from './scoring/trustScore.js';
6
+ import { printTerminalResult } from './output/terminal.js';
7
+ import { formatJsonResult } from './output/json.js';
8
+ import { formatSarifResult } from './output/sarif.js';
9
+ import chalk from 'chalk';
10
+
11
+ export async function run() {
12
+ const program = new Command();
13
+
14
+ program
15
+ .name('vantyr')
16
+ .description('MCP security scanner — Trust Score for AI dev environments. 100% local, zero telemetry.')
17
+ .version('1.0.0');
18
+
19
+ program.command('scan')
20
+ .argument('[url]', 'GitHub repository URL to scan')
21
+ .option('-l, --local', 'Discover local MCP configs and rules files')
22
+ .option('-t, --token <pat>', 'GitHub Personal Access Token for higher rate limits')
23
+ .option('-j, --json', 'Output results as JSON (suppresses all other output, for CI/CD pipelines)')
24
+ .option('-s, --sarif', 'Output results as SARIF 2.1.0 (for GitHub Code Scanning / upload-sarif action)')
25
+ .option('-v, --verbose', 'List every file that was scanned before showing results')
26
+ .action(async (url, options) => {
27
+ // --json and --sarif both emit structured data to stdout, so all
28
+ // progress/status console.log calls must be silenced for those modes.
29
+ const silent = !!(options.json || options.sarif);
30
+ const log = (msg) => { if (!silent) console.log(msg); };
31
+ const logErr = (msg) => {
32
+ if (silent) {
33
+ console.log(JSON.stringify({ error: msg }));
34
+ } else {
35
+ console.error(chalk.red(msg));
36
+ }
37
+ };
38
+
39
+ try {
40
+ let files = [];
41
+ let sourceUrl = '';
42
+
43
+ if (options.local) {
44
+ log(chalk.dim('\nDiscovering local MCP config and rules files...'));
45
+ files = discoverLocalFiles();
46
+ sourceUrl = 'Local Configuration & Rules';
47
+
48
+ if (files.length === 0) {
49
+ if (options.sarif) {
50
+ const empty = formatSarifResult(sourceUrl, { scoreData: null, noFiles: true });
51
+ console.log(JSON.stringify(empty, null, 2));
52
+ } else if (options.json) {
53
+ console.log(JSON.stringify({
54
+ source: sourceUrl,
55
+ trustScore: null,
56
+ label: 'NO_FILES',
57
+ message: 'No local MCP configuration or rules files found. Looked in common locations like ~/.cursor/mcp.json, claude_desktop_config.json, etc.',
58
+ findings: []
59
+ }, null, 2));
60
+ } else {
61
+ log(chalk.yellow('\nNo local MCP configuration or rules files found.'));
62
+ log(chalk.dim('Looked in common locations like ~/.cursor/mcp.json, claude_desktop_config.json, etc.'));
63
+ }
64
+ return;
65
+ }
66
+ log(chalk.green(`Found ${files.length} local configuration/rules files.`));
67
+ if (options.verbose) {
68
+ log('');
69
+ log(chalk.dim('Files:'));
70
+ for (const f of files) {
71
+ log(chalk.dim(` ${f.path}`));
72
+ }
73
+ }
74
+ log('');
75
+ } else {
76
+ if (!url) {
77
+ throw new Error('Missing GitHub URL argument. Usage: vantyr scan <github-url>');
78
+ }
79
+
80
+ // Accept URLs with .git suffix or trailing paths (e.g. copied from browser)
81
+ const normalizedUrl = url.replace(/\.git$/, '').replace(/\/(tree|blob)\/.*$/, '').replace(/\/$/, '');
82
+ const REPO_URL_REGEX = /^https:\/\/github\.com\/([\w.-]+)\/([\w.-]+)$/;
83
+ const match = normalizedUrl.match(REPO_URL_REGEX);
84
+
85
+ if (!match) {
86
+ throw new Error('Invalid GitHub URL. Please use the format: https://github.com/owner/repo');
87
+ }
88
+
89
+ const [, owner, repo] = match;
90
+ sourceUrl = normalizedUrl;
91
+
92
+ log(chalk.dim(`\nFetching repository files from GitHub (${owner}/${repo})...`));
93
+ const fetchResult = await fetchRepoFiles(owner, repo, options.token);
94
+ files = fetchResult.files;
95
+
96
+ if (files.length === 0) {
97
+ throw new Error('No scannable source files found in the repository. The repository may be empty or contain only unsupported file types.');
98
+ }
99
+ log(chalk.green(`Fetched ${files.length} files successfully.`));
100
+ if (fetchResult.capped) {
101
+ log(chalk.yellow(` ⚠ Large repository: ${fetchResult.totalFound} eligible files found but only the first 100 were scanned. Results may not reflect the full codebase. Consider reviewing skipped files manually.`));
102
+ }
103
+ if (options.verbose) {
104
+ log('');
105
+ log(chalk.dim(`Files scanned (${files.length}):`));
106
+ for (const f of files) {
107
+ log(chalk.dim(` ${f.path}`));
108
+ }
109
+ }
110
+ log('');
111
+ }
112
+
113
+ // Run analyzers
114
+ const checks = runAllChecks(files);
115
+
116
+ // Calculate Trust Score
117
+ const scoreData = calculateTrustScore(checks);
118
+
119
+ // Output — mutually exclusive structured formats, terminal is default
120
+ if (options.sarif) {
121
+ const result = formatSarifResult(sourceUrl, { scoreData });
122
+ console.log(JSON.stringify(result, null, 2));
123
+ } else if (options.json) {
124
+ const result = formatJsonResult(sourceUrl, { scoreData });
125
+ console.log(JSON.stringify(result, null, 2));
126
+ } else {
127
+ printTerminalResult(sourceUrl, { scoreData, checks });
128
+ }
129
+
130
+ // Exit Code (using process.exitCode to allow async drain)
131
+ if (scoreData.trustScore < 50) {
132
+ process.exitCode = 1;
133
+ }
134
+
135
+ } catch (err) {
136
+ logErr(`\nError: ${err.message}\n`);
137
+ process.exitCode = 1;
138
+ }
139
+ });
140
+
141
+ // Parse arguments or show help
142
+ if (process.argv.length < 3) {
143
+ program.help();
144
+ return;
145
+ }
146
+
147
+ await program.parseAsync(process.argv);
148
+ }
@@ -0,0 +1,50 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import os from 'os';
4
+
5
+ /**
6
+ * Discovers and reads local MCP configuration files and rule files.
7
+ * @returns {Array<{path: string, content: string}>}
8
+ */
9
+ export function discoverLocalFiles() {
10
+ const homedir = os.homedir();
11
+ const cwd = process.cwd();
12
+ const isWindows = os.platform() === 'win32';
13
+ const appData = process.env.APPDATA || path.join(homedir, 'AppData', 'Roaming');
14
+
15
+ // 1. MCP Config locations
16
+ const configLocations = [
17
+ path.join(homedir, '.cursor', 'mcp.json'),
18
+ path.join(homedir, '.codeium', 'windsurf', 'mcp_config.json'),
19
+ isWindows
20
+ ? path.join(appData, 'Claude', 'claude_desktop_config.json')
21
+ : path.join(homedir, 'Library', 'Application Support', 'Claude', 'claude_desktop_config.json'),
22
+ path.join(cwd, '.vscode', 'mcp.json'),
23
+ path.join(cwd, '.cursor', 'mcp.json')
24
+ ];
25
+
26
+ // 2. Rules files in current directory
27
+ const ruleFiles = [
28
+ '.cursorrules',
29
+ '.windsurfrules',
30
+ '.clinerules',
31
+ 'CLAUDE.md',
32
+ 'copilot-instructions.md'
33
+ ].map(f => path.join(cwd, f));
34
+
35
+ const allPaths = [...configLocations, ...ruleFiles];
36
+ const files = [];
37
+
38
+ for (const filePath of allPaths) {
39
+ try {
40
+ if (fs.existsSync(filePath)) {
41
+ const content = fs.readFileSync(filePath, 'utf-8');
42
+ files.push({ path: filePath, content });
43
+ }
44
+ } catch (err) {
45
+ // Silently skip unreadable files
46
+ }
47
+ }
48
+
49
+ return files;
50
+ }
@@ -0,0 +1,155 @@
1
+ const ALLOWED_EXTENSIONS = new Set([
2
+ ".js", ".ts", ".jsx", ".tsx", ".py", ".go", ".rs",
3
+ ".json", ".yaml", ".yml", ".toml", ".env", ".md",
4
+ ]);
5
+
6
+ const SKIP_DIRS = new Set([
7
+ "node_modules", ".git", "dist", "build", "vendor",
8
+ "__pycache__", ".next", ".nuxt", "coverage", ".venv",
9
+ ]);
10
+
11
+ const MAX_FILE_SIZE = 100 * 1024; // 100KB
12
+
13
+ /**
14
+ * Fetch the file tree and contents from a GitHub repository.
15
+ * @param {string} owner - Repo owner
16
+ * @param {string} repo - Repo name
17
+ * @param {string} token - Optional GitHub personal access token
18
+ * @returns {Promise<Array<{path: string, content: string}>>}
19
+ */
20
+ export async function fetchRepoFiles(owner, repo, token) {
21
+ // 1. Get the default branch
22
+ const repoData = await githubGet(`/repos/${owner}/${repo}`, token);
23
+ const branch = repoData.default_branch || "main";
24
+
25
+ // 2. Get the recursive file tree
26
+ const treeData = await githubGet(
27
+ `/repos/${owner}/${repo}/git/trees/${branch}?recursive=1`,
28
+ token
29
+ );
30
+
31
+ if (!treeData.tree || !Array.isArray(treeData.tree)) {
32
+ throw new Error("Could not fetch repository file tree. Check that the repository exists and is accessible.");
33
+ }
34
+
35
+ const REQUIRED_FILES = new Set([".gitignore", ".env", ".env.local"]);
36
+
37
+ // 3. Filter files
38
+ const filePaths = treeData.tree
39
+ .filter((item) => {
40
+ if (item.type !== "blob") return false;
41
+ if (item.size && item.size > MAX_FILE_SIZE) return false;
42
+
43
+ const filename = item.path.split("/").pop();
44
+ const isRequired = REQUIRED_FILES.has(filename);
45
+
46
+ // Check extension (if not explicitly required)
47
+ if (!isRequired) {
48
+ const ext = getExtension(item.path);
49
+ if (!ALLOWED_EXTENSIONS.has(ext)) return false;
50
+ }
51
+
52
+ // Check for skipped directories
53
+ const parts = item.path.split("/");
54
+ for (const part of parts) {
55
+ if (SKIP_DIRS.has(part)) return false;
56
+ }
57
+
58
+ return true;
59
+ })
60
+ .map((item) => item.path);
61
+
62
+ // 4. Fetch file contents (max 100 files to stay within timeout)
63
+ const FILE_LIMIT = 100;
64
+ const capped = filePaths.length > FILE_LIMIT;
65
+ const limitedPaths = filePaths.slice(0, FILE_LIMIT);
66
+ const files = [];
67
+
68
+ // Fetch in batches of 5 to respect rate limits
69
+ for (let i = 0; i < limitedPaths.length; i += 5) {
70
+ const batch = limitedPaths.slice(i, i + 5);
71
+ const results = await Promise.all(
72
+ batch.map(async (path) => {
73
+ try {
74
+ const data = await githubGet(
75
+ `/repos/${owner}/${repo}/contents/${encodeURIComponent(path)}?ref=${branch}`,
76
+ token
77
+ );
78
+ if (data.encoding === "base64" && data.content) {
79
+ const content = Buffer.from(data.content, "base64").toString("utf-8");
80
+ return { path, content };
81
+ }
82
+ return null;
83
+ } catch (err) {
84
+ return null;
85
+ }
86
+ })
87
+ );
88
+ files.push(...results.filter(Boolean));
89
+ }
90
+
91
+ return { files, capped, totalFound: filePaths.length };
92
+ }
93
+
94
+ /**
95
+ * Make a GET request to the GitHub API using native fetch.
96
+ */
97
+ async function githubGet(path, token) {
98
+ const headers = {
99
+ "User-Agent": "MCP-Certify/1.0",
100
+ "Accept": "application/vnd.github.v3+json",
101
+ };
102
+
103
+ if (token) {
104
+ headers["Authorization"] = `Bearer ${token}`;
105
+ }
106
+
107
+ const url = `https://api.github.com${path}`;
108
+
109
+ try {
110
+ const response = await fetch(url, { headers });
111
+
112
+ if (!response.ok) {
113
+ if (response.status === 404) {
114
+ // Determine if it was a repo 404 or a file 404
115
+ const isRepoPath = path.startsWith('/repos/') && !path.includes('/contents/') && !path.includes('/git/trees/');
116
+ if (isRepoPath) {
117
+ const parts = path.split('/');
118
+ throw new Error(`Repository not found: ${parts[2]}/${parts[3]}. Check that the URL is correct and the repository is public.`);
119
+ }
120
+ throw new Error(`GitHub API error 404: Not Found (${path})`);
121
+ }
122
+
123
+ if (response.status === 403 || response.status === 429) {
124
+ const limit = response.headers.get("x-ratelimit-limit");
125
+ const remaining = response.headers.get("x-ratelimit-remaining");
126
+
127
+ if (remaining === "0") {
128
+ throw new Error(`GitHub API rate limit exceeded (${limit} req/hr). Use the --token <github-pat> flag for authenticated access (5000 req/hr). Generate a token at: https://github.com/settings/tokens`);
129
+ }
130
+
131
+ const data = await response.json().catch(() => ({}));
132
+ if (data.message?.toLowerCase().includes('private')) {
133
+ throw new Error('Repository is private or requires authentication. Use --token <github-pat> to access private repositories.');
134
+ }
135
+
136
+ throw new Error(`GitHub API error ${response.status}: ${data.message || 'Forbidden'}`);
137
+ }
138
+
139
+ const body = await response.text().catch(() => '');
140
+ throw new Error(`GitHub API error ${response.status}: ${body.slice(0, 300) || response.statusText}`);
141
+ }
142
+
143
+ return response.json();
144
+ } catch (err) {
145
+ if (err.code === 'ENOTFOUND' || err.code === 'ECONNREFUSED') {
146
+ throw new Error('Could not connect to GitHub API. Check your internet connection.');
147
+ }
148
+ throw err;
149
+ }
150
+ }
151
+
152
+ function getExtension(filePath) {
153
+ const dot = filePath.lastIndexOf(".");
154
+ return dot >= 0 ? filePath.slice(dot) : "";
155
+ }
@@ -0,0 +1,64 @@
1
+ /**
2
+ * JSON output formatter for vantyr.
3
+ * Produces a stable, machine-readable structure suitable for CI/CD pipelines,
4
+ * dashboards, or downstream tooling.
5
+ */
6
+
7
+ const CATEGORY_NAMES = {
8
+ NE: 'Network Exposure',
9
+ CI: 'Command Injection',
10
+ CL: 'Credential Leaks',
11
+ TP: 'Tool Poisoning',
12
+ SC: 'Spec Compliance',
13
+ IV: 'Input Validation',
14
+ };
15
+
16
+ /**
17
+ * @param {string} sourceUrl
18
+ * @param {{ scoreData: object }} results
19
+ * @returns {object}
20
+ */
21
+ export function formatJsonResult(sourceUrl, { scoreData }) {
22
+ const { trustScore, categories, totalFindings, stats, passCount, scoreCapped } = scoreData;
23
+
24
+ let label = 'CERTIFIED';
25
+ if (trustScore < 50) label = 'FAILED';
26
+ else if (trustScore < 80) label = 'WARNING';
27
+
28
+ // Flatten all findings into a single array with category metadata attached
29
+ const allFindings = Object.entries(categories).flatMap(([key, cat]) =>
30
+ cat.findings.map(f => ({
31
+ category: key,
32
+ categoryName: CATEGORY_NAMES[key] || key,
33
+ severity: f.severity,
34
+ file: f.file || null,
35
+ line: f.line || null,
36
+ snippet: f.snippet || '',
37
+ message: f.message || '',
38
+ remediation: f.remediation || '',
39
+ }))
40
+ );
41
+
42
+ return {
43
+ source: sourceUrl,
44
+ trustScore,
45
+ label,
46
+ scoreCapped: scoreCapped || false,
47
+ categories: Object.fromEntries(
48
+ Object.entries(categories).map(([key, cat]) => [
49
+ key,
50
+ {
51
+ name: CATEGORY_NAMES[key] || key,
52
+ score: cat.score,
53
+ passed: cat.score >= 80,
54
+ findingCount: cat.findings.length,
55
+ findings: cat.findings,
56
+ },
57
+ ])
58
+ ),
59
+ stats,
60
+ passCount,
61
+ totalFindings,
62
+ findings: allFindings,
63
+ };
64
+ }