shieldcortex 4.2.4 → 4.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/doctor.js +54 -13
- package/dist/index.d.ts +3 -0
- package/dist/index.js +10 -1
- package/dist/license/gate.d.ts +1 -1
- package/dist/license/gate.js +2 -0
- package/dist/xray/dir-scanner.d.ts +14 -0
- package/dist/xray/dir-scanner.js +77 -0
- package/dist/xray/file-scanner.d.ts +15 -0
- package/dist/xray/file-scanner.js +296 -0
- package/dist/xray/index.d.ts +20 -0
- package/dist/xray/index.js +166 -0
- package/dist/xray/npm-inspector.d.ts +15 -0
- package/dist/xray/npm-inspector.js +380 -0
- package/dist/xray/patterns.d.ts +20 -0
- package/dist/xray/patterns.js +271 -0
- package/dist/xray/report.d.ts +16 -0
- package/dist/xray/report.js +193 -0
- package/dist/xray/trust-score.d.ts +10 -0
- package/dist/xray/trust-score.js +37 -0
- package/dist/xray/types.d.ts +29 -0
- package/dist/xray/types.js +7 -0
- package/package.json +1 -1
- package/plugins/openclaw/dist/openclaw.plugin.json +1 -1
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* X-Ray — Package, File & Plugin Risk Inspector
|
|
3
|
+
*
|
|
4
|
+
* Main entry point for the `shieldcortex xray` CLI command.
|
|
5
|
+
* Inspects npm packages, local files, and directories for hidden risk.
|
|
6
|
+
*
|
|
7
|
+
* Free tier: local scans only (no npm registry), max 5 scans/day.
|
|
8
|
+
* Pro tier: npm registry analysis, deep scanning, unlimited scans.
|
|
9
|
+
*/
|
|
10
|
+
import fs from 'fs';
|
|
11
|
+
import path from 'path';
|
|
12
|
+
import os from 'os';
|
|
13
|
+
import { isFeatureEnabled, requireFeature } from '../license/gate.js';
|
|
14
|
+
import { scanFile } from './file-scanner.js';
|
|
15
|
+
import { scanDirectory } from './dir-scanner.js';
|
|
16
|
+
import { inspectNpmPackage } from './npm-inspector.js';
|
|
17
|
+
import { calculateTrustScore } from './trust-score.js';
|
|
18
|
+
import { formatXRayReport, formatXRayMarkdown } from './report.js';
|
|
19
|
+
export { calculateTrustScore } from './trust-score.js';
|
|
20
|
+
export { detectPatterns, detectFilenameDirectives } from './patterns.js';
|
|
21
|
+
export { scanFile } from './file-scanner.js';
|
|
22
|
+
export { scanDirectory } from './dir-scanner.js';
|
|
23
|
+
export { inspectNpmPackage } from './npm-inspector.js';
|
|
24
|
+
export { formatXRayReport, formatXRayMarkdown } from './report.js';
|
|
25
|
+
// ── Usage tracking ──────────────────────────────────────────
|
|
26
|
+
const USAGE_FILE = path.join(os.homedir(), '.shieldcortex', 'xray-usage.json');
|
|
27
|
+
const FREE_DAILY_LIMIT = 5;
|
|
28
|
+
function getUsage() {
|
|
29
|
+
try {
|
|
30
|
+
const raw = fs.readFileSync(USAGE_FILE, 'utf-8');
|
|
31
|
+
return JSON.parse(raw);
|
|
32
|
+
}
|
|
33
|
+
catch {
|
|
34
|
+
return { date: '', count: 0 };
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
function incrementUsage() {
|
|
38
|
+
const today = new Date().toISOString().slice(0, 10);
|
|
39
|
+
const usage = getUsage();
|
|
40
|
+
if (usage.date !== today) {
|
|
41
|
+
// New day — reset
|
|
42
|
+
usage.date = today;
|
|
43
|
+
usage.count = 1;
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
usage.count++;
|
|
47
|
+
}
|
|
48
|
+
const dir = path.dirname(USAGE_FILE);
|
|
49
|
+
if (!fs.existsSync(dir)) {
|
|
50
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
51
|
+
}
|
|
52
|
+
fs.writeFileSync(USAGE_FILE, JSON.stringify(usage));
|
|
53
|
+
}
|
|
54
|
+
function checkFreeLimit() {
|
|
55
|
+
if (isFeatureEnabled('xray_deep'))
|
|
56
|
+
return true; // Pro users are unlimited
|
|
57
|
+
const today = new Date().toISOString().slice(0, 10);
|
|
58
|
+
const usage = getUsage();
|
|
59
|
+
if (usage.date !== today)
|
|
60
|
+
return true; // New day
|
|
61
|
+
return usage.count < FREE_DAILY_LIMIT;
|
|
62
|
+
}
|
|
63
|
+
// ── Target detection ────────────────────────────────────────
|
|
64
|
+
function isNpmPackageName(target) {
|
|
65
|
+
// npm package names: lowercase, may start with @scope/
|
|
66
|
+
if (target.startsWith('.') || target.startsWith('/') || target.startsWith('~'))
|
|
67
|
+
return false;
|
|
68
|
+
if (fs.existsSync(target))
|
|
69
|
+
return false; // Local path takes priority
|
|
70
|
+
return /^(@[a-z0-9-~][a-z0-9-._~]*\/)?[a-z0-9-~][a-z0-9-._~]*$/.test(target);
|
|
71
|
+
}
|
|
72
|
+
// ── CLI handler ─────────────────────────────────────────────
|
|
73
|
+
/**
|
|
74
|
+
* Handle the `shieldcortex xray` CLI command.
|
|
75
|
+
*/
|
|
76
|
+
export async function handleXRayCommand(args) {
|
|
77
|
+
const flags = new Set(args.filter(a => a.startsWith('--')));
|
|
78
|
+
const positional = args.filter(a => !a.startsWith('--'));
|
|
79
|
+
const deep = flags.has('--deep');
|
|
80
|
+
const jsonOutput = flags.has('--json');
|
|
81
|
+
const markdownOutput = flags.has('--markdown');
|
|
82
|
+
// Show usage if no target
|
|
83
|
+
if (positional.length === 0) {
|
|
84
|
+
console.error('Usage: shieldcortex xray <target> [--deep] [--json] [--markdown]');
|
|
85
|
+
console.error('');
|
|
86
|
+
console.error(' target: npm package name, local file path, or directory path');
|
|
87
|
+
console.error(' --deep Deep scan with full analysis (Pro)');
|
|
88
|
+
console.error(' --json Output JSON result');
|
|
89
|
+
console.error(' --markdown Output markdown report');
|
|
90
|
+
console.error('');
|
|
91
|
+
console.error('Examples:');
|
|
92
|
+
console.error(' shieldcortex xray ./src/');
|
|
93
|
+
console.error(' shieldcortex xray package.json');
|
|
94
|
+
console.error(' shieldcortex xray lodash --deep');
|
|
95
|
+
process.exit(1);
|
|
96
|
+
}
|
|
97
|
+
const target = positional[0];
|
|
98
|
+
// Deep scan requires Pro
|
|
99
|
+
if (deep) {
|
|
100
|
+
try {
|
|
101
|
+
requireFeature('xray_deep');
|
|
102
|
+
}
|
|
103
|
+
catch (err) {
|
|
104
|
+
console.error(err instanceof Error ? err.message : String(err));
|
|
105
|
+
process.exit(1);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
// Check free tier limit
|
|
109
|
+
if (!checkFreeLimit()) {
|
|
110
|
+
console.error('Daily scan limit reached. Upgrade to Pro for unlimited scans.');
|
|
111
|
+
console.error(' https://shieldcortex.ai/pricing');
|
|
112
|
+
process.exit(1);
|
|
113
|
+
}
|
|
114
|
+
let result;
|
|
115
|
+
if (isNpmPackageName(target)) {
|
|
116
|
+
// NPM package inspection
|
|
117
|
+
if (!isFeatureEnabled('xray_deep')) {
|
|
118
|
+
console.error('npm registry inspection requires a Pro licence.');
|
|
119
|
+
console.error('Free tier supports local file and directory scans only.');
|
|
120
|
+
console.error(' Upgrade: https://shieldcortex.ai/pricing');
|
|
121
|
+
process.exit(1);
|
|
122
|
+
}
|
|
123
|
+
result = await inspectNpmPackage(target, deep);
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
// Local file or directory
|
|
127
|
+
const resolved = path.resolve(target);
|
|
128
|
+
if (!fs.existsSync(resolved)) {
|
|
129
|
+
console.error(`Target not found: ${resolved}`);
|
|
130
|
+
process.exit(1);
|
|
131
|
+
}
|
|
132
|
+
const stat = fs.statSync(resolved);
|
|
133
|
+
if (stat.isDirectory()) {
|
|
134
|
+
result = await scanDirectory(resolved, deep);
|
|
135
|
+
}
|
|
136
|
+
else if (stat.isFile()) {
|
|
137
|
+
const findings = await scanFile(resolved, deep);
|
|
138
|
+
const { score, riskLevel } = calculateTrustScore(findings);
|
|
139
|
+
result = {
|
|
140
|
+
target: resolved,
|
|
141
|
+
trustScore: score,
|
|
142
|
+
riskLevel,
|
|
143
|
+
findings,
|
|
144
|
+
filesScanned: 1,
|
|
145
|
+
scannedAt: new Date(),
|
|
146
|
+
deepScan: deep,
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
else {
|
|
150
|
+
console.error(`Target is not a file or directory: ${resolved}`);
|
|
151
|
+
process.exit(1);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
// Track usage
|
|
155
|
+
incrementUsage();
|
|
156
|
+
// Output
|
|
157
|
+
if (jsonOutput) {
|
|
158
|
+
console.log(JSON.stringify(result, null, 2));
|
|
159
|
+
}
|
|
160
|
+
else if (markdownOutput) {
|
|
161
|
+
console.log(formatXRayMarkdown(result));
|
|
162
|
+
}
|
|
163
|
+
else {
|
|
164
|
+
console.log(formatXRayReport(result));
|
|
165
|
+
}
|
|
166
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* X-Ray NPM Package Inspector
|
|
3
|
+
*
|
|
4
|
+
* Downloads package metadata from registry.npmjs.org, analyses it for risk
|
|
5
|
+
* signals, and optionally scans the tarball contents (Pro/deep scan).
|
|
6
|
+
* Uses only Node.js built-ins (https module) — no new dependencies.
|
|
7
|
+
*/
|
|
8
|
+
import type { XRayResult } from './types.js';
|
|
9
|
+
/**
|
|
10
|
+
* Inspect an npm package for hidden risk.
|
|
11
|
+
*
|
|
12
|
+
* @param packageName - npm package name (e.g. "lodash", "@scope/pkg")
|
|
13
|
+
* @param deep - If true, downloads and scans the tarball (Pro feature)
|
|
14
|
+
*/
|
|
15
|
+
export declare function inspectNpmPackage(packageName: string, deep?: boolean): Promise<XRayResult>;
|
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* X-Ray NPM Package Inspector
|
|
3
|
+
*
|
|
4
|
+
* Downloads package metadata from registry.npmjs.org, analyses it for risk
|
|
5
|
+
* signals, and optionally scans the tarball contents (Pro/deep scan).
|
|
6
|
+
* Uses only Node.js built-ins (https module) — no new dependencies.
|
|
7
|
+
*/
|
|
8
|
+
import https from 'https';
|
|
9
|
+
import fs from 'fs';
|
|
10
|
+
import path from 'path';
|
|
11
|
+
import os from 'os';
|
|
12
|
+
import { createGunzip } from 'zlib';
|
|
13
|
+
import { detectPatterns } from './patterns.js';
|
|
14
|
+
import { calculateTrustScore } from './trust-score.js';
|
|
15
|
+
import { scanFile } from './file-scanner.js';
|
|
16
|
+
// ── Constants ───────────────────────────────────────────────
|
|
17
|
+
/** Popular npm packages for typosquat comparison. */
|
|
18
|
+
const POPULAR_PACKAGES = [
|
|
19
|
+
'react', 'express', 'lodash', 'axios', 'chalk', 'commander', 'debug',
|
|
20
|
+
'webpack', 'typescript', 'next', 'vue', 'angular', 'moment', 'request',
|
|
21
|
+
'underscore', 'async', 'bluebird', 'uuid', 'dotenv', 'cors', 'ws',
|
|
22
|
+
'body-parser', 'mongoose', 'yargs', 'inquirer', 'glob', 'rimraf',
|
|
23
|
+
'eslint', 'prettier', 'jest', 'mocha', 'chai', 'sinon', 'nodemon',
|
|
24
|
+
'babel', 'rollup', 'vite', 'esbuild', 'fastify', 'koa', 'hapi',
|
|
25
|
+
'socket.io', 'passport', 'jsonwebtoken', 'bcrypt', 'mysql', 'pg',
|
|
26
|
+
'redis', 'mongodb', 'sequelize', 'prisma', 'graphql', 'apollo',
|
|
27
|
+
];
|
|
28
|
+
// ── Helpers ─────────────────────────────────────────────────
|
|
29
|
+
/**
|
|
30
|
+
* Simple HTTPS GET returning the response body as a string.
|
|
31
|
+
*/
|
|
32
|
+
function httpsGet(url) {
|
|
33
|
+
return new Promise((resolve, reject) => {
|
|
34
|
+
https.get(url, { headers: { 'Accept': 'application/json' } }, (res) => {
|
|
35
|
+
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
|
36
|
+
httpsGet(res.headers.location).then(resolve, reject);
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
if (res.statusCode !== 200) {
|
|
40
|
+
reject(new Error(`HTTP ${res.statusCode} for ${url}`));
|
|
41
|
+
res.resume();
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
const chunks = [];
|
|
45
|
+
res.on('data', (chunk) => chunks.push(chunk));
|
|
46
|
+
res.on('end', () => resolve(Buffer.concat(chunks).toString('utf-8')));
|
|
47
|
+
res.on('error', reject);
|
|
48
|
+
}).on('error', reject);
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Download a tarball to a temp file and return the path.
|
|
53
|
+
*/
|
|
54
|
+
function downloadTarball(url) {
|
|
55
|
+
return new Promise((resolve, reject) => {
|
|
56
|
+
const tmpFile = path.join(os.tmpdir(), `shieldcortex-xray-${Date.now()}.tgz`);
|
|
57
|
+
const file = fs.createWriteStream(tmpFile);
|
|
58
|
+
const doGet = (targetUrl) => {
|
|
59
|
+
https.get(targetUrl, (res) => {
|
|
60
|
+
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
|
61
|
+
doGet(res.headers.location);
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
if (res.statusCode !== 200) {
|
|
65
|
+
reject(new Error(`HTTP ${res.statusCode} downloading tarball`));
|
|
66
|
+
res.resume();
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
res.pipe(file);
|
|
70
|
+
file.on('finish', () => {
|
|
71
|
+
file.close();
|
|
72
|
+
resolve(tmpFile);
|
|
73
|
+
});
|
|
74
|
+
}).on('error', reject);
|
|
75
|
+
};
|
|
76
|
+
doGet(url);
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Extract a .tgz to a temp directory using Node built-ins (zlib + tar parsing).
|
|
81
|
+
* Returns path to the extracted directory.
|
|
82
|
+
*/
|
|
83
|
+
async function extractTarball(tgzPath) {
|
|
84
|
+
const extractDir = path.join(os.tmpdir(), `shieldcortex-xray-extract-${Date.now()}`);
|
|
85
|
+
fs.mkdirSync(extractDir, { recursive: true });
|
|
86
|
+
return new Promise((resolve, reject) => {
|
|
87
|
+
const gunzip = createGunzip();
|
|
88
|
+
const input = fs.createReadStream(tgzPath);
|
|
89
|
+
const chunks = [];
|
|
90
|
+
input.pipe(gunzip);
|
|
91
|
+
gunzip.on('data', (chunk) => chunks.push(chunk));
|
|
92
|
+
gunzip.on('end', () => {
|
|
93
|
+
try {
|
|
94
|
+
const tarData = Buffer.concat(chunks);
|
|
95
|
+
extractTarBuffer(tarData, extractDir);
|
|
96
|
+
resolve(extractDir);
|
|
97
|
+
}
|
|
98
|
+
catch (err) {
|
|
99
|
+
reject(err);
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
gunzip.on('error', reject);
|
|
103
|
+
input.on('error', reject);
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Minimal tar extraction from a buffer. Handles ustar format.
|
|
108
|
+
*/
|
|
109
|
+
function extractTarBuffer(buf, outDir) {
|
|
110
|
+
let offset = 0;
|
|
111
|
+
while (offset < buf.length - 512) {
|
|
112
|
+
// Read header (512 bytes)
|
|
113
|
+
const header = buf.subarray(offset, offset + 512);
|
|
114
|
+
// Check for empty block (end of archive)
|
|
115
|
+
if (header.every(b => b === 0))
|
|
116
|
+
break;
|
|
117
|
+
// Extract filename (0-100 bytes, null-terminated)
|
|
118
|
+
const nameEnd = header.indexOf(0, 0);
|
|
119
|
+
const name = header.subarray(0, Math.min(nameEnd, 100)).toString('utf-8');
|
|
120
|
+
// Extract file size (octal, bytes 124-136)
|
|
121
|
+
const sizeStr = header.subarray(124, 136).toString('utf-8').trim();
|
|
122
|
+
const size = parseInt(sizeStr, 8) || 0;
|
|
123
|
+
// Extract type flag (byte 156)
|
|
124
|
+
const typeFlag = header[156];
|
|
125
|
+
// Prefix field for ustar (bytes 345-500)
|
|
126
|
+
const prefixEnd = header.indexOf(0, 345);
|
|
127
|
+
const prefix = header.subarray(345, Math.min(prefixEnd, 500)).toString('utf-8');
|
|
128
|
+
const fullName = prefix ? `${prefix}/${name}` : name;
|
|
129
|
+
const filePath = path.join(outDir, fullName);
|
|
130
|
+
offset += 512; // Move past header
|
|
131
|
+
if (typeFlag === 48 || typeFlag === 0) { // Regular file ('0' or null)
|
|
132
|
+
if (size > 0) {
|
|
133
|
+
const dir = path.dirname(filePath);
|
|
134
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
135
|
+
const fileData = buf.subarray(offset, offset + size);
|
|
136
|
+
fs.writeFileSync(filePath, fileData);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
else if (typeFlag === 53) { // Directory ('5')
|
|
140
|
+
fs.mkdirSync(filePath, { recursive: true });
|
|
141
|
+
}
|
|
142
|
+
// Advance past data blocks (rounded up to 512 boundary)
|
|
143
|
+
offset += Math.ceil(size / 512) * 512;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Levenshtein edit distance between two strings.
|
|
148
|
+
*/
|
|
149
|
+
function editDistance(a, b) {
|
|
150
|
+
const m = a.length;
|
|
151
|
+
const n = b.length;
|
|
152
|
+
const dp = Array.from({ length: m + 1 }, () => Array(n + 1).fill(0));
|
|
153
|
+
for (let i = 0; i <= m; i++)
|
|
154
|
+
dp[i][0] = i;
|
|
155
|
+
for (let j = 0; j <= n; j++)
|
|
156
|
+
dp[0][j] = j;
|
|
157
|
+
for (let i = 1; i <= m; i++) {
|
|
158
|
+
for (let j = 1; j <= n; j++) {
|
|
159
|
+
dp[i][j] = a[i - 1] === b[j - 1]
|
|
160
|
+
? dp[i - 1][j - 1]
|
|
161
|
+
: 1 + Math.min(dp[i - 1][j], dp[i][j - 1], dp[i - 1][j - 1]);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
return dp[m][n];
|
|
165
|
+
}
|
|
166
|
+
// ── Public API ──────────────────────────────────────────────
|
|
167
|
+
/**
|
|
168
|
+
* Inspect an npm package for hidden risk.
|
|
169
|
+
*
|
|
170
|
+
* @param packageName - npm package name (e.g. "lodash", "@scope/pkg")
|
|
171
|
+
* @param deep - If true, downloads and scans the tarball (Pro feature)
|
|
172
|
+
*/
|
|
173
|
+
export async function inspectNpmPackage(packageName, deep = false) {
|
|
174
|
+
const startTime = Date.now();
|
|
175
|
+
const findings = [];
|
|
176
|
+
let filesScanned = 0;
|
|
177
|
+
// Fetch package metadata
|
|
178
|
+
const encodedName = packageName.startsWith('@')
|
|
179
|
+
? `@${encodeURIComponent(packageName.slice(1))}`
|
|
180
|
+
: encodeURIComponent(packageName);
|
|
181
|
+
let meta;
|
|
182
|
+
try {
|
|
183
|
+
const raw = await httpsGet(`https://registry.npmjs.org/${encodedName}`);
|
|
184
|
+
meta = JSON.parse(raw);
|
|
185
|
+
}
|
|
186
|
+
catch (err) {
|
|
187
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
188
|
+
findings.push({
|
|
189
|
+
severity: 'info',
|
|
190
|
+
category: 'dependency-risk',
|
|
191
|
+
title: 'Failed to fetch package metadata',
|
|
192
|
+
description: `Could not fetch metadata from npm registry: ${errMsg}`,
|
|
193
|
+
});
|
|
194
|
+
const { score, riskLevel } = calculateTrustScore(findings);
|
|
195
|
+
return {
|
|
196
|
+
target: packageName,
|
|
197
|
+
trustScore: score,
|
|
198
|
+
riskLevel,
|
|
199
|
+
findings,
|
|
200
|
+
filesScanned: 0,
|
|
201
|
+
scannedAt: new Date(),
|
|
202
|
+
deepScan: deep,
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
// Check maintainer count
|
|
206
|
+
const maintainers = meta.maintainers;
|
|
207
|
+
if (maintainers && maintainers.length === 1) {
|
|
208
|
+
findings.push({
|
|
209
|
+
severity: 'low',
|
|
210
|
+
category: 'dependency-risk',
|
|
211
|
+
title: 'Single maintainer',
|
|
212
|
+
description: 'Package has only one maintainer, increasing bus factor risk.',
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
// Check publish frequency / time
|
|
216
|
+
const time = meta.time;
|
|
217
|
+
if (time) {
|
|
218
|
+
const versions = Object.keys(time).filter(k => k !== 'created' && k !== 'modified');
|
|
219
|
+
const created = new Date(time.created || '');
|
|
220
|
+
const lastPublish = versions.length > 0 ? new Date(time[versions[versions.length - 1]]) : null;
|
|
221
|
+
// Recently created with very few versions — potential typosquat
|
|
222
|
+
if (lastPublish && versions.length <= 2) {
|
|
223
|
+
const ageMs = Date.now() - created.getTime();
|
|
224
|
+
const ageDays = ageMs / (1000 * 60 * 60 * 24);
|
|
225
|
+
if (ageDays < 30) {
|
|
226
|
+
findings.push({
|
|
227
|
+
severity: 'medium',
|
|
228
|
+
category: 'dependency-risk',
|
|
229
|
+
title: 'Very new package with few versions',
|
|
230
|
+
description: `Package was created ${Math.round(ageDays)} days ago with only ${versions.length} version(s).`,
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
// Check latest version metadata
|
|
236
|
+
const distTags = meta['dist-tags'];
|
|
237
|
+
const latestVersion = distTags?.latest;
|
|
238
|
+
const versions = meta.versions;
|
|
239
|
+
const latestMeta = latestVersion && versions ? versions[latestVersion] : null;
|
|
240
|
+
if (latestMeta) {
|
|
241
|
+
// Check scripts
|
|
242
|
+
const scripts = latestMeta.scripts;
|
|
243
|
+
if (scripts) {
|
|
244
|
+
for (const hook of ['preinstall', 'install', 'postinstall']) {
|
|
245
|
+
if (scripts[hook]) {
|
|
246
|
+
const val = scripts[hook];
|
|
247
|
+
findings.push({
|
|
248
|
+
severity: 'medium',
|
|
249
|
+
category: 'persistence-hook',
|
|
250
|
+
title: `Has ${hook} script`,
|
|
251
|
+
description: `Package defines a "${hook}" lifecycle script.`,
|
|
252
|
+
evidence: val.slice(0, 120),
|
|
253
|
+
});
|
|
254
|
+
if (/curl|wget|node\s+-e|bash\s+-c|powershell|https?:\/\/|eval|exec/i.test(val)) {
|
|
255
|
+
findings.push({
|
|
256
|
+
severity: 'high',
|
|
257
|
+
category: 'persistence-hook',
|
|
258
|
+
title: `Suspicious ${hook} script`,
|
|
259
|
+
description: `The "${hook}" script executes potentially dangerous commands.`,
|
|
260
|
+
evidence: val.slice(0, 120),
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
// Check description/keywords for AI directives
|
|
267
|
+
const description = latestMeta.description;
|
|
268
|
+
if (description) {
|
|
269
|
+
const descFindings = detectPatterns(JSON.stringify({ description }));
|
|
270
|
+
findings.push(...descFindings);
|
|
271
|
+
}
|
|
272
|
+
const keywords = latestMeta.keywords;
|
|
273
|
+
if (keywords) {
|
|
274
|
+
const kwFindings = detectPatterns(JSON.stringify({ keywords }));
|
|
275
|
+
findings.push(...kwFindings);
|
|
276
|
+
}
|
|
277
|
+
// Check dependency count
|
|
278
|
+
const deps = latestMeta.dependencies;
|
|
279
|
+
if (deps && Object.keys(deps).length > 50) {
|
|
280
|
+
findings.push({
|
|
281
|
+
severity: 'low',
|
|
282
|
+
category: 'dependency-risk',
|
|
283
|
+
title: 'High dependency count',
|
|
284
|
+
description: `Package has ${Object.keys(deps).length} direct dependencies, increasing attack surface.`,
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
// Typosquatting check
|
|
289
|
+
const baseName = packageName.replace(/^@[^/]+\//, '');
|
|
290
|
+
for (const popular of POPULAR_PACKAGES) {
|
|
291
|
+
if (baseName === popular)
|
|
292
|
+
break; // It IS the popular package
|
|
293
|
+
const dist = editDistance(baseName, popular);
|
|
294
|
+
if (dist > 0 && dist <= 2) {
|
|
295
|
+
findings.push({
|
|
296
|
+
severity: 'high',
|
|
297
|
+
category: 'dependency-risk',
|
|
298
|
+
title: 'Possible typosquat',
|
|
299
|
+
description: `Package name "${packageName}" is ${dist} edit(s) from popular package "${popular}".`,
|
|
300
|
+
evidence: `edit distance: ${dist}`,
|
|
301
|
+
});
|
|
302
|
+
break;
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
// Deep scan: download and scan tarball
|
|
306
|
+
if (deep && latestMeta) {
|
|
307
|
+
const dist = latestMeta.dist;
|
|
308
|
+
if (dist?.tarball) {
|
|
309
|
+
try {
|
|
310
|
+
const tgzPath = await downloadTarball(dist.tarball);
|
|
311
|
+
try {
|
|
312
|
+
const extractDir = await extractTarball(tgzPath);
|
|
313
|
+
try {
|
|
314
|
+
// Walk and scan extracted files
|
|
315
|
+
const extractedFiles = walkExtracted(extractDir);
|
|
316
|
+
filesScanned = extractedFiles.length;
|
|
317
|
+
for (const file of extractedFiles) {
|
|
318
|
+
const fileFindings = await scanFile(file, true);
|
|
319
|
+
for (const f of fileFindings) {
|
|
320
|
+
// Make file paths relative to package
|
|
321
|
+
f.file = f.file ? path.relative(extractDir, f.file) : f.file;
|
|
322
|
+
findings.push(f);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
finally {
|
|
327
|
+
// Clean up extracted dir
|
|
328
|
+
fs.rmSync(extractDir, { recursive: true, force: true });
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
finally {
|
|
332
|
+
// Clean up tarball
|
|
333
|
+
fs.unlinkSync(tgzPath);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
catch {
|
|
337
|
+
findings.push({
|
|
338
|
+
severity: 'info',
|
|
339
|
+
category: 'dependency-risk',
|
|
340
|
+
title: 'Tarball scan failed',
|
|
341
|
+
description: 'Could not download or extract the package tarball for deep scanning.',
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
const { score, riskLevel } = calculateTrustScore(findings);
|
|
347
|
+
return {
|
|
348
|
+
target: packageName,
|
|
349
|
+
trustScore: score,
|
|
350
|
+
riskLevel,
|
|
351
|
+
findings,
|
|
352
|
+
filesScanned,
|
|
353
|
+
scannedAt: new Date(),
|
|
354
|
+
deepScan: deep,
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
/**
|
|
358
|
+
* Walk an extracted tarball directory and collect file paths.
|
|
359
|
+
*/
|
|
360
|
+
function walkExtracted(dir, files = []) {
|
|
361
|
+
let entries;
|
|
362
|
+
try {
|
|
363
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
364
|
+
}
|
|
365
|
+
catch {
|
|
366
|
+
return files;
|
|
367
|
+
}
|
|
368
|
+
for (const entry of entries) {
|
|
369
|
+
const full = path.join(dir, entry.name);
|
|
370
|
+
if (entry.isDirectory()) {
|
|
371
|
+
if (entry.name !== 'node_modules') {
|
|
372
|
+
walkExtracted(full, files);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
else if (entry.isFile()) {
|
|
376
|
+
files.push(full);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
return files;
|
|
380
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* X-Ray Pattern Detection
|
|
3
|
+
*
|
|
4
|
+
* Comprehensive pattern groups for detecting hidden risk in packages, files,
|
|
5
|
+
* and metadata. Follows the same conventions as skill-scanner/patterns.ts:
|
|
6
|
+
* - safeRegexTest wrapper for every test
|
|
7
|
+
* - MAX_SCAN_LENGTH truncation to prevent ReDOS
|
|
8
|
+
* - PatternGroup style with weighted confidence
|
|
9
|
+
* - One match per group is enough (break after first)
|
|
10
|
+
*/
|
|
11
|
+
import type { XRayFinding } from './types.js';
|
|
12
|
+
/**
|
|
13
|
+
* Run all X-Ray pattern groups against content and return matched findings.
|
|
14
|
+
* Optionally tag findings with a file path and compute line numbers.
|
|
15
|
+
*/
|
|
16
|
+
export declare function detectPatterns(content: string, filePath?: string): XRayFinding[];
|
|
17
|
+
/**
|
|
18
|
+
* Check if a filename itself contains AI directive patterns.
|
|
19
|
+
*/
|
|
20
|
+
export declare function detectFilenameDirectives(filename: string): XRayFinding[];
|