verimu 0.0.3 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.mjs +1616 -0
- package/dist/cli.mjs.map +1 -0
- package/dist/index.cjs +104 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +83 -3
- package/dist/index.d.ts +83 -3
- package/dist/index.mjs +101 -1
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/cli.mjs
ADDED
|
@@ -0,0 +1,1616 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/cli.ts
|
|
4
|
+
import { resolve } from "path";
|
|
5
|
+
|
|
6
|
+
// src/scan.ts
|
|
7
|
+
import { writeFile } from "fs/promises";
|
|
8
|
+
import { basename } from "path";
|
|
9
|
+
|
|
10
|
+
// src/scanners/npm/npm-scanner.ts
|
|
11
|
+
import { readFile } from "fs/promises";
|
|
12
|
+
import { existsSync } from "fs";
|
|
13
|
+
import path from "path";
|
|
14
|
+
|
|
15
|
+
// src/core/errors.ts
|
|
16
|
+
var VerimuError = class extends Error {
|
|
17
|
+
constructor(message, code) {
|
|
18
|
+
super(message);
|
|
19
|
+
this.code = code;
|
|
20
|
+
this.name = "VerimuError";
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
var NoLockfileError = class extends VerimuError {
|
|
24
|
+
constructor(projectPath) {
|
|
25
|
+
super(
|
|
26
|
+
`No supported lockfile found in ${projectPath}. Supported: package-lock.json (npm), packages.lock.json (NuGet), Cargo.lock (Rust), requirements.txt / Pipfile.lock (Python), pom.xml (Maven), go.sum (Go), Gemfile.lock (Ruby)`,
|
|
27
|
+
"NO_LOCKFILE"
|
|
28
|
+
);
|
|
29
|
+
this.name = "NoLockfileError";
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
var LockfileParseError = class extends VerimuError {
|
|
33
|
+
constructor(lockfilePath, reason) {
|
|
34
|
+
super(`Failed to parse ${lockfilePath}: ${reason}`, "LOCKFILE_PARSE_ERROR");
|
|
35
|
+
this.name = "LockfileParseError";
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
// src/scanners/npm/npm-scanner.ts
|
|
40
|
+
var NpmScanner = class {
|
|
41
|
+
ecosystem = "npm";
|
|
42
|
+
lockfileNames = ["package-lock.json"];
|
|
43
|
+
async detect(projectPath) {
|
|
44
|
+
const lockfilePath = path.join(projectPath, "package-lock.json");
|
|
45
|
+
return existsSync(lockfilePath) ? lockfilePath : null;
|
|
46
|
+
}
|
|
47
|
+
async scan(projectPath, lockfilePath) {
|
|
48
|
+
const [lockfileRaw, packageJsonRaw] = await Promise.all([
|
|
49
|
+
readFile(lockfilePath, "utf-8"),
|
|
50
|
+
readFile(path.join(projectPath, "package.json"), "utf-8").catch(() => null)
|
|
51
|
+
]);
|
|
52
|
+
let lockfile;
|
|
53
|
+
try {
|
|
54
|
+
lockfile = JSON.parse(lockfileRaw);
|
|
55
|
+
} catch {
|
|
56
|
+
throw new LockfileParseError(lockfilePath, "Invalid JSON");
|
|
57
|
+
}
|
|
58
|
+
const directNames = /* @__PURE__ */ new Set();
|
|
59
|
+
if (packageJsonRaw) {
|
|
60
|
+
try {
|
|
61
|
+
const pkg = JSON.parse(packageJsonRaw);
|
|
62
|
+
for (const name of Object.keys(pkg.dependencies ?? {})) {
|
|
63
|
+
directNames.add(name);
|
|
64
|
+
}
|
|
65
|
+
for (const name of Object.keys(pkg.devDependencies ?? {})) {
|
|
66
|
+
directNames.add(name);
|
|
67
|
+
}
|
|
68
|
+
} catch {
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
const dependencies = this.parseLockfile(lockfile, directNames);
|
|
72
|
+
return {
|
|
73
|
+
projectPath,
|
|
74
|
+
ecosystem: "npm",
|
|
75
|
+
dependencies,
|
|
76
|
+
lockfilePath,
|
|
77
|
+
scannedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Parses package-lock.json and extracts dependencies.
|
|
82
|
+
* Supports lockfile v2 and v3 (uses the `packages` field).
|
|
83
|
+
* Falls back to `dependencies` field for lockfile v1.
|
|
84
|
+
*/
|
|
85
|
+
parseLockfile(lockfile, directNames) {
|
|
86
|
+
const deps = [];
|
|
87
|
+
if (lockfile.packages) {
|
|
88
|
+
for (const [pkgPath, pkgInfo] of Object.entries(lockfile.packages)) {
|
|
89
|
+
if (pkgPath === "") continue;
|
|
90
|
+
const name = this.extractPackageName(pkgPath);
|
|
91
|
+
if (!name || !pkgInfo.version) continue;
|
|
92
|
+
if (pkgInfo.link) continue;
|
|
93
|
+
deps.push({
|
|
94
|
+
name,
|
|
95
|
+
version: pkgInfo.version,
|
|
96
|
+
direct: directNames.has(name),
|
|
97
|
+
ecosystem: "npm",
|
|
98
|
+
purl: this.buildPurl(name, pkgInfo.version)
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
} else if (lockfile.dependencies) {
|
|
102
|
+
this.parseDependenciesV1(lockfile.dependencies, directNames, deps);
|
|
103
|
+
}
|
|
104
|
+
return deps;
|
|
105
|
+
}
|
|
106
|
+
/**
|
|
107
|
+
* Builds a purl (Package URL) for an npm package.
|
|
108
|
+
*
|
|
109
|
+
* Per the purl spec (https://github.com/package-url/purl-spec/blob/main/types-doc/npm-definition.md):
|
|
110
|
+
* "The npm scope @ sign prefix is always percent encoded."
|
|
111
|
+
*
|
|
112
|
+
* So @types/node@20.11.5 → pkg:npm/%40types/node@20.11.5
|
|
113
|
+
* And express@4.18.2 → pkg:npm/express@4.18.2
|
|
114
|
+
*/
|
|
115
|
+
buildPurl(name, version) {
|
|
116
|
+
if (name.startsWith("@")) {
|
|
117
|
+
return `pkg:npm/%40${name.slice(1)}@${version}`;
|
|
118
|
+
}
|
|
119
|
+
return `pkg:npm/${name}@${version}`;
|
|
120
|
+
}
|
|
121
|
+
/** Extracts the package name from a node_modules path */
|
|
122
|
+
extractPackageName(pkgPath) {
|
|
123
|
+
const parts = pkgPath.split("node_modules/");
|
|
124
|
+
const last = parts[parts.length - 1];
|
|
125
|
+
return last || null;
|
|
126
|
+
}
|
|
127
|
+
/** Recursively parses lockfile v1 `dependencies` tree */
|
|
128
|
+
parseDependenciesV1(depsObj, directNames, result) {
|
|
129
|
+
for (const [name, info] of Object.entries(depsObj)) {
|
|
130
|
+
if (info.version) {
|
|
131
|
+
result.push({
|
|
132
|
+
name,
|
|
133
|
+
version: info.version,
|
|
134
|
+
direct: directNames.has(name),
|
|
135
|
+
ecosystem: "npm",
|
|
136
|
+
purl: this.buildPurl(name, info.version)
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
if (info.dependencies) {
|
|
140
|
+
this.parseDependenciesV1(info.dependencies, directNames, result);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
};
|
|
145
|
+
|
|
146
|
+
// src/scanners/nuget/nuget-scanner.ts
|
|
147
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
148
|
+
import { existsSync as existsSync2 } from "fs";
|
|
149
|
+
import path2 from "path";
|
|
150
|
+
var NugetScanner = class {
|
|
151
|
+
ecosystem = "nuget";
|
|
152
|
+
lockfileNames = ["packages.lock.json"];
|
|
153
|
+
async detect(projectPath) {
|
|
154
|
+
const lockfilePath = path2.join(projectPath, "packages.lock.json");
|
|
155
|
+
return existsSync2(lockfilePath) ? lockfilePath : null;
|
|
156
|
+
}
|
|
157
|
+
async scan(projectPath, lockfilePath) {
|
|
158
|
+
const lockfileRaw = await readFile2(lockfilePath, "utf-8");
|
|
159
|
+
let lockfile;
|
|
160
|
+
try {
|
|
161
|
+
lockfile = JSON.parse(lockfileRaw);
|
|
162
|
+
} catch {
|
|
163
|
+
throw new LockfileParseError(lockfilePath, "Invalid JSON");
|
|
164
|
+
}
|
|
165
|
+
if (!lockfile.dependencies) {
|
|
166
|
+
throw new LockfileParseError(lockfilePath, 'Missing "dependencies" field');
|
|
167
|
+
}
|
|
168
|
+
const dependencies = this.parseLockfile(lockfile);
|
|
169
|
+
return {
|
|
170
|
+
projectPath,
|
|
171
|
+
ecosystem: "nuget",
|
|
172
|
+
dependencies,
|
|
173
|
+
lockfilePath,
|
|
174
|
+
scannedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
175
|
+
};
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Parses packages.lock.json and extracts dependencies across all
|
|
179
|
+
* target frameworks. Deduplicates by package name (keeps highest version
|
|
180
|
+
* if the same package appears under multiple frameworks).
|
|
181
|
+
*/
|
|
182
|
+
parseLockfile(lockfile) {
|
|
183
|
+
const depMap = /* @__PURE__ */ new Map();
|
|
184
|
+
for (const [_framework, packages] of Object.entries(lockfile.dependencies)) {
|
|
185
|
+
for (const [name, info] of Object.entries(packages)) {
|
|
186
|
+
if (!info.resolved) continue;
|
|
187
|
+
const isDirect = info.type === "Direct";
|
|
188
|
+
const existing = depMap.get(name);
|
|
189
|
+
if (!existing) {
|
|
190
|
+
depMap.set(name, {
|
|
191
|
+
name,
|
|
192
|
+
version: info.resolved,
|
|
193
|
+
direct: isDirect,
|
|
194
|
+
ecosystem: "nuget",
|
|
195
|
+
purl: this.buildPurl(name, info.resolved)
|
|
196
|
+
});
|
|
197
|
+
} else if (isDirect && !existing.direct) {
|
|
198
|
+
existing.direct = true;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
return Array.from(depMap.values());
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Builds a purl for a NuGet package.
|
|
206
|
+
* NuGet purls are straightforward: pkg:nuget/Name@Version
|
|
207
|
+
*/
|
|
208
|
+
buildPurl(name, version) {
|
|
209
|
+
return `pkg:nuget/${name}@${version}`;
|
|
210
|
+
}
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
// src/scanners/cargo/cargo-scanner.ts
|
|
214
|
+
import { readFile as readFile3 } from "fs/promises";
|
|
215
|
+
import { existsSync as existsSync3 } from "fs";
|
|
216
|
+
import path3 from "path";
|
|
217
|
+
var CargoScanner = class {
|
|
218
|
+
ecosystem = "cargo";
|
|
219
|
+
lockfileNames = ["Cargo.lock"];
|
|
220
|
+
async detect(projectPath) {
|
|
221
|
+
const lockfilePath = path3.join(projectPath, "Cargo.lock");
|
|
222
|
+
return existsSync3(lockfilePath) ? lockfilePath : null;
|
|
223
|
+
}
|
|
224
|
+
async scan(projectPath, lockfilePath) {
|
|
225
|
+
const [lockfileRaw, cargoTomlRaw] = await Promise.all([
|
|
226
|
+
readFile3(lockfilePath, "utf-8"),
|
|
227
|
+
readFile3(path3.join(projectPath, "Cargo.toml"), "utf-8").catch(() => null)
|
|
228
|
+
]);
|
|
229
|
+
const packages = this.parseLockfile(lockfileRaw, lockfilePath);
|
|
230
|
+
const directNames = cargoTomlRaw ? this.parseCargoToml(cargoTomlRaw) : /* @__PURE__ */ new Set();
|
|
231
|
+
const rootName = packages.length > 0 ? packages[0].name : null;
|
|
232
|
+
const dependencies = [];
|
|
233
|
+
for (const pkg of packages) {
|
|
234
|
+
if (pkg.name === rootName && pkg.source === void 0) continue;
|
|
235
|
+
dependencies.push({
|
|
236
|
+
name: pkg.name,
|
|
237
|
+
version: pkg.version,
|
|
238
|
+
direct: directNames.has(pkg.name),
|
|
239
|
+
ecosystem: "cargo",
|
|
240
|
+
purl: this.buildPurl(pkg.name, pkg.version)
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
return {
|
|
244
|
+
projectPath,
|
|
245
|
+
ecosystem: "cargo",
|
|
246
|
+
dependencies,
|
|
247
|
+
lockfilePath,
|
|
248
|
+
scannedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Parses Cargo.lock by splitting on [[package]] blocks.
|
|
253
|
+
* This is a lightweight parser that handles the regular structure
|
|
254
|
+
* of Cargo.lock without needing a full TOML parser.
|
|
255
|
+
*/
|
|
256
|
+
parseLockfile(content, lockfilePath) {
|
|
257
|
+
const packages = [];
|
|
258
|
+
const blocks = content.split(/^\[\[package\]\]$/m);
|
|
259
|
+
for (const block of blocks) {
|
|
260
|
+
if (!block.trim()) continue;
|
|
261
|
+
const name = this.extractField(block, "name");
|
|
262
|
+
const version = this.extractField(block, "version");
|
|
263
|
+
const source = this.extractField(block, "source");
|
|
264
|
+
if (name && version) {
|
|
265
|
+
packages.push({ name, version, source: source || void 0 });
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
if (packages.length === 0 && content.includes("[[package]]")) {
|
|
269
|
+
throw new LockfileParseError(lockfilePath, "Failed to parse any packages from Cargo.lock");
|
|
270
|
+
}
|
|
271
|
+
return packages;
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
274
|
+
* Extracts a string field value from a TOML block.
|
|
275
|
+
* Handles: `name = "value"` format.
|
|
276
|
+
*/
|
|
277
|
+
extractField(block, fieldName) {
|
|
278
|
+
const regex = new RegExp(`^${fieldName}\\s*=\\s*"([^"]*)"`, "m");
|
|
279
|
+
const match = block.match(regex);
|
|
280
|
+
return match ? match[1] : null;
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Parses Cargo.toml to extract direct dependency names.
|
|
284
|
+
* Looks for [dependencies] and [dev-dependencies] sections.
|
|
285
|
+
*/
|
|
286
|
+
parseCargoToml(content) {
|
|
287
|
+
const directNames = /* @__PURE__ */ new Set();
|
|
288
|
+
let inDepsSection = false;
|
|
289
|
+
for (const rawLine of content.split("\n")) {
|
|
290
|
+
const line = rawLine.trim();
|
|
291
|
+
if (line.startsWith("[")) {
|
|
292
|
+
inDepsSection = line === "[dependencies]" || line === "[dev-dependencies]" || line === "[build-dependencies]";
|
|
293
|
+
continue;
|
|
294
|
+
}
|
|
295
|
+
if (inDepsSection && line && !line.startsWith("#")) {
|
|
296
|
+
const match = line.match(/^([a-zA-Z0-9_-]+)\s*=/);
|
|
297
|
+
if (match) {
|
|
298
|
+
directNames.add(match[1]);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
return directNames;
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Builds a purl for a Cargo (crates.io) package.
|
|
306
|
+
*/
|
|
307
|
+
buildPurl(name, version) {
|
|
308
|
+
return `pkg:cargo/${name}@${version}`;
|
|
309
|
+
}
|
|
310
|
+
};
|
|
311
|
+
|
|
312
|
+
// src/scanners/pip/pip-scanner.ts
|
|
313
|
+
import { readFile as readFile4 } from "fs/promises";
|
|
314
|
+
import { existsSync as existsSync4 } from "fs";
|
|
315
|
+
import path4 from "path";
|
|
316
|
+
var PipScanner = class {
|
|
317
|
+
ecosystem = "pip";
|
|
318
|
+
lockfileNames = ["requirements.txt", "Pipfile.lock"];
|
|
319
|
+
async detect(projectPath) {
|
|
320
|
+
for (const lockfile of this.lockfileNames) {
|
|
321
|
+
const fullPath = path4.join(projectPath, lockfile);
|
|
322
|
+
if (existsSync4(fullPath)) return fullPath;
|
|
323
|
+
}
|
|
324
|
+
return null;
|
|
325
|
+
}
|
|
326
|
+
async scan(projectPath, lockfilePath) {
|
|
327
|
+
const raw = await readFile4(lockfilePath, "utf-8");
|
|
328
|
+
const filename = path4.basename(lockfilePath);
|
|
329
|
+
let dependencies;
|
|
330
|
+
if (filename === "Pipfile.lock") {
|
|
331
|
+
dependencies = this.parsePipfileLock(raw, lockfilePath);
|
|
332
|
+
} else {
|
|
333
|
+
dependencies = this.parseRequirementsTxt(raw, lockfilePath);
|
|
334
|
+
}
|
|
335
|
+
return {
|
|
336
|
+
projectPath,
|
|
337
|
+
ecosystem: "pip",
|
|
338
|
+
dependencies,
|
|
339
|
+
lockfilePath,
|
|
340
|
+
scannedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
341
|
+
};
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Parses `requirements.txt` format.
|
|
345
|
+
*
|
|
346
|
+
* Supports:
|
|
347
|
+
* - `package==1.2.3` (pinned)
|
|
348
|
+
* - `package>=1.2.0` (minimum — uses the specified version)
|
|
349
|
+
* - `package~=1.2.0` (compatible release)
|
|
350
|
+
* - Comments (`#`) and blank lines are skipped
|
|
351
|
+
* - `-r other-file.txt` (include directive) — skipped for now
|
|
352
|
+
* - `--index-url` and other pip flags — skipped
|
|
353
|
+
*/
|
|
354
|
+
parseRequirementsTxt(content, lockfilePath) {
|
|
355
|
+
const deps = [];
|
|
356
|
+
for (const rawLine of content.split("\n")) {
|
|
357
|
+
const line = rawLine.trim();
|
|
358
|
+
if (!line || line.startsWith("#") || line.startsWith("-") || line.startsWith("--")) {
|
|
359
|
+
continue;
|
|
360
|
+
}
|
|
361
|
+
const match = line.match(/^([a-zA-Z0-9_][a-zA-Z0-9._-]*)\s*(?:[~=!<>]=?)\s*(.+)$/);
|
|
362
|
+
if (match) {
|
|
363
|
+
const [, name, versionSpec] = match;
|
|
364
|
+
const version = this.extractVersion(versionSpec);
|
|
365
|
+
if (name && version) {
|
|
366
|
+
deps.push({
|
|
367
|
+
name: this.normalizePipName(name),
|
|
368
|
+
version,
|
|
369
|
+
direct: true,
|
|
370
|
+
// requirements.txt doesn't distinguish
|
|
371
|
+
ecosystem: "pip",
|
|
372
|
+
purl: this.buildPurl(name, version)
|
|
373
|
+
});
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
return deps;
|
|
378
|
+
}
|
|
379
|
+
/**
|
|
380
|
+
* Parses `Pipfile.lock` (JSON format from Pipenv).
|
|
381
|
+
*
|
|
382
|
+
* Structure:
|
|
383
|
+
* ```json
|
|
384
|
+
* {
|
|
385
|
+
* "_meta": { ... },
|
|
386
|
+
* "default": {
|
|
387
|
+
* "requests": { "version": "==2.31.0", ... }
|
|
388
|
+
* },
|
|
389
|
+
* "develop": {
|
|
390
|
+
* "pytest": { "version": "==7.4.0", ... }
|
|
391
|
+
* }
|
|
392
|
+
* }
|
|
393
|
+
* ```
|
|
394
|
+
*/
|
|
395
|
+
parsePipfileLock(content, lockfilePath) {
|
|
396
|
+
let lockfile;
|
|
397
|
+
try {
|
|
398
|
+
lockfile = JSON.parse(content);
|
|
399
|
+
} catch {
|
|
400
|
+
throw new LockfileParseError(lockfilePath, "Invalid JSON in Pipfile.lock");
|
|
401
|
+
}
|
|
402
|
+
const deps = [];
|
|
403
|
+
if (lockfile.default) {
|
|
404
|
+
for (const [name, info] of Object.entries(lockfile.default)) {
|
|
405
|
+
const version = info.version?.replace(/^==/, "") ?? "";
|
|
406
|
+
if (version) {
|
|
407
|
+
deps.push({
|
|
408
|
+
name: this.normalizePipName(name),
|
|
409
|
+
version,
|
|
410
|
+
direct: true,
|
|
411
|
+
ecosystem: "pip",
|
|
412
|
+
purl: this.buildPurl(name, version)
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
if (lockfile.develop) {
|
|
418
|
+
for (const [name, info] of Object.entries(lockfile.develop)) {
|
|
419
|
+
const version = info.version?.replace(/^==/, "") ?? "";
|
|
420
|
+
if (version) {
|
|
421
|
+
deps.push({
|
|
422
|
+
name: this.normalizePipName(name),
|
|
423
|
+
version,
|
|
424
|
+
direct: true,
|
|
425
|
+
ecosystem: "pip",
|
|
426
|
+
purl: this.buildPurl(name, version)
|
|
427
|
+
});
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
return deps;
|
|
432
|
+
}
|
|
433
|
+
/**
|
|
434
|
+
* Extracts the version number from a pip version specifier.
|
|
435
|
+
* "1.2.3" → "1.2.3"
|
|
436
|
+
* "1.2.3,<2.0" → "1.2.3"
|
|
437
|
+
*/
|
|
438
|
+
extractVersion(spec) {
|
|
439
|
+
const cleaned = spec.split(",")[0].trim();
|
|
440
|
+
return cleaned;
|
|
441
|
+
}
|
|
442
|
+
/**
|
|
443
|
+
* Normalizes a pip package name per PEP 503.
|
|
444
|
+
* Converts to lowercase and replaces any run of [-_.] with a single hyphen.
|
|
445
|
+
*/
|
|
446
|
+
normalizePipName(name) {
|
|
447
|
+
return name.toLowerCase().replace(/[-_.]+/g, "-");
|
|
448
|
+
}
|
|
449
|
+
/**
|
|
450
|
+
* Builds a purl for a PyPI package.
|
|
451
|
+
* Per purl spec, the type is "pypi" (not "pip").
|
|
452
|
+
*/
|
|
453
|
+
buildPurl(name, version) {
|
|
454
|
+
return `pkg:pypi/${this.normalizePipName(name)}@${version}`;
|
|
455
|
+
}
|
|
456
|
+
};
|
|
457
|
+
|
|
458
|
+
// src/scanners/maven/maven-scanner.ts
|
|
459
|
+
import { readFile as readFile5 } from "fs/promises";
|
|
460
|
+
import { existsSync as existsSync5 } from "fs";
|
|
461
|
+
import { execSync } from "child_process";
|
|
462
|
+
import path5 from "path";
|
|
463
|
+
var MavenScanner = class {
|
|
464
|
+
ecosystem = "maven";
|
|
465
|
+
lockfileNames = ["pom.xml"];
|
|
466
|
+
/** Allow injection for testing */
|
|
467
|
+
execSyncFn;
|
|
468
|
+
constructor(execSyncImpl) {
|
|
469
|
+
this.execSyncFn = execSyncImpl ?? execSync;
|
|
470
|
+
}
|
|
471
|
+
async detect(projectPath) {
|
|
472
|
+
const pomPath = path5.join(projectPath, "pom.xml");
|
|
473
|
+
return existsSync5(pomPath) ? pomPath : null;
|
|
474
|
+
}
|
|
475
|
+
async scan(projectPath, _lockfilePath) {
|
|
476
|
+
const depTreePath = path5.join(projectPath, "dependency-tree.txt");
|
|
477
|
+
if (existsSync5(depTreePath)) {
|
|
478
|
+
const content = await readFile5(depTreePath, "utf-8");
|
|
479
|
+
const dependencies = this.parseDependencyList(content, depTreePath);
|
|
480
|
+
return this.buildResult(projectPath, depTreePath, dependencies);
|
|
481
|
+
}
|
|
482
|
+
if (this.isMavenAvailable()) {
|
|
483
|
+
const output = this.runMavenDependencyList(projectPath);
|
|
484
|
+
const dependencies = this.parseDependencyList(output, "mvn dependency:list");
|
|
485
|
+
return this.buildResult(projectPath, path5.join(projectPath, "pom.xml"), dependencies);
|
|
486
|
+
}
|
|
487
|
+
throw new LockfileParseError(
|
|
488
|
+
path5.join(projectPath, "pom.xml"),
|
|
489
|
+
"Maven project detected (pom.xml found) but could not resolve dependencies. Either install Maven (`mvn` must be on $PATH) or pre-generate a dependency list:\n mvn dependency:list -DoutputFile=dependency-tree.txt -DappendOutput=true"
|
|
490
|
+
);
|
|
491
|
+
}
|
|
492
|
+
/**
|
|
493
|
+
* Parses Maven `dependency:list` output.
|
|
494
|
+
*
|
|
495
|
+
* Each dependency line has the format:
|
|
496
|
+
* groupId:artifactId:type:version:scope
|
|
497
|
+
* groupId:artifactId:type:classifier:version:scope
|
|
498
|
+
*
|
|
499
|
+
* Lines are typically indented with leading whitespace.
|
|
500
|
+
*/
|
|
501
|
+
parseDependencyList(content, source) {
|
|
502
|
+
const deps = [];
|
|
503
|
+
const depPattern = /^\s*([a-zA-Z0-9._-]+):([a-zA-Z0-9._-]+):([a-z]+):(?:([a-zA-Z0-9._-]+):)?([a-zA-Z0-9._-]+):([a-z]+)/;
|
|
504
|
+
for (const rawLine of content.split("\n")) {
|
|
505
|
+
const line = rawLine.trim();
|
|
506
|
+
if (!line) continue;
|
|
507
|
+
const match = line.match(depPattern);
|
|
508
|
+
if (match) {
|
|
509
|
+
const groupId = match[1];
|
|
510
|
+
const artifactId = match[2];
|
|
511
|
+
const version = match[4] && match[5] ? match[5] : match[4] ?? match[5];
|
|
512
|
+
const scope = match[4] && match[5] ? match[6] : match[5] && match[6] ? match[6] : match[5];
|
|
513
|
+
const parts = line.split(":");
|
|
514
|
+
if (parts.length >= 5) {
|
|
515
|
+
const gId = parts[0].trim();
|
|
516
|
+
const aId = parts[1];
|
|
517
|
+
const ver = parts.length === 6 ? parts[4] : parts[3];
|
|
518
|
+
const scp = parts.length === 6 ? parts[5] : parts[4];
|
|
519
|
+
if (gId && aId && ver) {
|
|
520
|
+
const name = `${gId}:${aId}`;
|
|
521
|
+
deps.push({
|
|
522
|
+
name,
|
|
523
|
+
version: ver,
|
|
524
|
+
direct: scp === "compile" || scp === "runtime" || scp === "provided",
|
|
525
|
+
ecosystem: "maven",
|
|
526
|
+
purl: this.buildPurl(gId, aId, ver)
|
|
527
|
+
});
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
return deps;
|
|
533
|
+
}
|
|
534
|
+
/** Checks if `mvn` is available on PATH */
|
|
535
|
+
isMavenAvailable() {
|
|
536
|
+
try {
|
|
537
|
+
this.execSyncFn("mvn --version", { stdio: "pipe", timeout: 1e4 });
|
|
538
|
+
return true;
|
|
539
|
+
} catch {
|
|
540
|
+
return false;
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
/**
|
|
544
|
+
* Runs `mvn dependency:list` and returns the output.
|
|
545
|
+
*/
|
|
546
|
+
runMavenDependencyList(projectPath) {
|
|
547
|
+
try {
|
|
548
|
+
const output = this.execSyncFn(
|
|
549
|
+
"mvn dependency:list -DoutputType=text -DincludeScope=compile",
|
|
550
|
+
{
|
|
551
|
+
cwd: projectPath,
|
|
552
|
+
stdio: "pipe",
|
|
553
|
+
timeout: 12e4,
|
|
554
|
+
// 2 minute timeout
|
|
555
|
+
encoding: "utf-8"
|
|
556
|
+
}
|
|
557
|
+
);
|
|
558
|
+
return output.toString();
|
|
559
|
+
} catch (err) {
|
|
560
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
561
|
+
throw new LockfileParseError(
|
|
562
|
+
path5.join(projectPath, "pom.xml"),
|
|
563
|
+
`Failed to run 'mvn dependency:list': ${message}`
|
|
564
|
+
);
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
/**
|
|
568
|
+
* Builds a purl for a Maven package.
|
|
569
|
+
* Format: pkg:maven/groupId/artifactId@version
|
|
570
|
+
*/
|
|
571
|
+
buildPurl(groupId, artifactId, version) {
|
|
572
|
+
return `pkg:maven/${groupId}/${artifactId}@${version}`;
|
|
573
|
+
}
|
|
574
|
+
buildResult(projectPath, lockfilePath, dependencies) {
|
|
575
|
+
return {
|
|
576
|
+
projectPath,
|
|
577
|
+
ecosystem: "maven",
|
|
578
|
+
dependencies,
|
|
579
|
+
lockfilePath,
|
|
580
|
+
scannedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
581
|
+
};
|
|
582
|
+
}
|
|
583
|
+
};
|
|
584
|
+
|
|
585
|
+
// src/scanners/go/go-scanner.ts
|
|
586
|
+
import { readFile as readFile6 } from "fs/promises";
|
|
587
|
+
import { existsSync as existsSync6 } from "fs";
|
|
588
|
+
import path6 from "path";
|
|
589
|
+
var GoScanner = class {
|
|
590
|
+
ecosystem = "go";
|
|
591
|
+
lockfileNames = ["go.sum"];
|
|
592
|
+
async detect(projectPath) {
|
|
593
|
+
const goSumPath = path6.join(projectPath, "go.sum");
|
|
594
|
+
return existsSync6(goSumPath) ? goSumPath : null;
|
|
595
|
+
}
|
|
596
|
+
async scan(projectPath, lockfilePath) {
|
|
597
|
+
const [goSumRaw, goModRaw] = await Promise.all([
|
|
598
|
+
readFile6(lockfilePath, "utf-8"),
|
|
599
|
+
readFile6(path6.join(projectPath, "go.mod"), "utf-8").catch(() => null)
|
|
600
|
+
]);
|
|
601
|
+
const { directNames, indirectNames } = goModRaw ? this.parseGoMod(goModRaw) : { directNames: /* @__PURE__ */ new Set(), indirectNames: /* @__PURE__ */ new Set() };
|
|
602
|
+
const dependencies = this.parseGoSum(goSumRaw, lockfilePath, directNames, indirectNames);
|
|
603
|
+
return {
|
|
604
|
+
projectPath,
|
|
605
|
+
ecosystem: "go",
|
|
606
|
+
dependencies,
|
|
607
|
+
lockfilePath,
|
|
608
|
+
scannedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
609
|
+
};
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Parses go.sum and extracts unique module dependencies.
|
|
613
|
+
*
|
|
614
|
+
* Each module may appear twice in go.sum (once for the source archive,
|
|
615
|
+
* once for go.mod). We deduplicate by module path + version, keeping
|
|
616
|
+
* only the `h1:` entry (not the `/go.mod` entry).
|
|
617
|
+
*/
|
|
618
|
+
parseGoSum(content, lockfilePath, directNames, indirectNames) {
|
|
619
|
+
const depMap = /* @__PURE__ */ new Map();
|
|
620
|
+
for (const rawLine of content.split("\n")) {
|
|
621
|
+
const line = rawLine.trim();
|
|
622
|
+
if (!line) continue;
|
|
623
|
+
const parts = line.split(/\s+/);
|
|
624
|
+
if (parts.length < 3) continue;
|
|
625
|
+
const modulePath = parts[0];
|
|
626
|
+
let version = parts[1];
|
|
627
|
+
if (version.endsWith("/go.mod")) continue;
|
|
628
|
+
version = version.replace(/\+incompatible$/, "");
|
|
629
|
+
const key = `${modulePath}@${version}`;
|
|
630
|
+
if (depMap.has(key)) continue;
|
|
631
|
+
const isDirect = directNames.size > 0 || indirectNames.size > 0 ? directNames.has(modulePath) || (!indirectNames.has(modulePath) && !directNames.has(modulePath) ? false : directNames.has(modulePath)) : true;
|
|
632
|
+
depMap.set(key, {
|
|
633
|
+
name: modulePath,
|
|
634
|
+
version,
|
|
635
|
+
direct: isDirect,
|
|
636
|
+
ecosystem: "go",
|
|
637
|
+
purl: this.buildPurl(modulePath, version)
|
|
638
|
+
});
|
|
639
|
+
}
|
|
640
|
+
return Array.from(depMap.values());
|
|
641
|
+
}
|
|
642
|
+
/**
|
|
643
|
+
* Parses go.mod to extract direct and indirect dependency names.
|
|
644
|
+
*
|
|
645
|
+
* Handles both single-line and block `require` directives:
|
|
646
|
+
* ```
|
|
647
|
+
* require github.com/pkg/errors v0.9.1
|
|
648
|
+
*
|
|
649
|
+
* require (
|
|
650
|
+
* github.com/gin-gonic/gin v1.9.1
|
|
651
|
+
* golang.org/x/text v0.14.0 // indirect
|
|
652
|
+
* )
|
|
653
|
+
* ```
|
|
654
|
+
*/
|
|
655
|
+
parseGoMod(content) {
|
|
656
|
+
const directNames = /* @__PURE__ */ new Set();
|
|
657
|
+
const indirectNames = /* @__PURE__ */ new Set();
|
|
658
|
+
let inRequireBlock = false;
|
|
659
|
+
for (const rawLine of content.split("\n")) {
|
|
660
|
+
const line = rawLine.trim();
|
|
661
|
+
if (line.startsWith("require ") && !line.includes("(")) {
|
|
662
|
+
const match = line.match(/^require\s+(\S+)\s+\S+(.*)$/);
|
|
663
|
+
if (match) {
|
|
664
|
+
const modulePath = match[1];
|
|
665
|
+
const rest = match[2];
|
|
666
|
+
if (rest.includes("// indirect")) {
|
|
667
|
+
indirectNames.add(modulePath);
|
|
668
|
+
} else {
|
|
669
|
+
directNames.add(modulePath);
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
continue;
|
|
673
|
+
}
|
|
674
|
+
if (line === "require (" || line.startsWith("require (")) {
|
|
675
|
+
inRequireBlock = true;
|
|
676
|
+
continue;
|
|
677
|
+
}
|
|
678
|
+
if (inRequireBlock && line === ")") {
|
|
679
|
+
inRequireBlock = false;
|
|
680
|
+
continue;
|
|
681
|
+
}
|
|
682
|
+
if (inRequireBlock && line && !line.startsWith("//")) {
|
|
683
|
+
const match = line.match(/^(\S+)\s+\S+(.*)$/);
|
|
684
|
+
if (match) {
|
|
685
|
+
const modulePath = match[1];
|
|
686
|
+
const rest = match[2];
|
|
687
|
+
if (rest.includes("// indirect")) {
|
|
688
|
+
indirectNames.add(modulePath);
|
|
689
|
+
} else {
|
|
690
|
+
directNames.add(modulePath);
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
return { directNames, indirectNames };
|
|
696
|
+
}
|
|
697
|
+
/**
|
|
698
|
+
* Builds a purl for a Go module.
|
|
699
|
+
*
|
|
700
|
+
* Per purl spec, the type is "golang" and the module path
|
|
701
|
+
* uses `/` separators (no encoding needed for path segments).
|
|
702
|
+
*
|
|
703
|
+
* Example: `pkg:golang/github.com/gin-gonic/gin@v1.9.1`
|
|
704
|
+
*/
|
|
705
|
+
buildPurl(modulePath, version) {
|
|
706
|
+
return `pkg:golang/${modulePath}@${version}`;
|
|
707
|
+
}
|
|
708
|
+
};
|
|
709
|
+
|
|
710
|
+
// src/scanners/ruby/ruby-scanner.ts
|
|
711
|
+
import { readFile as readFile7 } from "fs/promises";
|
|
712
|
+
import { existsSync as existsSync7 } from "fs";
|
|
713
|
+
import path7 from "path";
|
|
714
|
+
var RubyScanner = class {
|
|
715
|
+
ecosystem = "ruby";
|
|
716
|
+
lockfileNames = ["Gemfile.lock"];
|
|
717
|
+
async detect(projectPath) {
|
|
718
|
+
const lockfilePath = path7.join(projectPath, "Gemfile.lock");
|
|
719
|
+
return existsSync7(lockfilePath) ? lockfilePath : null;
|
|
720
|
+
}
|
|
721
|
+
async scan(projectPath, lockfilePath) {
|
|
722
|
+
const content = await readFile7(lockfilePath, "utf-8");
|
|
723
|
+
const specs = this.parseSpecs(content, lockfilePath);
|
|
724
|
+
const directNames = this.parseDependencies(content);
|
|
725
|
+
const dependencies = specs.map(({ name, version }) => ({
|
|
726
|
+
name,
|
|
727
|
+
version,
|
|
728
|
+
direct: directNames.has(name),
|
|
729
|
+
ecosystem: "ruby",
|
|
730
|
+
purl: `pkg:gem/${name}@${version}`
|
|
731
|
+
}));
|
|
732
|
+
return {
|
|
733
|
+
projectPath,
|
|
734
|
+
ecosystem: "ruby",
|
|
735
|
+
dependencies,
|
|
736
|
+
lockfilePath,
|
|
737
|
+
scannedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
738
|
+
};
|
|
739
|
+
}
|
|
740
|
+
/**
|
|
741
|
+
* Parses the GEM > specs section to extract all resolved gems.
|
|
742
|
+
*
|
|
743
|
+
* Gems at the top level of the specs section (indented 4 spaces) are
|
|
744
|
+
* resolved packages. Their sub-dependencies (indented 6+ spaces) are
|
|
745
|
+
* constraints, not separate entries — those sub-deps appear as their
|
|
746
|
+
* own top-level spec entries elsewhere.
|
|
747
|
+
*
|
|
748
|
+
* Format: ` gem-name (1.2.3)`
|
|
749
|
+
*/
|
|
750
|
+
parseSpecs(content, lockfilePath) {
|
|
751
|
+
const gems = [];
|
|
752
|
+
let inGemSection = false;
|
|
753
|
+
let inSpecs = false;
|
|
754
|
+
for (const rawLine of content.split("\n")) {
|
|
755
|
+
const line = rawLine;
|
|
756
|
+
if (line.length > 0 && line[0] !== " ") {
|
|
757
|
+
if (line.startsWith("GEM")) {
|
|
758
|
+
inGemSection = true;
|
|
759
|
+
inSpecs = false;
|
|
760
|
+
continue;
|
|
761
|
+
}
|
|
762
|
+
inGemSection = false;
|
|
763
|
+
inSpecs = false;
|
|
764
|
+
continue;
|
|
765
|
+
}
|
|
766
|
+
if (inGemSection && line.trimStart().startsWith("specs:")) {
|
|
767
|
+
inSpecs = true;
|
|
768
|
+
continue;
|
|
769
|
+
}
|
|
770
|
+
if (!inSpecs) continue;
|
|
771
|
+
const match = line.match(/^ {4}(\S+)\s+\(([^)]+)\)$/);
|
|
772
|
+
if (match) {
|
|
773
|
+
const [, name, version] = match;
|
|
774
|
+
gems.push({ name, version });
|
|
775
|
+
}
|
|
776
|
+
}
|
|
777
|
+
if (gems.length === 0) {
|
|
778
|
+
throw new LockfileParseError(
|
|
779
|
+
lockfilePath,
|
|
780
|
+
"No gems found in GEM specs section"
|
|
781
|
+
);
|
|
782
|
+
}
|
|
783
|
+
return gems;
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Parses the DEPENDENCIES section to get direct dependency names.
|
|
787
|
+
*
|
|
788
|
+
* Format: ` gem-name (>= 1.0)` or ` gem-name`
|
|
789
|
+
* The version constraint is optional and we only need the name.
|
|
790
|
+
*/
|
|
791
|
+
parseDependencies(content) {
|
|
792
|
+
const directNames = /* @__PURE__ */ new Set();
|
|
793
|
+
let inDependencies = false;
|
|
794
|
+
for (const rawLine of content.split("\n")) {
|
|
795
|
+
const line = rawLine;
|
|
796
|
+
if (line.length > 0 && line[0] !== " ") {
|
|
797
|
+
if (line.startsWith("DEPENDENCIES")) {
|
|
798
|
+
inDependencies = true;
|
|
799
|
+
continue;
|
|
800
|
+
}
|
|
801
|
+
if (inDependencies) break;
|
|
802
|
+
continue;
|
|
803
|
+
}
|
|
804
|
+
if (!inDependencies) continue;
|
|
805
|
+
const match = line.match(/^ {2}(\S+?)!?\s*(?:\(|$)/);
|
|
806
|
+
if (match) {
|
|
807
|
+
directNames.add(match[1]);
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
return directNames;
|
|
811
|
+
}
|
|
812
|
+
};
|
|
813
|
+
|
|
814
|
+
// src/scanners/registry.ts
|
|
815
|
+
var ScannerRegistry = class {
|
|
816
|
+
scanners;
|
|
817
|
+
constructor() {
|
|
818
|
+
this.scanners = [
|
|
819
|
+
new NpmScanner(),
|
|
820
|
+
new NugetScanner(),
|
|
821
|
+
new CargoScanner(),
|
|
822
|
+
new PipScanner(),
|
|
823
|
+
new MavenScanner(),
|
|
824
|
+
new GoScanner(),
|
|
825
|
+
new RubyScanner()
|
|
826
|
+
];
|
|
827
|
+
}
|
|
828
|
+
/**
|
|
829
|
+
* Auto-detects the project's ecosystem and scans dependencies.
|
|
830
|
+
* Tries each registered scanner in order until one matches.
|
|
831
|
+
*/
|
|
832
|
+
async detectAndScan(projectPath) {
|
|
833
|
+
for (const scanner of this.scanners) {
|
|
834
|
+
const lockfilePath = await scanner.detect(projectPath);
|
|
835
|
+
if (lockfilePath) {
|
|
836
|
+
return scanner.scan(projectPath, lockfilePath);
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
throw new NoLockfileError(projectPath);
|
|
840
|
+
}
|
|
841
|
+
/** Returns a specific scanner by ecosystem name */
|
|
842
|
+
getScanner(ecosystem) {
|
|
843
|
+
return this.scanners.find((s) => s.ecosystem === ecosystem);
|
|
844
|
+
}
|
|
845
|
+
/** Lists all registered ecosystems */
|
|
846
|
+
listEcosystems() {
|
|
847
|
+
return this.scanners.map((s) => s.ecosystem);
|
|
848
|
+
}
|
|
849
|
+
};
|
|
850
|
+
|
|
851
|
+
// src/sbom/cyclonedx.ts
|
|
852
|
+
import { randomUUID } from "crypto";
|
|
853
|
+
var CycloneDxGenerator = class {
|
|
854
|
+
format = "cyclonedx-json";
|
|
855
|
+
generate(scanResult, toolVersion = "0.1.0") {
|
|
856
|
+
const bom = this.buildBom(scanResult, toolVersion);
|
|
857
|
+
const content = JSON.stringify(bom, null, 2);
|
|
858
|
+
return {
|
|
859
|
+
format: "cyclonedx-json",
|
|
860
|
+
specVersion: "1.7",
|
|
861
|
+
content,
|
|
862
|
+
componentCount: scanResult.dependencies.length,
|
|
863
|
+
generatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
864
|
+
};
|
|
865
|
+
}
|
|
866
|
+
buildBom(scanResult, toolVersion) {
|
|
867
|
+
const projectName = this.extractProjectName(scanResult.projectPath);
|
|
868
|
+
return {
|
|
869
|
+
$schema: "http://cyclonedx.org/schema/bom-1.7.schema.json",
|
|
870
|
+
bomFormat: "CycloneDX",
|
|
871
|
+
specVersion: "1.7",
|
|
872
|
+
serialNumber: `urn:uuid:${randomUUID()}`,
|
|
873
|
+
version: 1,
|
|
874
|
+
metadata: {
|
|
875
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
876
|
+
tools: {
|
|
877
|
+
components: [
|
|
878
|
+
{
|
|
879
|
+
type: "application",
|
|
880
|
+
name: "verimu",
|
|
881
|
+
version: toolVersion,
|
|
882
|
+
description: "Verimu CRA Compliance Scanner",
|
|
883
|
+
supplier: { name: "Verimu" },
|
|
884
|
+
externalReferences: [
|
|
885
|
+
{
|
|
886
|
+
type: "website",
|
|
887
|
+
url: "https://verimu.com"
|
|
888
|
+
}
|
|
889
|
+
]
|
|
890
|
+
}
|
|
891
|
+
]
|
|
892
|
+
},
|
|
893
|
+
// NTIA: metadata.supplier — the org supplying the root software
|
|
894
|
+
supplier: {
|
|
895
|
+
name: projectName
|
|
896
|
+
},
|
|
897
|
+
component: {
|
|
898
|
+
type: "application",
|
|
899
|
+
name: projectName,
|
|
900
|
+
"bom-ref": "root-component",
|
|
901
|
+
supplier: { name: projectName }
|
|
902
|
+
}
|
|
903
|
+
},
|
|
904
|
+
components: scanResult.dependencies.map((dep) => this.toComponent(dep)),
|
|
905
|
+
dependencies: this.buildDependencyGraph(scanResult)
|
|
906
|
+
};
|
|
907
|
+
}
|
|
908
|
+
/** Converts a Verimu Dependency to a CycloneDX component */
|
|
909
|
+
toComponent(dep) {
|
|
910
|
+
return {
|
|
911
|
+
type: "library",
|
|
912
|
+
name: dep.name,
|
|
913
|
+
version: dep.version,
|
|
914
|
+
purl: dep.purl,
|
|
915
|
+
"bom-ref": dep.purl,
|
|
916
|
+
scope: dep.direct ? "required" : "optional",
|
|
917
|
+
// NTIA: component.supplier — derived from npm scope or package name
|
|
918
|
+
supplier: {
|
|
919
|
+
name: this.deriveSupplierName(dep.name)
|
|
920
|
+
}
|
|
921
|
+
};
|
|
922
|
+
}
|
|
923
|
+
/**
|
|
924
|
+
* Derives a supplier name from a package name.
|
|
925
|
+
*
|
|
926
|
+
* For scoped packages like "@vue/reactivity" → "@vue"
|
|
927
|
+
* For unscoped packages like "express" → "express"
|
|
928
|
+
*
|
|
929
|
+
* This is the same heuristic used by Syft, Trivy, and other SBOM tools
|
|
930
|
+
* when registry metadata (author/publisher) isn't available from the lockfile.
|
|
931
|
+
*/
|
|
932
|
+
deriveSupplierName(packageName) {
|
|
933
|
+
if (packageName.startsWith("@")) {
|
|
934
|
+
const scope = packageName.split("/")[0];
|
|
935
|
+
return scope;
|
|
936
|
+
}
|
|
937
|
+
return packageName;
|
|
938
|
+
}
|
|
939
|
+
/**
|
|
940
|
+
* Builds the dependency graph section of the SBOM.
|
|
941
|
+
*
|
|
942
|
+
* The root component depends on all dependencies (direct + transitive).
|
|
943
|
+
* This ensures a single root node in the graph, which NTIA validators expect.
|
|
944
|
+
*
|
|
945
|
+
* We include ALL deps under root (not just direct) because from a flat lockfile
|
|
946
|
+
* we can't reliably reconstruct which transitive dep belongs to which direct dep.
|
|
947
|
+
* This is still valid per the CycloneDX spec — it represents a complete but flat
|
|
948
|
+
* dependency relationship.
|
|
949
|
+
*/
|
|
950
|
+
buildDependencyGraph(scanResult) {
|
|
951
|
+
const allDepPurls = scanResult.dependencies.map((d) => d.purl);
|
|
952
|
+
return [
|
|
953
|
+
{
|
|
954
|
+
ref: "root-component",
|
|
955
|
+
dependsOn: allDepPurls
|
|
956
|
+
}
|
|
957
|
+
];
|
|
958
|
+
}
|
|
959
|
+
/** Extracts project name from path */
|
|
960
|
+
extractProjectName(projectPath) {
|
|
961
|
+
const parts = projectPath.replace(/\\/g, "/").split("/");
|
|
962
|
+
return parts[parts.length - 1] || "unknown-project";
|
|
963
|
+
}
|
|
964
|
+
};
|
|
965
|
+
|
|
966
|
+
// src/cve/osv.ts
|
|
967
|
+
var OSV_API_BASE = "https://api.osv.dev/v1";
|
|
968
|
+
var BATCH_SIZE = 1e3;
|
|
969
|
+
var OsvSource = class {
|
|
970
|
+
sourceId = "osv";
|
|
971
|
+
name = "OSV.dev (Google Open Source Vulnerabilities)";
|
|
972
|
+
fetchFn;
|
|
973
|
+
constructor(fetchImpl) {
|
|
974
|
+
this.fetchFn = fetchImpl ?? globalThis.fetch;
|
|
975
|
+
}
|
|
976
|
+
async checkDependencies(dependencies) {
|
|
977
|
+
if (dependencies.length === 0) return [];
|
|
978
|
+
const allVulns = [];
|
|
979
|
+
for (let i = 0; i < dependencies.length; i += BATCH_SIZE) {
|
|
980
|
+
const batch = dependencies.slice(i, i + BATCH_SIZE);
|
|
981
|
+
const batchVulns = await this.queryBatch(batch);
|
|
982
|
+
allVulns.push(...batchVulns);
|
|
983
|
+
}
|
|
984
|
+
return allVulns;
|
|
985
|
+
}
|
|
986
|
+
/** Uses OSV's /querybatch endpoint for efficient bulk lookups */
|
|
987
|
+
async queryBatch(dependencies) {
|
|
988
|
+
const queries = dependencies.map((dep) => ({
|
|
989
|
+
version: dep.version,
|
|
990
|
+
package: {
|
|
991
|
+
name: dep.name,
|
|
992
|
+
ecosystem: this.mapEcosystem(dep.ecosystem)
|
|
993
|
+
}
|
|
994
|
+
}));
|
|
995
|
+
const response = await this.fetchFn(`${OSV_API_BASE}/querybatch`, {
|
|
996
|
+
method: "POST",
|
|
997
|
+
headers: { "Content-Type": "application/json" },
|
|
998
|
+
body: JSON.stringify({ queries })
|
|
999
|
+
});
|
|
1000
|
+
if (!response.ok) {
|
|
1001
|
+
throw new Error(`OSV API error: ${response.status} ${response.statusText}`);
|
|
1002
|
+
}
|
|
1003
|
+
const data = await response.json();
|
|
1004
|
+
const vulnerabilities = [];
|
|
1005
|
+
for (let i = 0; i < data.results.length; i++) {
|
|
1006
|
+
const result = data.results[i];
|
|
1007
|
+
const dep = dependencies[i];
|
|
1008
|
+
if (result.vulns && result.vulns.length > 0) {
|
|
1009
|
+
for (const vuln of result.vulns) {
|
|
1010
|
+
vulnerabilities.push(this.mapVulnerability(vuln, dep));
|
|
1011
|
+
}
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
return vulnerabilities;
|
|
1015
|
+
}
|
|
1016
|
+
/** Maps an OSV vulnerability record to our Vulnerability type */
|
|
1017
|
+
mapVulnerability(osvVuln, dep) {
|
|
1018
|
+
const cveId = this.extractCveId(osvVuln);
|
|
1019
|
+
const severity = this.extractSeverity(osvVuln);
|
|
1020
|
+
return {
|
|
1021
|
+
id: cveId || osvVuln.id,
|
|
1022
|
+
aliases: Array.from(/* @__PURE__ */ new Set([osvVuln.id, ...osvVuln.aliases ?? []])),
|
|
1023
|
+
summary: osvVuln.summary ?? osvVuln.details?.slice(0, 200) ?? "No description available",
|
|
1024
|
+
severity: severity.level,
|
|
1025
|
+
cvssScore: severity.score,
|
|
1026
|
+
packageName: dep.name,
|
|
1027
|
+
ecosystem: dep.ecosystem,
|
|
1028
|
+
affectedVersionRange: this.extractAffectedRange(osvVuln, dep.name),
|
|
1029
|
+
fixedVersion: this.extractFixedVersion(osvVuln, dep.name),
|
|
1030
|
+
exploitedInWild: false,
|
|
1031
|
+
// OSV doesn't track this — CISA KEV does
|
|
1032
|
+
source: "osv",
|
|
1033
|
+
referenceUrl: `https://osv.dev/vulnerability/${osvVuln.id}`,
|
|
1034
|
+
publishedAt: osvVuln.published
|
|
1035
|
+
};
|
|
1036
|
+
}
|
|
1037
|
+
/** Extracts CVE ID from aliases (prefers CVE-xxxx over GHSA-xxxx) */
|
|
1038
|
+
extractCveId(vuln) {
|
|
1039
|
+
if (vuln.id.startsWith("CVE-")) return vuln.id;
|
|
1040
|
+
if (vuln.aliases) {
|
|
1041
|
+
const cve = vuln.aliases.find((a) => a.startsWith("CVE-"));
|
|
1042
|
+
if (cve) return cve;
|
|
1043
|
+
}
|
|
1044
|
+
return null;
|
|
1045
|
+
}
|
|
1046
|
+
/** Extracts severity from CVSS scores in the OSV record */
|
|
1047
|
+
extractSeverity(vuln) {
|
|
1048
|
+
if (vuln.severity && vuln.severity.length > 0) {
|
|
1049
|
+
for (const sev of vuln.severity) {
|
|
1050
|
+
if (sev.type === "CVSS_V3") {
|
|
1051
|
+
const score = this.parseCvssScore(sev.score);
|
|
1052
|
+
if (score !== null) {
|
|
1053
|
+
return { level: this.scoreToSeverity(score), score };
|
|
1054
|
+
}
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
}
|
|
1058
|
+
if (vuln.database_specific?.severity) {
|
|
1059
|
+
const s = vuln.database_specific.severity.toUpperCase();
|
|
1060
|
+
if (["CRITICAL", "HIGH", "MEDIUM", "LOW"].includes(s)) {
|
|
1061
|
+
return { level: s };
|
|
1062
|
+
}
|
|
1063
|
+
}
|
|
1064
|
+
return { level: "UNKNOWN" };
|
|
1065
|
+
}
|
|
1066
|
+
/** Parses CVSS v3 vector string to extract the base score */
|
|
1067
|
+
parseCvssScore(vectorOrScore) {
|
|
1068
|
+
const num = parseFloat(vectorOrScore);
|
|
1069
|
+
if (!isNaN(num) && num >= 0 && num <= 10) return num;
|
|
1070
|
+
return null;
|
|
1071
|
+
}
|
|
1072
|
+
/** Converts a CVSS score (0-10) to a severity level */
|
|
1073
|
+
scoreToSeverity(score) {
|
|
1074
|
+
if (score >= 9) return "CRITICAL";
|
|
1075
|
+
if (score >= 7) return "HIGH";
|
|
1076
|
+
if (score >= 4) return "MEDIUM";
|
|
1077
|
+
if (score > 0) return "LOW";
|
|
1078
|
+
return "UNKNOWN";
|
|
1079
|
+
}
|
|
1080
|
+
/** Extracts affected version range for a specific package */
|
|
1081
|
+
extractAffectedRange(vuln, packageName) {
|
|
1082
|
+
if (!vuln.affected) return void 0;
|
|
1083
|
+
for (const affected of vuln.affected) {
|
|
1084
|
+
if (affected.package?.name === packageName && affected.ranges) {
|
|
1085
|
+
for (const range of affected.ranges) {
|
|
1086
|
+
if (range.events) {
|
|
1087
|
+
const introduced = range.events.find((e) => e.introduced)?.introduced;
|
|
1088
|
+
const fixed = range.events.find((e) => e.fixed)?.fixed;
|
|
1089
|
+
if (introduced && fixed) return `>=${introduced}, <${fixed}`;
|
|
1090
|
+
if (introduced) return `>=${introduced}`;
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
return void 0;
|
|
1096
|
+
}
|
|
1097
|
+
/** Extracts the fixed version for a specific package */
|
|
1098
|
+
extractFixedVersion(vuln, packageName) {
|
|
1099
|
+
if (!vuln.affected) return void 0;
|
|
1100
|
+
for (const affected of vuln.affected) {
|
|
1101
|
+
if (affected.package?.name === packageName && affected.ranges) {
|
|
1102
|
+
for (const range of affected.ranges) {
|
|
1103
|
+
if (range.events) {
|
|
1104
|
+
const fixed = range.events.find((e) => e.fixed)?.fixed;
|
|
1105
|
+
if (fixed) return fixed;
|
|
1106
|
+
}
|
|
1107
|
+
}
|
|
1108
|
+
}
|
|
1109
|
+
}
|
|
1110
|
+
return void 0;
|
|
1111
|
+
}
|
|
1112
|
+
/** Maps our ecosystem names to OSV ecosystem names */
|
|
1113
|
+
mapEcosystem(ecosystem) {
|
|
1114
|
+
const map = {
|
|
1115
|
+
npm: "npm",
|
|
1116
|
+
nuget: "NuGet",
|
|
1117
|
+
cargo: "crates.io",
|
|
1118
|
+
maven: "Maven",
|
|
1119
|
+
pip: "PyPI",
|
|
1120
|
+
go: "Go",
|
|
1121
|
+
ruby: "RubyGems"
|
|
1122
|
+
};
|
|
1123
|
+
return map[ecosystem] ?? ecosystem;
|
|
1124
|
+
}
|
|
1125
|
+
};
|
|
1126
|
+
|
|
1127
|
+
// src/cve/aggregator.ts
|
|
1128
|
+
var CveAggregator = class {
|
|
1129
|
+
sources;
|
|
1130
|
+
constructor(sources) {
|
|
1131
|
+
this.sources = sources ?? [
|
|
1132
|
+
new OsvSource()
|
|
1133
|
+
// Future: new NvdSource(), new EuvdSource(), new CisaKevSource()
|
|
1134
|
+
];
|
|
1135
|
+
}
|
|
1136
|
+
/**
|
|
1137
|
+
* Checks dependencies against all registered CVE sources.
|
|
1138
|
+
* Runs sources in parallel and merges/deduplicates results.
|
|
1139
|
+
*/
|
|
1140
|
+
async check(dependencies) {
|
|
1141
|
+
const startTime = Date.now();
|
|
1142
|
+
const sourcesQueried = [];
|
|
1143
|
+
const sourceErrors = [];
|
|
1144
|
+
const allVulns = [];
|
|
1145
|
+
const results = await Promise.allSettled(
|
|
1146
|
+
this.sources.map(async (source) => {
|
|
1147
|
+
const vulns = await source.checkDependencies(dependencies);
|
|
1148
|
+
return { sourceId: source.sourceId, vulns };
|
|
1149
|
+
})
|
|
1150
|
+
);
|
|
1151
|
+
for (const result of results) {
|
|
1152
|
+
if (result.status === "fulfilled") {
|
|
1153
|
+
sourcesQueried.push(result.value.sourceId);
|
|
1154
|
+
allVulns.push(...result.value.vulns);
|
|
1155
|
+
} else {
|
|
1156
|
+
const sourceIndex = results.indexOf(result);
|
|
1157
|
+
const sourceId = this.sources[sourceIndex].sourceId;
|
|
1158
|
+
sourceErrors.push({
|
|
1159
|
+
source: sourceId,
|
|
1160
|
+
error: result.reason instanceof Error ? result.reason.message : String(result.reason)
|
|
1161
|
+
});
|
|
1162
|
+
}
|
|
1163
|
+
}
|
|
1164
|
+
const deduplicated = this.deduplicateVulnerabilities(allVulns);
|
|
1165
|
+
return {
|
|
1166
|
+
vulnerabilities: deduplicated,
|
|
1167
|
+
sourcesQueried,
|
|
1168
|
+
sourceErrors,
|
|
1169
|
+
checkDurationMs: Date.now() - startTime
|
|
1170
|
+
};
|
|
1171
|
+
}
|
|
1172
|
+
/**
|
|
1173
|
+
* Deduplicates vulnerabilities by ID.
|
|
1174
|
+
* When the same CVE appears from multiple sources,
|
|
1175
|
+
* keeps the one with more complete data (has CVSS score, has fix version, etc.)
|
|
1176
|
+
*/
|
|
1177
|
+
deduplicateVulnerabilities(vulns) {
|
|
1178
|
+
const byKey = /* @__PURE__ */ new Map();
|
|
1179
|
+
for (const vuln of vulns) {
|
|
1180
|
+
const key = `${vuln.id}::${vuln.packageName}`;
|
|
1181
|
+
const existing = byKey.get(key);
|
|
1182
|
+
if (!existing) {
|
|
1183
|
+
byKey.set(key, vuln);
|
|
1184
|
+
} else {
|
|
1185
|
+
byKey.set(key, this.pickBetterEntry(existing, vuln));
|
|
1186
|
+
}
|
|
1187
|
+
}
|
|
1188
|
+
return Array.from(byKey.values());
|
|
1189
|
+
}
|
|
1190
|
+
/** Picks the vulnerability entry with more complete data */
|
|
1191
|
+
pickBetterEntry(a, b) {
|
|
1192
|
+
let scoreA = 0;
|
|
1193
|
+
let scoreB = 0;
|
|
1194
|
+
if (a.cvssScore !== void 0) scoreA++;
|
|
1195
|
+
if (b.cvssScore !== void 0) scoreB++;
|
|
1196
|
+
if (a.fixedVersion) scoreA++;
|
|
1197
|
+
if (b.fixedVersion) scoreB++;
|
|
1198
|
+
if (a.affectedVersionRange) scoreA++;
|
|
1199
|
+
if (b.affectedVersionRange) scoreB++;
|
|
1200
|
+
if (a.severity !== "UNKNOWN") scoreA++;
|
|
1201
|
+
if (b.severity !== "UNKNOWN") scoreB++;
|
|
1202
|
+
const strip = (obj) => Object.fromEntries(Object.entries(obj).filter(([, v]) => v !== void 0 && v !== null));
|
|
1203
|
+
const winner = scoreB > scoreA ? { ...strip(a), ...strip(b) } : { ...strip(b), ...strip(a) };
|
|
1204
|
+
const allAliases = /* @__PURE__ */ new Set([...a.aliases, ...b.aliases]);
|
|
1205
|
+
winner.aliases = Array.from(allAliases);
|
|
1206
|
+
winner.exploitedInWild = a.exploitedInWild || b.exploitedInWild;
|
|
1207
|
+
return winner;
|
|
1208
|
+
}
|
|
1209
|
+
};
|
|
1210
|
+
|
|
1211
|
+
// src/reporters/console.ts
|
|
1212
|
+
var ConsoleReporter = class {
|
|
1213
|
+
name = "console";
|
|
1214
|
+
report(result) {
|
|
1215
|
+
const lines = [];
|
|
1216
|
+
lines.push("");
|
|
1217
|
+
lines.push("\u250C\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2510");
|
|
1218
|
+
lines.push("\u2502 VERIMU CRA COMPLIANCE SCAN \u2502");
|
|
1219
|
+
lines.push("\u2514\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2518");
|
|
1220
|
+
lines.push("");
|
|
1221
|
+
lines.push(` Project: ${result.project.path}`);
|
|
1222
|
+
lines.push(` Ecosystem: ${result.project.ecosystem}`);
|
|
1223
|
+
lines.push(` Dependencies: ${result.project.dependencyCount}`);
|
|
1224
|
+
lines.push(` Scanned at: ${result.generatedAt}`);
|
|
1225
|
+
lines.push("");
|
|
1226
|
+
lines.push(` \u2713 SBOM generated (${result.sbom.format}, ${result.sbom.specVersion})`);
|
|
1227
|
+
lines.push(` Components: ${result.sbom.componentCount}`);
|
|
1228
|
+
lines.push("");
|
|
1229
|
+
const vulns = result.cveCheck.vulnerabilities;
|
|
1230
|
+
if (vulns.length === 0) {
|
|
1231
|
+
lines.push(" \u2713 No known vulnerabilities found");
|
|
1232
|
+
} else {
|
|
1233
|
+
lines.push(` \u26A0 ${vulns.length} vulnerabilit${vulns.length === 1 ? "y" : "ies"} found:`);
|
|
1234
|
+
lines.push("");
|
|
1235
|
+
const sorted = [...vulns].sort((a, b) => severityOrder(a.severity) - severityOrder(b.severity));
|
|
1236
|
+
for (const vuln of sorted) {
|
|
1237
|
+
const badge = severityBadge(vuln.severity);
|
|
1238
|
+
const fix = vuln.fixedVersion ? ` \u2192 fix: ${vuln.fixedVersion}` : "";
|
|
1239
|
+
lines.push(` ${badge} ${vuln.id}`);
|
|
1240
|
+
lines.push(` ${vuln.packageName}@${vuln.affectedVersionRange ?? "?"}${fix}`);
|
|
1241
|
+
lines.push(` ${vuln.summary.slice(0, 100)}`);
|
|
1242
|
+
if (vuln.exploitedInWild) {
|
|
1243
|
+
lines.push(` \u{1F534} ACTIVELY EXPLOITED \u2014 24h CRA reporting required`);
|
|
1244
|
+
}
|
|
1245
|
+
lines.push("");
|
|
1246
|
+
}
|
|
1247
|
+
}
|
|
1248
|
+
const sources = result.cveCheck.sourcesQueried.join(", ");
|
|
1249
|
+
lines.push(` Sources queried: ${sources} (${result.cveCheck.checkDurationMs}ms)`);
|
|
1250
|
+
if (result.cveCheck.sourceErrors.length > 0) {
|
|
1251
|
+
for (const err of result.cveCheck.sourceErrors) {
|
|
1252
|
+
lines.push(` \u26A0 ${err.source}: ${err.error}`);
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
lines.push("");
|
|
1256
|
+
lines.push(" \u2500\u2500\u2500 Summary \u2500\u2500\u2500");
|
|
1257
|
+
lines.push(` Total: ${result.summary.totalVulnerabilities} | Critical: ${result.summary.critical} | High: ${result.summary.high} | Medium: ${result.summary.medium} | Low: ${result.summary.low}`);
|
|
1258
|
+
if (result.summary.exploitedInWild > 0) {
|
|
1259
|
+
lines.push(` \u{1F534} ${result.summary.exploitedInWild} actively exploited \u2014 immediate action required`);
|
|
1260
|
+
}
|
|
1261
|
+
lines.push("");
|
|
1262
|
+
return lines.join("\n");
|
|
1263
|
+
}
|
|
1264
|
+
};
|
|
1265
|
+
function severityOrder(s) {
|
|
1266
|
+
const order = {
|
|
1267
|
+
CRITICAL: 0,
|
|
1268
|
+
HIGH: 1,
|
|
1269
|
+
MEDIUM: 2,
|
|
1270
|
+
LOW: 3,
|
|
1271
|
+
UNKNOWN: 4
|
|
1272
|
+
};
|
|
1273
|
+
return order[s] ?? 5;
|
|
1274
|
+
}
|
|
1275
|
+
function severityBadge(s) {
|
|
1276
|
+
const badges = {
|
|
1277
|
+
CRITICAL: "[CRIT]",
|
|
1278
|
+
HIGH: "[HIGH]",
|
|
1279
|
+
MEDIUM: "[MED] ",
|
|
1280
|
+
LOW: "[LOW] ",
|
|
1281
|
+
UNKNOWN: "[???] "
|
|
1282
|
+
};
|
|
1283
|
+
return badges[s] ?? "[???] ";
|
|
1284
|
+
}
|
|
1285
|
+
|
|
1286
|
+
// src/api/client.ts
|
|
1287
|
+
var DEFAULT_API_BASE = "https://api.verimu.com";
|
|
1288
|
+
var VerimuApiClient = class {
|
|
1289
|
+
baseUrl;
|
|
1290
|
+
apiKey;
|
|
1291
|
+
constructor(apiKey, baseUrl) {
|
|
1292
|
+
this.apiKey = apiKey;
|
|
1293
|
+
this.baseUrl = (baseUrl ?? DEFAULT_API_BASE).replace(/\/+$/, "");
|
|
1294
|
+
}
|
|
1295
|
+
/**
|
|
1296
|
+
* Upsert a project — finds by name or creates it.
|
|
1297
|
+
* Used so `npx verimu` auto-registers projects without manual dashboard setup.
|
|
1298
|
+
*/
|
|
1299
|
+
async upsertProject(opts) {
|
|
1300
|
+
const res = await fetch(`${this.baseUrl}/api/projects/upsert`, {
|
|
1301
|
+
method: "POST",
|
|
1302
|
+
headers: this.headers(),
|
|
1303
|
+
body: JSON.stringify({
|
|
1304
|
+
name: opts.name,
|
|
1305
|
+
ecosystem: this.mapEcosystem(opts.ecosystem),
|
|
1306
|
+
repository_url: opts.repositoryUrl ?? null,
|
|
1307
|
+
platform: opts.platform ?? null
|
|
1308
|
+
})
|
|
1309
|
+
});
|
|
1310
|
+
if (!res.ok) {
|
|
1311
|
+
const body = await res.text();
|
|
1312
|
+
throw new Error(`Verimu API: upsert project failed (${res.status}): ${body}`);
|
|
1313
|
+
}
|
|
1314
|
+
return res.json();
|
|
1315
|
+
}
|
|
1316
|
+
/**
|
|
1317
|
+
* Upload a CycloneDX SBOM to a project and trigger CVE scanning.
|
|
1318
|
+
*/
|
|
1319
|
+
async uploadSbom(projectId, sbomContent) {
|
|
1320
|
+
const sbomJson = JSON.parse(sbomContent);
|
|
1321
|
+
const res = await fetch(`${this.baseUrl}/api/projects/${projectId}/scan`, {
|
|
1322
|
+
method: "POST",
|
|
1323
|
+
headers: this.headers(),
|
|
1324
|
+
body: JSON.stringify(sbomJson)
|
|
1325
|
+
});
|
|
1326
|
+
if (!res.ok) {
|
|
1327
|
+
const body = await res.text();
|
|
1328
|
+
throw new Error(`Verimu API: upload SBOM failed (${res.status}): ${body}`);
|
|
1329
|
+
}
|
|
1330
|
+
return res.json();
|
|
1331
|
+
}
|
|
1332
|
+
headers() {
|
|
1333
|
+
return {
|
|
1334
|
+
"Content-Type": "application/json",
|
|
1335
|
+
"X-API-Key": this.apiKey
|
|
1336
|
+
};
|
|
1337
|
+
}
|
|
1338
|
+
/**
|
|
1339
|
+
* Maps internal ecosystem names to what the backend expects.
|
|
1340
|
+
* Currently 1:1, but keeps the mapping explicit.
|
|
1341
|
+
*/
|
|
1342
|
+
mapEcosystem(eco) {
|
|
1343
|
+
const map = {
|
|
1344
|
+
npm: "npm",
|
|
1345
|
+
pip: "pip",
|
|
1346
|
+
maven: "maven",
|
|
1347
|
+
nuget: "nuget",
|
|
1348
|
+
go: "gomod",
|
|
1349
|
+
cargo: "cargo",
|
|
1350
|
+
ruby: "bundler"
|
|
1351
|
+
};
|
|
1352
|
+
return map[eco] ?? eco;
|
|
1353
|
+
}
|
|
1354
|
+
};
|
|
1355
|
+
|
|
1356
|
+
// src/scan.ts
|
|
1357
|
+
async function scan(config) {
|
|
1358
|
+
const {
|
|
1359
|
+
projectPath,
|
|
1360
|
+
sbomOutput = "./sbom.cdx.json",
|
|
1361
|
+
skipCveCheck = false
|
|
1362
|
+
} = config;
|
|
1363
|
+
const registry = new ScannerRegistry();
|
|
1364
|
+
const scanResult = await registry.detectAndScan(projectPath);
|
|
1365
|
+
const sbomGenerator = new CycloneDxGenerator();
|
|
1366
|
+
const sbom = sbomGenerator.generate(scanResult);
|
|
1367
|
+
await writeFile(sbomOutput, sbom.content, "utf-8");
|
|
1368
|
+
let cveCheck;
|
|
1369
|
+
if (skipCveCheck) {
|
|
1370
|
+
cveCheck = {
|
|
1371
|
+
vulnerabilities: [],
|
|
1372
|
+
sourcesQueried: [],
|
|
1373
|
+
sourceErrors: [],
|
|
1374
|
+
checkDurationMs: 0
|
|
1375
|
+
};
|
|
1376
|
+
} else {
|
|
1377
|
+
const aggregator = new CveAggregator();
|
|
1378
|
+
cveCheck = await aggregator.check(scanResult.dependencies);
|
|
1379
|
+
}
|
|
1380
|
+
const summary = {
|
|
1381
|
+
totalDependencies: scanResult.dependencies.length,
|
|
1382
|
+
totalVulnerabilities: cveCheck.vulnerabilities.length,
|
|
1383
|
+
critical: cveCheck.vulnerabilities.filter((v) => v.severity === "CRITICAL").length,
|
|
1384
|
+
high: cveCheck.vulnerabilities.filter((v) => v.severity === "HIGH").length,
|
|
1385
|
+
medium: cveCheck.vulnerabilities.filter((v) => v.severity === "MEDIUM").length,
|
|
1386
|
+
low: cveCheck.vulnerabilities.filter((v) => v.severity === "LOW").length,
|
|
1387
|
+
exploitedInWild: cveCheck.vulnerabilities.filter((v) => v.exploitedInWild).length
|
|
1388
|
+
};
|
|
1389
|
+
const report = {
|
|
1390
|
+
project: {
|
|
1391
|
+
path: projectPath,
|
|
1392
|
+
ecosystem: scanResult.ecosystem,
|
|
1393
|
+
dependencyCount: scanResult.dependencies.length
|
|
1394
|
+
},
|
|
1395
|
+
sbom,
|
|
1396
|
+
cveCheck,
|
|
1397
|
+
summary,
|
|
1398
|
+
generatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
1399
|
+
};
|
|
1400
|
+
if (config.apiKey) {
|
|
1401
|
+
try {
|
|
1402
|
+
const uploadResult = await uploadToVerimu(report, config);
|
|
1403
|
+
report.upload = uploadResult;
|
|
1404
|
+
} catch {
|
|
1405
|
+
}
|
|
1406
|
+
}
|
|
1407
|
+
return report;
|
|
1408
|
+
}
|
|
1409
|
+
async function uploadToVerimu(report, config) {
|
|
1410
|
+
if (!config.apiKey) {
|
|
1411
|
+
throw new Error("API key required for upload");
|
|
1412
|
+
}
|
|
1413
|
+
const client = new VerimuApiClient(config.apiKey, config.apiBaseUrl);
|
|
1414
|
+
const projectName = basename(config.projectPath);
|
|
1415
|
+
const upsertRes = await client.upsertProject({
|
|
1416
|
+
name: projectName,
|
|
1417
|
+
ecosystem: report.project.ecosystem
|
|
1418
|
+
});
|
|
1419
|
+
const projectId = upsertRes.project.id;
|
|
1420
|
+
const scanRes = await client.uploadSbom(projectId, report.sbom.content);
|
|
1421
|
+
return {
|
|
1422
|
+
projectId,
|
|
1423
|
+
projectCreated: upsertRes.created,
|
|
1424
|
+
totalDependencies: scanRes.summary.total_dependencies,
|
|
1425
|
+
vulnerableDependencies: scanRes.summary.vulnerable_dependencies,
|
|
1426
|
+
dashboardUrl: `https://app.verimu.com/dashboard/projects/${projectId}`
|
|
1427
|
+
};
|
|
1428
|
+
}
|
|
1429
|
+
function shouldFailCi(report, threshold) {
|
|
1430
|
+
const severityOrder2 = {
|
|
1431
|
+
CRITICAL: 0,
|
|
1432
|
+
HIGH: 1,
|
|
1433
|
+
MEDIUM: 2,
|
|
1434
|
+
LOW: 3,
|
|
1435
|
+
UNKNOWN: 4
|
|
1436
|
+
};
|
|
1437
|
+
const thresholdLevel = severityOrder2[threshold] ?? 4;
|
|
1438
|
+
return report.cveCheck.vulnerabilities.some(
|
|
1439
|
+
(v) => severityOrder2[v.severity] <= thresholdLevel
|
|
1440
|
+
);
|
|
1441
|
+
}
|
|
1442
|
+
|
|
1443
|
+
// src/cli.ts
|
|
1444
|
+
var VERSION = "0.0.3";
|
|
1445
|
+
var BRAND = `
|
|
1446
|
+
\u2566 \u2566\u250C\u2500\u2510\u252C\u2500\u2510\u252C\u250C\u252C\u2510\u252C \u252C
|
|
1447
|
+
\u255A\u2557\u2554\u255D\u251C\u2524 \u251C\u252C\u2518\u2502\u2502\u2502\u2502\u2502 \u2502
|
|
1448
|
+
\u255A\u255D \u2514\u2500\u2518\u2534\u2514\u2500\u2534\u2534 \u2534\u2514\u2500\u2518
|
|
1449
|
+
CRA Compliance Scanner v${VERSION}
|
|
1450
|
+
`;
|
|
1451
|
+
function log(msg) {
|
|
1452
|
+
console.log(` ${msg}`);
|
|
1453
|
+
}
|
|
1454
|
+
function logSuccess(msg) {
|
|
1455
|
+
console.log(` \u2713 ${msg}`);
|
|
1456
|
+
}
|
|
1457
|
+
function logWarn(msg) {
|
|
1458
|
+
console.log(` \u26A0 ${msg}`);
|
|
1459
|
+
}
|
|
1460
|
+
function logError(msg) {
|
|
1461
|
+
console.error(` \u2717 ${msg}`);
|
|
1462
|
+
}
|
|
1463
|
+
function parseArgs(argv) {
|
|
1464
|
+
const args = argv.slice(2);
|
|
1465
|
+
const result = {
|
|
1466
|
+
command: "scan",
|
|
1467
|
+
projectPath: ".",
|
|
1468
|
+
sbomOutput: "./sbom.cdx.json",
|
|
1469
|
+
failOnSeverity: null,
|
|
1470
|
+
skipCveCheck: false,
|
|
1471
|
+
skipUpload: false
|
|
1472
|
+
};
|
|
1473
|
+
let i = 0;
|
|
1474
|
+
while (i < args.length) {
|
|
1475
|
+
const arg = args[i];
|
|
1476
|
+
if (arg === "scan") {
|
|
1477
|
+
result.command = "scan";
|
|
1478
|
+
} else if (arg === "generate-sbom" || arg === "sbom") {
|
|
1479
|
+
result.command = "generate-sbom";
|
|
1480
|
+
result.skipCveCheck = true;
|
|
1481
|
+
} else if (arg === "help" || arg === "--help" || arg === "-h") {
|
|
1482
|
+
result.command = "help";
|
|
1483
|
+
} else if (arg === "version" || arg === "--version" || arg === "-v") {
|
|
1484
|
+
result.command = "version";
|
|
1485
|
+
} else if (arg === "--path" || arg === "-p") {
|
|
1486
|
+
result.projectPath = args[++i] ?? ".";
|
|
1487
|
+
} else if (arg === "--output" || arg === "-o") {
|
|
1488
|
+
result.sbomOutput = args[++i] ?? "./sbom.cdx.json";
|
|
1489
|
+
} else if (arg === "--fail-on") {
|
|
1490
|
+
const val = (args[++i] ?? "").toUpperCase();
|
|
1491
|
+
if (["CRITICAL", "HIGH", "MEDIUM", "LOW"].includes(val)) {
|
|
1492
|
+
result.failOnSeverity = val;
|
|
1493
|
+
}
|
|
1494
|
+
} else if (arg === "--skip-cve") {
|
|
1495
|
+
result.skipCveCheck = true;
|
|
1496
|
+
} else if (arg === "--skip-upload" || arg === "--offline") {
|
|
1497
|
+
result.skipUpload = true;
|
|
1498
|
+
}
|
|
1499
|
+
i++;
|
|
1500
|
+
}
|
|
1501
|
+
return result;
|
|
1502
|
+
}
|
|
1503
|
+
async function main() {
|
|
1504
|
+
const args = parseArgs(process.argv);
|
|
1505
|
+
if (args.command === "version") {
|
|
1506
|
+
console.log(`verimu ${VERSION}`);
|
|
1507
|
+
return;
|
|
1508
|
+
}
|
|
1509
|
+
if (args.command === "help") {
|
|
1510
|
+
printHelp();
|
|
1511
|
+
return;
|
|
1512
|
+
}
|
|
1513
|
+
console.log(BRAND);
|
|
1514
|
+
const apiKey = process.env.VERIMU_API_KEY;
|
|
1515
|
+
const apiBaseUrl = process.env.VERIMU_API_URL;
|
|
1516
|
+
log(`Scanning ${resolve(args.projectPath)}...`);
|
|
1517
|
+
if (apiKey && !args.skipUpload) {
|
|
1518
|
+
log("API key detected \u2014 results will sync to Verimu platform");
|
|
1519
|
+
} else if (!apiKey) {
|
|
1520
|
+
log("No VERIMU_API_KEY set \u2014 running in offline mode");
|
|
1521
|
+
log("Get your API key at https://app.verimu.com/dashboard/api-keys");
|
|
1522
|
+
}
|
|
1523
|
+
console.log("");
|
|
1524
|
+
const config = {
|
|
1525
|
+
projectPath: resolve(args.projectPath),
|
|
1526
|
+
sbomOutput: args.sbomOutput,
|
|
1527
|
+
skipCveCheck: args.skipCveCheck,
|
|
1528
|
+
// Don't pass apiKey to scan() if --skip-upload — we'll handle upload separately for better logging
|
|
1529
|
+
apiKey: apiKey && !args.skipUpload ? void 0 : void 0,
|
|
1530
|
+
apiBaseUrl
|
|
1531
|
+
};
|
|
1532
|
+
let report;
|
|
1533
|
+
try {
|
|
1534
|
+
report = await scan(config);
|
|
1535
|
+
} catch (err) {
|
|
1536
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1537
|
+
logError(msg);
|
|
1538
|
+
process.exit(2);
|
|
1539
|
+
}
|
|
1540
|
+
const reporter = new ConsoleReporter();
|
|
1541
|
+
console.log(reporter.report(report));
|
|
1542
|
+
if (apiKey && !args.skipUpload) {
|
|
1543
|
+
console.log("");
|
|
1544
|
+
log("Syncing to Verimu platform...");
|
|
1545
|
+
try {
|
|
1546
|
+
const uploadConfig = {
|
|
1547
|
+
...config,
|
|
1548
|
+
apiKey,
|
|
1549
|
+
apiBaseUrl
|
|
1550
|
+
};
|
|
1551
|
+
const result = await uploadToVerimu(report, uploadConfig);
|
|
1552
|
+
if (result.projectCreated) {
|
|
1553
|
+
logSuccess(`Project created: ${report.project.path}`);
|
|
1554
|
+
}
|
|
1555
|
+
logSuccess(`${result.totalDependencies} dependencies tracked`);
|
|
1556
|
+
if (result.vulnerableDependencies > 0) {
|
|
1557
|
+
logWarn(`${result.vulnerableDependencies} vulnerable dependencies flagged`);
|
|
1558
|
+
}
|
|
1559
|
+
logSuccess(`Dashboard: ${result.dashboardUrl}`);
|
|
1560
|
+
} catch (err) {
|
|
1561
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1562
|
+
logWarn(`Platform sync failed: ${msg}`);
|
|
1563
|
+
log("Your SBOM was still generated locally. You can upload it manually.");
|
|
1564
|
+
}
|
|
1565
|
+
}
|
|
1566
|
+
console.log("");
|
|
1567
|
+
log("Thanks for using Verimu \u2014 keeping your software CRA-compliant \u{1F6E1}\uFE0F");
|
|
1568
|
+
console.log("");
|
|
1569
|
+
if (args.failOnSeverity && shouldFailCi(report, args.failOnSeverity)) {
|
|
1570
|
+
logError(`Vulnerabilities found at or above ${args.failOnSeverity} severity`);
|
|
1571
|
+
process.exit(1);
|
|
1572
|
+
}
|
|
1573
|
+
}
|
|
1574
|
+
function printHelp() {
|
|
1575
|
+
console.log(`
|
|
1576
|
+
Verimu \u2014 CRA Compliance Scanner
|
|
1577
|
+
|
|
1578
|
+
Usage:
|
|
1579
|
+
verimu Scan current directory
|
|
1580
|
+
verimu scan [options] Full scan (SBOM + CVE check)
|
|
1581
|
+
verimu generate-sbom [options] Generate SBOM only (no CVE check)
|
|
1582
|
+
verimu help Show this help
|
|
1583
|
+
verimu version Show version
|
|
1584
|
+
|
|
1585
|
+
Options:
|
|
1586
|
+
--path, -p <dir> Project directory to scan (default: .)
|
|
1587
|
+
--output, -o <file> SBOM output path (default: ./sbom.cdx.json)
|
|
1588
|
+
--fail-on <severity> Exit 1 if vulns at or above: CRITICAL, HIGH, MEDIUM, LOW
|
|
1589
|
+
--skip-cve Skip CVE vulnerability checking
|
|
1590
|
+
--skip-upload Don't sync to Verimu platform (even if API key is set)
|
|
1591
|
+
|
|
1592
|
+
Environment:
|
|
1593
|
+
VERIMU_API_KEY API key for Verimu platform (from app.verimu.com)
|
|
1594
|
+
VERIMU_API_URL Custom API URL (default: https://api.verimu.com)
|
|
1595
|
+
|
|
1596
|
+
Examples:
|
|
1597
|
+
npx verimu # Quick scan
|
|
1598
|
+
VERIMU_API_KEY=vmu_xxx npx verimu # Scan + sync to platform
|
|
1599
|
+
npx verimu scan --fail-on HIGH # Fail CI on HIGH+ vulns
|
|
1600
|
+
npx verimu scan --path ./backend --output ./reports/sbom.json
|
|
1601
|
+
|
|
1602
|
+
Supported ecosystems:
|
|
1603
|
+
npm (package-lock.json) pip (requirements.txt)
|
|
1604
|
+
Maven (pom.xml) NuGet (packages.lock.json)
|
|
1605
|
+
Cargo (Cargo.lock) Go (go.sum)
|
|
1606
|
+
Ruby (Gemfile.lock)
|
|
1607
|
+
|
|
1608
|
+
Learn more: https://verimu.com
|
|
1609
|
+
Dashboard: https://app.verimu.com
|
|
1610
|
+
`);
|
|
1611
|
+
}
|
|
1612
|
+
main().catch((err) => {
|
|
1613
|
+
console.error("Fatal:", err);
|
|
1614
|
+
process.exit(2);
|
|
1615
|
+
});
|
|
1616
|
+
//# sourceMappingURL=cli.mjs.map
|