@kushankurdas/npm-sentinel 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CONTRIBUTING.md +19 -0
- package/LICENSE +21 -0
- package/README.md +243 -0
- package/SECURITY.md +17 -0
- package/bin/cli.js +303 -0
- package/docker/Dockerfile.sandbox +15 -0
- package/docker/entrypoint-sandbox.sh +34 -0
- package/docs/README.md +27 -0
- package/docs/commands/baseline.md +104 -0
- package/docs/commands/check.md +92 -0
- package/docs/commands/gate.md +70 -0
- package/docs/commands/help.md +28 -0
- package/docs/commands/sandbox.md +101 -0
- package/docs/reference/config.md +39 -0
- package/docs/reference/flags.md +45 -0
- package/docs/testing.md +19 -0
- package/lib/baseline.js +83 -0
- package/lib/config.js +49 -0
- package/lib/diff-signals.js +99 -0
- package/lib/dns-allowlist-default.json +27 -0
- package/lib/merge-findings.js +66 -0
- package/lib/offline-iocs.json +17 -0
- package/lib/osv-client.js +97 -0
- package/lib/packument.js +59 -0
- package/lib/parse-npm-lockfile.js +144 -0
- package/lib/sandbox/dns-parse.js +54 -0
- package/lib/sandbox/docker-runner.js +211 -0
- package/lib/scan.js +156 -0
- package/package.json +45 -0
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import { getRootDependencyResolution } from "./parse-npm-lockfile.js";
|
|
2
|
+
import { fetchVersionScripts, summarizeLifecycleScripts } from "./packument.js";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @typedef {{ type: string, severity: 'error'|'warn', message: string, package?: string, detail?: object }} Signal
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* @param {object} baselineSnapshot - from buildBaselineSnapshot / loadBaseline
|
|
10
|
+
* @param {ReturnType<import('./parse-npm-lockfile.js').parseNpmLockfile>} parsed
|
|
11
|
+
* @param {string[]} watchNames
|
|
12
|
+
* @param {typeof fetch} fetchImpl
|
|
13
|
+
* @returns {Promise<Signal[]>}
|
|
14
|
+
*/
|
|
15
|
+
export async function diffAgainstBaseline(
|
|
16
|
+
baselineSnapshot,
|
|
17
|
+
parsed,
|
|
18
|
+
watchNames,
|
|
19
|
+
fetchImpl = fetch
|
|
20
|
+
) {
|
|
21
|
+
/** @type {Signal[]} */
|
|
22
|
+
const signals = [];
|
|
23
|
+
const saved = baselineSnapshot.packages || {};
|
|
24
|
+
|
|
25
|
+
for (const name of watchNames) {
|
|
26
|
+
const prev = saved[name];
|
|
27
|
+
const cur = getRootDependencyResolution(parsed, name);
|
|
28
|
+
|
|
29
|
+
if (!prev) continue;
|
|
30
|
+
|
|
31
|
+
if (!cur) {
|
|
32
|
+
signals.push({
|
|
33
|
+
type: "missing_resolution",
|
|
34
|
+
severity: "error",
|
|
35
|
+
message: `Watched package "${name}" no longer resolves at node_modules (removed from tree?)`,
|
|
36
|
+
package: name,
|
|
37
|
+
});
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (prev.resolvedVersion && cur.version !== prev.resolvedVersion) {
|
|
42
|
+
signals.push({
|
|
43
|
+
type: "version_change",
|
|
44
|
+
severity: "warn",
|
|
45
|
+
message: `Watched package "${name}" version changed: ${prev.resolvedVersion} → ${cur.version}`,
|
|
46
|
+
package: name,
|
|
47
|
+
detail: { from: prev.resolvedVersion, to: cur.version },
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const prevDeps = new Set(prev.directDependencyNames || []);
|
|
52
|
+
const curDeps = new Set(cur.directDependencyNames || []);
|
|
53
|
+
|
|
54
|
+
for (const d of curDeps) {
|
|
55
|
+
if (!prevDeps.has(d)) {
|
|
56
|
+
signals.push({
|
|
57
|
+
type: "new_dependency",
|
|
58
|
+
severity: "error",
|
|
59
|
+
message: `New direct dependency "${d}" on watched package "${name}"`,
|
|
60
|
+
package: name,
|
|
61
|
+
detail: { dependency: d },
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
for (const d of prevDeps) {
|
|
67
|
+
if (!curDeps.has(d)) {
|
|
68
|
+
signals.push({
|
|
69
|
+
type: "dependency_removed",
|
|
70
|
+
severity: "warn",
|
|
71
|
+
message: `Direct dependency "${d}" removed from watched package "${name}" (review for takeover)`,
|
|
72
|
+
package: name,
|
|
73
|
+
detail: { dependency: d },
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const scripts = await fetchVersionScripts(name, cur.version, fetchImpl);
|
|
79
|
+
const sum =
|
|
80
|
+
scripts === null
|
|
81
|
+
? { keys: [], hashes: {} }
|
|
82
|
+
: summarizeLifecycleScripts(scripts);
|
|
83
|
+
const prevKeys = new Set(prev.lifecycleScriptKeys || []);
|
|
84
|
+
|
|
85
|
+
for (const k of sum.keys) {
|
|
86
|
+
if (!prevKeys.has(k)) {
|
|
87
|
+
signals.push({
|
|
88
|
+
type: "new_lifecycle_script",
|
|
89
|
+
severity: "error",
|
|
90
|
+
message: `New lifecycle script "${k}" on watched package "${name}"@${cur.version}`,
|
|
91
|
+
package: name,
|
|
92
|
+
detail: { script: k },
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return signals;
|
|
99
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"suffixes": [
|
|
3
|
+
"npmjs.org",
|
|
4
|
+
"registry.npmjs.org",
|
|
5
|
+
"nodejs.org",
|
|
6
|
+
"github.com",
|
|
7
|
+
"codeload.github.com",
|
|
8
|
+
"objects.githubusercontent.com",
|
|
9
|
+
"raw.githubusercontent.com",
|
|
10
|
+
"api.github.com",
|
|
11
|
+
"unpkg.com",
|
|
12
|
+
"jsdelivr.net",
|
|
13
|
+
"fastly.com",
|
|
14
|
+
"cloudflare.com",
|
|
15
|
+
"googleapis.com",
|
|
16
|
+
"gstatic.com",
|
|
17
|
+
"microsoft.com",
|
|
18
|
+
"azureedge.net",
|
|
19
|
+
"amazonaws.com",
|
|
20
|
+
"docker.io",
|
|
21
|
+
"docker.com"
|
|
22
|
+
],
|
|
23
|
+
"exactHosts": [
|
|
24
|
+
"localhost",
|
|
25
|
+
"127.0.0.1"
|
|
26
|
+
]
|
|
27
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { readFileSync } from "node:fs";
|
|
2
|
+
import { dirname, join } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { maxSeverityFromVulns, normalizeSeverity, compareSeverity } from "./osv-client.js";
|
|
5
|
+
|
|
6
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
7
|
+
const offlineIocs = JSON.parse(
|
|
8
|
+
readFileSync(join(__dirname, "offline-iocs.json"), "utf8")
|
|
9
|
+
);
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @param {Array<{package: {name: string, version: string}, vulns: object[]}>} osvResults
|
|
13
|
+
* @returns {Array<{name: string, version: string, source: string, severity: string, ids: string[], summary?: string}>}
|
|
14
|
+
*/
|
|
15
|
+
export function flattenOsvFindings(osvResults) {
|
|
16
|
+
const out = [];
|
|
17
|
+
for (const row of osvResults) {
|
|
18
|
+
const { name, version } = row.package;
|
|
19
|
+
if (!row.vulns?.length) continue;
|
|
20
|
+
const severity = maxSeverityFromVulns(row.vulns);
|
|
21
|
+
const ids = row.vulns.map((v) => v.id || v.alias || "unknown").filter(Boolean);
|
|
22
|
+
const summary = row.vulns[0]?.summary || row.vulns[0]?.details?.slice?.(0, 200);
|
|
23
|
+
out.push({
|
|
24
|
+
name,
|
|
25
|
+
version,
|
|
26
|
+
source: "osv",
|
|
27
|
+
severity,
|
|
28
|
+
ids,
|
|
29
|
+
summary,
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
return out;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* @param {Array<{name: string, version: string}>} packages
|
|
37
|
+
* @returns {Array<{name: string, version: string, source: string, severity: string, ids: string[], summary?: string}>}
|
|
38
|
+
*/
|
|
39
|
+
export function matchOfflineIocs(packages) {
|
|
40
|
+
const iocs = offlineIocs.packages || [];
|
|
41
|
+
const byName = new Map(iocs.map((p) => [p.name, p]));
|
|
42
|
+
const out = [];
|
|
43
|
+
for (const { name, version } of packages) {
|
|
44
|
+
const entry = byName.get(name);
|
|
45
|
+
if (!entry) continue;
|
|
46
|
+
if (!entry.versions.includes(version)) continue;
|
|
47
|
+
out.push({
|
|
48
|
+
name,
|
|
49
|
+
version,
|
|
50
|
+
source: "offline-ioc",
|
|
51
|
+
severity: "critical",
|
|
52
|
+
ids: [entry.id || `ioc-${name}`],
|
|
53
|
+
summary: entry.summary,
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
return out;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* @param {string} minSeverity
|
|
61
|
+
* @param {Array<{severity: string}>} findings
|
|
62
|
+
*/
|
|
63
|
+
export function filterByMinSeverity(minSeverity, findings) {
|
|
64
|
+
const min = normalizeSeverity(minSeverity);
|
|
65
|
+
return findings.filter((f) => compareSeverity(f.severity, min) <= 0);
|
|
66
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
{
|
|
2
|
+
"description": "Exact version IOCs for known malicious npm releases (supplement when OSV/offline). Not a package watch list.",
|
|
3
|
+
"packages": [
|
|
4
|
+
{
|
|
5
|
+
"name": "axios",
|
|
6
|
+
"versions": ["1.14.1", "0.30.4"],
|
|
7
|
+
"id": "ioc-axios-2026-03",
|
|
8
|
+
"summary": "Supply-chain compromised releases (March 2026); see Elastic Security Labs / Microsoft guidance"
|
|
9
|
+
},
|
|
10
|
+
{
|
|
11
|
+
"name": "plain-crypto-js",
|
|
12
|
+
"versions": ["4.2.1"],
|
|
13
|
+
"id": "ioc-plain-crypto-js-2026-03",
|
|
14
|
+
"summary": "Malicious postinstall dropper associated with compromised axios releases"
|
|
15
|
+
}
|
|
16
|
+
]
|
|
17
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OSV batch query API — https://google.github.io/osv.dev/
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const OSV_BATCH = "https://api.osv.dev/v1/querybatch";
|
|
6
|
+
|
|
7
|
+
const SEVERITY_ORDER = ["critical", "high", "moderate", "low", "unknown"];
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* @param {string} a
|
|
11
|
+
* @param {string} b
|
|
12
|
+
*/
|
|
13
|
+
export function compareSeverity(a, b) {
|
|
14
|
+
const ia = SEVERITY_ORDER.indexOf(normalizeSeverity(a));
|
|
15
|
+
const ib = SEVERITY_ORDER.indexOf(normalizeSeverity(b));
|
|
16
|
+
return ia - ib;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function normalizeSeverity(s) {
|
|
20
|
+
const x = String(s || "").toLowerCase();
|
|
21
|
+
if (SEVERITY_ORDER.includes(x)) return x;
|
|
22
|
+
return "unknown";
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* @param {Array<{name: string, version: string}>} packages
|
|
27
|
+
* @param {typeof fetch} fetchImpl
|
|
28
|
+
* @param {number} chunkSize
|
|
29
|
+
*/
|
|
30
|
+
export async function queryOsvBatch(packages, fetchImpl = fetch, chunkSize = 500) {
|
|
31
|
+
/** @type {Array<{package: {name: string, version: string}, vulns: object[]}>} */
|
|
32
|
+
const results = [];
|
|
33
|
+
|
|
34
|
+
for (let i = 0; i < packages.length; i += chunkSize) {
|
|
35
|
+
const chunk = packages.slice(i, i + chunkSize);
|
|
36
|
+
const body = {
|
|
37
|
+
queries: chunk.map((p) => ({
|
|
38
|
+
package: { ecosystem: "npm", name: p.name },
|
|
39
|
+
version: p.version,
|
|
40
|
+
})),
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
const res = await fetchImpl(OSV_BATCH, {
|
|
44
|
+
method: "POST",
|
|
45
|
+
headers: { "content-type": "application/json" },
|
|
46
|
+
body: JSON.stringify(body),
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
if (!res.ok) {
|
|
50
|
+
const text = await res.text();
|
|
51
|
+
throw new Error(`OSV batch failed: ${res.status} ${text.slice(0, 200)}`);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const json = await res.json();
|
|
55
|
+
const batchResults = json.results || [];
|
|
56
|
+
for (let j = 0; j < chunk.length; j++) {
|
|
57
|
+
const pkg = chunk[j];
|
|
58
|
+
const r = batchResults[j] || {};
|
|
59
|
+
const vulns = r.vulns || [];
|
|
60
|
+
results.push({ package: pkg, vulns });
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return results;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* @param {object[]} vulns - OSV vuln objects
|
|
69
|
+
* @returns {string}
|
|
70
|
+
*/
|
|
71
|
+
export function maxSeverityFromVulns(vulns) {
|
|
72
|
+
let best = "unknown";
|
|
73
|
+
for (const v of vulns) {
|
|
74
|
+
const s = extractSeverity(v);
|
|
75
|
+
if (compareSeverity(s, best) < 0) best = s;
|
|
76
|
+
}
|
|
77
|
+
return best;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function extractSeverity(vuln) {
|
|
81
|
+
const ss = vuln.severity;
|
|
82
|
+
if (Array.isArray(ss)) {
|
|
83
|
+
for (const s of ss) {
|
|
84
|
+
if (s?.type === "CVSS_V3" && s.score) {
|
|
85
|
+
const num = parseFloat(String(s.score));
|
|
86
|
+
if (num >= 9) return "critical";
|
|
87
|
+
if (num >= 7) return "high";
|
|
88
|
+
if (num >= 4) return "moderate";
|
|
89
|
+
if (num > 0) return "low";
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
if (vuln.database_specific?.severity) {
|
|
94
|
+
return normalizeSeverity(vuln.database_specific.severity);
|
|
95
|
+
}
|
|
96
|
+
return "moderate";
|
|
97
|
+
}
|
package/lib/packument.js
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Fetch npm registry packument / version metadata for lifecycle scripts.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const REGISTRY = "https://registry.npmjs.org";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {string} name - package name (scoped ok)
|
|
9
|
+
* @param {string} version - exact version
|
|
10
|
+
* @returns {Promise<Record<string, string> | null>} scripts object or null
|
|
11
|
+
*/
|
|
12
|
+
export async function fetchVersionScripts(name, version, fetchImpl = fetch) {
|
|
13
|
+
const enc = encodeURIComponent(name).replace(/%40/g, "@").replace(/%2F/g, "/");
|
|
14
|
+
const url = `${REGISTRY}/${enc}/${encodeURIComponent(version)}`;
|
|
15
|
+
const res = await fetchImpl(url, {
|
|
16
|
+
headers: { accept: "application/json" },
|
|
17
|
+
});
|
|
18
|
+
if (!res.ok) return null;
|
|
19
|
+
const json = await res.json();
|
|
20
|
+
const scripts = json.scripts;
|
|
21
|
+
if (!scripts || typeof scripts !== "object") return {};
|
|
22
|
+
return /** @type {Record<string, string>} */ (scripts);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const LIFECYCLE = new Set([
|
|
26
|
+
"preinstall",
|
|
27
|
+
"install",
|
|
28
|
+
"postinstall",
|
|
29
|
+
"preprepare",
|
|
30
|
+
"prepare",
|
|
31
|
+
"postprepare",
|
|
32
|
+
"prepublish",
|
|
33
|
+
"prepublishOnly",
|
|
34
|
+
]);
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* @param {Record<string, string>} scripts
|
|
38
|
+
* @returns {{ keys: string[], hashes: Record<string, string> }}
|
|
39
|
+
*/
|
|
40
|
+
export function summarizeLifecycleScripts(scripts) {
|
|
41
|
+
const keys = [];
|
|
42
|
+
const hashes = {};
|
|
43
|
+
for (const key of Object.keys(scripts)) {
|
|
44
|
+
if (LIFECYCLE.has(key)) {
|
|
45
|
+
keys.push(key);
|
|
46
|
+
const body = scripts[key] || "";
|
|
47
|
+
hashes[key] = simpleHash(body);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return { keys, hashes };
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function simpleHash(s) {
|
|
54
|
+
let h = 0;
|
|
55
|
+
for (let i = 0; i < s.length; i++) {
|
|
56
|
+
h = (Math.imul(31, h) + s.charCodeAt(i)) | 0;
|
|
57
|
+
}
|
|
58
|
+
return String(h);
|
|
59
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parse npm package-lock.json v1 and v2/v3; enumerate packages and root dependency resolutions.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* @param {string} pathKey - e.g. "node_modules/foo" or "node_modules/@scope/pkg"
|
|
7
|
+
* @returns {string} package name
|
|
8
|
+
*/
|
|
9
|
+
export function pathKeyToPackageName(pathKey) {
|
|
10
|
+
if (!pathKey || pathKey === "") return "";
|
|
11
|
+
const prefix = "node_modules/";
|
|
12
|
+
if (!pathKey.startsWith(prefix)) return pathKey;
|
|
13
|
+
return pathKey.slice(prefix.length).replace(/\\/g, "/");
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* @param {object} lock
|
|
18
|
+
* @returns {{ lockfileVersion: number, packages: Array<{path: string, name: string, version: string, dependencies: Record<string, string>, directDependencyNames: string[]}> }}
|
|
19
|
+
*/
|
|
20
|
+
export function parseNpmLockfile(lock) {
|
|
21
|
+
const lv = lock.lockfileVersion ?? 1;
|
|
22
|
+
if (lv === 1 || !lock.packages) {
|
|
23
|
+
return parseLockfileV1(lock);
|
|
24
|
+
}
|
|
25
|
+
return parseLockfileV2(lock, lv);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function parseLockfileV1(lock) {
|
|
29
|
+
/** @type {Array<{path: string, name: string, version: string, dependencies: Record<string, string>}>} */
|
|
30
|
+
const packages = [];
|
|
31
|
+
const seen = new Set();
|
|
32
|
+
|
|
33
|
+
function walk(deps, prefixPath) {
|
|
34
|
+
if (!deps) return;
|
|
35
|
+
for (const [name, spec] of Object.entries(deps)) {
|
|
36
|
+
if (!spec || typeof spec !== "object") continue;
|
|
37
|
+
const version = spec.version;
|
|
38
|
+
if (!version) continue;
|
|
39
|
+
const pathKey =
|
|
40
|
+
prefixPath === ""
|
|
41
|
+
? `node_modules/${name}`
|
|
42
|
+
: `${prefixPath}/node_modules/${name}`;
|
|
43
|
+
if (!seen.has(pathKey)) {
|
|
44
|
+
seen.add(pathKey);
|
|
45
|
+
const depObj = spec.dependencies || {};
|
|
46
|
+
const directDependencyNames = Object.keys(depObj);
|
|
47
|
+
const dependencies = {};
|
|
48
|
+
for (const k of directDependencyNames) dependencies[k] = "*";
|
|
49
|
+
packages.push({
|
|
50
|
+
path: pathKey,
|
|
51
|
+
name,
|
|
52
|
+
version,
|
|
53
|
+
dependencies,
|
|
54
|
+
directDependencyNames,
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
if (spec.dependencies) {
|
|
58
|
+
walk(spec.dependencies, pathKey);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
walk(lock.dependencies || {}, "");
|
|
64
|
+
|
|
65
|
+
return {
|
|
66
|
+
lockfileVersion: 1,
|
|
67
|
+
packages,
|
|
68
|
+
rootDependencies: lock.dependencies || {},
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function parseLockfileV2(lock, lockfileVersion) {
|
|
73
|
+
const map = lock.packages || {};
|
|
74
|
+
/** @type {Array<{path: string, name: string, version: string, dependencies: Record<string, string>}>} */
|
|
75
|
+
const packages = [];
|
|
76
|
+
|
|
77
|
+
for (const [pathKey, pkg] of Object.entries(map)) {
|
|
78
|
+
if (pathKey === "") continue;
|
|
79
|
+
if (!pkg || typeof pkg !== "object") continue;
|
|
80
|
+
const version = pkg.version;
|
|
81
|
+
if (!version) continue;
|
|
82
|
+
const name = pkg.name || pathKeyToPackageName(pathKey);
|
|
83
|
+
const depRecord = pkg.dependencies || {};
|
|
84
|
+
const directDependencyNames = Object.keys(depRecord);
|
|
85
|
+
packages.push({
|
|
86
|
+
path: pathKey.replace(/\\/g, "/"),
|
|
87
|
+
name,
|
|
88
|
+
version,
|
|
89
|
+
dependencies: depRecord,
|
|
90
|
+
directDependencyNames,
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const root = map[""] || {};
|
|
95
|
+
return {
|
|
96
|
+
lockfileVersion,
|
|
97
|
+
packages,
|
|
98
|
+
rootDependencies: root.dependencies || {},
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Unique (name, version) pairs for OSV batching.
|
|
104
|
+
* @param {ReturnType<parseNpmLockfile>} parsed
|
|
105
|
+
* @returns {Array<{name: string, version: string}>}
|
|
106
|
+
*/
|
|
107
|
+
export function getAllNameVersionPairs(parsed) {
|
|
108
|
+
const key = (n, v) => `${n}@${v}`;
|
|
109
|
+
const out = new Map();
|
|
110
|
+
for (const p of parsed.packages) {
|
|
111
|
+
const k = key(p.name, p.version);
|
|
112
|
+
if (!out.has(k)) {
|
|
113
|
+
out.set(k, { name: p.name, version: p.version });
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return [...out.values()];
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Path under node_modules for a root dependency name (e.g. axios -> node_modules/axios, @x/y -> node_modules/@x/y)
|
|
121
|
+
* @param {string} depName
|
|
122
|
+
*/
|
|
123
|
+
export function rootNodeModulesPath(depName) {
|
|
124
|
+
return `node_modules/${depName.replace(/\\/g, "/")}`;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Resolved version + direct dependency names for a root-level dependency from package.json.
|
|
129
|
+
* @param {ReturnType<parseNpmLockfile>} parsed
|
|
130
|
+
* @param {string} depName - key as in package.json dependencies
|
|
131
|
+
* @returns {{ version: string, directDependencyNames: string[] } | null}
|
|
132
|
+
*/
|
|
133
|
+
export function getRootDependencyResolution(parsed, depName) {
|
|
134
|
+
const targetPath = rootNodeModulesPath(depName);
|
|
135
|
+
const entry = parsed.packages.find((p) => p.path === targetPath);
|
|
136
|
+
if (!entry) return null;
|
|
137
|
+
const directDependencyNames =
|
|
138
|
+
entry.directDependencyNames ||
|
|
139
|
+
Object.keys(entry.dependencies || {});
|
|
140
|
+
return {
|
|
141
|
+
version: entry.version,
|
|
142
|
+
directDependencyNames,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Extract hostnames from tcpdump -n udp port 53 style lines.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const RE_A = /A\?\s+([a-zA-Z0-9*.-]+)\.\s/;
|
|
6
|
+
const RE_AAAA = /AAAA\?\s+([a-zA-Z0-9*.-]+)\.\s/;
|
|
7
|
+
const RE_CNAME = /CNAME\?\s+([a-zA-Z0-9*.-]+)\.\s/;
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* @param {string} logText
|
|
11
|
+
* @returns {Set<string>}
|
|
12
|
+
*/
|
|
13
|
+
export function extractHostsFromTcpdump(logText) {
|
|
14
|
+
const hosts = new Set();
|
|
15
|
+
for (const line of logText.split("\n")) {
|
|
16
|
+
for (const re of [RE_A, RE_AAAA, RE_CNAME]) {
|
|
17
|
+
const m = line.match(re);
|
|
18
|
+
if (m) {
|
|
19
|
+
hosts.add(m[1].toLowerCase().replace(/\.$/, ""));
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
return hosts;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* @param {string} host
|
|
28
|
+
* @param {{ suffixes: string[], exactHosts: string[] }} allow
|
|
29
|
+
*/
|
|
30
|
+
export function isHostAllowed(host, allow) {
|
|
31
|
+
const h = host.toLowerCase().replace(/\.$/, "");
|
|
32
|
+
if (!h) return true;
|
|
33
|
+
for (const ex of allow.exactHosts || []) {
|
|
34
|
+
if (h === ex.toLowerCase()) return true;
|
|
35
|
+
}
|
|
36
|
+
for (const suf of allow.suffixes || []) {
|
|
37
|
+
const s = suf.toLowerCase();
|
|
38
|
+
if (h === s || h.endsWith("." + s)) return true;
|
|
39
|
+
}
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* @param {Set<string>} hosts
|
|
45
|
+
* @param {object} allow
|
|
46
|
+
* @returns {string[]}
|
|
47
|
+
*/
|
|
48
|
+
export function filterDisallowedHosts(hosts, allow) {
|
|
49
|
+
const bad = [];
|
|
50
|
+
for (const h of hosts) {
|
|
51
|
+
if (!isHostAllowed(h, allow)) bad.push(h);
|
|
52
|
+
}
|
|
53
|
+
return [...new Set(bad)].sort();
|
|
54
|
+
}
|