@rainy-updates/cli 0.5.1 → 0.5.2-rc.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +93 -1
- package/README.md +88 -25
- package/dist/bin/cli.js +50 -1
- package/dist/commands/audit/fetcher.d.ts +2 -6
- package/dist/commands/audit/fetcher.js +2 -79
- package/dist/commands/audit/mapper.d.ts +8 -1
- package/dist/commands/audit/mapper.js +106 -10
- package/dist/commands/audit/parser.js +36 -2
- package/dist/commands/audit/runner.js +179 -15
- package/dist/commands/audit/sources/github.d.ts +2 -0
- package/dist/commands/audit/sources/github.js +125 -0
- package/dist/commands/audit/sources/index.d.ts +6 -0
- package/dist/commands/audit/sources/index.js +92 -0
- package/dist/commands/audit/sources/osv.d.ts +2 -0
- package/dist/commands/audit/sources/osv.js +131 -0
- package/dist/commands/audit/sources/types.d.ts +21 -0
- package/dist/commands/audit/sources/types.js +1 -0
- package/dist/commands/audit/targets.d.ts +20 -0
- package/dist/commands/audit/targets.js +314 -0
- package/dist/commands/changelog/fetcher.d.ts +9 -0
- package/dist/commands/changelog/fetcher.js +130 -0
- package/dist/commands/licenses/parser.d.ts +2 -0
- package/dist/commands/licenses/parser.js +116 -0
- package/dist/commands/licenses/runner.d.ts +9 -0
- package/dist/commands/licenses/runner.js +163 -0
- package/dist/commands/licenses/sbom.d.ts +10 -0
- package/dist/commands/licenses/sbom.js +70 -0
- package/dist/commands/resolve/graph/builder.d.ts +20 -0
- package/dist/commands/resolve/graph/builder.js +183 -0
- package/dist/commands/resolve/graph/conflict.d.ts +20 -0
- package/dist/commands/resolve/graph/conflict.js +52 -0
- package/dist/commands/resolve/graph/resolver.d.ts +17 -0
- package/dist/commands/resolve/graph/resolver.js +71 -0
- package/dist/commands/resolve/parser.d.ts +2 -0
- package/dist/commands/resolve/parser.js +89 -0
- package/dist/commands/resolve/runner.d.ts +13 -0
- package/dist/commands/resolve/runner.js +136 -0
- package/dist/commands/snapshot/parser.d.ts +2 -0
- package/dist/commands/snapshot/parser.js +80 -0
- package/dist/commands/snapshot/runner.d.ts +11 -0
- package/dist/commands/snapshot/runner.js +115 -0
- package/dist/commands/snapshot/store.d.ts +35 -0
- package/dist/commands/snapshot/store.js +158 -0
- package/dist/commands/unused/matcher.d.ts +22 -0
- package/dist/commands/unused/matcher.js +95 -0
- package/dist/commands/unused/parser.d.ts +2 -0
- package/dist/commands/unused/parser.js +95 -0
- package/dist/commands/unused/runner.d.ts +11 -0
- package/dist/commands/unused/runner.js +113 -0
- package/dist/commands/unused/scanner.d.ts +18 -0
- package/dist/commands/unused/scanner.js +129 -0
- package/dist/core/impact.d.ts +36 -0
- package/dist/core/impact.js +82 -0
- package/dist/core/options.d.ts +13 -1
- package/dist/core/options.js +35 -13
- package/dist/types/index.d.ts +187 -1
- package/dist/ui/tui.d.ts +6 -0
- package/dist/ui/tui.js +50 -0
- package/dist/utils/semver.d.ts +18 -0
- package/dist/utils/semver.js +88 -3
- package/package.json +8 -1
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import process from "node:process";
|
|
2
|
+
import { discoverPackageDirs } from "../../workspace/discover.js";
|
|
3
|
+
import { readManifest, collectDependencies, } from "../../parsers/package-json.js";
|
|
4
|
+
import { asyncPool } from "../../utils/async-pool.js";
|
|
5
|
+
import { stableStringify } from "../../utils/stable-json.js";
|
|
6
|
+
import { writeFileAtomic } from "../../utils/io.js";
|
|
7
|
+
import { generateSbom } from "./sbom.js";
|
|
8
|
+
/**
|
|
9
|
+
* Entry point for `rup licenses`. Lazy-loaded by cli.ts.
|
|
10
|
+
*
|
|
11
|
+
* Fetches the SPDX license field from each dependency's packument,
|
|
12
|
+
* checks it against --allow/--deny lists, and optionally generates
|
|
13
|
+
* an SPDX 2.3 SBOM JSON document.
|
|
14
|
+
*/
|
|
15
|
+
export async function runLicenses(options) {
|
|
16
|
+
const result = {
|
|
17
|
+
packages: [],
|
|
18
|
+
violations: [],
|
|
19
|
+
totalViolations: 0,
|
|
20
|
+
errors: [],
|
|
21
|
+
warnings: [],
|
|
22
|
+
};
|
|
23
|
+
const packageDirs = await discoverPackageDirs(options.cwd, options.workspace);
|
|
24
|
+
const allDeps = new Map(); // name → resolved version
|
|
25
|
+
for (const packageDir of packageDirs) {
|
|
26
|
+
let manifest;
|
|
27
|
+
try {
|
|
28
|
+
manifest = await readManifest(packageDir);
|
|
29
|
+
}
|
|
30
|
+
catch (err) {
|
|
31
|
+
result.errors.push(`${packageDir}: ${String(err)}`);
|
|
32
|
+
continue;
|
|
33
|
+
}
|
|
34
|
+
const deps = collectDependencies(manifest, [
|
|
35
|
+
"dependencies",
|
|
36
|
+
"devDependencies",
|
|
37
|
+
"optionalDependencies",
|
|
38
|
+
]);
|
|
39
|
+
for (const dep of deps) {
|
|
40
|
+
if (!allDeps.has(dep.name)) {
|
|
41
|
+
const bare = dep.range.replace(/^[~^>=<]/, "").split(" ")[0] ?? dep.range;
|
|
42
|
+
allDeps.set(dep.name, bare);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
// Fetch license info from npm registry in parallel
|
|
47
|
+
const names = Array.from(allDeps.keys());
|
|
48
|
+
const fetchTasks = names.map((name) => async () => {
|
|
49
|
+
const version = allDeps.get(name) ?? "latest";
|
|
50
|
+
return fetchLicenseInfo(name, version, options.registryTimeoutMs);
|
|
51
|
+
});
|
|
52
|
+
const licenseInfos = await asyncPool(options.concurrency, fetchTasks);
|
|
53
|
+
for (const info of licenseInfos) {
|
|
54
|
+
if (!info || info instanceof Error)
|
|
55
|
+
continue;
|
|
56
|
+
result.packages.push(info);
|
|
57
|
+
}
|
|
58
|
+
// Evaluate allow/deny lists
|
|
59
|
+
for (const pkg of result.packages) {
|
|
60
|
+
if (isViolation(pkg, options)) {
|
|
61
|
+
result.violations.push(pkg);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
result.totalViolations = result.violations.length;
|
|
65
|
+
// Render
|
|
66
|
+
process.stdout.write(renderLicenseTable(result) + "\n");
|
|
67
|
+
// SBOM output
|
|
68
|
+
if (options.sbomFile) {
|
|
69
|
+
const sbom = generateSbom(result.packages, options.cwd);
|
|
70
|
+
await writeFileAtomic(options.sbomFile, stableStringify(sbom, 2) + "\n");
|
|
71
|
+
process.stderr.write(`[licenses] SBOM written to ${options.sbomFile}\n`);
|
|
72
|
+
}
|
|
73
|
+
// JSON output
|
|
74
|
+
if (options.jsonFile) {
|
|
75
|
+
await writeFileAtomic(options.jsonFile, stableStringify(result, 2) + "\n");
|
|
76
|
+
process.stderr.write(`[licenses] JSON report written to ${options.jsonFile}\n`);
|
|
77
|
+
}
|
|
78
|
+
return result;
|
|
79
|
+
}
|
|
80
|
+
async function fetchLicenseInfo(name, version, timeoutMs) {
|
|
81
|
+
try {
|
|
82
|
+
const controller = new AbortController();
|
|
83
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
84
|
+
const url = `https://registry.npmjs.org/${encodeURIComponent(name)}/${encodeURIComponent(version)}`;
|
|
85
|
+
const res = await fetch(url, {
|
|
86
|
+
signal: controller.signal,
|
|
87
|
+
headers: { accept: "application/json" },
|
|
88
|
+
});
|
|
89
|
+
clearTimeout(timer);
|
|
90
|
+
if (!res.ok)
|
|
91
|
+
return null;
|
|
92
|
+
const data = (await res.json());
|
|
93
|
+
const rawLicense = data.license ?? "UNKNOWN";
|
|
94
|
+
const repo = typeof data.repository === "object"
|
|
95
|
+
? data.repository?.url
|
|
96
|
+
: data.repository;
|
|
97
|
+
return {
|
|
98
|
+
name,
|
|
99
|
+
version,
|
|
100
|
+
license: rawLicense,
|
|
101
|
+
spdxExpression: normalizeSpdx(rawLicense),
|
|
102
|
+
homepage: data.homepage,
|
|
103
|
+
repository: repo,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
catch {
|
|
107
|
+
return null;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
/** Normalizes common license strings to SPDX identifiers. */
|
|
111
|
+
function normalizeSpdx(raw) {
|
|
112
|
+
const known = {
|
|
113
|
+
MIT: "MIT",
|
|
114
|
+
ISC: "ISC",
|
|
115
|
+
"Apache-2.0": "Apache-2.0",
|
|
116
|
+
"BSD-2-Clause": "BSD-2-Clause",
|
|
117
|
+
"BSD-3-Clause": "BSD-3-Clause",
|
|
118
|
+
"GPL-3.0": "GPL-3.0",
|
|
119
|
+
"GPL-2.0": "GPL-2.0",
|
|
120
|
+
"LGPL-2.1": "LGPL-2.1",
|
|
121
|
+
"LGPL-3.0": "LGPL-3.0",
|
|
122
|
+
"MPL-2.0": "MPL-2.0",
|
|
123
|
+
"CC0-1.0": "CC0-1.0",
|
|
124
|
+
Unlicense: "Unlicense",
|
|
125
|
+
"AGPL-3.0": "AGPL-3.0",
|
|
126
|
+
};
|
|
127
|
+
return known[raw.trim()] ?? (raw.includes("-") ? raw : null);
|
|
128
|
+
}
|
|
129
|
+
function isViolation(pkg, options) {
|
|
130
|
+
const spdx = pkg.spdxExpression ?? pkg.license;
|
|
131
|
+
if (options.deny && options.deny.includes(spdx))
|
|
132
|
+
return true;
|
|
133
|
+
if (options.allow &&
|
|
134
|
+
options.allow.length > 0 &&
|
|
135
|
+
!options.allow.includes(spdx))
|
|
136
|
+
return true;
|
|
137
|
+
return false;
|
|
138
|
+
}
|
|
139
|
+
function renderLicenseTable(result) {
|
|
140
|
+
const lines = [];
|
|
141
|
+
if (result.violations.length > 0) {
|
|
142
|
+
lines.push(`\n✖ License violations (${result.violations.length}):\n`);
|
|
143
|
+
for (const pkg of result.violations) {
|
|
144
|
+
lines.push(` \x1b[31m✖\x1b[0m ${pkg.name.padEnd(35)} ${pkg.spdxExpression ?? pkg.license}`);
|
|
145
|
+
}
|
|
146
|
+
lines.push("");
|
|
147
|
+
}
|
|
148
|
+
lines.push(`📄 ${result.packages.length} packages scanned:\n`);
|
|
149
|
+
lines.push(" " + "Package".padEnd(35) + "Version".padEnd(12) + "License");
|
|
150
|
+
lines.push(" " + "─".repeat(60));
|
|
151
|
+
for (const pkg of result.packages) {
|
|
152
|
+
const isViolating = result.violations.some((v) => v.name === pkg.name);
|
|
153
|
+
const prefix = isViolating ? "\x1b[31m" : "";
|
|
154
|
+
const suffix = isViolating ? "\x1b[0m" : "";
|
|
155
|
+
lines.push(" " +
|
|
156
|
+
prefix +
|
|
157
|
+
pkg.name.padEnd(35) +
|
|
158
|
+
pkg.version.padEnd(12) +
|
|
159
|
+
(pkg.spdxExpression ?? pkg.license) +
|
|
160
|
+
suffix);
|
|
161
|
+
}
|
|
162
|
+
return lines.join("\n");
|
|
163
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { PackageLicense, SbomDocument } from "../../types/index.js";
|
|
2
|
+
/**
|
|
3
|
+
* Generates an SPDX 2.3 compliant SBOM JSON document from a list of
|
|
4
|
+
* scanned package licenses.
|
|
5
|
+
*
|
|
6
|
+
* SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/
|
|
7
|
+
* Required by: CISA SBOM mandate, EU Cyber Resilience Act, many enterprise
|
|
8
|
+
* security standards.
|
|
9
|
+
*/
|
|
10
|
+
export declare function generateSbom(packages: PackageLicense[], projectName: string): SbomDocument;
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { randomUUID } from "node:crypto";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
/**
|
|
4
|
+
* Generates an SPDX 2.3 compliant SBOM JSON document from a list of
|
|
5
|
+
* scanned package licenses.
|
|
6
|
+
*
|
|
7
|
+
* SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/
|
|
8
|
+
* Required by: CISA SBOM mandate, EU Cyber Resilience Act, many enterprise
|
|
9
|
+
* security standards.
|
|
10
|
+
*/
|
|
11
|
+
export function generateSbom(packages, projectName) {
|
|
12
|
+
const docId = `SPDXRef-DOCUMENT`;
|
|
13
|
+
const rootId = `SPDXRef-Package-root`;
|
|
14
|
+
const timestamp = new Date().toISOString();
|
|
15
|
+
const namespace = `https://spdx.org/spdxdocs/${encodeURIComponent(path.basename(projectName))}-${randomUUID()}`;
|
|
16
|
+
const spdxPackages = [
|
|
17
|
+
// Root package entry
|
|
18
|
+
{
|
|
19
|
+
SPDXID: rootId,
|
|
20
|
+
name: path.basename(projectName),
|
|
21
|
+
versionInfo: "NOASSERTION",
|
|
22
|
+
downloadLocation: "NOASSERTION",
|
|
23
|
+
licenseConcluded: "NOASSERTION",
|
|
24
|
+
licenseDeclared: "NOASSERTION",
|
|
25
|
+
copyrightText: "NOASSERTION",
|
|
26
|
+
},
|
|
27
|
+
// One entry per dependency
|
|
28
|
+
...packages.map((pkg) => ({
|
|
29
|
+
SPDXID: toSpdxId(pkg.name, pkg.version),
|
|
30
|
+
name: pkg.name,
|
|
31
|
+
versionInfo: pkg.version,
|
|
32
|
+
downloadLocation: pkg.repository
|
|
33
|
+
? normalizeRepoUrl(pkg.repository)
|
|
34
|
+
: `https://www.npmjs.com/package/${encodeURIComponent(pkg.name)}`,
|
|
35
|
+
licenseConcluded: pkg.spdxExpression ?? "NOASSERTION",
|
|
36
|
+
licenseDeclared: pkg.spdxExpression ?? "NOASSERTION",
|
|
37
|
+
copyrightText: "NOASSERTION",
|
|
38
|
+
})),
|
|
39
|
+
];
|
|
40
|
+
const relationships = [
|
|
41
|
+
{
|
|
42
|
+
spdxElementId: docId,
|
|
43
|
+
relationshipType: "DESCRIBES",
|
|
44
|
+
relatedSpdxElement: rootId,
|
|
45
|
+
},
|
|
46
|
+
...packages.map((pkg) => ({
|
|
47
|
+
spdxElementId: rootId,
|
|
48
|
+
relationshipType: "DEPENDS_ON",
|
|
49
|
+
relatedSpdxElement: toSpdxId(pkg.name, pkg.version),
|
|
50
|
+
})),
|
|
51
|
+
];
|
|
52
|
+
return {
|
|
53
|
+
spdxVersion: "SPDX-2.3",
|
|
54
|
+
dataLicense: "CC0-1.0",
|
|
55
|
+
name: `SBOM for ${path.basename(projectName)}`,
|
|
56
|
+
documentNamespace: namespace,
|
|
57
|
+
packages: spdxPackages,
|
|
58
|
+
relationships,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
/** Converts a package name + version to a valid SPDX ID. */
|
|
62
|
+
function toSpdxId(name, version) {
|
|
63
|
+
const safe = `${name}-${version}`.replace(/[^a-zA-Z0-9-.]/g, "-");
|
|
64
|
+
return `SPDXRef-Package-${safe}`;
|
|
65
|
+
}
|
|
66
|
+
/** Normalize various repository URL formats to a clean string. */
|
|
67
|
+
function normalizeRepoUrl(raw) {
|
|
68
|
+
// git+https://... or git://...
|
|
69
|
+
return raw.replace(/^git\+/, "").replace(/\.git$/, "");
|
|
70
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { PeerGraph, ResolveOptions } from "../../../types/index.js";
|
|
2
|
+
/**
|
|
3
|
+
* Builds an in-memory PeerGraph from the direct dependencies declared in
|
|
4
|
+
* package.json, enriched with peerDependency ranges fetched from the registry.
|
|
5
|
+
*
|
|
6
|
+
* Performance strategy:
|
|
7
|
+
* - Collect all unique package names first (single pass)
|
|
8
|
+
* - Check cache for packument data (zero network cost on cache hit)
|
|
9
|
+
* - Fetch missing ones via asyncPool (parallel, up to options.concurrency)
|
|
10
|
+
* - Build PeerGraph from merged results
|
|
11
|
+
*
|
|
12
|
+
* The graph only contains packages that declare peerDependencies — packages
|
|
13
|
+
* without peers are implicitly conflict-free and excluded to keep the graph lean.
|
|
14
|
+
*/
|
|
15
|
+
export declare function buildPeerGraph(options: ResolveOptions,
|
|
16
|
+
/**
|
|
17
|
+
* Optional override of the resolved versions map (used by --after-update mode
|
|
18
|
+
* to inject proposed upgrade versions before writing them to disk).
|
|
19
|
+
*/
|
|
20
|
+
resolvedVersionOverrides?: Map<string, string>): Promise<PeerGraph>;
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import { asyncPool } from "../../../utils/async-pool.js";
|
|
2
|
+
import { VersionCache } from "../../../cache/cache.js";
|
|
3
|
+
import { NpmRegistryClient } from "../../../registry/npm.js";
|
|
4
|
+
import { readManifest, collectDependencies, } from "../../../parsers/package-json.js";
|
|
5
|
+
import { discoverPackageDirs } from "../../../workspace/discover.js";
|
|
6
|
+
/**
|
|
7
|
+
* Builds an in-memory PeerGraph from the direct dependencies declared in
|
|
8
|
+
* package.json, enriched with peerDependency ranges fetched from the registry.
|
|
9
|
+
*
|
|
10
|
+
* Performance strategy:
|
|
11
|
+
* - Collect all unique package names first (single pass)
|
|
12
|
+
* - Check cache for packument data (zero network cost on cache hit)
|
|
13
|
+
* - Fetch missing ones via asyncPool (parallel, up to options.concurrency)
|
|
14
|
+
* - Build PeerGraph from merged results
|
|
15
|
+
*
|
|
16
|
+
* The graph only contains packages that declare peerDependencies — packages
|
|
17
|
+
* without peers are implicitly conflict-free and excluded to keep the graph lean.
|
|
18
|
+
*/
|
|
19
|
+
export async function buildPeerGraph(options,
|
|
20
|
+
/**
|
|
21
|
+
* Optional override of the resolved versions map (used by --after-update mode
|
|
22
|
+
* to inject proposed upgrade versions before writing them to disk).
|
|
23
|
+
*/
|
|
24
|
+
resolvedVersionOverrides) {
|
|
25
|
+
const packageDirs = await discoverPackageDirs(options.cwd, options.workspace);
|
|
26
|
+
const cache = await VersionCache.create();
|
|
27
|
+
const registry = new NpmRegistryClient(options.cwd, {
|
|
28
|
+
timeoutMs: options.registryTimeoutMs,
|
|
29
|
+
retries: 2,
|
|
30
|
+
});
|
|
31
|
+
// ─ Step 1: collect all declared dependencies and their current versions ────
|
|
32
|
+
const declaredVersions = new Map(); // name → range/version
|
|
33
|
+
const roots = [];
|
|
34
|
+
for (const packageDir of packageDirs) {
|
|
35
|
+
let manifest;
|
|
36
|
+
try {
|
|
37
|
+
manifest = await readManifest(packageDir);
|
|
38
|
+
}
|
|
39
|
+
catch {
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
const deps = collectDependencies(manifest, [
|
|
43
|
+
"dependencies",
|
|
44
|
+
"devDependencies",
|
|
45
|
+
"optionalDependencies",
|
|
46
|
+
]);
|
|
47
|
+
for (const dep of deps) {
|
|
48
|
+
if (!declaredVersions.has(dep.name)) {
|
|
49
|
+
// Strip range prefix to get a bare version for peer satisfaction checks
|
|
50
|
+
const bare = dep.range.replace(/^[~^>=<]/, "").split(" ")[0] ?? dep.range;
|
|
51
|
+
declaredVersions.set(dep.name, bare);
|
|
52
|
+
roots.push(dep.name);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
// Apply version overrides (--after-update mode)
|
|
57
|
+
if (resolvedVersionOverrides) {
|
|
58
|
+
for (const [name, version] of resolvedVersionOverrides) {
|
|
59
|
+
declaredVersions.set(name, version);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
const packageNames = Array.from(declaredVersions.keys());
|
|
63
|
+
// ─ Step 2: fetch peer dependency data ─────────────────────────────────────
|
|
64
|
+
// Check cache first, then fetch missing ones from registry
|
|
65
|
+
const peerDataByName = new Map();
|
|
66
|
+
const uncached = [];
|
|
67
|
+
for (const name of packageNames) {
|
|
68
|
+
const cached = await cache.getAny(name, "latest");
|
|
69
|
+
const resolvedVersion = resolvedVersionOverrides?.get(name) ??
|
|
70
|
+
declaredVersions.get(name) ??
|
|
71
|
+
"0.0.0";
|
|
72
|
+
if (cached) {
|
|
73
|
+
// We have cached packument; peer deps would need a separate field.
|
|
74
|
+
// For now, initialize with empty peers (fetched below if needed).
|
|
75
|
+
peerDataByName.set(name, {
|
|
76
|
+
resolvedVersion,
|
|
77
|
+
peerRequirements: new Map(),
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
else {
|
|
81
|
+
uncached.push(name);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
// Fetch peer deps for packages not in cache
|
|
85
|
+
if (uncached.length > 0) {
|
|
86
|
+
const fetched = await registry.resolveManyPackageMetadata(uncached, {
|
|
87
|
+
concurrency: options.concurrency,
|
|
88
|
+
timeoutMs: options.registryTimeoutMs,
|
|
89
|
+
retries: 2,
|
|
90
|
+
});
|
|
91
|
+
for (const name of uncached) {
|
|
92
|
+
const resolvedVersion = resolvedVersionOverrides?.get(name) ??
|
|
93
|
+
declaredVersions.get(name) ??
|
|
94
|
+
"0.0.0";
|
|
95
|
+
peerDataByName.set(name, {
|
|
96
|
+
resolvedVersion,
|
|
97
|
+
peerRequirements: new Map(), // peer deps from packument handled below
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
// The registry packument includes peerDependencies in the version object.
|
|
101
|
+
// Fetch peer deps via a targeted per-package request for the resolved version.
|
|
102
|
+
const peerFetchTasks = uncached.map((name) => async () => {
|
|
103
|
+
const resolvedVersion = resolvedVersionOverrides?.get(name) ??
|
|
104
|
+
declaredVersions.get(name) ??
|
|
105
|
+
"0.0.0";
|
|
106
|
+
const peerDeps = await fetchPeerDepsForVersion(name, resolvedVersion, options.registryTimeoutMs);
|
|
107
|
+
const existing = peerDataByName.get(name);
|
|
108
|
+
if (existing) {
|
|
109
|
+
existing.peerRequirements = peerDeps;
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
await asyncPool(options.concurrency, peerFetchTasks);
|
|
113
|
+
}
|
|
114
|
+
// Also fetch peer deps for cached packages where we don't have peer data
|
|
115
|
+
const cachedPeerFetchTasks = packageNames
|
|
116
|
+
.filter((n) => !uncached.includes(n))
|
|
117
|
+
.map((name) => async () => {
|
|
118
|
+
const resolvedVersion = resolvedVersionOverrides?.get(name) ??
|
|
119
|
+
declaredVersions.get(name) ??
|
|
120
|
+
"0.0.0";
|
|
121
|
+
const peerDeps = await fetchPeerDepsForVersion(name, resolvedVersion, options.registryTimeoutMs);
|
|
122
|
+
const existing = peerDataByName.get(name);
|
|
123
|
+
if (existing && peerDeps.size > 0) {
|
|
124
|
+
existing.peerRequirements = peerDeps;
|
|
125
|
+
}
|
|
126
|
+
});
|
|
127
|
+
await asyncPool(options.concurrency, cachedPeerFetchTasks);
|
|
128
|
+
// ─ Step 3: assemble PeerGraph ─────────────────────────────────────────────
|
|
129
|
+
const nodes = new Map();
|
|
130
|
+
for (const [name, metadata] of peerDataByName) {
|
|
131
|
+
if (metadata.peerRequirements.size > 0) {
|
|
132
|
+
nodes.set(name, {
|
|
133
|
+
name,
|
|
134
|
+
resolvedVersion: metadata.resolvedVersion,
|
|
135
|
+
peerRequirements: metadata.peerRequirements,
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
// Also add nodes that are referenced AS peers (so the resolver can look them up)
|
|
140
|
+
for (const [, node] of nodes) {
|
|
141
|
+
for (const [peerName] of node.peerRequirements) {
|
|
142
|
+
if (!nodes.has(peerName) && declaredVersions.has(peerName)) {
|
|
143
|
+
const meta = peerDataByName.get(peerName);
|
|
144
|
+
nodes.set(peerName, {
|
|
145
|
+
name: peerName,
|
|
146
|
+
resolvedVersion: meta?.resolvedVersion ?? declaredVersions.get(peerName) ?? "0.0.0",
|
|
147
|
+
peerRequirements: new Map(), // this node has no peer requirements of its own
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
return { nodes, roots: [...new Set(roots)] };
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Fetches peerDependencies for a specific version of a package directly from
|
|
156
|
+
* the npm registry packument. Returns an empty Map on any failure.
|
|
157
|
+
*/
|
|
158
|
+
async function fetchPeerDepsForVersion(packageName, version, timeoutMs) {
|
|
159
|
+
const peerDeps = new Map();
|
|
160
|
+
try {
|
|
161
|
+
const controller = new AbortController();
|
|
162
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
163
|
+
const url = `https://registry.npmjs.org/${encodeURIComponent(packageName)}`;
|
|
164
|
+
const response = await fetch(url, {
|
|
165
|
+
signal: controller.signal,
|
|
166
|
+
headers: { accept: "application/json" },
|
|
167
|
+
});
|
|
168
|
+
clearTimeout(timer);
|
|
169
|
+
if (!response.ok)
|
|
170
|
+
return peerDeps;
|
|
171
|
+
const packument = (await response.json());
|
|
172
|
+
const versionData = packument.versions?.[version];
|
|
173
|
+
if (!versionData?.peerDependencies)
|
|
174
|
+
return peerDeps;
|
|
175
|
+
for (const [peer, range] of Object.entries(versionData.peerDependencies)) {
|
|
176
|
+
peerDeps.set(peer, range);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
catch {
|
|
180
|
+
// Network/parse failure — return empty peer map (no false positives)
|
|
181
|
+
}
|
|
182
|
+
return peerDeps;
|
|
183
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { PeerConflict } from "../../../types/index.js";
|
|
2
|
+
interface ConflictInput {
|
|
3
|
+
requester: string;
|
|
4
|
+
peer: string;
|
|
5
|
+
requiredRange: string;
|
|
6
|
+
resolvedVersion: string;
|
|
7
|
+
isInstalled: boolean;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Classifies a potential peer conflict and generates a human-readable suggestion.
|
|
11
|
+
*
|
|
12
|
+
* Severity rules:
|
|
13
|
+
* "error" — peer is not installed at all
|
|
14
|
+
* "error" — resolved version is outside the required range entirely
|
|
15
|
+
* (would produce ERESOLVE in npm)
|
|
16
|
+
* "warning" — resolved version satisfies a subrange of the requirement
|
|
17
|
+
* but crosses a major boundary (soft peer warning in npm 7+)
|
|
18
|
+
*/
|
|
19
|
+
export declare function classifyConflict(input: ConflictInput): PeerConflict;
|
|
20
|
+
export {};
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { parseVersion } from "../../../utils/semver.js";
|
|
2
|
+
/**
|
|
3
|
+
* Classifies a potential peer conflict and generates a human-readable suggestion.
|
|
4
|
+
*
|
|
5
|
+
* Severity rules:
|
|
6
|
+
* "error" — peer is not installed at all
|
|
7
|
+
* "error" — resolved version is outside the required range entirely
|
|
8
|
+
* (would produce ERESOLVE in npm)
|
|
9
|
+
* "warning" — resolved version satisfies a subrange of the requirement
|
|
10
|
+
* but crosses a major boundary (soft peer warning in npm 7+)
|
|
11
|
+
*/
|
|
12
|
+
export function classifyConflict(input) {
|
|
13
|
+
const severity = determineSeverity(input);
|
|
14
|
+
const suggestion = buildSuggestion(input);
|
|
15
|
+
return {
|
|
16
|
+
requester: input.requester,
|
|
17
|
+
peer: input.peer,
|
|
18
|
+
requiredRange: input.requiredRange,
|
|
19
|
+
resolvedVersion: input.resolvedVersion,
|
|
20
|
+
severity,
|
|
21
|
+
suggestion,
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
function determineSeverity(input) {
|
|
25
|
+
if (!input.isInstalled)
|
|
26
|
+
return "error";
|
|
27
|
+
// If we can parse both versions, check if they're in the same major series
|
|
28
|
+
const resolved = parseVersion(input.resolvedVersion);
|
|
29
|
+
const rangeVersion = extractBaseVersion(input.requiredRange);
|
|
30
|
+
if (!resolved || !rangeVersion) {
|
|
31
|
+
// Can't parse → assume it's a hard error to be safe
|
|
32
|
+
return "error";
|
|
33
|
+
}
|
|
34
|
+
// Different major → ERESOLVE-level incompatibility
|
|
35
|
+
if (resolved.major !== rangeVersion.major)
|
|
36
|
+
return "error";
|
|
37
|
+
// Same major but version is below the floor declared in the range → error
|
|
38
|
+
// (e.g. resolved=18.1.0 required=^18.3.0 — same major but concrete floor missed)
|
|
39
|
+
return "warning";
|
|
40
|
+
}
|
|
41
|
+
function extractBaseVersion(range) {
|
|
42
|
+
const stripped = range.trim().replace(/^[~^>=<]+/, "");
|
|
43
|
+
return parseVersion(stripped.split(" ")[0] ?? stripped);
|
|
44
|
+
}
|
|
45
|
+
function buildSuggestion(input) {
|
|
46
|
+
if (!input.isInstalled) {
|
|
47
|
+
return `Install ${input.peer}@${input.requiredRange} — required by ${input.requester} but not found in the dependency tree`;
|
|
48
|
+
}
|
|
49
|
+
const clean = input.requiredRange.replace(/^[~^]/, "");
|
|
50
|
+
return (`Upgrade ${input.peer} from ${input.resolvedVersion} to ${clean} ` +
|
|
51
|
+
`(required by ${input.requester}: "${input.peer}": "${input.requiredRange}")`);
|
|
52
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { PeerGraph, PeerConflict } from "../../../types/index.js";
|
|
2
|
+
/**
|
|
3
|
+
* Resolves peer conflicts in the given PeerGraph.
|
|
4
|
+
*
|
|
5
|
+
* Algorithm: single-pass BFS over the graph.
|
|
6
|
+
* For each node N that has peer requirements:
|
|
7
|
+
* For each (peerName, requiredRange) in N.peerRequirements:
|
|
8
|
+
* 1. Look up peerName in the graph (the resolved version we have)
|
|
9
|
+
* 2. Call satisfies(resolvedVersion, requiredRange)
|
|
10
|
+
* 3. If not satisfied → conflict
|
|
11
|
+
*
|
|
12
|
+
* Complexity: O(n × max_peers_per_package) — effectively O(n) since
|
|
13
|
+
* peerDependencies counts are always small (< 10 in practice).
|
|
14
|
+
*
|
|
15
|
+
* Returns an array of conflicts sorted by severity (errors first) then by name.
|
|
16
|
+
*/
|
|
17
|
+
export declare function resolvePeerConflicts(graph: PeerGraph): PeerConflict[];
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { satisfies } from "../../../utils/semver.js";
|
|
2
|
+
import { classifyConflict } from "./conflict.js";
|
|
3
|
+
/**
|
|
4
|
+
* Resolves peer conflicts in the given PeerGraph.
|
|
5
|
+
*
|
|
6
|
+
* Algorithm: single-pass BFS over the graph.
|
|
7
|
+
* For each node N that has peer requirements:
|
|
8
|
+
* For each (peerName, requiredRange) in N.peerRequirements:
|
|
9
|
+
* 1. Look up peerName in the graph (the resolved version we have)
|
|
10
|
+
* 2. Call satisfies(resolvedVersion, requiredRange)
|
|
11
|
+
* 3. If not satisfied → conflict
|
|
12
|
+
*
|
|
13
|
+
* Complexity: O(n × max_peers_per_package) — effectively O(n) since
|
|
14
|
+
* peerDependencies counts are always small (< 10 in practice).
|
|
15
|
+
*
|
|
16
|
+
* Returns an array of conflicts sorted by severity (errors first) then by name.
|
|
17
|
+
*/
|
|
18
|
+
export function resolvePeerConflicts(graph) {
|
|
19
|
+
const conflicts = [];
|
|
20
|
+
const queue = [...graph.roots];
|
|
21
|
+
const visited = new Set();
|
|
22
|
+
// BFS traversal so we process in dependency order (roots first)
|
|
23
|
+
while (queue.length > 0) {
|
|
24
|
+
const name = queue.shift();
|
|
25
|
+
if (visited.has(name))
|
|
26
|
+
continue;
|
|
27
|
+
visited.add(name);
|
|
28
|
+
const node = graph.nodes.get(name);
|
|
29
|
+
if (!node) {
|
|
30
|
+
// Queue children: any node that references `name` as a peer
|
|
31
|
+
// (they will be checked when processed)
|
|
32
|
+
continue;
|
|
33
|
+
}
|
|
34
|
+
for (const [peerName, requiredRange] of node.peerRequirements) {
|
|
35
|
+
const peerNode = graph.nodes.get(peerName);
|
|
36
|
+
if (!peerNode) {
|
|
37
|
+
// Package not in the dependency tree at all → hard error
|
|
38
|
+
conflicts.push(classifyConflict({
|
|
39
|
+
requester: name,
|
|
40
|
+
peer: peerName,
|
|
41
|
+
requiredRange,
|
|
42
|
+
resolvedVersion: "(not installed)",
|
|
43
|
+
isInstalled: false,
|
|
44
|
+
}));
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
const peerVersion = peerNode.resolvedVersion;
|
|
48
|
+
const satisfied = satisfies(peerVersion, requiredRange);
|
|
49
|
+
if (!satisfied) {
|
|
50
|
+
conflicts.push(classifyConflict({
|
|
51
|
+
requester: name,
|
|
52
|
+
peer: peerName,
|
|
53
|
+
requiredRange,
|
|
54
|
+
resolvedVersion: peerVersion,
|
|
55
|
+
isInstalled: true,
|
|
56
|
+
}));
|
|
57
|
+
}
|
|
58
|
+
// Always enqueue the peer for processing
|
|
59
|
+
if (!visited.has(peerName)) {
|
|
60
|
+
queue.push(peerName);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
// Sort: errors first, then warnings; within category sort by requester name
|
|
65
|
+
return conflicts.sort((a, b) => {
|
|
66
|
+
if (a.severity !== b.severity) {
|
|
67
|
+
return a.severity === "error" ? -1 : 1;
|
|
68
|
+
}
|
|
69
|
+
return a.requester.localeCompare(b.requester);
|
|
70
|
+
});
|
|
71
|
+
}
|