@rainy-updates/cli 0.5.1 → 0.5.2-rc.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/CHANGELOG.md +68 -1
  2. package/README.md +84 -25
  3. package/dist/bin/cli.js +30 -0
  4. package/dist/commands/audit/mapper.js +1 -1
  5. package/dist/commands/licenses/parser.d.ts +2 -0
  6. package/dist/commands/licenses/parser.js +116 -0
  7. package/dist/commands/licenses/runner.d.ts +9 -0
  8. package/dist/commands/licenses/runner.js +163 -0
  9. package/dist/commands/licenses/sbom.d.ts +10 -0
  10. package/dist/commands/licenses/sbom.js +70 -0
  11. package/dist/commands/resolve/graph/builder.d.ts +20 -0
  12. package/dist/commands/resolve/graph/builder.js +183 -0
  13. package/dist/commands/resolve/graph/conflict.d.ts +20 -0
  14. package/dist/commands/resolve/graph/conflict.js +52 -0
  15. package/dist/commands/resolve/graph/resolver.d.ts +17 -0
  16. package/dist/commands/resolve/graph/resolver.js +71 -0
  17. package/dist/commands/resolve/parser.d.ts +2 -0
  18. package/dist/commands/resolve/parser.js +89 -0
  19. package/dist/commands/resolve/runner.d.ts +13 -0
  20. package/dist/commands/resolve/runner.js +136 -0
  21. package/dist/commands/snapshot/parser.d.ts +2 -0
  22. package/dist/commands/snapshot/parser.js +80 -0
  23. package/dist/commands/snapshot/runner.d.ts +11 -0
  24. package/dist/commands/snapshot/runner.js +115 -0
  25. package/dist/commands/snapshot/store.d.ts +35 -0
  26. package/dist/commands/snapshot/store.js +158 -0
  27. package/dist/commands/unused/matcher.d.ts +22 -0
  28. package/dist/commands/unused/matcher.js +95 -0
  29. package/dist/commands/unused/parser.d.ts +2 -0
  30. package/dist/commands/unused/parser.js +95 -0
  31. package/dist/commands/unused/runner.d.ts +11 -0
  32. package/dist/commands/unused/runner.js +113 -0
  33. package/dist/commands/unused/scanner.d.ts +18 -0
  34. package/dist/commands/unused/scanner.js +129 -0
  35. package/dist/core/impact.d.ts +36 -0
  36. package/dist/core/impact.js +82 -0
  37. package/dist/core/options.d.ts +13 -1
  38. package/dist/core/options.js +35 -13
  39. package/dist/types/index.d.ts +153 -0
  40. package/dist/utils/semver.d.ts +18 -0
  41. package/dist/utils/semver.js +88 -3
  42. package/package.json +1 -1
@@ -0,0 +1,70 @@
1
+ import { randomUUID } from "node:crypto";
2
+ import path from "node:path";
3
+ /**
4
+ * Generates an SPDX 2.3 compliant SBOM JSON document from a list of
5
+ * scanned package licenses.
6
+ *
7
+ * SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/
8
+ * Required by: CISA SBOM mandate, EU Cyber Resilience Act, many enterprise
9
+ * security standards.
10
+ */
11
+ export function generateSbom(packages, projectName) {
12
+ const docId = `SPDXRef-DOCUMENT`;
13
+ const rootId = `SPDXRef-Package-root`;
14
+ const timestamp = new Date().toISOString();
15
+ const namespace = `https://spdx.org/spdxdocs/${encodeURIComponent(path.basename(projectName))}-${randomUUID()}`;
16
+ const spdxPackages = [
17
+ // Root package entry
18
+ {
19
+ SPDXID: rootId,
20
+ name: path.basename(projectName),
21
+ versionInfo: "NOASSERTION",
22
+ downloadLocation: "NOASSERTION",
23
+ licenseConcluded: "NOASSERTION",
24
+ licenseDeclared: "NOASSERTION",
25
+ copyrightText: "NOASSERTION",
26
+ },
27
+ // One entry per dependency
28
+ ...packages.map((pkg) => ({
29
+ SPDXID: toSpdxId(pkg.name, pkg.version),
30
+ name: pkg.name,
31
+ versionInfo: pkg.version,
32
+ downloadLocation: pkg.repository
33
+ ? normalizeRepoUrl(pkg.repository)
34
+ : `https://www.npmjs.com/package/${encodeURIComponent(pkg.name)}`,
35
+ licenseConcluded: pkg.spdxExpression ?? "NOASSERTION",
36
+ licenseDeclared: pkg.spdxExpression ?? "NOASSERTION",
37
+ copyrightText: "NOASSERTION",
38
+ })),
39
+ ];
40
+ const relationships = [
41
+ {
42
+ spdxElementId: docId,
43
+ relationshipType: "DESCRIBES",
44
+ relatedSpdxElement: rootId,
45
+ },
46
+ ...packages.map((pkg) => ({
47
+ spdxElementId: rootId,
48
+ relationshipType: "DEPENDS_ON",
49
+ relatedSpdxElement: toSpdxId(pkg.name, pkg.version),
50
+ })),
51
+ ];
52
+ return {
53
+ spdxVersion: "SPDX-2.3",
54
+ dataLicense: "CC0-1.0",
55
+ name: `SBOM for ${path.basename(projectName)}`,
56
+ documentNamespace: namespace,
57
+ packages: spdxPackages,
58
+ relationships,
59
+ };
60
+ }
61
+ /** Converts a package name + version to a valid SPDX ID. */
62
+ function toSpdxId(name, version) {
63
+ const safe = `${name}-${version}`.replace(/[^a-zA-Z0-9-.]/g, "-");
64
+ return `SPDXRef-Package-${safe}`;
65
+ }
66
+ /** Normalize various repository URL formats to a clean string. */
67
+ function normalizeRepoUrl(raw) {
68
+ // git+https://... or git://...
69
+ return raw.replace(/^git\+/, "").replace(/\.git$/, "");
70
+ }
@@ -0,0 +1,20 @@
1
+ import type { PeerGraph, ResolveOptions } from "../../../types/index.js";
2
+ /**
3
+ * Builds an in-memory PeerGraph from the direct dependencies declared in
4
+ * package.json, enriched with peerDependency ranges fetched from the registry.
5
+ *
6
+ * Performance strategy:
7
+ * - Collect all unique package names first (single pass)
8
+ * - Check cache for packument data (zero network cost on cache hit)
9
+ * - Fetch missing ones via asyncPool (parallel, up to options.concurrency)
10
+ * - Build PeerGraph from merged results
11
+ *
12
+ * The graph only contains packages that declare peerDependencies — packages
13
+ * without peers are implicitly conflict-free and excluded to keep the graph lean.
14
+ */
15
+ export declare function buildPeerGraph(options: ResolveOptions,
16
+ /**
17
+ * Optional override of the resolved versions map (used by --after-update mode
18
+ * to inject proposed upgrade versions before writing them to disk).
19
+ */
20
+ resolvedVersionOverrides?: Map<string, string>): Promise<PeerGraph>;
@@ -0,0 +1,183 @@
1
+ import { asyncPool } from "../../../utils/async-pool.js";
2
+ import { VersionCache } from "../../../cache/cache.js";
3
+ import { NpmRegistryClient } from "../../../registry/npm.js";
4
+ import { readManifest, collectDependencies, } from "../../../parsers/package-json.js";
5
+ import { discoverPackageDirs } from "../../../workspace/discover.js";
6
+ /**
7
+ * Builds an in-memory PeerGraph from the direct dependencies declared in
8
+ * package.json, enriched with peerDependency ranges fetched from the registry.
9
+ *
10
+ * Performance strategy:
11
+ * - Collect all unique package names first (single pass)
12
+ * - Check cache for packument data (zero network cost on cache hit)
13
+ * - Fetch missing ones via asyncPool (parallel, up to options.concurrency)
14
+ * - Build PeerGraph from merged results
15
+ *
16
+ * The graph only contains packages that declare peerDependencies — packages
17
+ * without peers are implicitly conflict-free and excluded to keep the graph lean.
18
+ */
19
+ export async function buildPeerGraph(options,
20
+ /**
21
+ * Optional override of the resolved versions map (used by --after-update mode
22
+ * to inject proposed upgrade versions before writing them to disk).
23
+ */
24
+ resolvedVersionOverrides) {
25
+ const packageDirs = await discoverPackageDirs(options.cwd, options.workspace);
26
+ const cache = await VersionCache.create();
27
+ const registry = new NpmRegistryClient(options.cwd, {
28
+ timeoutMs: options.registryTimeoutMs,
29
+ retries: 2,
30
+ });
31
+ // ─ Step 1: collect all declared dependencies and their current versions ────
32
+ const declaredVersions = new Map(); // name → range/version
33
+ const roots = [];
34
+ for (const packageDir of packageDirs) {
35
+ let manifest;
36
+ try {
37
+ manifest = await readManifest(packageDir);
38
+ }
39
+ catch {
40
+ continue;
41
+ }
42
+ const deps = collectDependencies(manifest, [
43
+ "dependencies",
44
+ "devDependencies",
45
+ "optionalDependencies",
46
+ ]);
47
+ for (const dep of deps) {
48
+ if (!declaredVersions.has(dep.name)) {
49
+ // Strip range prefix to get a bare version for peer satisfaction checks
50
+ const bare = dep.range.replace(/^[~^>=<]/, "").split(" ")[0] ?? dep.range;
51
+ declaredVersions.set(dep.name, bare);
52
+ roots.push(dep.name);
53
+ }
54
+ }
55
+ }
56
+ // Apply version overrides (--after-update mode)
57
+ if (resolvedVersionOverrides) {
58
+ for (const [name, version] of resolvedVersionOverrides) {
59
+ declaredVersions.set(name, version);
60
+ }
61
+ }
62
+ const packageNames = Array.from(declaredVersions.keys());
63
+ // ─ Step 2: fetch peer dependency data ─────────────────────────────────────
64
+ // Check cache first, then fetch missing ones from registry
65
+ const peerDataByName = new Map();
66
+ const uncached = [];
67
+ for (const name of packageNames) {
68
+ const cached = await cache.getAny(name, "latest");
69
+ const resolvedVersion = resolvedVersionOverrides?.get(name) ??
70
+ declaredVersions.get(name) ??
71
+ "0.0.0";
72
+ if (cached) {
73
+ // We have cached packument; peer deps would need a separate field.
74
+ // For now, initialize with empty peers (fetched below if needed).
75
+ peerDataByName.set(name, {
76
+ resolvedVersion,
77
+ peerRequirements: new Map(),
78
+ });
79
+ }
80
+ else {
81
+ uncached.push(name);
82
+ }
83
+ }
84
+ // Fetch peer deps for packages not in cache
85
+ if (uncached.length > 0) {
86
+ const fetched = await registry.resolveManyPackageMetadata(uncached, {
87
+ concurrency: options.concurrency,
88
+ timeoutMs: options.registryTimeoutMs,
89
+ retries: 2,
90
+ });
91
+ for (const name of uncached) {
92
+ const resolvedVersion = resolvedVersionOverrides?.get(name) ??
93
+ declaredVersions.get(name) ??
94
+ "0.0.0";
95
+ peerDataByName.set(name, {
96
+ resolvedVersion,
97
+ peerRequirements: new Map(), // peer deps from packument handled below
98
+ });
99
+ }
100
+ // The registry packument includes peerDependencies in the version object.
101
+ // Fetch peer deps via a targeted per-package request for the resolved version.
102
+ const peerFetchTasks = uncached.map((name) => async () => {
103
+ const resolvedVersion = resolvedVersionOverrides?.get(name) ??
104
+ declaredVersions.get(name) ??
105
+ "0.0.0";
106
+ const peerDeps = await fetchPeerDepsForVersion(name, resolvedVersion, options.registryTimeoutMs);
107
+ const existing = peerDataByName.get(name);
108
+ if (existing) {
109
+ existing.peerRequirements = peerDeps;
110
+ }
111
+ });
112
+ await asyncPool(options.concurrency, peerFetchTasks);
113
+ }
114
+ // Also fetch peer deps for cached packages where we don't have peer data
115
+ const cachedPeerFetchTasks = packageNames
116
+ .filter((n) => !uncached.includes(n))
117
+ .map((name) => async () => {
118
+ const resolvedVersion = resolvedVersionOverrides?.get(name) ??
119
+ declaredVersions.get(name) ??
120
+ "0.0.0";
121
+ const peerDeps = await fetchPeerDepsForVersion(name, resolvedVersion, options.registryTimeoutMs);
122
+ const existing = peerDataByName.get(name);
123
+ if (existing && peerDeps.size > 0) {
124
+ existing.peerRequirements = peerDeps;
125
+ }
126
+ });
127
+ await asyncPool(options.concurrency, cachedPeerFetchTasks);
128
+ // ─ Step 3: assemble PeerGraph ─────────────────────────────────────────────
129
+ const nodes = new Map();
130
+ for (const [name, metadata] of peerDataByName) {
131
+ if (metadata.peerRequirements.size > 0) {
132
+ nodes.set(name, {
133
+ name,
134
+ resolvedVersion: metadata.resolvedVersion,
135
+ peerRequirements: metadata.peerRequirements,
136
+ });
137
+ }
138
+ }
139
+ // Also add nodes that are referenced AS peers (so the resolver can look them up)
140
+ for (const [, node] of nodes) {
141
+ for (const [peerName] of node.peerRequirements) {
142
+ if (!nodes.has(peerName) && declaredVersions.has(peerName)) {
143
+ const meta = peerDataByName.get(peerName);
144
+ nodes.set(peerName, {
145
+ name: peerName,
146
+ resolvedVersion: meta?.resolvedVersion ?? declaredVersions.get(peerName) ?? "0.0.0",
147
+ peerRequirements: new Map(), // this node has no peer requirements of its own
148
+ });
149
+ }
150
+ }
151
+ }
152
+ return { nodes, roots: [...new Set(roots)] };
153
+ }
154
+ /**
155
+ * Fetches peerDependencies for a specific version of a package directly from
156
+ * the npm registry packument. Returns an empty Map on any failure.
157
+ */
158
+ async function fetchPeerDepsForVersion(packageName, version, timeoutMs) {
159
+ const peerDeps = new Map();
160
+ try {
161
+ const controller = new AbortController();
162
+ const timer = setTimeout(() => controller.abort(), timeoutMs);
163
+ const url = `https://registry.npmjs.org/${encodeURIComponent(packageName)}`;
164
+ const response = await fetch(url, {
165
+ signal: controller.signal,
166
+ headers: { accept: "application/json" },
167
+ });
168
+ clearTimeout(timer);
169
+ if (!response.ok)
170
+ return peerDeps;
171
+ const packument = (await response.json());
172
+ const versionData = packument.versions?.[version];
173
+ if (!versionData?.peerDependencies)
174
+ return peerDeps;
175
+ for (const [peer, range] of Object.entries(versionData.peerDependencies)) {
176
+ peerDeps.set(peer, range);
177
+ }
178
+ }
179
+ catch {
180
+ // Network/parse failure — return empty peer map (no false positives)
181
+ }
182
+ return peerDeps;
183
+ }
@@ -0,0 +1,20 @@
1
+ import type { PeerConflict } from "../../../types/index.js";
2
+ interface ConflictInput {
3
+ requester: string;
4
+ peer: string;
5
+ requiredRange: string;
6
+ resolvedVersion: string;
7
+ isInstalled: boolean;
8
+ }
9
+ /**
10
+ * Classifies a potential peer conflict and generates a human-readable suggestion.
11
+ *
12
+ * Severity rules:
13
+ * "error" — peer is not installed at all
14
+ * "error" — resolved version is outside the required range entirely
15
+ * (would produce ERESOLVE in npm)
16
+ * "warning" — resolved version satisfies a subrange of the requirement
17
+ * but crosses a major boundary (soft peer warning in npm 7+)
18
+ */
19
+ export declare function classifyConflict(input: ConflictInput): PeerConflict;
20
+ export {};
@@ -0,0 +1,52 @@
1
+ import { parseVersion } from "../../../utils/semver.js";
2
+ /**
3
+ * Classifies a potential peer conflict and generates a human-readable suggestion.
4
+ *
5
+ * Severity rules:
6
+ * "error" — peer is not installed at all
7
+ * "error" — resolved version is outside the required range entirely
8
+ * (would produce ERESOLVE in npm)
9
+ * "warning" — resolved version satisfies a subrange of the requirement
10
+ * but crosses a major boundary (soft peer warning in npm 7+)
11
+ */
12
+ export function classifyConflict(input) {
13
+ const severity = determineSeverity(input);
14
+ const suggestion = buildSuggestion(input);
15
+ return {
16
+ requester: input.requester,
17
+ peer: input.peer,
18
+ requiredRange: input.requiredRange,
19
+ resolvedVersion: input.resolvedVersion,
20
+ severity,
21
+ suggestion,
22
+ };
23
+ }
24
+ function determineSeverity(input) {
25
+ if (!input.isInstalled)
26
+ return "error";
27
+ // If we can parse both versions, check if they're in the same major series
28
+ const resolved = parseVersion(input.resolvedVersion);
29
+ const rangeVersion = extractBaseVersion(input.requiredRange);
30
+ if (!resolved || !rangeVersion) {
31
+ // Can't parse → assume it's a hard error to be safe
32
+ return "error";
33
+ }
34
+ // Different major → ERESOLVE-level incompatibility
35
+ if (resolved.major !== rangeVersion.major)
36
+ return "error";
37
+ // Same major but version is below the floor declared in the range → error
38
+ // (e.g. resolved=18.1.0 required=^18.3.0 — same major but concrete floor missed)
39
+ return "warning";
40
+ }
41
+ function extractBaseVersion(range) {
42
+ const stripped = range.trim().replace(/^[~^>=<]+/, "");
43
+ return parseVersion(stripped.split(" ")[0] ?? stripped);
44
+ }
45
+ function buildSuggestion(input) {
46
+ if (!input.isInstalled) {
47
+ return `Install ${input.peer}@${input.requiredRange} — required by ${input.requester} but not found in the dependency tree`;
48
+ }
49
+ const clean = input.requiredRange.replace(/^[~^]/, "");
50
+ return (`Upgrade ${input.peer} from ${input.resolvedVersion} to ${clean} ` +
51
+ `(required by ${input.requester}: "${input.peer}": "${input.requiredRange}")`);
52
+ }
@@ -0,0 +1,17 @@
1
+ import type { PeerGraph, PeerConflict } from "../../../types/index.js";
2
+ /**
3
+ * Resolves peer conflicts in the given PeerGraph.
4
+ *
5
+ * Algorithm: single-pass BFS over the graph.
6
+ * For each node N that has peer requirements:
7
+ * For each (peerName, requiredRange) in N.peerRequirements:
8
+ * 1. Look up peerName in the graph (the resolved version we have)
9
+ * 2. Call satisfies(resolvedVersion, requiredRange)
10
+ * 3. If not satisfied → conflict
11
+ *
12
+ * Complexity: O(n × max_peers_per_package) — effectively O(n) since
13
+ * peerDependencies counts are always small (< 10 in practice).
14
+ *
15
+ * Returns an array of conflicts sorted by severity (errors first) then by name.
16
+ */
17
+ export declare function resolvePeerConflicts(graph: PeerGraph): PeerConflict[];
@@ -0,0 +1,71 @@
1
+ import { satisfies } from "../../../utils/semver.js";
2
+ import { classifyConflict } from "./conflict.js";
3
+ /**
4
+ * Resolves peer conflicts in the given PeerGraph.
5
+ *
6
+ * Algorithm: single-pass BFS over the graph.
7
+ * For each node N that has peer requirements:
8
+ * For each (peerName, requiredRange) in N.peerRequirements:
9
+ * 1. Look up peerName in the graph (the resolved version we have)
10
+ * 2. Call satisfies(resolvedVersion, requiredRange)
11
+ * 3. If not satisfied → conflict
12
+ *
13
+ * Complexity: O(n × max_peers_per_package) — effectively O(n) since
14
+ * peerDependencies counts are always small (< 10 in practice).
15
+ *
16
+ * Returns an array of conflicts sorted by severity (errors first) then by name.
17
+ */
18
+ export function resolvePeerConflicts(graph) {
19
+ const conflicts = [];
20
+ const queue = [...graph.roots];
21
+ const visited = new Set();
22
+ // BFS traversal so we process in dependency order (roots first)
23
+ while (queue.length > 0) {
24
+ const name = queue.shift();
25
+ if (visited.has(name))
26
+ continue;
27
+ visited.add(name);
28
+ const node = graph.nodes.get(name);
29
+ if (!node) {
30
+ // Queue children: any node that references `name` as a peer
31
+ // (they will be checked when processed)
32
+ continue;
33
+ }
34
+ for (const [peerName, requiredRange] of node.peerRequirements) {
35
+ const peerNode = graph.nodes.get(peerName);
36
+ if (!peerNode) {
37
+ // Package not in the dependency tree at all → hard error
38
+ conflicts.push(classifyConflict({
39
+ requester: name,
40
+ peer: peerName,
41
+ requiredRange,
42
+ resolvedVersion: "(not installed)",
43
+ isInstalled: false,
44
+ }));
45
+ continue;
46
+ }
47
+ const peerVersion = peerNode.resolvedVersion;
48
+ const satisfied = satisfies(peerVersion, requiredRange);
49
+ if (!satisfied) {
50
+ conflicts.push(classifyConflict({
51
+ requester: name,
52
+ peer: peerName,
53
+ requiredRange,
54
+ resolvedVersion: peerVersion,
55
+ isInstalled: true,
56
+ }));
57
+ }
58
+ // Always enqueue the peer for processing
59
+ if (!visited.has(peerName)) {
60
+ queue.push(peerName);
61
+ }
62
+ }
63
+ }
64
+ // Sort: errors first, then warnings; within category sort by requester name
65
+ return conflicts.sort((a, b) => {
66
+ if (a.severity !== b.severity) {
67
+ return a.severity === "error" ? -1 : 1;
68
+ }
69
+ return a.requester.localeCompare(b.requester);
70
+ });
71
+ }
@@ -0,0 +1,2 @@
1
+ import type { ResolveOptions } from "../../types/index.js";
2
+ export declare function parseResolveArgs(args: string[]): ResolveOptions;
@@ -0,0 +1,89 @@
1
+ export function parseResolveArgs(args) {
2
+ const options = {
3
+ cwd: process.cwd(),
4
+ workspace: false,
5
+ afterUpdate: false,
6
+ safe: false,
7
+ jsonFile: undefined,
8
+ concurrency: 12,
9
+ registryTimeoutMs: 10_000,
10
+ cacheTtlSeconds: 3600,
11
+ };
12
+ for (let i = 0; i < args.length; i++) {
13
+ const current = args[i];
14
+ const next = args[i + 1];
15
+ if (current === "--cwd" && next) {
16
+ options.cwd = next;
17
+ i++;
18
+ continue;
19
+ }
20
+ if (current === "--cwd")
21
+ throw new Error("Missing value for --cwd");
22
+ if (current === "--workspace") {
23
+ options.workspace = true;
24
+ continue;
25
+ }
26
+ if (current === "--after-update") {
27
+ options.afterUpdate = true;
28
+ continue;
29
+ }
30
+ if (current === "--safe") {
31
+ options.safe = true;
32
+ continue;
33
+ }
34
+ if (current === "--json-file" && next) {
35
+ options.jsonFile = next;
36
+ i++;
37
+ continue;
38
+ }
39
+ if (current === "--json-file")
40
+ throw new Error("Missing value for --json-file");
41
+ if (current === "--concurrency" && next) {
42
+ const n = Number(next);
43
+ if (!Number.isInteger(n) || n <= 0)
44
+ throw new Error("--concurrency must be a positive integer");
45
+ options.concurrency = n;
46
+ i++;
47
+ continue;
48
+ }
49
+ if (current === "--concurrency")
50
+ throw new Error("Missing value for --concurrency");
51
+ if (current === "--timeout" && next) {
52
+ const ms = Number(next);
53
+ if (!Number.isFinite(ms) || ms <= 0)
54
+ throw new Error("--timeout must be a positive number");
55
+ options.registryTimeoutMs = ms;
56
+ i++;
57
+ continue;
58
+ }
59
+ if (current === "--timeout")
60
+ throw new Error("Missing value for --timeout");
61
+ if (current === "--help" || current === "-h") {
62
+ process.stdout.write(RESOLVE_HELP);
63
+ process.exit(0);
64
+ }
65
+ if (current.startsWith("-"))
66
+ throw new Error(`Unknown option: ${current}`);
67
+ }
68
+ return options;
69
+ }
70
+ const RESOLVE_HELP = `
71
+ rup resolve — Detect peer dependency conflicts (pure-TS, no subprocess spawn)
72
+
73
+ Usage:
74
+ rup resolve [options]
75
+
76
+ Options:
77
+ --after-update Simulate conflicts after applying pending \`rup check\` updates
78
+ --safe Exit non-zero if any error-level conflicts exist
79
+ --workspace Scan all workspace packages
80
+ --json-file <path> Write JSON conflict report to file
81
+ --timeout <ms> Registry request timeout in ms (default: 10000)
82
+ --concurrency <n> Parallel registry requests (default: 12)
83
+ --cwd <path> Working directory (default: cwd)
84
+ --help Show this help
85
+
86
+ Exit codes:
87
+ 0 No conflicts
88
+ 1 One or more peer conflicts detected
89
+ `.trimStart();
@@ -0,0 +1,13 @@
1
+ import type { ResolveOptions, ResolveResult } from "../../types/index.js";
2
+ /**
3
+ * Entry point for `rup resolve`. Lazy-loaded by cli.ts.
4
+ *
5
+ * Modes:
6
+ * default — check current peer-dep state for conflicts
7
+ * --after-update — re-check after applying pending `rup check` updates
8
+ * in-memory (reads proposed versions from check runner)
9
+ *
10
+ * The pure-TS peer graph is assembled entirely from registry data; no subprocess
11
+ * is spawned. When the cache is warm this completes in < 1 s for typical projects.
12
+ */
13
+ export declare function runResolve(options: ResolveOptions): Promise<ResolveResult>;
@@ -0,0 +1,136 @@
1
+ import process from "node:process";
2
+ import { buildPeerGraph } from "./graph/builder.js";
3
+ import { resolvePeerConflicts } from "./graph/resolver.js";
4
+ import { stableStringify } from "../../utils/stable-json.js";
5
+ import { writeFileAtomic } from "../../utils/io.js";
6
+ /**
7
+ * Entry point for `rup resolve`. Lazy-loaded by cli.ts.
8
+ *
9
+ * Modes:
10
+ * default — check current peer-dep state for conflicts
11
+ * --after-update — re-check after applying pending `rup check` updates
12
+ * in-memory (reads proposed versions from check runner)
13
+ *
14
+ * The pure-TS peer graph is assembled entirely from registry data; no subprocess
15
+ * is spawned. When the cache is warm this completes in < 1 s for typical projects.
16
+ */
17
+ export async function runResolve(options) {
18
+ const result = {
19
+ conflicts: [],
20
+ errorConflicts: 0,
21
+ warningConflicts: 0,
22
+ errors: [],
23
+ warnings: [],
24
+ };
25
+ let versionOverrides;
26
+ if (options.afterUpdate) {
27
+ versionOverrides = await fetchProposedVersions(options);
28
+ if (versionOverrides.size === 0) {
29
+ process.stderr.write("[resolve] No pending updates found — checking current state.\n");
30
+ }
31
+ }
32
+ let graph;
33
+ try {
34
+ graph = await buildPeerGraph(options, versionOverrides);
35
+ }
36
+ catch (err) {
37
+ result.errors.push(`Failed to build peer graph: ${String(err)}`);
38
+ return result;
39
+ }
40
+ const conflicts = resolvePeerConflicts(graph);
41
+ result.conflicts = conflicts;
42
+ result.errorConflicts = conflicts.filter((c) => c.severity === "error").length;
43
+ result.warningConflicts = conflicts.filter((c) => c.severity === "warning").length;
44
+ process.stdout.write(renderConflictsTable(result, options) + "\n");
45
+ if (options.jsonFile) {
46
+ await writeFileAtomic(options.jsonFile, stableStringify(result, 2) + "\n");
47
+ process.stderr.write(`[resolve] JSON report written to ${options.jsonFile}\n`);
48
+ }
49
+ return result;
50
+ }
51
+ /**
52
+ * In --after-update mode, runs `rup check` logic in read-only mode to get
53
+ * the proposed new versions, returning them as a version override map.
54
+ */
55
+ async function fetchProposedVersions(options) {
56
+ const overrides = new Map();
57
+ try {
58
+ const { check } = await import("../../core/check.js");
59
+ const checkResult = await check({
60
+ cwd: options.cwd,
61
+ workspace: options.workspace,
62
+ concurrency: options.concurrency,
63
+ target: "latest",
64
+ filter: undefined,
65
+ reject: undefined,
66
+ includeKinds: ["dependencies", "devDependencies", "optionalDependencies"],
67
+ ci: false,
68
+ format: "table",
69
+ jsonFile: undefined,
70
+ githubOutputFile: undefined,
71
+ sarifFile: undefined,
72
+ cacheTtlSeconds: options.cacheTtlSeconds,
73
+ registryTimeoutMs: options.registryTimeoutMs,
74
+ registryRetries: 2,
75
+ offline: false,
76
+ stream: false,
77
+ policyFile: undefined,
78
+ prReportFile: undefined,
79
+ failOn: "none",
80
+ maxUpdates: undefined,
81
+ fixPr: false,
82
+ fixBranch: "chore/rainy-updates",
83
+ fixCommitMessage: undefined,
84
+ fixDryRun: false,
85
+ fixPrNoCheckout: false,
86
+ fixPrBatchSize: undefined,
87
+ noPrReport: false,
88
+ logLevel: "info",
89
+ groupBy: "none",
90
+ groupMax: undefined,
91
+ cooldownDays: undefined,
92
+ prLimit: undefined,
93
+ onlyChanged: false,
94
+ ciProfile: "minimal",
95
+ lockfileMode: "preserve",
96
+ });
97
+ for (const update of checkResult.updates ?? []) {
98
+ overrides.set(update.name, update.toVersionResolved);
99
+ }
100
+ }
101
+ catch {
102
+ // If check fails, fall back to current state (no overrides)
103
+ }
104
+ return overrides;
105
+ }
106
+ function renderConflictsTable(result, options) {
107
+ const { conflicts } = result;
108
+ if (conflicts.length === 0) {
109
+ return options.afterUpdate
110
+ ? "✔ No peer conflicts detected after proposed updates are applied."
111
+ : "✔ No peer conflicts detected in current dependency tree.";
112
+ }
113
+ const lines = [];
114
+ const header = options.afterUpdate
115
+ ? `\nPeer conflicts after proposed updates (${conflicts.length} found):\n`
116
+ : `\nPeer conflicts in current dependency tree (${conflicts.length} found):\n`;
117
+ lines.push(header);
118
+ const errors = conflicts.filter((c) => c.severity === "error");
119
+ const warnings = conflicts.filter((c) => c.severity === "warning");
120
+ if (errors.length > 0) {
121
+ lines.push(` ✖ Errors (${errors.length}) — would cause ERESOLVE on install:\n`);
122
+ for (const c of errors) {
123
+ lines.push(` \x1b[31m✖\x1b[0m ${c.requester} requires ${c.peer}@${c.requiredRange} got ${c.resolvedVersion}`);
124
+ lines.push(` → ${c.suggestion}`);
125
+ }
126
+ lines.push("");
127
+ }
128
+ if (warnings.length > 0) {
129
+ lines.push(` ⚠ Warnings (${warnings.length}) — soft peer incompatibilities:\n`);
130
+ for (const c of warnings) {
131
+ lines.push(` \x1b[33m⚠\x1b[0m ${c.requester} requires ${c.peer}@${c.requiredRange} got ${c.resolvedVersion}`);
132
+ lines.push(` → ${c.suggestion}`);
133
+ }
134
+ }
135
+ return lines.join("\n");
136
+ }