@rainy-updates/cli 0.5.7 → 0.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. package/CHANGELOG.md +134 -0
  2. package/README.md +90 -31
  3. package/dist/bin/cli.js +11 -126
  4. package/dist/bin/dispatch.js +35 -32
  5. package/dist/bin/help.js +79 -2
  6. package/dist/bin/main.d.ts +1 -0
  7. package/dist/bin/main.js +126 -0
  8. package/dist/cache/cache.js +13 -11
  9. package/dist/commands/audit/parser.js +38 -2
  10. package/dist/commands/audit/runner.js +41 -61
  11. package/dist/commands/audit/targets.js +13 -13
  12. package/dist/commands/bisect/oracle.js +31 -11
  13. package/dist/commands/bisect/parser.js +3 -3
  14. package/dist/commands/bisect/runner.js +16 -8
  15. package/dist/commands/changelog/fetcher.js +11 -5
  16. package/dist/commands/dashboard/parser.js +144 -1
  17. package/dist/commands/dashboard/runner.d.ts +2 -2
  18. package/dist/commands/dashboard/runner.js +67 -37
  19. package/dist/commands/doctor/parser.js +53 -4
  20. package/dist/commands/doctor/runner.js +2 -2
  21. package/dist/commands/ga/parser.js +43 -4
  22. package/dist/commands/ga/runner.js +22 -13
  23. package/dist/commands/health/parser.js +38 -2
  24. package/dist/commands/health/runner.js +5 -1
  25. package/dist/commands/hook/parser.d.ts +2 -0
  26. package/dist/commands/hook/parser.js +40 -0
  27. package/dist/commands/hook/runner.d.ts +2 -0
  28. package/dist/commands/hook/runner.js +174 -0
  29. package/dist/commands/licenses/parser.js +39 -0
  30. package/dist/commands/licenses/runner.js +9 -5
  31. package/dist/commands/resolve/graph/builder.js +5 -1
  32. package/dist/commands/resolve/parser.js +39 -0
  33. package/dist/commands/resolve/runner.js +14 -4
  34. package/dist/commands/review/parser.js +101 -4
  35. package/dist/commands/review/runner.js +31 -5
  36. package/dist/commands/snapshot/parser.js +39 -0
  37. package/dist/commands/snapshot/runner.js +21 -18
  38. package/dist/commands/snapshot/store.d.ts +0 -12
  39. package/dist/commands/snapshot/store.js +26 -38
  40. package/dist/commands/unused/parser.js +39 -0
  41. package/dist/commands/unused/runner.js +10 -8
  42. package/dist/commands/unused/scanner.d.ts +2 -1
  43. package/dist/commands/unused/scanner.js +65 -52
  44. package/dist/config/loader.d.ts +2 -2
  45. package/dist/config/loader.js +2 -5
  46. package/dist/config/policy.js +20 -11
  47. package/dist/core/analysis/run-silenced.js +0 -1
  48. package/dist/core/artifacts.js +6 -5
  49. package/dist/core/baseline.js +3 -5
  50. package/dist/core/check.js +7 -3
  51. package/dist/core/ci.js +52 -1
  52. package/dist/core/decision-plan.d.ts +14 -0
  53. package/dist/core/decision-plan.js +107 -0
  54. package/dist/core/doctor/result.js +8 -5
  55. package/dist/core/fix-pr-batch.js +38 -28
  56. package/dist/core/fix-pr.js +27 -24
  57. package/dist/core/init-ci.js +34 -28
  58. package/dist/core/options.d.ts +4 -1
  59. package/dist/core/options.js +152 -4
  60. package/dist/core/review-model.js +3 -0
  61. package/dist/core/summary.js +6 -0
  62. package/dist/core/upgrade.js +64 -2
  63. package/dist/core/verification.d.ts +2 -0
  64. package/dist/core/verification.js +108 -0
  65. package/dist/core/warm-cache.js +7 -3
  66. package/dist/generated/version.d.ts +1 -0
  67. package/dist/generated/version.js +2 -0
  68. package/dist/git/scope.d.ts +19 -0
  69. package/dist/git/scope.js +167 -0
  70. package/dist/index.d.ts +2 -1
  71. package/dist/index.js +1 -0
  72. package/dist/output/format.js +15 -0
  73. package/dist/output/github.js +6 -0
  74. package/dist/output/sarif.js +12 -18
  75. package/dist/parsers/package-json.js +2 -4
  76. package/dist/pm/detect.d.ts +40 -1
  77. package/dist/pm/detect.js +152 -9
  78. package/dist/pm/install.d.ts +3 -1
  79. package/dist/pm/install.js +18 -17
  80. package/dist/registry/npm.js +34 -76
  81. package/dist/rup +0 -0
  82. package/dist/types/index.d.ts +134 -5
  83. package/dist/ui/tui.d.ts +4 -1
  84. package/dist/ui/tui.js +156 -67
  85. package/dist/utils/io.js +5 -6
  86. package/dist/utils/lockfile.js +24 -19
  87. package/dist/utils/runtime-paths.d.ts +4 -0
  88. package/dist/utils/runtime-paths.js +35 -0
  89. package/dist/utils/runtime.d.ts +7 -0
  90. package/dist/utils/runtime.js +32 -0
  91. package/dist/workspace/discover.d.ts +7 -1
  92. package/dist/workspace/discover.js +67 -54
  93. package/package.json +24 -19
  94. package/dist/ui/dashboard/DashboardTUI.d.ts +0 -6
  95. package/dist/ui/dashboard/DashboardTUI.js +0 -34
  96. package/dist/ui/dashboard/components/DetailPanel.d.ts +0 -4
  97. package/dist/ui/dashboard/components/DetailPanel.js +0 -30
  98. package/dist/ui/dashboard/components/Footer.d.ts +0 -4
  99. package/dist/ui/dashboard/components/Footer.js +0 -9
  100. package/dist/ui/dashboard/components/Header.d.ts +0 -4
  101. package/dist/ui/dashboard/components/Header.js +0 -12
  102. package/dist/ui/dashboard/components/Sidebar.d.ts +0 -4
  103. package/dist/ui/dashboard/components/Sidebar.js +0 -23
  104. package/dist/ui/dashboard/store.d.ts +0 -34
  105. package/dist/ui/dashboard/store.js +0 -148
@@ -1,16 +1,6 @@
1
- import { createHash } from "node:crypto";
2
- import { promises as fs } from "node:fs";
3
1
  import path from "node:path";
2
+ import { writeFileAtomic } from "../../utils/io.js";
4
3
  const DEFAULT_STORE_NAME = ".rup-snapshots.json";
5
- /**
6
- * Lightweight SQLite-free snapshot store (uses a JSON file in the project root).
7
- *
8
- * Design goals:
9
- * - No extra runtime dependencies (SQLite bindings vary by runtime)
10
- * - Human-readable store file (git-committable if desired)
11
- * - Atomic writes via tmp-rename to prevent corruption
12
- * - Fast: entire store fits in memory for typical use (< 50 snapshots)
13
- */
14
4
  export class SnapshotStore {
15
5
  storePath;
16
6
  entries = [];
@@ -26,8 +16,7 @@ export class SnapshotStore {
26
16
  if (this.loaded)
27
17
  return;
28
18
  try {
29
- const raw = await fs.readFile(this.storePath, "utf8");
30
- const parsed = JSON.parse(raw);
19
+ const parsed = (await Bun.file(this.storePath).json());
31
20
  if (Array.isArray(parsed)) {
32
21
  this.entries = parsed;
33
22
  }
@@ -38,9 +27,7 @@ export class SnapshotStore {
38
27
  this.loaded = true;
39
28
  }
40
29
  async save() {
41
- const tmp = this.storePath + ".tmp";
42
- await fs.writeFile(tmp, JSON.stringify(this.entries, null, 2) + "\n", "utf8");
43
- await fs.rename(tmp, this.storePath);
30
+ await writeFileAtomic(this.storePath, JSON.stringify(this.entries, null, 2) + "\n");
44
31
  }
45
32
  async saveSnapshot(manifests, lockfileHashes, label) {
46
33
  await this.load();
@@ -76,32 +63,30 @@ export class SnapshotStore {
76
63
  return false;
77
64
  }
78
65
  }
79
- /** Captures current package.json and lockfile state for a set of directories. */
80
66
  export async function captureState(packageDirs) {
81
67
  const manifests = {};
82
68
  const lockfileHashes = {};
83
- const LOCKFILES = [
69
+ const lockfiles = [
84
70
  "package-lock.json",
85
71
  "pnpm-lock.yaml",
86
72
  "yarn.lock",
73
+ "bun.lock",
87
74
  "bun.lockb",
88
75
  ];
89
76
  await Promise.all(packageDirs.map(async (dir) => {
90
- // Read package.json
91
77
  try {
92
- const content = await fs.readFile(path.join(dir, "package.json"), "utf8");
93
- manifests[dir] = content;
78
+ manifests[dir] = await Bun.file(path.join(dir, "package.json")).text();
94
79
  }
95
80
  catch {
96
81
  // No package.json — skip
97
82
  }
98
- // Hash the first lockfile found
99
- for (const lf of LOCKFILES) {
83
+ for (const lockfileName of lockfiles) {
84
+ const filePath = path.join(dir, lockfileName);
100
85
  try {
101
- const content = await fs.readFile(path.join(dir, lf));
102
- lockfileHashes[dir] = createHash("sha256")
103
- .update(content)
104
- .digest("hex");
86
+ const file = Bun.file(filePath);
87
+ if (!(await file.exists()))
88
+ continue;
89
+ lockfileHashes[dir] = await hashFile(filePath);
105
90
  break;
106
91
  }
107
92
  catch {
@@ -111,16 +96,11 @@ export async function captureState(packageDirs) {
111
96
  }));
112
97
  return { manifests, lockfileHashes };
113
98
  }
114
- /** Restores package.json files from a snapshot's manifest map. */
115
99
  export async function restoreState(entry) {
116
100
  await Promise.all(Object.entries(entry.manifests).map(async ([dir, content]) => {
117
- const manifestPath = path.join(dir, "package.json");
118
- const tmp = manifestPath + ".tmp";
119
- await fs.writeFile(tmp, content, "utf8");
120
- await fs.rename(tmp, manifestPath);
101
+ await writeFileAtomic(path.join(dir, "package.json"), content);
121
102
  }));
122
103
  }
123
- /** Computes a diff of dependency versions between two manifest snapshots. */
124
104
  export function diffManifests(before, after) {
125
105
  const changes = [];
126
106
  for (const [dir, afterJson] of Object.entries(after)) {
@@ -142,12 +122,15 @@ export function diffManifests(before, after) {
142
122
  "optionalDependencies",
143
123
  ];
144
124
  for (const field of fields) {
145
- const before = beforeManifest[field] ?? {};
146
- const after = afterManifest[field] ?? {};
147
- const allNames = new Set([...Object.keys(before), ...Object.keys(after)]);
125
+ const beforeDeps = beforeManifest[field] ?? {};
126
+ const afterDeps = afterManifest[field] ?? {};
127
+ const allNames = new Set([
128
+ ...Object.keys(beforeDeps),
129
+ ...Object.keys(afterDeps),
130
+ ]);
148
131
  for (const name of allNames) {
149
- const fromVer = before[name] ?? "(removed)";
150
- const toVer = after[name] ?? "(removed)";
132
+ const fromVer = beforeDeps[name] ?? "(removed)";
133
+ const toVer = afterDeps[name] ?? "(removed)";
151
134
  if (fromVer !== toVer) {
152
135
  changes.push({ name, from: fromVer, to: toVer });
153
136
  }
@@ -156,3 +139,8 @@ export function diffManifests(before, after) {
156
139
  }
157
140
  return changes;
158
141
  }
142
+ async function hashFile(filePath) {
143
+ const hasher = new Bun.CryptoHasher("sha256");
144
+ hasher.update(await Bun.file(filePath).bytes());
145
+ return hasher.digest("hex");
146
+ }
@@ -3,6 +3,11 @@ export function parseUnusedArgs(args) {
3
3
  const options = {
4
4
  cwd: process.cwd(),
5
5
  workspace: false,
6
+ affected: false,
7
+ staged: false,
8
+ baseRef: undefined,
9
+ headRef: undefined,
10
+ sinceRef: undefined,
6
11
  srcDirs: DEFAULT_SRC_DIRS,
7
12
  includeDevDependencies: true,
8
13
  fix: false,
@@ -24,6 +29,35 @@ export function parseUnusedArgs(args) {
24
29
  options.workspace = true;
25
30
  continue;
26
31
  }
32
+ if (current === "--affected") {
33
+ options.affected = true;
34
+ continue;
35
+ }
36
+ if (current === "--staged") {
37
+ options.staged = true;
38
+ continue;
39
+ }
40
+ if (current === "--base" && next) {
41
+ options.baseRef = next;
42
+ i++;
43
+ continue;
44
+ }
45
+ if (current === "--base")
46
+ throw new Error("Missing value for --base");
47
+ if (current === "--head" && next) {
48
+ options.headRef = next;
49
+ i++;
50
+ continue;
51
+ }
52
+ if (current === "--head")
53
+ throw new Error("Missing value for --head");
54
+ if (current === "--since" && next) {
55
+ options.sinceRef = next;
56
+ i++;
57
+ continue;
58
+ }
59
+ if (current === "--since")
60
+ throw new Error("Missing value for --since");
27
61
  if (current === "--src" && next) {
28
62
  options.srcDirs = next
29
63
  .split(",")
@@ -81,6 +115,11 @@ Usage:
81
115
  Options:
82
116
  --src <dirs> Comma-separated source directories to scan (default: src)
83
117
  --workspace Scan all workspace packages
118
+ --affected Scan changed workspace packages and their dependents
119
+ --staged Limit scanning to staged changes
120
+ --base <ref> Compare changes against a base git ref
121
+ --head <ref> Compare changes against a head git ref
122
+ --since <ref> Compare changes since a git ref
84
123
  --no-dev Exclude devDependencies from unused detection
85
124
  --fix Remove unused dependencies from package.json
86
125
  --dry-run Preview changes without writing
@@ -1,9 +1,9 @@
1
1
  import path from "node:path";
2
- import process from "node:process";
3
2
  import { readManifest, } from "../../parsers/package-json.js";
4
3
  import { discoverPackageDirs } from "../../workspace/discover.js";
5
4
  import { writeFileAtomic } from "../../utils/io.js";
6
5
  import { stableStringify } from "../../utils/stable-json.js";
6
+ import { writeStderr, writeStdout } from "../../utils/runtime.js";
7
7
  import { scanDirectory } from "./scanner.js";
8
8
  import { matchDependencies, removeUnusedFromManifest } from "./matcher.js";
9
9
  /**
@@ -24,7 +24,10 @@ export async function runUnused(options) {
24
24
  errors: [],
25
25
  warnings: [],
26
26
  };
27
- const packageDirs = await discoverPackageDirs(options.cwd, options.workspace);
27
+ const packageDirs = await discoverPackageDirs(options.cwd, options.workspace, {
28
+ git: options,
29
+ includeDependents: false,
30
+ });
28
31
  for (const packageDir of packageDirs) {
29
32
  // ─ Read manifest ─────────────────────────────────────────────────────────
30
33
  let manifest;
@@ -60,16 +63,15 @@ export async function runUnused(options) {
60
63
  // ─ Apply fix ─────────────────────────────────────────────────────────────
61
64
  if (options.fix && unused.length > 0) {
62
65
  if (options.dryRun) {
63
- process.stderr.write(`[unused] --dry-run: would remove ${unused.length} unused dep(s) from ${packageDir}/package.json\n`);
66
+ writeStderr(`[unused] --dry-run: would remove ${unused.length} unused dep(s) from ${packageDir}/package.json\n`);
64
67
  }
65
68
  else {
66
69
  try {
67
- const { promises: fs } = await import("node:fs");
68
70
  const manifestPath = path.join(packageDir, "package.json");
69
- const originalJson = await fs.readFile(manifestPath, "utf8");
71
+ const originalJson = await Bun.file(manifestPath).text();
70
72
  const updatedJson = removeUnusedFromManifest(originalJson, unused);
71
73
  await writeFileAtomic(manifestPath, updatedJson);
72
- process.stderr.write(`[unused] Removed ${unused.length} unused dep(s) from ${packageDir}/package.json\n`);
74
+ writeStderr(`[unused] Removed ${unused.length} unused dep(s) from ${packageDir}/package.json\n`);
73
75
  }
74
76
  catch (error) {
75
77
  result.errors.push(`Failed to update package.json in ${packageDir}: ${String(error)}`);
@@ -80,11 +82,11 @@ export async function runUnused(options) {
80
82
  result.totalUnused = result.unused.length;
81
83
  result.totalMissing = result.missing.length;
82
84
  // ─ Render output ─────────────────────────────────────────────────────────
83
- process.stdout.write(renderUnusedTable(result) + "\n");
85
+ writeStdout(renderUnusedTable(result) + "\n");
84
86
  // ─ JSON report ───────────────────────────────────────────────────────────
85
87
  if (options.jsonFile) {
86
88
  await writeFileAtomic(options.jsonFile, stableStringify(result, 2) + "\n");
87
- process.stderr.write(`[unused] JSON report written to ${options.jsonFile}\n`);
89
+ writeStderr(`[unused] JSON report written to ${options.jsonFile}\n`);
88
90
  }
89
91
  return result;
90
92
  }
@@ -1,10 +1,11 @@
1
1
  /**
2
- * Extracts all imported package names from a single source file.
2
+ * Extracts all imported package names from a single source file using AST.
3
3
  *
4
4
  * Handles:
5
5
  * - ESM static: import ... from "pkg"
6
6
  * - ESM dynamic: import("pkg")
7
7
  * - CJS: require("pkg")
8
+ * - ESM re-export: export ... from "pkg"
8
9
  *
9
10
  * Strips subpath imports (e.g. "lodash/merge" → "lodash"),
10
11
  * skips relative imports and node: builtins.
@@ -1,37 +1,63 @@
1
- import { promises as fs } from "node:fs";
2
1
  import path from "node:path";
2
+ import { parseSync } from "oxc-parser";
3
3
  /**
4
- * Extracts all imported package names from a single source file.
4
+ * Extracts all imported package names from a single source file using AST.
5
5
  *
6
6
  * Handles:
7
7
  * - ESM static: import ... from "pkg"
8
8
  * - ESM dynamic: import("pkg")
9
9
  * - CJS: require("pkg")
10
+ * - ESM re-export: export ... from "pkg"
10
11
  *
11
12
  * Strips subpath imports (e.g. "lodash/merge" → "lodash"),
12
13
  * skips relative imports and node: builtins.
13
14
  */
14
15
  export function extractImportsFromSource(source) {
15
16
  const names = new Set();
16
- // ESM static import: from "pkg" or from 'pkg'
17
- const staticImport = /from\s+['"]([^'"]+)['"]/g;
18
- for (const match of source.matchAll(staticImport)) {
19
- addPackageName(names, match[1]);
20
- }
21
- // ESM dynamic import: import("pkg") or import('pkg')
22
- const dynamicImport = /\bimport\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
23
- for (const match of source.matchAll(dynamicImport)) {
24
- addPackageName(names, match[1]);
25
- }
26
- // CJS require: require("pkg") or require('pkg')
27
- const cjsRequire = /\brequire\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
28
- for (const match of source.matchAll(cjsRequire)) {
29
- addPackageName(names, match[1]);
17
+ try {
18
+ const parseResult = parseSync("unknown.ts", source, {
19
+ sourceType: "module",
20
+ });
21
+ const walk = (node) => {
22
+ if (!node)
23
+ return;
24
+ if (node.type === "ImportDeclaration" && node.source?.value) {
25
+ addPackageName(names, node.source.value);
26
+ }
27
+ else if (node.type === "ExportNamedDeclaration" && node.source?.value) {
28
+ addPackageName(names, node.source.value);
29
+ }
30
+ else if (node.type === "ExportAllDeclaration" && node.source?.value) {
31
+ addPackageName(names, node.source.value);
32
+ }
33
+ else if (node.type === "ImportExpression" && node.source?.value) {
34
+ addPackageName(names, node.source.value);
35
+ }
36
+ else if (node.type === "CallExpression") {
37
+ if (node.callee?.type === "Identifier" &&
38
+ node.callee.name === "require" &&
39
+ node.arguments?.[0]?.type === "StringLiteral") {
40
+ addPackageName(names, node.arguments[0].value);
41
+ }
42
+ }
43
+ // Traverse children
44
+ for (const key in node) {
45
+ if (node[key] && typeof node[key] === "object") {
46
+ if (Array.isArray(node[key])) {
47
+ for (const child of node[key]) {
48
+ walk(child);
49
+ }
50
+ }
51
+ else {
52
+ walk(node[key]);
53
+ }
54
+ }
55
+ }
56
+ };
57
+ walk(parseResult.program);
30
58
  }
31
- // export ... from "pkg"
32
- const reExport = /\bexport\s+(?:\*|\{[^}]*\})\s+from\s+['"]([^'"]+)['"]/g;
33
- for (const match of source.matchAll(reExport)) {
34
- addPackageName(names, match[1]);
59
+ catch (err) {
60
+ // Fallback or ignore parse errors
35
61
  }
36
62
  return names;
37
63
  }
@@ -87,43 +113,30 @@ const IGNORED_DIRS = new Set([
87
113
  */
88
114
  export async function scanDirectory(dir) {
89
115
  const allImports = new Set();
90
- await walkDirectory(dir, allImports);
91
- return allImports;
92
- }
93
- async function walkDirectory(dir, collector) {
94
- let entries;
95
- try {
96
- entries = await fs.readdir(dir);
97
- }
98
- catch {
99
- return;
100
- }
116
+ const glob = new Bun.Glob("**/*.{ts,tsx,js,jsx,mjs,cjs,mts,cts}");
101
117
  const tasks = [];
102
- for (const entryName of entries) {
103
- if (IGNORED_DIRS.has(entryName))
118
+ for await (const file of glob.scan(dir)) {
119
+ // Bun.Glob returns relative paths
120
+ const fullPath = path.join(dir, file);
121
+ // Quick check to ignore certain directories in the path
122
+ if (fullPath.includes("/node_modules/") ||
123
+ fullPath.includes("/.git/") ||
124
+ fullPath.includes("/dist/") ||
125
+ fullPath.includes("/build/") ||
126
+ fullPath.includes("/out/") ||
127
+ fullPath.includes("/.next/") ||
128
+ fullPath.includes("/.nuxt/")) {
104
129
  continue;
105
- const fullPath = path.join(dir, entryName);
106
- tasks.push(fs
107
- .stat(fullPath)
108
- .then((stat) => {
109
- if (stat.isDirectory()) {
110
- return walkDirectory(fullPath, collector);
111
- }
112
- if (stat.isFile()) {
113
- const ext = path.extname(entryName).toLowerCase();
114
- if (!SOURCE_EXTENSIONS.has(ext))
115
- return;
116
- return fs
117
- .readFile(fullPath, "utf8")
118
- .then((source) => {
119
- for (const name of extractImportsFromSource(source)) {
120
- collector.add(name);
121
- }
122
- })
123
- .catch(() => undefined);
130
+ }
131
+ tasks.push(Bun.file(fullPath)
132
+ .text()
133
+ .then((source) => {
134
+ for (const name of extractImportsFromSource(source)) {
135
+ allImports.add(name);
124
136
  }
125
137
  })
126
138
  .catch(() => undefined));
127
139
  }
128
140
  await Promise.all(tasks);
141
+ return allImports;
129
142
  }
@@ -1,4 +1,4 @@
1
- import type { CiProfile, DependencyKind, FailOnLevel, GroupBy, LockfileMode, LogLevel, OutputFormat, TargetLevel } from "../types/index.js";
1
+ import type { CiProfile, SelectedPackageManager, DependencyKind, FailOnLevel, GroupBy, LockfileMode, LogLevel, OutputFormat, TargetLevel } from "../types/index.js";
2
2
  export interface FileConfig {
3
3
  target?: TargetLevel;
4
4
  filter?: string;
@@ -39,7 +39,7 @@ export interface FileConfig {
39
39
  showImpact?: boolean;
40
40
  showHomepage?: boolean;
41
41
  install?: boolean;
42
- packageManager?: "auto" | "npm" | "pnpm";
42
+ packageManager?: SelectedPackageManager;
43
43
  sync?: boolean;
44
44
  }
45
45
  export declare function loadConfig(cwd: string): Promise<FileConfig>;
@@ -1,4 +1,3 @@
1
- import { promises as fs } from "node:fs";
2
1
  import path from "node:path";
3
2
  export async function loadConfig(cwd) {
4
3
  const fromRc = await loadRcFile(cwd);
@@ -13,8 +12,7 @@ async function loadRcFile(cwd) {
13
12
  for (const candidate of candidates) {
14
13
  const filePath = path.join(cwd, candidate);
15
14
  try {
16
- const content = await fs.readFile(filePath, "utf8");
17
- return JSON.parse(content);
15
+ return (await Bun.file(filePath).json());
18
16
  }
19
17
  catch {
20
18
  // noop
@@ -25,8 +23,7 @@ async function loadRcFile(cwd) {
25
23
  async function loadPackageConfig(cwd) {
26
24
  const packagePath = path.join(cwd, "package.json");
27
25
  try {
28
- const content = await fs.readFile(packagePath, "utf8");
29
- const parsed = JSON.parse(content);
26
+ const parsed = (await Bun.file(packagePath).json());
30
27
  return parsed.rainyUpdates ?? {};
31
28
  }
32
29
  catch {
@@ -1,19 +1,24 @@
1
- import { promises as fs } from "node:fs";
2
1
  import path from "node:path";
3
2
  export async function loadPolicy(cwd, policyFile) {
4
- const candidates = policyFile ? [policyFile] : [
5
- path.join(cwd, ".rainyupdates-policy.json"),
6
- path.join(cwd, "rainy-updates.policy.json"),
7
- ];
3
+ const candidates = policyFile
4
+ ? [policyFile]
5
+ : [
6
+ path.join(cwd, ".rainyupdates-policy.json"),
7
+ path.join(cwd, "rainy-updates.policy.json"),
8
+ ];
8
9
  for (const candidate of candidates) {
9
- const filePath = path.isAbsolute(candidate) ? candidate : path.resolve(cwd, candidate);
10
+ const filePath = path.isAbsolute(candidate)
11
+ ? candidate
12
+ : path.resolve(cwd, candidate);
10
13
  try {
11
- const content = await fs.readFile(filePath, "utf8");
12
- const parsed = JSON.parse(content);
14
+ const parsed = (await Bun.file(filePath).json());
13
15
  return {
14
16
  ignorePatterns: parsed.ignore ?? [],
15
17
  cooldownDays: asNonNegativeInt(parsed.cooldownDays),
16
- packageRules: new Map(Object.entries(parsed.packageRules ?? {}).map(([pkg, rule]) => [pkg, normalizeRule(rule)])),
18
+ packageRules: new Map(Object.entries(parsed.packageRules ?? {}).map(([pkg, rule]) => [
19
+ pkg,
20
+ normalizeRule(rule),
21
+ ])),
17
22
  matchRules: Object.values(parsed.packageRules ?? {})
18
23
  .map((rule) => normalizeRule(rule))
19
24
  .filter((rule) => typeof rule.match === "string" && rule.match.length > 0),
@@ -44,7 +49,9 @@ function normalizeRule(rule) {
44
49
  maxUpdatesPerRun: asNonNegativeInt(rule.maxUpdatesPerRun),
45
50
  cooldownDays: asNonNegativeInt(rule.cooldownDays),
46
51
  allowPrerelease: rule.allowPrerelease === true,
47
- group: typeof rule.group === "string" && rule.group.trim().length > 0 ? rule.group.trim() : undefined,
52
+ group: typeof rule.group === "string" && rule.group.trim().length > 0
53
+ ? rule.group.trim()
54
+ : undefined,
48
55
  priority: asNonNegativeInt(rule.priority),
49
56
  target: rule.target,
50
57
  autofix: rule.autofix !== false,
@@ -60,7 +67,9 @@ function matchesPattern(value, pattern) {
60
67
  return false;
61
68
  if (pattern === "*")
62
69
  return true;
63
- const escaped = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*");
70
+ const escaped = pattern
71
+ .replace(/[.+^${}()|[\]\\]/g, "\\$&")
72
+ .replace(/\*/g, ".*");
64
73
  const regex = new RegExp(`^${escaped}$`);
65
74
  return regex.test(value);
66
75
  }
@@ -1,4 +1,3 @@
1
- import process from "node:process";
2
1
  export async function runSilenced(fn) {
3
2
  const stdoutWrite = process.stdout.write.bind(process.stdout);
4
3
  const stderrWrite = process.stderr.write.bind(process.stderr);
@@ -1,10 +1,9 @@
1
- import crypto from "node:crypto";
2
1
  import path from "node:path";
3
2
  import { stableStringify } from "../utils/stable-json.js";
4
3
  import { writeFileAtomic } from "../utils/io.js";
5
4
  export function createRunId(command, options, result) {
6
- const hash = crypto.createHash("sha256");
7
- hash.update(stableStringify({
5
+ const hasher = new Bun.CryptoHasher("sha256");
6
+ hasher.update(stableStringify({
8
7
  command,
9
8
  cwd: path.resolve(options.cwd),
10
9
  target: options.target,
@@ -17,14 +16,15 @@ export function createRunId(command, options, result) {
17
16
  toRange: update.toRange,
18
17
  })),
19
18
  }, 0));
20
- return hash.digest("hex").slice(0, 16);
19
+ return hasher.digest("hex").slice(0, 16);
21
20
  }
22
21
  export async function writeArtifactManifest(command, options, result) {
23
22
  const shouldWrite = options.ci ||
24
23
  Boolean(options.jsonFile) ||
25
24
  Boolean(options.githubOutputFile) ||
26
25
  Boolean(options.sarifFile) ||
27
- Boolean(options.prReportFile);
26
+ Boolean(options.prReportFile) ||
27
+ Boolean(options.verificationReportFile);
28
28
  if (!shouldWrite)
29
29
  return null;
30
30
  const runId = result.summary.runId ?? createRunId(command, options, result);
@@ -41,6 +41,7 @@ export async function writeArtifactManifest(command, options, result) {
41
41
  githubOutputFile: options.githubOutputFile,
42
42
  sarifFile: options.sarifFile,
43
43
  prReportFile: options.prReportFile,
44
+ verificationReportFile: options.verificationReportFile,
44
45
  },
45
46
  };
46
47
  await writeFileAtomic(artifactManifestPath, stableStringify(manifest, 2) + "\n");
@@ -1,6 +1,6 @@
1
- import { promises as fs } from "node:fs";
2
1
  import path from "node:path";
3
2
  import { collectDependencies, readManifest } from "../parsers/package-json.js";
3
+ import { writeFileAtomic } from "../utils/io.js";
4
4
  import { discoverPackageDirs } from "../workspace/discover.js";
5
5
  export async function saveBaseline(options) {
6
6
  const entries = await collectBaselineEntries(options.cwd, options.workspace, options.includeKinds);
@@ -9,16 +9,14 @@ export async function saveBaseline(options) {
9
9
  createdAt: new Date().toISOString(),
10
10
  entries,
11
11
  };
12
- await fs.mkdir(path.dirname(options.filePath), { recursive: true });
13
- await fs.writeFile(options.filePath, JSON.stringify(payload, null, 2) + "\n", "utf8");
12
+ await writeFileAtomic(options.filePath, JSON.stringify(payload, null, 2) + "\n");
14
13
  return {
15
14
  filePath: options.filePath,
16
15
  entries: entries.length,
17
16
  };
18
17
  }
19
18
  export async function diffBaseline(options) {
20
- const content = await fs.readFile(options.filePath, "utf8");
21
- const baseline = JSON.parse(content);
19
+ const baseline = (await Bun.file(options.filePath).json());
22
20
  const currentEntries = await collectBaselineEntries(options.cwd, options.workspace, options.includeKinds);
23
21
  const baselineMap = new Map(baseline.entries.map((entry) => [toKey(entry), entry]));
24
22
  const currentMap = new Map(currentEntries.map((entry) => [toKey(entry), entry]));
@@ -1,5 +1,4 @@
1
1
  import path from "node:path";
2
- import process from "node:process";
3
2
  import { collectDependencies, readManifest } from "../parsers/package-json.js";
4
3
  import { matchesPattern } from "../utils/pattern.js";
5
4
  import { applyRangeStyle, classifyDiff, clampTarget, pickTargetVersionFromAvailable } from "../utils/semver.js";
@@ -11,6 +10,7 @@ import { loadPolicy, resolvePolicyRule } from "../config/policy.js";
11
10
  import { createSummary, finalizeSummary } from "./summary.js";
12
11
  import { applyImpactScores } from "./impact.js";
13
12
  import { formatClassifiedMessage } from "./errors.js";
13
+ import { writeStdout } from "../utils/runtime.js";
14
14
  export async function check(options) {
15
15
  const startedAt = Date.now();
16
16
  let discoveryMs = 0;
@@ -18,7 +18,11 @@ export async function check(options) {
18
18
  let registryMs = 0;
19
19
  const discoveryStartedAt = Date.now();
20
20
  const packageManager = await detectPackageManager(options.cwd);
21
- const packageDirs = await discoverPackageDirs(options.cwd, options.workspace);
21
+ const packageDirs = await discoverPackageDirs(options.cwd, options.workspace, {
22
+ git: options,
23
+ includeKinds: options.includeKinds,
24
+ includeDependents: options.affected === true,
25
+ });
22
26
  discoveryMs += Date.now() - discoveryStartedAt;
23
27
  const cache = await VersionCache.create();
24
28
  const registryClient = new NpmRegistryClient(options.cwd, {
@@ -48,7 +52,7 @@ export async function check(options) {
48
52
  if (!options.stream)
49
53
  return;
50
54
  streamedEvents += 1;
51
- process.stdout.write(`${message}\n`);
55
+ writeStdout(`${message}\n`);
52
56
  };
53
57
  for (const packageDir of packageDirs) {
54
58
  let manifest;