@ucdjs/release-scripts 0.1.0-beta.2 → 0.1.0-beta.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,23 +1,76 @@
1
+ import { t as Eta } from "./eta-j5TFRbI4.mjs";
2
+ import { mkdir, readFile, rm, writeFile } from "node:fs/promises";
3
+ import { join, relative } from "node:path";
1
4
  import process from "node:process";
2
- import { getCommits } from "commit-parser";
3
- import createDebug from "debug";
5
+ import readline from "node:readline";
4
6
  import farver from "farver";
7
+ import mri from "mri";
5
8
  import { exec } from "tinyexec";
6
- import { readFile, writeFile } from "node:fs/promises";
7
- import { join } from "node:path";
9
+ import { dedent } from "@luxass/utils";
10
+ import { getCommits, groupByType } from "commit-parser";
8
11
  import prompts from "prompts";
12
+ import { compare, gt } from "semver";
9
13
 
10
- //#region src/logger.ts
11
- function createDebugger(namespace) {
12
- const debug$2 = createDebug(namespace);
13
- if (debug$2.enabled) return debug$2;
14
- }
14
+ //#region src/publish.ts
15
+ function publish(_options) {}
15
16
 
16
17
  //#endregion
17
- //#region src/utils.ts
18
- const globalOptions = { dryRun: false };
19
- async function run(bin, args, opts = {}) {
20
- return exec(bin, args, {
18
+ //#region src/shared/utils.ts
19
+ const args = mri(process.argv.slice(2));
20
+ const isDryRun = !!args.dry;
21
+ const isVerbose = !!args.verbose;
22
+ const isForce = !!args.force;
23
+ const ucdjsReleaseOverridesPath = ".github/ucdjs-release.overrides.json";
24
+ const isCI = typeof process.env.CI === "string" && process.env.CI !== "" && process.env.CI.toLowerCase() !== "false";
25
+ const logger = {
26
+ info: (...args$1) => {
27
+ console.info(...args$1);
28
+ },
29
+ warn: (...args$1) => {
30
+ console.warn(` ${farver.yellow("⚠")}`, ...args$1);
31
+ },
32
+ error: (...args$1) => {
33
+ console.error(` ${farver.red("✖")}`, ...args$1);
34
+ },
35
+ verbose: (...args$1) => {
36
+ if (!isVerbose) return;
37
+ if (args$1.length === 0) {
38
+ console.log();
39
+ return;
40
+ }
41
+ if (args$1.length > 1 && typeof args$1[0] === "string") {
42
+ console.log(farver.dim(args$1[0]), ...args$1.slice(1));
43
+ return;
44
+ }
45
+ console.log(...args$1);
46
+ },
47
+ section: (title) => {
48
+ console.log();
49
+ console.log(` ${farver.bold(title)}`);
50
+ console.log(` ${farver.gray("─".repeat(title.length + 2))}`);
51
+ },
52
+ emptyLine: () => {
53
+ console.log();
54
+ },
55
+ item: (message, ...args$1) => {
56
+ console.log(` ${message}`, ...args$1);
57
+ },
58
+ step: (message) => {
59
+ console.log(` ${farver.blue("→")} ${message}`);
60
+ },
61
+ success: (message) => {
62
+ console.log(` ${farver.green("✓")} ${message}`);
63
+ },
64
+ clearScreen: () => {
65
+ const repeatCount = process.stdout.rows - 2;
66
+ const blank = repeatCount > 0 ? "\n".repeat(repeatCount) : "";
67
+ console.log(blank);
68
+ readline.cursorTo(process.stdout, 0, 0);
69
+ readline.clearScreenDown(process.stdout);
70
+ }
71
+ };
72
+ async function run(bin, args$1, opts = {}) {
73
+ return exec(bin, args$1, {
21
74
  throwOnError: true,
22
75
  ...opts,
23
76
  nodeOptions: {
@@ -26,211 +79,27 @@ async function run(bin, args, opts = {}) {
26
79
  }
27
80
  });
28
81
  }
29
- async function dryRun(bin, args, opts) {
30
- return console.log(farver.blue(`[dryrun] ${bin} ${args.join(" ")}`), opts || "");
82
+ async function dryRun(bin, args$1, opts) {
83
+ return logger.verbose(farver.blue(`[dryrun] ${bin} ${args$1.join(" ")}`), opts || "");
31
84
  }
32
- const runIfNotDry = globalOptions.dryRun ? dryRun : run;
33
-
34
- //#endregion
35
- //#region src/commits.ts
36
- const debug$1 = createDebugger("ucdjs:release-scripts:commits");
37
- async function getLastPackageTag(packageName, workspaceRoot) {
38
- const { stdout } = await run("git", ["tag", "--list"], { nodeOptions: {
39
- cwd: workspaceRoot,
40
- stdio: "pipe"
41
- } });
42
- return stdout.split("\n").map((tag) => tag.trim()).filter(Boolean).reverse().find((tag) => tag.startsWith(`${packageName}@`));
43
- }
44
- function determineHighestBump(commits) {
45
- if (commits.length === 0) return "none";
46
- let highestBump = "none";
47
- for (const commit of commits) {
48
- const bump = determineBumpType(commit);
49
- if (bump === "major") return "major";
50
- if (bump === "minor") highestBump = "minor";
51
- else if (bump === "patch" && highestBump === "none") highestBump = "patch";
52
- }
53
- return highestBump;
85
+ const runIfNotDry = isDryRun ? dryRun : run;
86
+ function exitWithError(message, hint) {
87
+ logger.error(farver.bold(message));
88
+ if (hint) console.error(farver.gray(` ${hint}`));
89
+ process.exit(1);
54
90
  }
55
- async function getPackageCommits(pkg, workspaceRoot) {
56
- const lastTag = await getLastPackageTag(pkg.name, workspaceRoot);
57
- const allCommits = getCommits({
58
- from: lastTag,
59
- to: "HEAD"
91
+ if (isDryRun || isVerbose || isForce) {
92
+ logger.verbose(farver.inverse(farver.yellow(" Running with special flags ")));
93
+ logger.verbose({
94
+ isDryRun,
95
+ isVerbose,
96
+ isForce
60
97
  });
61
- debug$1?.(`Found ${allCommits.length} commits for ${pkg.name} since ${lastTag || "beginning"}`);
62
- const touchedCommitHashes = await getCommitsTouchingPackage(lastTag || "HEAD", "HEAD", pkg.path, workspaceRoot);
63
- const touchedSet = new Set(touchedCommitHashes);
64
- const packageCommits = allCommits.filter((commit) => touchedSet.has(commit.shortHash));
65
- debug$1?.(`${packageCommits.length} commits affect ${pkg.name}`);
66
- return packageCommits;
67
- }
68
- async function analyzePackageCommits(pkg, workspaceRoot) {
69
- return determineHighestBump(await getPackageCommits(pkg, workspaceRoot));
70
- }
71
- function determineBumpType(commit) {
72
- if (commit.isBreaking) return "major";
73
- if (!commit.isConventional || !commit.type) return "none";
74
- switch (commit.type) {
75
- case "feat": return "minor";
76
- case "fix":
77
- case "perf": return "patch";
78
- case "docs":
79
- case "style":
80
- case "refactor":
81
- case "test":
82
- case "build":
83
- case "ci":
84
- case "chore":
85
- case "revert": return "none";
86
- default: return "none";
87
- }
88
- }
89
- async function getCommitsTouchingPackage(from, to, packagePath, workspaceRoot) {
90
- try {
91
- const { stdout } = await run("git", [
92
- "log",
93
- "--pretty=format:%h",
94
- from === "HEAD" ? "HEAD" : `${from}...${to}`,
95
- "--",
96
- packagePath
97
- ], { nodeOptions: {
98
- cwd: workspaceRoot,
99
- stdio: "pipe"
100
- } });
101
- return stdout.split("\n").map((line) => line.trim()).filter(Boolean);
102
- } catch (error) {
103
- debug$1?.(`Error getting commits touching package: ${error}`);
104
- return [];
105
- }
106
- }
107
-
108
- //#endregion
109
- //#region src/validation.ts
110
- /**
111
- * Validation utilities for release scripts
112
- */
113
- function isValidSemver(version) {
114
- return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(version);
115
- }
116
- function validateSemver(version) {
117
- if (!isValidSemver(version)) throw new Error(`Invalid semver version: ${version}`);
118
- }
119
-
120
- //#endregion
121
- //#region src/version.ts
122
- /**
123
- * Calculate the new version based on current version and bump type
124
- * Pure function - no side effects, easily testable
125
- */
126
- function calculateNewVersion(currentVersion, bump) {
127
- if (bump === "none") return currentVersion;
128
- validateSemver(currentVersion);
129
- const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
130
- if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
131
- const [, major, minor, patch, suffix] = match;
132
- let newMajor = Number.parseInt(major, 10);
133
- let newMinor = Number.parseInt(minor, 10);
134
- let newPatch = Number.parseInt(patch, 10);
135
- switch (bump) {
136
- case "major":
137
- newMajor += 1;
138
- newMinor = 0;
139
- newPatch = 0;
140
- break;
141
- case "minor":
142
- newMinor += 1;
143
- newPatch = 0;
144
- break;
145
- case "patch":
146
- newPatch += 1;
147
- break;
148
- }
149
- return `${newMajor}.${newMinor}.${newPatch}`;
150
- }
151
- /**
152
- * Create a version update object
153
- */
154
- function createVersionUpdate(pkg, bump, hasDirectChanges) {
155
- const newVersion = calculateNewVersion(pkg.version, bump);
156
- return {
157
- package: pkg,
158
- currentVersion: pkg.version,
159
- newVersion,
160
- bumpType: bump,
161
- hasDirectChanges
162
- };
163
- }
164
- /**
165
- * Update a package.json file with new version and dependency versions
166
- */
167
- async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
168
- const packageJsonPath = join(pkg.path, "package.json");
169
- const content = await readFile(packageJsonPath, "utf-8");
170
- const packageJson = JSON.parse(content);
171
- packageJson.version = newVersion;
172
- for (const [depName, depVersion] of dependencyUpdates) {
173
- if (packageJson.dependencies?.[depName]) {
174
- if (packageJson.dependencies[depName] === "workspace:*") continue;
175
- packageJson.dependencies[depName] = `^${depVersion}`;
176
- }
177
- if (packageJson.devDependencies?.[depName]) {
178
- if (packageJson.devDependencies[depName] === "workspace:*") continue;
179
- packageJson.devDependencies[depName] = `^${depVersion}`;
180
- }
181
- if (packageJson.peerDependencies?.[depName]) {
182
- if (packageJson.peerDependencies[depName] === "workspace:*") continue;
183
- packageJson.peerDependencies[depName] = `^${depVersion}`;
184
- }
185
- }
186
- await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
187
- }
188
- /**
189
- * Get all dependency updates needed for a package
190
- */
191
- function getDependencyUpdates(pkg, allUpdates) {
192
- const updates = /* @__PURE__ */ new Map();
193
- const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
194
- for (const dep of allDeps) {
195
- const update = allUpdates.find((u) => u.package.name === dep);
196
- if (update) updates.set(dep, update.newVersion);
197
- }
198
- return updates;
199
- }
200
-
201
- //#endregion
202
- //#region src/dependencies.ts
203
- /**
204
- * Pure function: Determine which packages need updates due to dependency changes
205
- *
206
- * When a package is updated, all packages that depend on it should also be updated.
207
- * This function calculates which additional packages need patch bumps.
208
- *
209
- * @param updateOrder - Packages in topological order with their dependency levels
210
- * @param directUpdates - Packages with direct code changes
211
- * @returns All updates including dependent packages
212
- */
213
- function createDependentUpdates(updateOrder, directUpdates) {
214
- const allUpdates = [...directUpdates];
215
- const updatedPackages = new Set(directUpdates.map((u) => u.package.name));
216
- for (const { package: pkg } of updateOrder) {
217
- if (updatedPackages.has(pkg.name)) continue;
218
- if (hasUpdatedDependencies(pkg, updatedPackages)) {
219
- allUpdates.push(createVersionUpdate(pkg, "patch", false));
220
- updatedPackages.add(pkg.name);
221
- }
222
- }
223
- return allUpdates;
224
- }
225
- /**
226
- * Pure function: Check if a package has any updated dependencies
227
- */
228
- function hasUpdatedDependencies(pkg, updatedPackages) {
229
- return [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies].some((dep) => updatedPackages.has(dep));
98
+ logger.verbose();
230
99
  }
231
100
 
232
101
  //#endregion
233
- //#region src/git.ts
102
+ //#region src/core/git.ts
234
103
  /**
235
104
  * Check if the working directory is clean (no uncommitted changes)
236
105
  * @param {string} workspaceRoot - The root directory of the workspace
@@ -244,7 +113,7 @@ async function isWorkingDirectoryClean(workspaceRoot) {
244
113
  } })).stdout.trim() !== "") return false;
245
114
  return true;
246
115
  } catch (err) {
247
- console.error("Error checking git status:", err);
116
+ logger.error("Error checking git status:", err);
248
117
  return false;
249
118
  }
250
119
  }
@@ -270,83 +139,123 @@ async function doesBranchExist(branch, workspaceRoot) {
270
139
  }
271
140
  }
272
141
  /**
273
- * Pull latest changes from remote branch
274
- * @param branch - The branch name to pull from
275
- * @param workspaceRoot - The root directory of the workspace
276
- * @returns Promise resolving to true if pull succeeded, false otherwise
142
+ * Retrieves the default branch name from the remote repository.
143
+ * Falls back to "main" if the default branch cannot be determined.
144
+ * @returns {Promise<string>} A Promise resolving to the default branch name as a string.
277
145
  */
278
- async function pullLatestChanges(branch, workspaceRoot) {
146
+ async function getDefaultBranch(workspaceRoot) {
279
147
  try {
280
- await run("git", [
281
- "pull",
282
- "origin",
283
- branch
284
- ], { nodeOptions: {
148
+ const match = (await run("git", ["symbolic-ref", "refs/remotes/origin/HEAD"], { nodeOptions: {
285
149
  cwd: workspaceRoot,
286
150
  stdio: "pipe"
287
- } });
288
- return true;
151
+ } })).stdout.trim().match(/^refs\/remotes\/origin\/(.+)$/);
152
+ if (match && match[1]) return match[1];
153
+ return "main";
289
154
  } catch {
290
- return false;
155
+ return "main";
291
156
  }
292
157
  }
293
158
  /**
294
- * Create a new git branch
295
- * @param branch - The new branch name
296
- * @param base - The base branch to create from
297
- * @param workspaceRoot - The root directory of the workspace
159
+ * Retrieves the name of the current branch in the repository.
160
+ * @param {string} workspaceRoot - The root directory of the workspace
161
+ * @returns {Promise<string>} A Promise resolving to the current branch name as a string
298
162
  */
299
- async function createBranch(branch, base, workspaceRoot) {
300
- await runIfNotDry("git", [
301
- "checkout",
302
- "-b",
303
- branch,
304
- base
305
- ], { nodeOptions: { cwd: workspaceRoot } });
163
+ async function getCurrentBranch(workspaceRoot) {
164
+ try {
165
+ return (await run("git", [
166
+ "rev-parse",
167
+ "--abbrev-ref",
168
+ "HEAD"
169
+ ], { nodeOptions: {
170
+ cwd: workspaceRoot,
171
+ stdio: "pipe"
172
+ } })).stdout.trim();
173
+ } catch (err) {
174
+ logger.error("Error getting current branch:", err);
175
+ throw err;
176
+ }
306
177
  }
307
178
  /**
308
- * Checkout a git branch
309
- * @param branch - The branch name to checkout
310
- * @param workspaceRoot - The root directory of the workspace
311
- * @returns Promise resolving to true if checkout succeeded, false otherwise
179
+ * Retrieves the list of available branches in the repository.
180
+ * @param {string} workspaceRoot - The root directory of the workspace
181
+ * @returns {Promise<string[]>} A Promise resolving to an array of branch names
182
+ */
183
+ async function getAvailableBranches(workspaceRoot) {
184
+ try {
185
+ return (await run("git", ["branch", "--list"], { nodeOptions: {
186
+ cwd: workspaceRoot,
187
+ stdio: "pipe"
188
+ } })).stdout.split("\n").map((line) => line.replace("*", "").trim()).filter((line) => line.length > 0);
189
+ } catch (err) {
190
+ logger.error("Error getting available branches:", err);
191
+ throw err;
192
+ }
193
+ }
194
+ /**
195
+ * Creates a new branch from the specified base branch.
196
+ * @param {string} branch - The name of the new branch to create
197
+ * @param {string} base - The base branch to create the new branch from
198
+ * @param {string} workspaceRoot - The root directory of the workspace
199
+ * @returns {Promise<void>} A Promise that resolves when the branch is created
312
200
  */
201
+ async function createBranch(branch, base, workspaceRoot) {
202
+ try {
203
+ logger.info(`Creating branch: ${farver.green(branch)} from ${farver.cyan(base)}`);
204
+ await runIfNotDry("git", [
205
+ "branch",
206
+ branch,
207
+ base
208
+ ], { nodeOptions: {
209
+ cwd: workspaceRoot,
210
+ stdio: "pipe"
211
+ } });
212
+ } catch {
213
+ exitWithError(`Failed to create branch: ${branch}`, `Make sure the branch doesn't already exist and you have a clean working directory`);
214
+ }
215
+ }
313
216
  async function checkoutBranch(branch, workspaceRoot) {
314
217
  try {
315
- await run("git", ["checkout", branch], { nodeOptions: { cwd: workspaceRoot } });
316
- return true;
218
+ logger.info(`Switching to branch: ${farver.green(branch)}`);
219
+ const match = (await run("git", ["checkout", branch], { nodeOptions: {
220
+ cwd: workspaceRoot,
221
+ stdio: "pipe"
222
+ } })).stderr.trim().match(/Switched to branch '(.+)'/);
223
+ if (match && match[1] === branch) {
224
+ logger.info(`Successfully switched to branch: ${farver.green(branch)}`);
225
+ return true;
226
+ }
227
+ return false;
317
228
  } catch {
318
229
  return false;
319
230
  }
320
231
  }
321
- /**
322
- * Get the current branch name
323
- * @param workspaceRoot - The root directory of the workspace
324
- * @returns Promise resolving to the current branch name
325
- */
326
- async function getCurrentBranch(workspaceRoot) {
327
- return (await run("git", [
328
- "rev-parse",
329
- "--abbrev-ref",
330
- "HEAD"
331
- ], { nodeOptions: {
332
- cwd: workspaceRoot,
333
- stdio: "pipe"
334
- } })).stdout.trim();
232
+ async function pullLatestChanges(branch, workspaceRoot) {
233
+ try {
234
+ await run("git", [
235
+ "pull",
236
+ "origin",
237
+ branch
238
+ ], { nodeOptions: {
239
+ cwd: workspaceRoot,
240
+ stdio: "pipe"
241
+ } });
242
+ return true;
243
+ } catch {
244
+ return false;
245
+ }
335
246
  }
336
- /**
337
- * Rebase current branch onto another branch
338
- * @param ontoBranch - The target branch to rebase onto
339
- * @param workspaceRoot - The root directory of the workspace
340
- */
341
247
  async function rebaseBranch(ontoBranch, workspaceRoot) {
342
- await run("git", ["rebase", ontoBranch], { nodeOptions: { cwd: workspaceRoot } });
248
+ try {
249
+ logger.info(`Rebasing onto: ${farver.cyan(ontoBranch)}`);
250
+ await runIfNotDry("git", ["rebase", ontoBranch], { nodeOptions: {
251
+ cwd: workspaceRoot,
252
+ stdio: "pipe"
253
+ } });
254
+ return true;
255
+ } catch {
256
+ exitWithError(`Failed to rebase onto: ${ontoBranch}`, `You may have merge conflicts. Run 'git rebase --abort' to undo the rebase`);
257
+ }
343
258
  }
344
- /**
345
- * Check if local branch is ahead of remote (has commits to push)
346
- * @param branch - The branch name to check
347
- * @param workspaceRoot - The root directory of the workspace
348
- * @returns Promise resolving to true if local is ahead, false otherwise
349
- */
350
259
  async function isBranchAheadOfRemote(branch, workspaceRoot) {
351
260
  try {
352
261
  const result = await run("git", [
@@ -362,134 +271,398 @@ async function isBranchAheadOfRemote(branch, workspaceRoot) {
362
271
  return true;
363
272
  }
364
273
  }
365
- /**
366
- * Check if there are any changes to commit (staged or unstaged)
367
- * @param workspaceRoot - The root directory of the workspace
368
- * @returns Promise resolving to true if there are changes, false otherwise
369
- */
370
- async function hasChangesToCommit(workspaceRoot) {
371
- return (await run("git", ["status", "--porcelain"], { nodeOptions: {
372
- cwd: workspaceRoot,
373
- stdio: "pipe"
374
- } })).stdout.trim() !== "";
375
- }
376
- /**
377
- * Commit changes with a message
378
- * @param message - The commit message
379
- * @param workspaceRoot - The root directory of the workspace
380
- * @returns Promise resolving to true if commit was made, false if there were no changes
381
- */
382
274
  async function commitChanges(message, workspaceRoot) {
383
- await run("git", ["add", "."], { nodeOptions: { cwd: workspaceRoot } });
384
- if (!await hasChangesToCommit(workspaceRoot)) return false;
385
- await run("git", [
386
- "commit",
387
- "-m",
388
- message
389
- ], { nodeOptions: { cwd: workspaceRoot } });
390
- return true;
275
+ try {
276
+ await run("git", ["add", "."], { nodeOptions: {
277
+ cwd: workspaceRoot,
278
+ stdio: "pipe"
279
+ } });
280
+ if (await isWorkingDirectoryClean(workspaceRoot)) return false;
281
+ logger.info(`Committing changes: ${farver.dim(message)}`);
282
+ await runIfNotDry("git", [
283
+ "commit",
284
+ "-m",
285
+ message
286
+ ], { nodeOptions: {
287
+ cwd: workspaceRoot,
288
+ stdio: "pipe"
289
+ } });
290
+ return true;
291
+ } catch {
292
+ exitWithError(`Failed to commit changes`, `Make sure you have git configured properly with user.name and user.email`);
293
+ }
391
294
  }
392
- /**
393
- * Push branch to remote
394
- * @param branch - The branch name to push
395
- * @param workspaceRoot - The root directory of the workspace
396
- * @param options - Push options
397
- * @param options.force - Force push (overwrite remote)
398
- * @param options.forceWithLease - Force push with safety check (won't overwrite unexpected changes)
399
- */
400
295
  async function pushBranch(branch, workspaceRoot, options) {
401
- const args = [
402
- "push",
403
- "origin",
404
- branch
405
- ];
406
- if (options?.forceWithLease) args.push("--force-with-lease");
407
- else if (options?.force) args.push("--force");
408
- await run("git", args, { nodeOptions: { cwd: workspaceRoot } });
409
- }
410
- /**
411
- * Generate PR body from version updates
412
- * @param updates - Array of version updates to include in the PR body
413
- * @returns Formatted PR body as a string
414
- */
415
- function generatePRBody(updates) {
416
- const lines = [];
417
- lines.push("## Packages");
418
- lines.push("");
419
- const directChanges = updates.filter((u) => u.hasDirectChanges);
420
- const dependencyUpdates = updates.filter((u) => !u.hasDirectChanges);
421
- if (directChanges.length > 0) {
422
- lines.push("### Direct Changes");
423
- lines.push("");
424
- for (const update of directChanges) lines.push(`- **${update.package.name}**: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
425
- lines.push("");
426
- }
427
- if (dependencyUpdates.length > 0) {
428
- lines.push("### Dependency Updates");
429
- lines.push("");
430
- for (const update of dependencyUpdates) lines.push(`- **${update.package.name}**: ${update.currentVersion} → ${update.newVersion} (dependencies changed)`);
431
- lines.push("");
432
- }
433
- lines.push("---");
434
- lines.push("");
435
- lines.push("This release PR was automatically generated.");
436
- return lines.join("\n");
437
- }
438
-
439
- //#endregion
440
- //#region src/github.ts
441
- async function getExistingPullRequest({ owner, repo, branch, githubToken }) {
442
296
  try {
443
- const res = await fetch(`https://api.github.com/repos/${owner}/${repo}/pulls?state=open&head=${branch}`, { headers: {
444
- Accept: "application/vnd.github.v3+json",
445
- Authorization: `token ${githubToken}`
297
+ const args$1 = [
298
+ "push",
299
+ "origin",
300
+ branch
301
+ ];
302
+ if (options?.forceWithLease) {
303
+ args$1.push("--force-with-lease");
304
+ logger.info(`Pushing branch: ${farver.green(branch)} ${farver.dim("(with lease)")}`);
305
+ } else if (options?.force) {
306
+ args$1.push("--force");
307
+ logger.info(`Force pushing branch: ${farver.green(branch)}`);
308
+ } else logger.info(`Pushing branch: ${farver.green(branch)}`);
309
+ await runIfNotDry("git", args$1, { nodeOptions: {
310
+ cwd: workspaceRoot,
311
+ stdio: "pipe"
446
312
  } });
447
- if (!res.ok) throw new Error(`GitHub API request failed with status ${res.status}`);
448
- const pulls = await res.json();
449
- if (pulls == null || !Array.isArray(pulls) || pulls.length === 0) return null;
450
- const firstPullRequest = pulls[0];
451
- if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
452
- const pullRequest = {
453
- number: firstPullRequest.number,
454
- title: firstPullRequest.title,
455
- body: firstPullRequest.body,
456
- draft: firstPullRequest.draft,
457
- html_url: firstPullRequest.html_url
458
- };
459
- console.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
460
- return pullRequest;
461
- } catch (err) {
462
- console.error("Error fetching pull request:", err);
463
- return null;
313
+ return true;
314
+ } catch {
315
+ exitWithError(`Failed to push branch: ${branch}`, `Make sure you have permission to push to the remote repository`);
464
316
  }
465
317
  }
466
- async function upsertPullRequest({ owner, repo, title, body, head, base, pullNumber, githubToken }) {
318
+ async function readFileFromGit(workspaceRoot, ref, filePath) {
467
319
  try {
468
- const isUpdate = pullNumber != null;
469
- const url = isUpdate ? `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}` : `https://api.github.com/repos/${owner}/${repo}/pulls`;
470
- const method = isUpdate ? "PATCH" : "POST";
471
- const requestBody = isUpdate ? {
320
+ return (await run("git", ["show", `${ref}:${filePath}`], { nodeOptions: {
321
+ cwd: workspaceRoot,
322
+ stdio: "pipe"
323
+ } })).stdout;
324
+ } catch {
325
+ return null;
326
+ }
327
+ }
328
+ async function getMostRecentPackageTag(workspaceRoot, packageName) {
329
+ try {
330
+ const { stdout } = await run("git", [
331
+ "tag",
332
+ "--list",
333
+ `${packageName}@*`
334
+ ], { nodeOptions: {
335
+ cwd: workspaceRoot,
336
+ stdio: "pipe"
337
+ } });
338
+ const tags = stdout.split("\n").map((tag) => tag.trim()).filter(Boolean);
339
+ if (tags.length === 0) return;
340
+ return tags.reverse()[0];
341
+ } catch (err) {
342
+ logger.warn(`Failed to get tags for package ${packageName}: ${err.message}`);
343
+ return;
344
+ }
345
+ }
346
+ /**
347
+ * Builds a mapping of commit SHAs to the list of files changed in each commit
348
+ * within a given inclusive range.
349
+ *
350
+ * Internally runs:
351
+ * git log --name-only --format=%H <from>^..<to>
352
+ *
353
+ * Notes
354
+ * - This includes the commit identified by `from` (via `from^..to`).
355
+ * - Order of commits in the resulting Map follows `git log` output
356
+ * (reverse chronological, newest first).
357
+ * - On failure (e.g., invalid refs), the function returns null.
358
+ *
359
+ * @param {string} workspaceRoot Absolute path to the git repository root used as cwd.
360
+ * @param {string} from Starting commit/ref (inclusive).
361
+ * @param {string} to Ending commit/ref (inclusive).
362
+ * @returns {Promise<Map<string, string[]> | null>} Promise resolving to a Map where keys are commit SHAs and values are
363
+ * arrays of file paths changed by that commit, or null on error.
364
+ */
365
+ async function getGroupedFilesByCommitSha(workspaceRoot, from, to) {
366
+ const commitsMap = /* @__PURE__ */ new Map();
367
+ try {
368
+ const { stdout } = await run("git", [
369
+ "log",
370
+ "--name-only",
371
+ "--format=%H",
372
+ `${from}^..${to}`
373
+ ], { nodeOptions: {
374
+ cwd: workspaceRoot,
375
+ stdio: "pipe"
376
+ } });
377
+ const lines = stdout.trim().split("\n").filter((line) => line.trim() !== "");
378
+ let currentSha = null;
379
+ const HASH_REGEX = /^[0-9a-f]{40}$/i;
380
+ for (const line of lines) {
381
+ const trimmedLine = line.trim();
382
+ if (HASH_REGEX.test(trimmedLine)) {
383
+ currentSha = trimmedLine;
384
+ commitsMap.set(currentSha, []);
385
+ continue;
386
+ }
387
+ if (currentSha === null) continue;
388
+ commitsMap.get(currentSha).push(trimmedLine);
389
+ }
390
+ return commitsMap;
391
+ } catch {
392
+ return null;
393
+ }
394
+ }
395
+
396
+ //#endregion
397
+ //#region src/core/changelog.ts
398
+ const globalAuthorCache = /* @__PURE__ */ new Map();
399
+ const DEFAULT_CHANGELOG_TEMPLATE = dedent`
400
+ <% if (it.previousVersion) { -%>
401
+ ## [<%= it.version %>](<%= it.compareUrl %>) (<%= it.date %>)
402
+ <% } else { -%>
403
+ ## <%= it.version %> (<%= it.date %>)
404
+ <% } %>
405
+
406
+ <% it.groups.forEach((group) => { %>
407
+ <% if (group.commits.length > 0) { %>
408
+
409
+ ### <%= group.title %>
410
+ <% group.commits.forEach((commit) => { %>
411
+
412
+ * <%= commit.line %>
413
+ <% }); %>
414
+
415
+ <% } %>
416
+ <% }); %>
417
+ `;
418
+ async function generateChangelogEntry(options) {
419
+ const { packageName, version, previousVersion, date, commits, owner, repo, groups, template, githubClient } = options;
420
+ const compareUrl = previousVersion ? `https://github.com/${owner}/${repo}/compare/${packageName}@${previousVersion}...${packageName}@${version}` : void 0;
421
+ const grouped = groupByType(commits, {
422
+ includeNonConventional: false,
423
+ mergeKeys: Object.fromEntries(groups.map((g) => [g.name, g.types]))
424
+ });
425
+ const commitAuthors = await resolveCommitAuthors(commits, githubClient);
426
+ const templateData = {
427
+ packageName,
428
+ version,
429
+ previousVersion,
430
+ date,
431
+ compareUrl,
432
+ owner,
433
+ repo,
434
+ groups: groups.map((group) => {
435
+ const commitsInGroup = grouped.get(group.name) ?? [];
436
+ if (commitsInGroup.length > 0) logger.verbose(`Found ${commitsInGroup.length} commits for group "${group.name}".`);
437
+ const formattedCommits = commitsInGroup.map((commit) => ({ line: formatCommitLine({
438
+ commit,
439
+ owner,
440
+ repo,
441
+ authors: commitAuthors.get(commit.hash) ?? []
442
+ }) }));
443
+ return {
444
+ name: group.name,
445
+ title: group.title,
446
+ commits: formattedCommits
447
+ };
448
+ })
449
+ };
450
+ const eta = new Eta();
451
+ const templateToUse = template || DEFAULT_CHANGELOG_TEMPLATE;
452
+ return eta.renderString(templateToUse, templateData).trim();
453
+ }
454
+ async function updateChangelog(options) {
455
+ const { version, previousVersion, commits, date, normalizedOptions, workspacePackage, githubClient } = options;
456
+ const changelogPath = join(workspacePackage.path, "CHANGELOG.md");
457
+ const changelogRelativePath = relative(normalizedOptions.workspaceRoot, join(workspacePackage.path, "CHANGELOG.md"));
458
+ const existingContent = await readFileFromGit(normalizedOptions.workspaceRoot, normalizedOptions.branch.default, changelogRelativePath);
459
+ logger.verbose("Existing content found: ", Boolean(existingContent));
460
+ const newEntry = await generateChangelogEntry({
461
+ packageName: workspacePackage.name,
462
+ version,
463
+ previousVersion,
464
+ date,
465
+ commits,
466
+ owner: normalizedOptions.owner,
467
+ repo: normalizedOptions.repo,
468
+ groups: normalizedOptions.groups,
469
+ template: normalizedOptions.changelog?.template,
470
+ githubClient
471
+ });
472
+ let updatedContent;
473
+ if (!existingContent) {
474
+ updatedContent = `# ${workspacePackage.name}\n\n${newEntry}\n`;
475
+ await writeFile(changelogPath, updatedContent, "utf-8");
476
+ return;
477
+ }
478
+ const parsed = parseChangelog(existingContent);
479
+ const lines = existingContent.split("\n");
480
+ const existingVersionIndex = parsed.versions.findIndex((v) => v.version === version);
481
+ if (existingVersionIndex !== -1) {
482
+ const existingVersion = parsed.versions[existingVersionIndex];
483
+ const before = lines.slice(0, existingVersion.lineStart);
484
+ const after = lines.slice(existingVersion.lineEnd + 1);
485
+ updatedContent = [
486
+ ...before,
487
+ newEntry,
488
+ ...after
489
+ ].join("\n");
490
+ } else {
491
+ const insertAt = parsed.headerLineEnd + 1;
492
+ const before = lines.slice(0, insertAt);
493
+ const after = lines.slice(insertAt);
494
+ if (before.length > 0 && before[before.length - 1] !== "") before.push("");
495
+ updatedContent = [
496
+ ...before,
497
+ newEntry,
498
+ "",
499
+ ...after
500
+ ].join("\n");
501
+ }
502
+ await writeFile(changelogPath, updatedContent, "utf-8");
503
+ }
504
+ async function resolveCommitAuthors(commits, githubClient) {
505
+ const authorsToResolve = /* @__PURE__ */ new Set();
506
+ const commitAuthors = /* @__PURE__ */ new Map();
507
+ for (const commit of commits) {
508
+ const authorsForCommit = [];
509
+ commit.authors.forEach((author, idx) => {
510
+ if (!author.email || !author.name) return;
511
+ let info = globalAuthorCache.get(author.email);
512
+ if (!info) {
513
+ info = {
514
+ commits: [],
515
+ name: author.name,
516
+ email: author.email
517
+ };
518
+ globalAuthorCache.set(author.email, info);
519
+ }
520
+ if (idx === 0) info.commits.push(commit.shortHash);
521
+ authorsForCommit.push(info);
522
+ if (!info.login) authorsToResolve.add(info);
523
+ });
524
+ commitAuthors.set(commit.hash, authorsForCommit);
525
+ }
526
+ await Promise.all(Array.from(authorsToResolve).map((info) => githubClient.resolveAuthorInfo(info)));
527
+ return commitAuthors;
528
+ }
529
+ function formatCommitLine({ commit, owner, repo, authors }) {
530
+ const commitUrl = `https://github.com/${owner}/${repo}/commit/${commit.hash}`;
531
+ let line = `${commit.description}`;
532
+ const references = commit.references ?? [];
533
+ if (references.length > 0) logger.verbose("Located references in commit", references.length);
534
+ for (const ref of references) {
535
+ if (!ref.value) continue;
536
+ const number = Number.parseInt(ref.value.replace(/^#/, ""), 10);
537
+ if (Number.isNaN(number)) continue;
538
+ if (ref.type === "issue") {
539
+ line += ` ([Issue ${ref.value}](https://github.com/${owner}/${repo}/issues/${number}))`;
540
+ continue;
541
+ }
542
+ line += ` ([PR ${ref.value}](https://github.com/${owner}/${repo}/pull/${number}))`;
543
+ }
544
+ line += ` ([${commit.shortHash}](${commitUrl}))`;
545
+ if (authors.length > 0) {
546
+ const authorList = authors.map((author) => {
547
+ if (author.login) return `[@${author.login}](https://github.com/${author.login})`;
548
+ return author.name;
549
+ }).join(", ");
550
+ line += ` (by ${authorList})`;
551
+ }
552
+ return line;
553
+ }
554
+ function parseChangelog(content) {
555
+ const lines = content.split("\n");
556
+ let packageName = null;
557
+ let headerLineEnd = -1;
558
+ const versions = [];
559
+ for (let i = 0; i < lines.length; i++) {
560
+ const line = lines[i].trim();
561
+ if (line.startsWith("# ")) {
562
+ packageName = line.slice(2).trim();
563
+ headerLineEnd = i;
564
+ break;
565
+ }
566
+ }
567
+ for (let i = headerLineEnd + 1; i < lines.length; i++) {
568
+ const line = lines[i].trim();
569
+ if (line.startsWith("## ")) {
570
+ const versionMatch = line.match(/##\s+(?:<small>)?\[?([^\](\s<]+)/);
571
+ if (versionMatch) {
572
+ const version = versionMatch[1];
573
+ const lineStart = i;
574
+ let lineEnd = lines.length - 1;
575
+ for (let j = i + 1; j < lines.length; j++) if (lines[j].trim().startsWith("## ")) {
576
+ lineEnd = j - 1;
577
+ break;
578
+ }
579
+ const versionContent = lines.slice(lineStart, lineEnd + 1).join("\n");
580
+ versions.push({
581
+ version,
582
+ lineStart,
583
+ lineEnd,
584
+ content: versionContent
585
+ });
586
+ }
587
+ }
588
+ }
589
+ return {
590
+ packageName,
591
+ versions,
592
+ headerLineEnd
593
+ };
594
+ }
595
+
596
+ //#endregion
597
+ //#region src/core/github.ts
598
+ var GitHubClient = class {
599
+ owner;
600
+ repo;
601
+ githubToken;
602
+ apiBase = "https://api.github.com";
603
+ constructor({ owner, repo, githubToken }) {
604
+ this.owner = owner;
605
+ this.repo = repo;
606
+ this.githubToken = githubToken;
607
+ }
608
+ async request(path, init = {}) {
609
+ const url = path.startsWith("http") ? path : `${this.apiBase}${path}`;
610
+ const res = await fetch(url, {
611
+ ...init,
612
+ headers: {
613
+ ...init.headers,
614
+ "Accept": "application/vnd.github.v3+json",
615
+ "Authorization": `token ${this.githubToken}`,
616
+ "User-Agent": "ucdjs-release-scripts (+https://github.com/ucdjs/ucdjs-release-scripts)"
617
+ }
618
+ });
619
+ if (!res.ok) {
620
+ const errorText = await res.text();
621
+ throw new Error(`GitHub API request failed with status ${res.status}: ${errorText || "No response body"}`);
622
+ }
623
+ if (res.status === 204) return;
624
+ return res.json();
625
+ }
626
+ async getExistingPullRequest(branch) {
627
+ const head = branch.includes(":") ? branch : `${this.owner}:${branch}`;
628
+ const endpoint = `/repos/${this.owner}/${this.repo}/pulls?state=open&head=${encodeURIComponent(head)}`;
629
+ logger.verbose(`Requesting pull request for branch: ${branch} (url: ${this.apiBase}${endpoint})`);
630
+ const pulls = await this.request(endpoint);
631
+ if (!Array.isArray(pulls) || pulls.length === 0) return null;
632
+ const firstPullRequest = pulls[0];
633
+ if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
634
+ const pullRequest = {
635
+ number: firstPullRequest.number,
636
+ title: firstPullRequest.title,
637
+ body: firstPullRequest.body,
638
+ draft: firstPullRequest.draft,
639
+ html_url: firstPullRequest.html_url,
640
+ head: "head" in firstPullRequest && typeof firstPullRequest.head === "object" && firstPullRequest.head !== null && "sha" in firstPullRequest.head && typeof firstPullRequest.head.sha === "string" ? { sha: firstPullRequest.head.sha } : void 0
641
+ };
642
+ logger.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
643
+ return pullRequest;
644
+ }
645
+ async upsertPullRequest({ title, body, head, base, pullNumber }) {
646
+ const isUpdate = typeof pullNumber === "number";
647
+ const endpoint = isUpdate ? `/repos/${this.owner}/${this.repo}/pulls/${pullNumber}` : `/repos/${this.owner}/${this.repo}/pulls`;
648
+ const requestBody = isUpdate ? {
472
649
  title,
473
650
  body
474
651
  } : {
475
652
  title,
476
653
  body,
477
654
  head,
478
- base
655
+ base,
656
+ draft: true
479
657
  };
480
- const res = await fetch(url, {
481
- method,
482
- headers: {
483
- Accept: "application/vnd.github.v3+json",
484
- Authorization: `token ${githubToken}`
485
- },
658
+ logger.verbose(`${isUpdate ? "Updating" : "Creating"} pull request (url: ${this.apiBase}${endpoint})`);
659
+ const pr = await this.request(endpoint, {
660
+ method: isUpdate ? "PATCH" : "POST",
486
661
  body: JSON.stringify(requestBody)
487
662
  });
488
- if (!res.ok) throw new Error(`GitHub API request failed with status ${res.status}`);
489
- const pr = await res.json();
490
663
  if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
491
664
  const action = isUpdate ? "Updated" : "Created";
492
- console.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
665
+ logger.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
493
666
  return {
494
667
  number: pr.number,
495
668
  title: pr.title,
@@ -497,129 +670,266 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
497
670
  draft: pr.draft,
498
671
  html_url: pr.html_url
499
672
  };
500
- } catch (err) {
501
- console.error(`Error upserting pull request:`, err);
502
- throw err;
503
673
  }
674
+ async setCommitStatus({ sha, state, targetUrl, description, context }) {
675
+ const endpoint = `/repos/${this.owner}/${this.repo}/statuses/${sha}`;
676
+ logger.verbose(`Setting commit status on ${sha} to ${state} (url: ${this.apiBase}${endpoint})`);
677
+ await this.request(endpoint, {
678
+ method: "POST",
679
+ body: JSON.stringify({
680
+ state,
681
+ target_url: targetUrl,
682
+ description: description || "",
683
+ context
684
+ })
685
+ });
686
+ logger.info(`Commit status set to ${farver.cyan(state)} for ${farver.gray(sha.substring(0, 7))}`);
687
+ }
688
+ async resolveAuthorInfo(info) {
689
+ if (info.login) return info;
690
+ try {
691
+ const q = encodeURIComponent(`${info.email} type:user in:email`);
692
+ const data = await this.request(`/search/users?q=${q}`);
693
+ if (!data.items || data.items.length === 0) return info;
694
+ info.login = data.items[0].login;
695
+ } catch (err) {
696
+ logger.warn(`Failed to resolve author info for email ${info.email}: ${err.message}`);
697
+ }
698
+ if (info.login) return info;
699
+ if (info.commits.length > 0) try {
700
+ const data = await this.request(`/repos/${this.owner}/${this.repo}/commits/${info.commits[0]}`);
701
+ if (data.author && data.author.login) info.login = data.author.login;
702
+ } catch (err) {
703
+ logger.warn(`Failed to resolve author info from commits for email ${info.email}: ${err.message}`);
704
+ }
705
+ return info;
706
+ }
707
+ };
708
+ function createGitHubClient(options) {
709
+ return new GitHubClient(options);
710
+ }
711
+ const DEFAULT_PR_BODY_TEMPLATE = dedent`
712
+ This PR was automatically generated by the release script.
713
+
714
+ The following packages have been prepared for release:
715
+
716
+ <% it.packages.forEach((pkg) => { %>
717
+ - **<%= pkg.name %>**: <%= pkg.currentVersion %> → <%= pkg.newVersion %> (<%= pkg.bumpType %>)
718
+ <% }) %>
719
+
720
+ Please review the changes and merge when ready.
721
+
722
+ For a more in-depth look at the changes, please refer to the individual package changelogs.
723
+
724
+ > [!NOTE]
725
+ > When this PR is merged, the release process will be triggered automatically, publishing the new package versions to the registry.
726
+ `;
727
+ function dedentString(str) {
728
+ const lines = str.split("\n");
729
+ const minIndent = lines.filter((line) => line.trim().length > 0).reduce((min, line) => Math.min(min, line.search(/\S/)), Infinity);
730
+ return lines.map((line) => minIndent === Infinity ? line : line.slice(minIndent)).join("\n").trim();
731
+ }
732
+ function generatePullRequestBody(updates, body) {
733
+ const eta = new Eta();
734
+ const bodyTemplate = body ? dedentString(body) : DEFAULT_PR_BODY_TEMPLATE;
735
+ return eta.renderString(bodyTemplate, { packages: updates.map((u) => ({
736
+ name: u.package.name,
737
+ currentVersion: u.currentVersion,
738
+ newVersion: u.newVersion,
739
+ bumpType: u.bumpType,
740
+ hasDirectChanges: u.hasDirectChanges
741
+ })) });
504
742
  }
505
743
 
506
744
  //#endregion
507
- //#region src/prompts.ts
508
- async function promptPackageSelection(packages) {
509
- const response = await prompts({
510
- type: "multiselect",
511
- name: "selectedPackages",
512
- message: "Select packages to release",
513
- choices: packages.map((pkg) => ({
514
- title: `${pkg.name} (${pkg.version})`,
515
- value: pkg.name,
516
- selected: true
517
- })),
518
- min: 1,
519
- hint: "Space to select/deselect. Return to submit."
520
- });
521
- if (!response.selectedPackages || response.selectedPackages.length === 0) throw new Error("No packages selected");
522
- return response.selectedPackages;
745
+ //#region src/versioning/commits.ts
746
+ function determineHighestBump(commits) {
747
+ if (commits.length === 0) return "none";
748
+ let highestBump = "none";
749
+ for (const commit of commits) {
750
+ const bump = determineBumpType(commit);
751
+ if (bump === "major") return "major";
752
+ if (bump === "minor") highestBump = "minor";
753
+ else if (bump === "patch" && highestBump === "none") highestBump = "patch";
754
+ }
755
+ return highestBump;
523
756
  }
524
- async function promptVersionOverride(packageName, currentVersion, suggestedVersion, suggestedBumpType) {
525
- const choices = [{
526
- title: `Use suggested: ${suggestedVersion} (${suggestedBumpType})`,
527
- value: "suggested"
528
- }];
529
- for (const bumpType of [
530
- "patch",
531
- "minor",
532
- "major"
533
- ]) if (bumpType !== suggestedBumpType) {
534
- const version = calculateNewVersion(currentVersion, bumpType);
535
- choices.push({
536
- title: `${bumpType}: ${version}`,
537
- value: bumpType
757
+ /**
758
+ * Get commits grouped by workspace package.
759
+ * For each package, retrieves all commits since its last release tag that affect that package.
760
+ *
761
+ * @param {string} workspaceRoot - The root directory of the workspace
762
+ * @param {WorkspacePackage[]} packages - Array of workspace packages to analyze
763
+ * @returns {Promise<Map<string, GitCommit[]>>} A map of package names to their commits since their last release
764
+ */
765
+ async function getWorkspacePackageGroupedCommits(workspaceRoot, packages) {
766
+ const changedPackages = /* @__PURE__ */ new Map();
767
+ const promises = packages.map(async (pkg) => {
768
+ const lastTag = await getMostRecentPackageTag(workspaceRoot, pkg.name);
769
+ const allCommits = await getCommits({
770
+ from: lastTag,
771
+ to: "HEAD",
772
+ cwd: workspaceRoot,
773
+ folder: pkg.path
538
774
  });
539
- }
540
- choices.push({
541
- title: "Custom version",
542
- value: "custom"
775
+ logger.verbose(`Found ${farver.cyan(allCommits.length)} commits for package ${farver.bold(pkg.name)} since tag ${farver.cyan(lastTag ?? "N/A")}`);
776
+ return {
777
+ pkgName: pkg.name,
778
+ commits: allCommits
779
+ };
543
780
  });
544
- const response = await prompts([{
545
- type: "select",
546
- name: "choice",
547
- message: `${packageName} (${currentVersion}):`,
548
- choices,
549
- initial: 0
550
- }, {
551
- type: (prev) => prev === "custom" ? "text" : null,
552
- name: "customVersion",
553
- message: "Enter custom version:",
554
- initial: suggestedVersion,
555
- validate: (value) => {
556
- return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(value) || "Invalid semver version (e.g., 1.0.0)";
557
- }
558
- }]);
559
- if (response.choice === "suggested") return suggestedVersion;
560
- else if (response.choice === "custom") return response.customVersion;
561
- else return calculateNewVersion(currentVersion, response.choice);
781
+ const results = await Promise.all(promises);
782
+ for (const { pkgName, commits } of results) changedPackages.set(pkgName, commits);
783
+ return changedPackages;
562
784
  }
563
- async function promptVersionOverrides(packages) {
564
- const overrides = /* @__PURE__ */ new Map();
565
- for (const pkg of packages) {
566
- const newVersion = await promptVersionOverride(pkg.name, pkg.currentVersion, pkg.suggestedVersion, pkg.bumpType);
567
- overrides.set(pkg.name, newVersion);
785
+ /**
786
+ * Check if a file path touches any package folder.
787
+ * @param file - The file path to check
788
+ * @param packagePaths - Set of normalized package paths
789
+ * @param workspaceRoot - The workspace root for path normalization
790
+ * @returns true if the file is inside a package folder
791
+ */
792
+ function fileMatchesPackageFolder(file, packagePaths, workspaceRoot) {
793
+ const normalizedFile = file.startsWith("./") ? file.slice(2) : file;
794
+ for (const pkgPath of packagePaths) {
795
+ const normalizedPkgPath = pkgPath.startsWith(workspaceRoot) ? pkgPath.slice(workspaceRoot.length + 1) : pkgPath;
796
+ if (normalizedFile.startsWith(`${normalizedPkgPath}/`) || normalizedFile === normalizedPkgPath) return true;
568
797
  }
569
- return overrides;
798
+ return false;
570
799
  }
571
-
572
- //#endregion
573
- //#region src/workspace.ts
574
- const debug = createDebugger("ucdjs:release-scripts:workspace");
575
- function shouldIncludePackage(pkg, options) {
576
- if (!options) return true;
577
- if (options.excludePrivate && pkg.private) return false;
578
- if (options.included && options.included.length > 0) {
579
- if (!options.included.includes(pkg.name)) return false;
800
+ /**
801
+ * Check if a commit is a "global" commit (doesn't touch any package folder).
802
+ * @param workspaceRoot - The workspace root
803
+ * @param files - Array of files changed in the commit
804
+ * @param packagePaths - Set of normalized package paths
805
+ * @returns true if this is a global commit
806
+ */
807
+ function isGlobalCommit(workspaceRoot, files, packagePaths) {
808
+ if (!files || files.length === 0) return false;
809
+ return !files.some((file) => fileMatchesPackageFolder(file, packagePaths, workspaceRoot));
810
+ }
811
+ const DEPENDENCY_FILES = [
812
+ "package.json",
813
+ "pnpm-lock.yaml",
814
+ "pnpm-workspace.yaml",
815
+ "yarn.lock",
816
+ "package-lock.json"
817
+ ];
818
+ /**
819
+ * Find the oldest and newest commits across all packages.
820
+ * @param packageCommits - Map of package commits
821
+ * @returns Object with oldest and newest commit SHAs, or null if no commits
822
+ */
823
+ function findCommitRange(packageCommits) {
824
+ let oldestCommit = null;
825
+ let newestCommit = null;
826
+ for (const commits of packageCommits.values()) {
827
+ if (commits.length === 0) continue;
828
+ const firstCommit = commits[0].shortHash;
829
+ const lastCommit = commits[commits.length - 1].shortHash;
830
+ if (!newestCommit) newestCommit = firstCommit;
831
+ oldestCommit = lastCommit;
580
832
  }
581
- if (options.excluded?.includes(pkg.name)) return false;
582
- return true;
833
+ if (!oldestCommit || !newestCommit) return null;
834
+ return {
835
+ oldest: oldestCommit,
836
+ newest: newestCommit
837
+ };
583
838
  }
584
- async function findWorkspacePackages(workspaceRoot, options) {
585
- const result = await run("pnpm", [
586
- "-r",
587
- "ls",
588
- "--json"
589
- ], { nodeOptions: {
590
- cwd: workspaceRoot,
591
- stdio: "pipe"
592
- } });
593
- const rawProjects = JSON.parse(result.stdout);
594
- const packages = [];
595
- const allPackageNames = new Set(rawProjects.map((p) => p.name));
596
- for (const rawProject of rawProjects) {
597
- const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
598
- const packageJson = JSON.parse(content);
599
- if (!shouldIncludePackage(packageJson, options)) {
600
- debug?.(`Excluding package ${rawProject.name}`);
839
+ /**
840
+ * Get global commits for each package based on their individual commit timelines.
841
+ * This solves the problem where packages with different release histories need different global commits.
842
+ *
843
+ * A "global commit" is a commit that doesn't touch any package folder but may affect all packages
844
+ * (e.g., root package.json, CI config, README).
845
+ *
846
+ * Performance: Makes ONE batched git call to get files for all commits across all packages.
847
+ *
848
+ * @param workspaceRoot - The root directory of the workspace
849
+ * @param packageCommits - Map of package name to their commits (from getWorkspacePackageCommits)
850
+ * @param allPackages - All workspace packages (used to identify package folders)
851
+ * @param mode - Filter mode: false (disabled), "all" (all global commits), or "dependencies" (only dependency-related)
852
+ * @returns Map of package name to their global commits
853
+ */
854
+ async function getGlobalCommitsPerPackage(workspaceRoot, packageCommits, allPackages, mode) {
855
+ const result = /* @__PURE__ */ new Map();
856
+ if (!mode) {
857
+ logger.verbose("Global commits mode disabled");
858
+ return result;
859
+ }
860
+ logger.verbose(`Computing global commits per-package (mode: ${farver.cyan(mode)})`);
861
+ const commitRange = findCommitRange(packageCommits);
862
+ if (!commitRange) {
863
+ logger.verbose("No commits found across packages");
864
+ return result;
865
+ }
866
+ logger.verbose("Fetching files for commits range", `${farver.cyan(commitRange.oldest)}..${farver.cyan(commitRange.newest)}`);
867
+ const commitFilesMap = await getGroupedFilesByCommitSha(workspaceRoot, commitRange.oldest, commitRange.newest);
868
+ if (!commitFilesMap) {
869
+ logger.warn("Failed to get commit file list, returning empty global commits");
870
+ return result;
871
+ }
872
+ logger.verbose("Got file lists for commits", `${farver.cyan(commitFilesMap.size)} commits in ONE git call`);
873
+ const packagePaths = new Set(allPackages.map((p) => p.path));
874
+ for (const [pkgName, commits] of packageCommits) {
875
+ const globalCommitsAffectingPackage = [];
876
+ logger.verbose("Filtering global commits for package", `${farver.bold(pkgName)} from ${farver.cyan(commits.length)} commits`);
877
+ for (const commit of commits) {
878
+ const files = commitFilesMap.get(commit.shortHash);
879
+ if (!files) continue;
880
+ if (isGlobalCommit(workspaceRoot, files, packagePaths)) globalCommitsAffectingPackage.push(commit);
881
+ }
882
+ logger.verbose("Package global commits found", `${farver.bold(pkgName)}: ${farver.cyan(globalCommitsAffectingPackage.length)} global commits`);
883
+ if (mode === "all") {
884
+ result.set(pkgName, globalCommitsAffectingPackage);
601
885
  continue;
602
886
  }
603
- const workspaceDeps = extractWorkspaceDependencies(rawProject.dependencies, allPackageNames);
604
- const workspaceDevDeps = extractWorkspaceDependencies(rawProject.devDependencies, allPackageNames);
605
- packages.push({
606
- name: rawProject.name,
607
- version: rawProject.version,
608
- path: rawProject.path,
609
- packageJson,
610
- workspaceDependencies: workspaceDeps,
611
- workspaceDevDependencies: workspaceDevDeps
612
- });
887
+ const dependencyCommits = [];
888
+ for (const commit of globalCommitsAffectingPackage) {
889
+ const files = commitFilesMap.get(commit.shortHash);
890
+ if (!files) continue;
891
+ if (files.some((file) => DEPENDENCY_FILES.includes(file.startsWith("./") ? file.slice(2) : file))) {
892
+ logger.verbose("Global commit affects dependencies", `${farver.bold(pkgName)}: commit ${farver.cyan(commit.shortHash)} affects dependencies`);
893
+ dependencyCommits.push(commit);
894
+ }
895
+ }
896
+ logger.verbose("Global commits affect dependencies", `${farver.bold(pkgName)}: ${farver.cyan(dependencyCommits.length)} global commits affect dependencies`);
897
+ result.set(pkgName, dependencyCommits);
613
898
  }
614
- return packages;
899
+ return result;
615
900
  }
616
- function extractWorkspaceDependencies(dependencies, workspacePackages) {
617
- if (!dependencies) return [];
618
- return Object.keys(dependencies).filter((dep) => {
619
- return workspacePackages.has(dep);
620
- });
901
+ function determineBumpType(commit) {
902
+ if (commit.isBreaking) return "major";
903
+ if (!commit.isConventional || !commit.type) return "none";
904
+ switch (commit.type) {
905
+ case "feat": return "minor";
906
+ case "fix":
907
+ case "perf": return "patch";
908
+ case "docs":
909
+ case "style":
910
+ case "refactor":
911
+ case "test":
912
+ case "build":
913
+ case "ci":
914
+ case "chore":
915
+ case "revert": return "none";
916
+ default: return "none";
917
+ }
621
918
  }
622
- function buildDependencyGraph(packages) {
919
+
920
+ //#endregion
921
+ //#region src/versioning/package.ts
922
+ /**
923
+ * Build a dependency graph from workspace packages
924
+ *
925
+ * Creates a bidirectional graph that maps:
926
+ * - packages: Map of package name → WorkspacePackage
927
+ * - dependents: Map of package name → Set of packages that depend on it
928
+ *
929
+ * @param packages - All workspace packages
930
+ * @returns Dependency graph with packages and dependents maps
931
+ */
932
+ function buildPackageDependencyGraph(packages) {
623
933
  const packagesMap = /* @__PURE__ */ new Map();
624
934
  const dependents = /* @__PURE__ */ new Map();
625
935
  for (const pkg of packages) {
@@ -638,203 +948,809 @@ function buildDependencyGraph(packages) {
638
948
  dependents
639
949
  };
640
950
  }
641
- function getPackageUpdateOrder(graph, changedPackages) {
642
- const result = [];
643
- const visited = /* @__PURE__ */ new Set();
644
- const toUpdate = new Set(changedPackages);
645
- const packagesToProcess = new Set(changedPackages);
646
- for (const pkg of changedPackages) {
647
- const deps = graph.dependents.get(pkg);
648
- if (deps) for (const dep of deps) {
649
- packagesToProcess.add(dep);
650
- toUpdate.add(dep);
951
+ /**
952
+ * Get all packages affected by changes (including transitive dependents)
953
+ *
954
+ * Uses graph traversal to find all packages that need updates:
955
+ * - Packages with direct changes
956
+ * - All packages that depend on changed packages (transitively)
957
+ *
958
+ * @param graph - Dependency graph
959
+ * @param changedPackages - Set of package names with direct changes
960
+ * @returns Set of all package names that need updates
961
+ */
962
+ function getAllAffectedPackages(graph, changedPackages) {
963
+ const affected = /* @__PURE__ */ new Set();
964
+ function visitDependents(pkgName) {
965
+ if (affected.has(pkgName)) return;
966
+ affected.add(pkgName);
967
+ const dependents = graph.dependents.get(pkgName);
968
+ if (dependents) for (const dependent of dependents) visitDependents(dependent);
969
+ }
970
+ for (const pkg of changedPackages) visitDependents(pkg);
971
+ return affected;
972
+ }
973
+ /**
974
+ * Create version updates for all packages affected by dependency changes
975
+ *
976
+ * When a package is updated, all packages that depend on it should also be updated.
977
+ * This function calculates which additional packages need patch bumps due to dependency changes.
978
+ *
979
+ * @param graph - Dependency graph
980
+ * @param workspacePackages - All workspace packages
981
+ * @param directUpdates - Packages with direct code changes
982
+ * @returns All updates including dependent packages that need patch bumps
983
+ */
984
+ function createDependentUpdates(graph, workspacePackages, directUpdates) {
985
+ const allUpdates = [...directUpdates];
986
+ const directUpdateMap = new Map(directUpdates.map((u) => [u.package.name, u]));
987
+ const affectedPackages = getAllAffectedPackages(graph, new Set(directUpdates.map((u) => u.package.name)));
988
+ for (const pkgName of affectedPackages) {
989
+ logger.verbose(`Processing affected package: ${pkgName}`);
990
+ if (directUpdateMap.has(pkgName)) {
991
+ logger.verbose(`Skipping ${pkgName}, already has a direct update`);
992
+ continue;
651
993
  }
994
+ const pkg = workspacePackages.find((p) => p.name === pkgName);
995
+ if (!pkg) continue;
996
+ allUpdates.push(createVersionUpdate(pkg, "patch", false));
652
997
  }
653
- function visit(pkgName, level) {
654
- if (visited.has(pkgName)) return;
655
- visited.add(pkgName);
656
- const pkg = graph.packages.get(pkgName);
657
- if (!pkg) return;
658
- const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
659
- let maxDepLevel = level;
660
- for (const dep of allDeps) if (toUpdate.has(dep)) {
661
- visit(dep, level);
662
- const depResult = result.find((r) => r.package.name === dep);
663
- if (depResult && depResult.level >= maxDepLevel) maxDepLevel = depResult.level + 1;
998
+ return allUpdates;
999
+ }
1000
+
1001
+ //#endregion
1002
+ //#region src/versioning/version.ts
1003
+ function isValidSemver(version) {
1004
+ return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(version);
1005
+ }
1006
+ function getNextVersion(currentVersion, bump) {
1007
+ if (bump === "none") {
1008
+ logger.verbose(`No version bump needed, keeping version ${currentVersion}`);
1009
+ return currentVersion;
1010
+ }
1011
+ if (!isValidSemver(currentVersion)) throw new Error(`Cannot bump version for invalid semver: ${currentVersion}`);
1012
+ const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
1013
+ if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
1014
+ const [, major, minor, patch] = match;
1015
+ let newMajor = Number.parseInt(major, 10);
1016
+ let newMinor = Number.parseInt(minor, 10);
1017
+ let newPatch = Number.parseInt(patch, 10);
1018
+ switch (bump) {
1019
+ case "major":
1020
+ newMajor += 1;
1021
+ newMinor = 0;
1022
+ newPatch = 0;
1023
+ break;
1024
+ case "minor":
1025
+ newMinor += 1;
1026
+ newPatch = 0;
1027
+ break;
1028
+ case "patch":
1029
+ newPatch += 1;
1030
+ break;
1031
+ }
1032
+ return `${newMajor}.${newMinor}.${newPatch}`;
1033
+ }
1034
+ function createVersionUpdate(pkg, bump, hasDirectChanges) {
1035
+ const newVersion = getNextVersion(pkg.version, bump);
1036
+ return {
1037
+ package: pkg,
1038
+ currentVersion: pkg.version,
1039
+ newVersion,
1040
+ bumpType: bump,
1041
+ hasDirectChanges
1042
+ };
1043
+ }
1044
+ function _calculateBumpType(oldVersion, newVersion) {
1045
+ if (!isValidSemver(oldVersion) || !isValidSemver(newVersion)) throw new Error(`Cannot calculate bump type for invalid semver: ${oldVersion} or ${newVersion}`);
1046
+ const oldParts = oldVersion.split(".").map(Number);
1047
+ const newParts = newVersion.split(".").map(Number);
1048
+ if (newParts[0] > oldParts[0]) return "major";
1049
+ if (newParts[1] > oldParts[1]) return "minor";
1050
+ if (newParts[2] > oldParts[2]) return "patch";
1051
+ return "none";
1052
+ }
1053
+ const messageColorMap = {
1054
+ feat: farver.green,
1055
+ feature: farver.green,
1056
+ refactor: farver.cyan,
1057
+ style: farver.cyan,
1058
+ docs: farver.blue,
1059
+ doc: farver.blue,
1060
+ types: farver.blue,
1061
+ type: farver.blue,
1062
+ chore: farver.gray,
1063
+ ci: farver.gray,
1064
+ build: farver.gray,
1065
+ deps: farver.gray,
1066
+ dev: farver.gray,
1067
+ fix: farver.yellow,
1068
+ test: farver.yellow,
1069
+ perf: farver.magenta,
1070
+ revert: farver.red,
1071
+ breaking: farver.red
1072
+ };
1073
+ function formatCommitsForDisplay(commits) {
1074
+ if (commits.length === 0) return farver.dim("No commits found");
1075
+ const maxCommitsToShow = 10;
1076
+ const commitsToShow = commits.slice(0, maxCommitsToShow);
1077
+ const hasMore = commits.length > maxCommitsToShow;
1078
+ const typeLength = commits.map(({ type }) => type.length).reduce((a, b) => Math.max(a, b), 0);
1079
+ const scopeLength = commits.map(({ scope }) => scope?.length).reduce((a, b) => Math.max(a || 0, b || 0), 0) || 0;
1080
+ const formattedCommits = commitsToShow.map((commit) => {
1081
+ let color = messageColorMap[commit.type] || ((c) => c);
1082
+ if (commit.isBreaking) color = (s) => farver.inverse.red(s);
1083
+ const paddedType = commit.type.padStart(typeLength + 1, " ");
1084
+ const paddedScope = !commit.scope ? " ".repeat(scopeLength ? scopeLength + 2 : 0) : farver.dim("(") + commit.scope + farver.dim(")") + " ".repeat(scopeLength - commit.scope.length);
1085
+ return [
1086
+ farver.dim(commit.shortHash),
1087
+ " ",
1088
+ color === farver.gray ? color(paddedType) : farver.bold(color(paddedType)),
1089
+ " ",
1090
+ paddedScope,
1091
+ farver.dim(":"),
1092
+ " ",
1093
+ color === farver.gray ? color(commit.description) : commit.description
1094
+ ].join("");
1095
+ }).join("\n");
1096
+ if (hasMore) return `${formattedCommits}\n ${farver.dim(`... and ${commits.length - maxCommitsToShow} more commits`)}`;
1097
+ return formattedCommits;
1098
+ }
1099
+ async function calculateVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides: initialOverrides = {} }) {
1100
+ const versionUpdates = [];
1101
+ const processedPackages = /* @__PURE__ */ new Set();
1102
+ const newOverrides = { ...initialOverrides };
1103
+ const bumpRanks = {
1104
+ major: 3,
1105
+ minor: 2,
1106
+ patch: 1,
1107
+ none: 0
1108
+ };
1109
+ logger.verbose(`Starting version inference for ${packageCommits.size} packages with commits`);
1110
+ for (const [pkgName, pkgCommits] of packageCommits) {
1111
+ const pkg = workspacePackages.find((p) => p.name === pkgName);
1112
+ if (!pkg) {
1113
+ logger.error(`Package ${pkgName} not found in workspace packages, skipping`);
1114
+ continue;
1115
+ }
1116
+ processedPackages.add(pkgName);
1117
+ const globalCommits = globalCommitsPerPackage.get(pkgName) || [];
1118
+ const allCommitsForPackage = [...pkgCommits, ...globalCommits];
1119
+ const determinedBump = determineHighestBump(allCommitsForPackage);
1120
+ const override = newOverrides[pkgName];
1121
+ const effectiveBump = override?.type || determinedBump;
1122
+ if (effectiveBump === "none") continue;
1123
+ let newVersion = override?.version || getNextVersion(pkg.version, effectiveBump);
1124
+ let finalBumpType = effectiveBump;
1125
+ if (!isCI && showPrompt) {
1126
+ logger.clearScreen();
1127
+ logger.section(`📝 Commits for ${farver.cyan(pkg.name)}`);
1128
+ formatCommitsForDisplay(allCommitsForPackage).split("\n").forEach((line) => logger.item(line));
1129
+ logger.emptyLine();
1130
+ const selectedVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, newVersion);
1131
+ if (selectedVersion === null) continue;
1132
+ const userBump = _calculateBumpType(pkg.version, selectedVersion);
1133
+ finalBumpType = userBump;
1134
+ if (bumpRanks[userBump] < bumpRanks[determinedBump]) {
1135
+ newOverrides[pkgName] = {
1136
+ type: userBump,
1137
+ version: selectedVersion
1138
+ };
1139
+ logger.info(`Version override recorded for ${pkgName}: ${determinedBump} → ${userBump}`);
1140
+ } else if (newOverrides[pkgName] && bumpRanks[userBump] >= bumpRanks[determinedBump]) {
1141
+ delete newOverrides[pkgName];
1142
+ logger.info(`Version override removed for ${pkgName}.`);
1143
+ }
1144
+ newVersion = selectedVersion;
664
1145
  }
665
- result.push({
1146
+ versionUpdates.push({
666
1147
  package: pkg,
667
- level: maxDepLevel
1148
+ currentVersion: pkg.version,
1149
+ newVersion,
1150
+ bumpType: finalBumpType,
1151
+ hasDirectChanges: allCommitsForPackage.length > 0
668
1152
  });
669
1153
  }
670
- for (const pkg of toUpdate) visit(pkg, 0);
671
- result.sort((a, b) => a.level - b.level);
672
- return result;
1154
+ if (!isCI && showPrompt) for (const pkg of workspacePackages) {
1155
+ if (processedPackages.has(pkg.name)) continue;
1156
+ logger.clearScreen();
1157
+ logger.section(`📦 Package: ${pkg.name}`);
1158
+ logger.item("No direct commits found");
1159
+ const newVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, pkg.version);
1160
+ if (newVersion === null) break;
1161
+ if (newVersion !== pkg.version) {
1162
+ const bumpType = _calculateBumpType(pkg.version, newVersion);
1163
+ versionUpdates.push({
1164
+ package: pkg,
1165
+ currentVersion: pkg.version,
1166
+ newVersion,
1167
+ bumpType,
1168
+ hasDirectChanges: false
1169
+ });
1170
+ }
1171
+ }
1172
+ return {
1173
+ updates: versionUpdates,
1174
+ overrides: newOverrides
1175
+ };
1176
+ }
1177
+ /**
1178
+ * Calculate version updates and prepare dependent updates
1179
+ * Returns both the updates and a function to apply them
1180
+ */
1181
+ async function calculateAndPrepareVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides }) {
1182
+ const { updates: directUpdates, overrides: newOverrides } = await calculateVersionUpdates({
1183
+ workspacePackages,
1184
+ packageCommits,
1185
+ workspaceRoot,
1186
+ showPrompt,
1187
+ globalCommitsPerPackage,
1188
+ overrides
1189
+ });
1190
+ const allUpdates = createDependentUpdates(buildPackageDependencyGraph(workspacePackages), workspacePackages, directUpdates);
1191
+ const applyUpdates = async () => {
1192
+ await Promise.all(allUpdates.map(async (update) => {
1193
+ const depUpdates = getDependencyUpdates(update.package, allUpdates);
1194
+ await updatePackageJson(update.package, update.newVersion, depUpdates);
1195
+ }));
1196
+ };
1197
+ return {
1198
+ allUpdates,
1199
+ applyUpdates,
1200
+ overrides: newOverrides
1201
+ };
1202
+ }
1203
+ async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
1204
+ const packageJsonPath = join(pkg.path, "package.json");
1205
+ const content = await readFile(packageJsonPath, "utf-8");
1206
+ const packageJson = JSON.parse(content);
1207
+ packageJson.version = newVersion;
1208
+ function updateDependency(deps, depName, depVersion, isPeerDependency = false) {
1209
+ if (!deps) return;
1210
+ const oldVersion = deps[depName];
1211
+ if (!oldVersion) return;
1212
+ if (oldVersion === "workspace:*") {
1213
+ logger.verbose(` - Skipping workspace:* dependency: ${depName}`);
1214
+ return;
1215
+ }
1216
+ if (isPeerDependency) {
1217
+ const majorVersion = depVersion.split(".")[0];
1218
+ deps[depName] = `>=${depVersion} <${Number(majorVersion) + 1}.0.0`;
1219
+ } else deps[depName] = `^${depVersion}`;
1220
+ logger.verbose(` - Updated dependency ${depName}: ${oldVersion} → ${deps[depName]}`);
1221
+ }
1222
+ for (const [depName, depVersion] of dependencyUpdates) {
1223
+ updateDependency(packageJson.dependencies, depName, depVersion);
1224
+ updateDependency(packageJson.devDependencies, depName, depVersion);
1225
+ updateDependency(packageJson.peerDependencies, depName, depVersion, true);
1226
+ }
1227
+ await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
1228
+ logger.verbose(` - Successfully wrote updated package.json`);
1229
+ }
1230
+ /**
1231
+ * Get all dependency updates needed for a package
1232
+ */
1233
+ function getDependencyUpdates(pkg, allUpdates) {
1234
+ const updates = /* @__PURE__ */ new Map();
1235
+ const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
1236
+ for (const dep of allDeps) {
1237
+ const update = allUpdates.find((u) => u.package.name === dep);
1238
+ if (update) {
1239
+ logger.verbose(` - Dependency ${dep} will be updated: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
1240
+ updates.set(dep, update.newVersion);
1241
+ }
1242
+ }
1243
+ if (updates.size === 0) logger.verbose(` - No dependency updates needed`);
1244
+ return updates;
673
1245
  }
674
1246
 
675
1247
  //#endregion
676
- //#region src/release.ts
677
- const isCI = process.env.CI === "true";
678
- async function release(options) {
679
- const { dryRun: dryRun$1 = false, safeguards = true, workspaceRoot = process.cwd(), releaseBranch = "release/next", githubToken } = options;
680
- globalOptions.dryRun = dryRun$1;
681
- if (githubToken.trim() === "" || githubToken == null) throw new Error("GitHub token is required");
682
- const [owner, repo] = options.repo.split("/");
683
- if (!owner || !repo) throw new Error(`Invalid repo format: ${options.repo}. Expected "owner/repo".`);
684
- if (safeguards && !isWorkingDirectoryClean(workspaceRoot)) {
685
- console.error("Working directory is not clean. Please commit or stash your changes before proceeding.");
686
- return null;
1248
+ //#region src/core/prompts.ts
1249
+ async function selectPackagePrompt(packages) {
1250
+ const response = await prompts({
1251
+ type: "multiselect",
1252
+ name: "selectedPackages",
1253
+ message: "Select packages to release",
1254
+ choices: packages.map((pkg) => ({
1255
+ title: `${pkg.name} (${farver.bold(pkg.version)})`,
1256
+ value: pkg.name,
1257
+ selected: true
1258
+ })),
1259
+ min: 1,
1260
+ hint: "Space to select/deselect. Return to submit.",
1261
+ instructions: false
1262
+ });
1263
+ if (!response.selectedPackages || response.selectedPackages.length === 0) return [];
1264
+ return response.selectedPackages;
1265
+ }
1266
+ async function selectVersionPrompt(workspaceRoot, pkg, currentVersion, suggestedVersion) {
1267
+ const answers = await prompts([{
1268
+ type: "autocomplete",
1269
+ name: "version",
1270
+ message: `${pkg.name}: ${farver.green(pkg.version)}`,
1271
+ choices: [
1272
+ {
1273
+ value: "skip",
1274
+ title: `skip ${farver.dim("(no change)")}`
1275
+ },
1276
+ {
1277
+ value: "major",
1278
+ title: `major ${farver.bold(getNextVersion(pkg.version, "major"))}`
1279
+ },
1280
+ {
1281
+ value: "minor",
1282
+ title: `minor ${farver.bold(getNextVersion(pkg.version, "minor"))}`
1283
+ },
1284
+ {
1285
+ value: "patch",
1286
+ title: `patch ${farver.bold(getNextVersion(pkg.version, "patch"))}`
1287
+ },
1288
+ {
1289
+ value: "suggested",
1290
+ title: `suggested ${farver.bold(suggestedVersion)}`
1291
+ },
1292
+ {
1293
+ value: "custom",
1294
+ title: "custom"
1295
+ }
1296
+ ],
1297
+ initial: suggestedVersion === currentVersion ? 0 : 4
1298
+ }, {
1299
+ type: (prev) => prev === "custom" ? "text" : null,
1300
+ name: "custom",
1301
+ message: "Enter the new version number:",
1302
+ initial: suggestedVersion,
1303
+ validate: (custom) => {
1304
+ if (isValidSemver(custom)) return true;
1305
+ return "That's not a valid version number";
1306
+ }
1307
+ }]);
1308
+ if (!answers.version) return null;
1309
+ if (answers.version === "skip") return null;
1310
+ else if (answers.version === "suggested") return suggestedVersion;
1311
+ else if (answers.version === "custom") {
1312
+ if (!answers.custom) return null;
1313
+ return answers.custom;
1314
+ } else return getNextVersion(pkg.version, answers.version);
1315
+ }
1316
+
1317
+ //#endregion
1318
+ //#region src/core/workspace.ts
1319
+ async function discoverWorkspacePackages(workspaceRoot, options) {
1320
+ let workspaceOptions;
1321
+ let explicitPackages;
1322
+ if (options.packages == null || options.packages === true) workspaceOptions = { excludePrivate: false };
1323
+ else if (Array.isArray(options.packages)) {
1324
+ workspaceOptions = {
1325
+ excludePrivate: false,
1326
+ include: options.packages
1327
+ };
1328
+ explicitPackages = options.packages;
1329
+ } else {
1330
+ workspaceOptions = options.packages;
1331
+ if (options.packages.include) explicitPackages = options.packages.include;
1332
+ }
1333
+ let workspacePackages = await findWorkspacePackages(workspaceRoot, workspaceOptions);
1334
+ if (explicitPackages) {
1335
+ const foundNames = new Set(workspacePackages.map((p) => p.name));
1336
+ const missing = explicitPackages.filter((p) => !foundNames.has(p));
1337
+ if (missing.length > 0) exitWithError(`Package${missing.length > 1 ? "s" : ""} not found in workspace: ${missing.join(", ")}`, "Check your package names or run 'pnpm ls' to see available packages");
687
1338
  }
688
- const { workspacePackages, packagesToAnalyze: initialPackages } = await discoverPackages(workspaceRoot, options);
689
- if (initialPackages.length === 0) return null;
690
1339
  const isPackagePromptEnabled = options.prompts?.packages !== false;
691
- const isPackagesPreConfigured = Array.isArray(options.packages) || typeof options.packages === "object" && options.packages.included != null;
692
- let packagesToAnalyze = initialPackages;
693
- if (!isCI && isPackagePromptEnabled && !isPackagesPreConfigured) {
694
- const selectedNames = await promptPackageSelection(initialPackages);
695
- packagesToAnalyze = initialPackages.filter((pkg) => selectedNames.includes(pkg.name));
696
- }
697
- const changedPackages = await analyzeCommits(packagesToAnalyze, workspaceRoot);
698
- if (changedPackages.size === 0) throw new Error("No packages have changes requiring a release");
699
- let versionUpdates = calculateVersions(workspacePackages, changedPackages);
700
- const isVersionPromptEnabled = options.prompts?.versions !== false;
701
- if (!isCI && isVersionPromptEnabled) {
702
- const versionOverrides = await promptVersionOverrides(versionUpdates.map((u) => ({
703
- name: u.package.name,
704
- currentVersion: u.currentVersion,
705
- suggestedVersion: u.newVersion,
706
- bumpType: u.bumpType
707
- })));
708
- versionUpdates = versionUpdates.map((update) => {
709
- const overriddenVersion = versionOverrides.get(update.package.name);
710
- if (overriddenVersion && overriddenVersion !== update.newVersion) return {
711
- ...update,
712
- newVersion: overriddenVersion
713
- };
714
- return update;
715
- });
1340
+ if (!isCI && isPackagePromptEnabled && !explicitPackages) {
1341
+ const selectedNames = await selectPackagePrompt(workspacePackages);
1342
+ workspacePackages = workspacePackages.filter((pkg) => selectedNames.includes(pkg.name));
716
1343
  }
717
- const allUpdates = createDependentUpdates(getPackageUpdateOrder(buildDependencyGraph(workspacePackages), new Set(versionUpdates.map((u) => u.package.name))), versionUpdates);
718
- const currentBranch = await getCurrentBranch(workspaceRoot);
719
- const existingPullRequest = await getExistingPullRequest({
720
- owner,
721
- repo,
722
- branch: releaseBranch,
723
- githubToken
724
- });
725
- const prExists = !!existingPullRequest;
726
- if (prExists) console.log("Existing pull request found:", existingPullRequest.html_url);
727
- else console.log("No existing pull request found, will create new one");
728
- const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
729
- if (!branchExists) {
730
- console.log("Creating release branch:", releaseBranch);
731
- await createBranch(releaseBranch, currentBranch, workspaceRoot);
732
- }
733
- if (!await checkoutBranch(releaseBranch, workspaceRoot)) throw new Error(`Failed to checkout branch: ${releaseBranch}`);
734
- if (branchExists) {
735
- console.log("Pulling latest changes from remote");
736
- if (!await pullLatestChanges(releaseBranch, workspaceRoot)) console.log("Warning: Failed to pull latest changes, continuing anyway");
737
- }
738
- console.log("Rebasing release branch onto", currentBranch);
739
- await rebaseBranch(currentBranch, workspaceRoot);
740
- await updatePackageJsonFiles(allUpdates);
741
- const hasCommitted = await commitChanges("chore: update release versions", workspaceRoot);
742
- const isBranchAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
743
- if (!hasCommitted && !isBranchAhead) {
744
- console.log("No changes to commit and branch is in sync with remote");
745
- await checkoutBranch(currentBranch, workspaceRoot);
746
- if (prExists) {
747
- console.log("No updates needed, PR is already up to date");
1344
+ return workspacePackages;
1345
+ }
1346
+ async function findWorkspacePackages(workspaceRoot, options) {
1347
+ try {
1348
+ const result = await run("pnpm", [
1349
+ "-r",
1350
+ "ls",
1351
+ "--json"
1352
+ ], { nodeOptions: {
1353
+ cwd: workspaceRoot,
1354
+ stdio: "pipe"
1355
+ } });
1356
+ const rawProjects = JSON.parse(result.stdout);
1357
+ const allPackageNames = new Set(rawProjects.map((p) => p.name));
1358
+ const excludedPackages = /* @__PURE__ */ new Set();
1359
+ const promises = rawProjects.map(async (rawProject) => {
1360
+ const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
1361
+ const packageJson = JSON.parse(content);
1362
+ if (!shouldIncludePackage(packageJson, options)) {
1363
+ excludedPackages.add(rawProject.name);
1364
+ return null;
1365
+ }
748
1366
  return {
749
- updates: allUpdates,
750
- prUrl: existingPullRequest.html_url,
751
- created: false
1367
+ name: rawProject.name,
1368
+ version: rawProject.version,
1369
+ path: rawProject.path,
1370
+ packageJson,
1371
+ workspaceDependencies: Object.keys(rawProject.dependencies || []).filter((dep) => {
1372
+ return allPackageNames.has(dep);
1373
+ }),
1374
+ workspaceDevDependencies: Object.keys(rawProject.devDependencies || []).filter((dep) => {
1375
+ return allPackageNames.has(dep);
1376
+ })
752
1377
  };
753
- } else {
754
- console.error("No changes to commit, and no existing PR. Nothing to do.");
755
- return null;
756
- }
1378
+ });
1379
+ const packages = await Promise.all(promises);
1380
+ if (excludedPackages.size > 0) logger.info(`Excluded packages: ${farver.green(Array.from(excludedPackages).join(", "))}`);
1381
+ return packages.filter((pkg) => pkg !== null);
1382
+ } catch (err) {
1383
+ logger.error("Error discovering workspace packages:", err);
1384
+ throw err;
1385
+ }
1386
+ }
1387
+ function shouldIncludePackage(pkg, options) {
1388
+ if (!options) return true;
1389
+ if (options.excludePrivate && pkg.private) return false;
1390
+ if (options.include && options.include.length > 0) {
1391
+ if (!options.include.includes(pkg.name)) return false;
757
1392
  }
758
- console.log("Pushing changes to remote");
759
- await pushBranch(releaseBranch, workspaceRoot, { forceWithLease: true });
760
- const prTitle = existingPullRequest?.title || "Release: Update package versions";
761
- const prBody = generatePRBody(allUpdates);
762
- const pullRequest = await upsertPullRequest({
1393
+ if (options.exclude?.includes(pkg.name)) return false;
1394
+ return true;
1395
+ }
1396
+
1397
+ //#endregion
1398
+ //#region src/shared/options.ts
1399
+ const DEFAULT_COMMIT_GROUPS = [
1400
+ {
1401
+ name: "features",
1402
+ title: "Features",
1403
+ types: ["feat"]
1404
+ },
1405
+ {
1406
+ name: "fixes",
1407
+ title: "Bug Fixes",
1408
+ types: ["fix", "perf"]
1409
+ },
1410
+ {
1411
+ name: "refactor",
1412
+ title: "Refactoring",
1413
+ types: ["refactor"]
1414
+ },
1415
+ {
1416
+ name: "docs",
1417
+ title: "Documentation",
1418
+ types: ["docs"]
1419
+ }
1420
+ ];
1421
+ function normalizeSharedOptions(options) {
1422
+ const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, prompts: prompts$1 = {
1423
+ packages: true,
1424
+ versions: true
1425
+ }, groups = DEFAULT_COMMIT_GROUPS } = options;
1426
+ if (!githubToken.trim()) exitWithError("GitHub token is required", "Set GITHUB_TOKEN environment variable or pass it in options");
1427
+ if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) exitWithError("Repository (repo) is required", "Specify the repository in 'owner/repo' format (e.g., 'octocat/hello-world')");
1428
+ const [owner, repo] = fullRepo.split("/");
1429
+ if (!owner || !repo) exitWithError(`Invalid repo format: "${fullRepo}"`, "Expected format: \"owner/repo\" (e.g., \"octocat/hello-world\")");
1430
+ return {
1431
+ packages: typeof packages === "object" && !Array.isArray(packages) ? {
1432
+ exclude: packages.exclude ?? [],
1433
+ include: packages.include ?? [],
1434
+ excludePrivate: packages.excludePrivate ?? false
1435
+ } : packages,
1436
+ prompts: {
1437
+ packages: prompts$1?.packages ?? true,
1438
+ versions: prompts$1?.versions ?? true
1439
+ },
1440
+ workspaceRoot,
1441
+ githubToken,
763
1442
  owner,
764
1443
  repo,
765
- pullNumber: existingPullRequest?.number,
766
- title: prTitle,
767
- body: prBody,
768
- head: releaseBranch,
769
- base: currentBranch,
770
- githubToken
771
- });
772
- console.log(prExists ? "Updated pull request:" : "Created pull request:", pullRequest?.html_url);
773
- await checkoutBranch(currentBranch, workspaceRoot);
1444
+ groups
1445
+ };
1446
+ }
1447
+ async function normalizeReleaseOptions(options) {
1448
+ const normalized = normalizeSharedOptions(options);
1449
+ let defaultBranch = options.branch?.default?.trim();
1450
+ const releaseBranch = options.branch?.release?.trim() ?? "release/next";
1451
+ if (defaultBranch == null || defaultBranch === "") {
1452
+ defaultBranch = await getDefaultBranch(normalized.workspaceRoot);
1453
+ if (!defaultBranch) exitWithError("Could not determine default branch", "Please specify the default branch in options");
1454
+ }
1455
+ if (defaultBranch === releaseBranch) exitWithError(`Default branch and release branch cannot be the same: "${defaultBranch}"`, "Specify different branches for default and release");
1456
+ const availableBranches = await getAvailableBranches(normalized.workspaceRoot);
1457
+ if (!availableBranches.includes(defaultBranch)) exitWithError(`Default branch "${defaultBranch}" does not exist in the repository`, `Available branches: ${availableBranches.join(", ")}`);
1458
+ logger.verbose(`Using default branch: ${farver.green(defaultBranch)}`);
774
1459
  return {
775
- updates: allUpdates,
776
- prUrl: pullRequest?.html_url,
777
- created: !prExists
1460
+ ...normalized,
1461
+ branch: {
1462
+ release: releaseBranch,
1463
+ default: defaultBranch
1464
+ },
1465
+ safeguards: options.safeguards ?? true,
1466
+ globalCommitMode: options.globalCommitMode ?? "dependencies",
1467
+ pullRequest: {
1468
+ title: options.pullRequest?.title ?? "chore: release new version",
1469
+ body: options.pullRequest?.body ?? DEFAULT_PR_BODY_TEMPLATE
1470
+ },
1471
+ changelog: {
1472
+ enabled: options.changelog?.enabled ?? true,
1473
+ template: options.changelog?.template ?? DEFAULT_CHANGELOG_TEMPLATE
1474
+ }
778
1475
  };
779
1476
  }
780
- async function discoverPackages(workspaceRoot, options) {
781
- let workspacePackages;
782
- let packagesToAnalyze;
783
- if (typeof options.packages === "boolean" && options.packages === true) {
784
- workspacePackages = await findWorkspacePackages(workspaceRoot, { excludePrivate: false });
785
- packagesToAnalyze = workspacePackages;
786
- return {
787
- workspacePackages,
788
- packagesToAnalyze
789
- };
1477
+
1478
+ //#endregion
1479
+ //#region src/release.ts
1480
+ async function release(options) {
1481
+ const { workspaceRoot,...normalizedOptions } = await normalizeReleaseOptions(options);
1482
+ if (normalizedOptions.safeguards && !await isWorkingDirectoryClean(workspaceRoot)) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
1483
+ const workspacePackages = await discoverWorkspacePackages(workspaceRoot, options);
1484
+ if (workspacePackages.length === 0) {
1485
+ logger.warn("No packages found to release");
1486
+ return null;
790
1487
  }
791
- if (Array.isArray(options.packages)) {
792
- const packageNames = options.packages;
793
- workspacePackages = await findWorkspacePackages(workspaceRoot, {
794
- excludePrivate: false,
795
- included: packageNames
796
- });
797
- packagesToAnalyze = workspacePackages.filter((pkg) => packageNames.includes(pkg.name));
798
- if (packagesToAnalyze.length !== packageNames.length) {
799
- const found = new Set(packagesToAnalyze.map((p) => p.name));
800
- const missing = packageNames.filter((p) => !found.has(p));
801
- throw new Error(`Packages not found in workspace: ${missing.join(", ")}`);
1488
+ logger.section("📦 Workspace Packages");
1489
+ logger.item(`Found ${workspacePackages.length} packages`);
1490
+ for (const pkg of workspacePackages) {
1491
+ logger.item(`${farver.cyan(pkg.name)} (${farver.bold(pkg.version)})`);
1492
+ logger.item(` ${farver.gray("→")} ${farver.gray(pkg.path)}`);
1493
+ }
1494
+ logger.emptyLine();
1495
+ const groupedPackageCommits = await getWorkspacePackageGroupedCommits(workspaceRoot, workspacePackages);
1496
+ const globalCommitsPerPackage = await getGlobalCommitsPerPackage(workspaceRoot, groupedPackageCommits, workspacePackages, normalizedOptions.globalCommitMode);
1497
+ const githubClient = createGitHubClient({
1498
+ owner: normalizedOptions.owner,
1499
+ repo: normalizedOptions.repo,
1500
+ githubToken: normalizedOptions.githubToken
1501
+ });
1502
+ const prOps = await orchestrateReleasePullRequest({
1503
+ workspaceRoot,
1504
+ githubClient,
1505
+ releaseBranch: normalizedOptions.branch.release,
1506
+ defaultBranch: normalizedOptions.branch.default,
1507
+ pullRequestTitle: options.pullRequest?.title,
1508
+ pullRequestBody: options.pullRequest?.body
1509
+ });
1510
+ await prOps.prepareBranch();
1511
+ const overridesPath = join(workspaceRoot, ucdjsReleaseOverridesPath);
1512
+ let existingOverrides = {};
1513
+ try {
1514
+ const overridesContent = await readFile(overridesPath, "utf-8");
1515
+ existingOverrides = JSON.parse(overridesContent);
1516
+ logger.info("Found existing version overrides file.");
1517
+ } catch {
1518
+ logger.info("No existing version overrides file found. Continuing...");
1519
+ }
1520
+ const { allUpdates, applyUpdates, overrides: newOverrides } = await calculateAndPrepareVersionUpdates({
1521
+ workspacePackages,
1522
+ packageCommits: groupedPackageCommits,
1523
+ workspaceRoot,
1524
+ showPrompt: options.prompts?.versions !== false,
1525
+ globalCommitsPerPackage,
1526
+ overrides: existingOverrides
1527
+ });
1528
+ if (Object.keys(newOverrides).length > 0) {
1529
+ logger.info("Writing version overrides file...");
1530
+ try {
1531
+ await mkdir(join(workspaceRoot, ".github"), { recursive: true });
1532
+ await writeFile(overridesPath, JSON.stringify(newOverrides, null, 2), "utf-8");
1533
+ logger.success("Successfully wrote version overrides file.");
1534
+ } catch (e) {
1535
+ logger.error("Failed to write version overrides file:", e);
1536
+ }
1537
+ }
1538
+ if (Object.keys(newOverrides).length === 0 && Object.keys(existingOverrides).length > 0) {
1539
+ let shouldRemoveOverrides = false;
1540
+ for (const update of allUpdates) {
1541
+ const overriddenVersion = existingOverrides[update.package.name];
1542
+ if (overriddenVersion) {
1543
+ if (compare(update.newVersion, overriddenVersion.version) > 0) {
1544
+ shouldRemoveOverrides = true;
1545
+ break;
1546
+ }
1547
+ }
802
1548
  }
1549
+ if (shouldRemoveOverrides) {
1550
+ logger.info("Removing obsolete version overrides file...");
1551
+ try {
1552
+ await rm(overridesPath);
1553
+ logger.success("Successfully removed obsolete version overrides file.");
1554
+ } catch (e) {
1555
+ logger.error("Failed to remove obsolete version overrides file:", e);
1556
+ }
1557
+ }
1558
+ }
1559
+ if (allUpdates.filter((u) => u.hasDirectChanges).length === 0) logger.warn("No packages have changes requiring a release");
1560
+ logger.section("🔄 Version Updates");
1561
+ logger.item(`Updating ${allUpdates.length} packages (including dependents)`);
1562
+ for (const update of allUpdates) logger.item(`${update.package.name}: ${update.currentVersion} → ${update.newVersion}`);
1563
+ await applyUpdates();
1564
+ if (normalizedOptions.changelog.enabled) {
1565
+ logger.step("Updating changelogs");
1566
+ const changelogPromises = allUpdates.map((update) => {
1567
+ const pkgCommits = groupedPackageCommits.get(update.package.name) || [];
1568
+ const globalCommits = globalCommitsPerPackage.get(update.package.name) || [];
1569
+ const allCommits = [...pkgCommits, ...globalCommits];
1570
+ if (allCommits.length === 0) {
1571
+ logger.verbose(`No commits for ${update.package.name}, skipping changelog`);
1572
+ return Promise.resolve();
1573
+ }
1574
+ logger.verbose(`Updating changelog for ${farver.cyan(update.package.name)}`);
1575
+ return updateChangelog({
1576
+ normalizedOptions: {
1577
+ ...normalizedOptions,
1578
+ workspaceRoot
1579
+ },
1580
+ githubClient,
1581
+ workspacePackage: update.package,
1582
+ version: update.newVersion,
1583
+ previousVersion: update.currentVersion !== "0.0.0" ? update.currentVersion : void 0,
1584
+ commits: allCommits,
1585
+ date: (/* @__PURE__ */ new Date()).toISOString().split("T")[0]
1586
+ });
1587
+ }).filter((p) => p != null);
1588
+ const updates = await Promise.all(changelogPromises);
1589
+ logger.success(`Updated ${updates.length} changelog(s)`);
1590
+ }
1591
+ if (!await prOps.syncChanges(true)) if (prOps.doesReleasePRExist && prOps.existingPullRequest) {
1592
+ logger.item("No updates needed, PR is already up to date");
1593
+ const { pullRequest: pullRequest$1, created: created$1 } = await prOps.syncPullRequest(allUpdates);
1594
+ await prOps.cleanup();
803
1595
  return {
804
- workspacePackages,
805
- packagesToAnalyze
1596
+ updates: allUpdates,
1597
+ prUrl: pullRequest$1?.html_url,
1598
+ created: created$1
806
1599
  };
1600
+ } else {
1601
+ logger.error("No changes to commit, and no existing PR. Nothing to do.");
1602
+ return null;
1603
+ }
1604
+ const { pullRequest, created } = await prOps.syncPullRequest(allUpdates);
1605
+ await prOps.cleanup();
1606
+ if (pullRequest?.html_url) {
1607
+ logger.section("🚀 Pull Request");
1608
+ logger.success(`Pull request ${created ? "created" : "updated"}: ${pullRequest.html_url}`);
807
1609
  }
808
- workspacePackages = await findWorkspacePackages(workspaceRoot, options.packages);
809
- packagesToAnalyze = workspacePackages;
810
1610
  return {
811
- workspacePackages,
812
- packagesToAnalyze
1611
+ updates: allUpdates,
1612
+ prUrl: pullRequest?.html_url,
1613
+ created
813
1614
  };
814
1615
  }
815
- async function analyzeCommits(packages, workspaceRoot) {
816
- const changedPackages = /* @__PURE__ */ new Map();
817
- for (const pkg of packages) {
818
- const bump = await analyzePackageCommits(pkg, workspaceRoot);
819
- if (bump !== "none") changedPackages.set(pkg.name, bump);
820
- }
821
- return changedPackages;
1616
+ async function orchestrateReleasePullRequest({ workspaceRoot, githubClient, releaseBranch, defaultBranch, pullRequestTitle, pullRequestBody }) {
1617
+ const currentBranch = await getCurrentBranch(workspaceRoot);
1618
+ if (currentBranch !== defaultBranch) exitWithError(`Current branch is '${currentBranch}'. Please switch to the default branch '${defaultBranch}' before proceeding.`, `git checkout ${defaultBranch}`);
1619
+ const existingPullRequest = await githubClient.getExistingPullRequest(releaseBranch);
1620
+ const doesReleasePRExist = !!existingPullRequest;
1621
+ if (doesReleasePRExist) logger.item("Found existing release pull request");
1622
+ else logger.item("Will create new pull request");
1623
+ const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
1624
+ return {
1625
+ existingPullRequest,
1626
+ doesReleasePRExist,
1627
+ async prepareBranch() {
1628
+ if (!branchExists) await createBranch(releaseBranch, defaultBranch, workspaceRoot);
1629
+ logger.step(`Checking out release branch: ${releaseBranch}`);
1630
+ if (!await checkoutBranch(releaseBranch, workspaceRoot)) throw new Error(`Failed to checkout branch: ${releaseBranch}`);
1631
+ if (branchExists) {
1632
+ logger.step("Pulling latest changes from remote");
1633
+ if (!await pullLatestChanges(releaseBranch, workspaceRoot)) logger.warn("Failed to pull latest changes, continuing anyway");
1634
+ }
1635
+ logger.step(`Rebasing onto ${defaultBranch}`);
1636
+ if (!await rebaseBranch(defaultBranch, workspaceRoot)) throw new Error(`Failed to rebase onto ${defaultBranch}. Please resolve conflicts manually.`);
1637
+ },
1638
+ async syncChanges(hasChanges) {
1639
+ const hasCommitted = hasChanges ? await commitChanges("chore: update release versions", workspaceRoot) : false;
1640
+ const isBranchAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
1641
+ if (!hasCommitted && !isBranchAhead) {
1642
+ logger.item("No changes to commit and branch is in sync with remote");
1643
+ return false;
1644
+ }
1645
+ logger.step("Pushing changes to remote");
1646
+ if (!await pushBranch(releaseBranch, workspaceRoot, { forceWithLease: true })) throw new Error(`Failed to push changes to ${releaseBranch}. Remote may have been updated.`);
1647
+ return true;
1648
+ },
1649
+ async syncPullRequest(updates) {
1650
+ const prTitle = existingPullRequest?.title || pullRequestTitle || "chore: update package versions";
1651
+ const prBody = generatePullRequestBody(updates, pullRequestBody);
1652
+ const pullRequest = await githubClient.upsertPullRequest({
1653
+ pullNumber: existingPullRequest?.number,
1654
+ title: prTitle,
1655
+ body: prBody,
1656
+ head: releaseBranch,
1657
+ base: defaultBranch
1658
+ });
1659
+ logger.success(`${doesReleasePRExist ? "Updated" : "Created"} pull request: ${pullRequest?.html_url}`);
1660
+ return {
1661
+ pullRequest,
1662
+ created: !doesReleasePRExist
1663
+ };
1664
+ },
1665
+ async cleanup() {
1666
+ await checkoutBranch(defaultBranch, workspaceRoot);
1667
+ }
1668
+ };
822
1669
  }
823
- function calculateVersions(allPackages, changedPackages) {
824
- const updates = [];
825
- for (const [pkgName, bump] of changedPackages) {
826
- const pkg = allPackages.find((p) => p.name === pkgName);
827
- if (!pkg) continue;
828
- updates.push(createVersionUpdate(pkg, bump, true));
1670
+
1671
+ //#endregion
1672
+ //#region src/verify.ts
1673
+ async function verify(options) {
1674
+ const { workspaceRoot,...normalizedOptions } = await normalizeReleaseOptions(options);
1675
+ if (normalizedOptions.safeguards && !await isWorkingDirectoryClean(workspaceRoot)) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
1676
+ const githubClient = createGitHubClient({
1677
+ owner: normalizedOptions.owner,
1678
+ repo: normalizedOptions.repo,
1679
+ githubToken: normalizedOptions.githubToken
1680
+ });
1681
+ const releaseBranch = normalizedOptions.branch.release;
1682
+ const defaultBranch = normalizedOptions.branch.default;
1683
+ const releasePr = await githubClient.getExistingPullRequest(releaseBranch);
1684
+ if (!releasePr || !releasePr.head) {
1685
+ logger.warn(`No open release pull request found for branch "${releaseBranch}". Nothing to verify.`);
1686
+ return;
1687
+ }
1688
+ logger.info(`Found release PR #${releasePr.number}. Verifying against default branch "${defaultBranch}"...`);
1689
+ const originalBranch = await getCurrentBranch(workspaceRoot);
1690
+ if (originalBranch !== defaultBranch) await checkoutBranch(defaultBranch, workspaceRoot);
1691
+ const overridesPath = join(workspaceRoot, ucdjsReleaseOverridesPath);
1692
+ let existingOverrides = {};
1693
+ try {
1694
+ const overridesContent = await readFileFromGit(workspaceRoot, releasePr.head.sha, overridesPath);
1695
+ if (overridesContent) {
1696
+ existingOverrides = JSON.parse(overridesContent);
1697
+ logger.info("Found existing version overrides file on release branch.");
1698
+ }
1699
+ } catch {
1700
+ logger.info("No version overrides file found on release branch. Continuing...");
1701
+ }
1702
+ const mainPackages = await discoverWorkspacePackages(workspaceRoot, options);
1703
+ const mainCommits = await getWorkspacePackageGroupedCommits(workspaceRoot, mainPackages);
1704
+ const { allUpdates: expectedUpdates } = await calculateAndPrepareVersionUpdates({
1705
+ workspacePackages: mainPackages,
1706
+ packageCommits: mainCommits,
1707
+ workspaceRoot,
1708
+ showPrompt: false,
1709
+ globalCommitsPerPackage: await getGlobalCommitsPerPackage(workspaceRoot, mainCommits, mainPackages, normalizedOptions.globalCommitMode),
1710
+ overrides: existingOverrides
1711
+ });
1712
+ const expectedVersionMap = new Map(expectedUpdates.map((u) => [u.package.name, u.newVersion]));
1713
+ const prVersionMap = /* @__PURE__ */ new Map();
1714
+ for (const pkg of mainPackages) {
1715
+ const pkgJsonPath = join(pkg.path.replace(workspaceRoot, ""), "package.json").substring(1);
1716
+ const pkgJsonContent = await readFileFromGit(workspaceRoot, releasePr.head.sha, pkgJsonPath);
1717
+ if (pkgJsonContent) {
1718
+ const pkgJson = JSON.parse(pkgJsonContent);
1719
+ prVersionMap.set(pkg.name, pkgJson.version);
1720
+ }
1721
+ }
1722
+ if (originalBranch !== defaultBranch) await checkoutBranch(originalBranch, workspaceRoot);
1723
+ let isOutOfSync = false;
1724
+ for (const [pkgName, expectedVersion] of expectedVersionMap.entries()) {
1725
+ const prVersion = prVersionMap.get(pkgName);
1726
+ if (!prVersion) {
1727
+ logger.warn(`Package "${pkgName}" found in default branch but not in release branch. Skipping.`);
1728
+ continue;
1729
+ }
1730
+ if (gt(expectedVersion, prVersion)) {
1731
+ logger.error(`Package "${pkgName}" is out of sync. Expected version >= ${expectedVersion}, but PR has ${prVersion}.`);
1732
+ isOutOfSync = true;
1733
+ } else logger.success(`Package "${pkgName}" is up to date (PR version: ${prVersion}, Expected: ${expectedVersion})`);
1734
+ }
1735
+ const statusContext = "ucdjs/release-verify";
1736
+ if (isOutOfSync) {
1737
+ await githubClient.setCommitStatus({
1738
+ sha: releasePr.head.sha,
1739
+ state: "failure",
1740
+ context: statusContext,
1741
+ description: "Release PR is out of sync with the default branch. Please re-run the release process."
1742
+ });
1743
+ logger.error("Verification failed. Commit status set to 'failure'.");
1744
+ } else {
1745
+ await githubClient.setCommitStatus({
1746
+ sha: releasePr.head.sha,
1747
+ state: "success",
1748
+ context: statusContext,
1749
+ description: "Release PR is up to date."
1750
+ });
1751
+ logger.success("Verification successful. Commit status set to 'success'.");
829
1752
  }
830
- return updates;
831
- }
832
- async function updatePackageJsonFiles(updates) {
833
- await Promise.all(updates.map(async (update) => {
834
- const depUpdates = getDependencyUpdates(update.package, updates);
835
- await updatePackageJson(update.package, update.newVersion, depUpdates);
836
- }));
837
1753
  }
838
1754
 
839
1755
  //#endregion
840
- export { release };
1756
+ export { publish, release, verify };