@ucdjs/release-scripts 0.0.0 → 0.1.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,21 +1,41 @@
1
- import process from "node:process";
1
+ import { t as Eta } from "./eta-Boh7yPZi.mjs";
2
2
  import { getCommits } from "commit-parser";
3
- import createDebug from "debug";
3
+ import process from "node:process";
4
4
  import farver from "farver";
5
5
  import { exec } from "tinyexec";
6
- import { readFile, writeFile } from "node:fs/promises";
6
+ import { dedent } from "@luxass/utils";
7
7
  import { join } from "node:path";
8
+ import { readFile, writeFile } from "node:fs/promises";
8
9
  import prompts from "prompts";
9
10
 
10
- //#region src/logger.ts
11
- function createDebugger(namespace) {
12
- const debug$2 = createDebug(namespace);
13
- if (debug$2.enabled) return debug$2;
14
- }
11
+ //#region src/publish.ts
12
+ function publish(_options) {}
15
13
 
16
14
  //#endregion
17
15
  //#region src/utils.ts
18
- const globalOptions = { dryRun: false };
16
+ const globalOptions = {
17
+ dryRun: false,
18
+ verbose: false
19
+ };
20
+ const isCI = typeof process.env.CI === "string" && process.env.CI !== "" && process.env.CI.toLowerCase() !== "false";
21
+ const logger = {
22
+ info: (...args) => {
23
+ console.info(farver.cyan("[info]:"), ...args);
24
+ },
25
+ debug: (...args) => {
26
+ console.debug(farver.gray("[debug]:"), ...args);
27
+ },
28
+ warn: (...args) => {
29
+ console.warn(farver.yellow("[warn]:"), ...args);
30
+ },
31
+ error: (...args) => {
32
+ console.error(farver.red("[error]:"), ...args);
33
+ },
34
+ log: (...args) => {
35
+ if (!globalOptions.verbose) return;
36
+ console.log(...args);
37
+ }
38
+ };
19
39
  async function run(bin, args, opts = {}) {
20
40
  return exec(bin, args, {
21
41
  throwOnError: true,
@@ -27,19 +47,49 @@ async function run(bin, args, opts = {}) {
27
47
  });
28
48
  }
29
49
  async function dryRun(bin, args, opts) {
30
- return console.log(farver.blue(`[dryrun] ${bin} ${args.join(" ")}`), opts || "");
50
+ return logger.log(farver.blue(`[dryrun] ${bin} ${args.join(" ")}`), opts || "");
31
51
  }
32
52
  const runIfNotDry = globalOptions.dryRun ? dryRun : run;
53
+ function exitWithError(message, hint) {
54
+ logger.error(farver.bold(message));
55
+ if (hint) console.error(farver.gray(` ${hint}`));
56
+ process.exit(1);
57
+ }
58
+ function normalizeSharedOptions(options) {
59
+ const { workspaceRoot = process.cwd(), githubToken = "", verbose = false, repo: fullRepo, packages = true, prompts: prompts$1 = {
60
+ packages: true,
61
+ versions: true
62
+ },...rest } = options;
63
+ globalOptions.verbose = verbose;
64
+ if (!githubToken.trim()) exitWithError("GitHub token is required", "Set GITHUB_TOKEN environment variable or pass it in options");
65
+ if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) exitWithError("Repository (repo) is required", "Specify the repository in 'owner/repo' format (e.g., 'octocat/hello-world')");
66
+ const [owner, repo] = fullRepo.split("/");
67
+ if (!owner || !repo) exitWithError(`Invalid repo format: "${fullRepo}"`, "Expected format: \"owner/repo\" (e.g., \"octocat/hello-world\")");
68
+ return {
69
+ ...rest,
70
+ packages,
71
+ prompts: prompts$1,
72
+ workspaceRoot,
73
+ githubToken,
74
+ owner,
75
+ repo,
76
+ verbose
77
+ };
78
+ }
33
79
 
34
80
  //#endregion
35
81
  //#region src/commits.ts
36
- const debug$1 = createDebugger("ucdjs:release-scripts:commits");
37
82
  async function getLastPackageTag(packageName, workspaceRoot) {
38
- const { stdout } = await run("git", ["tag", "--list"], { nodeOptions: {
39
- cwd: workspaceRoot,
40
- stdio: "pipe"
41
- } });
42
- return stdout.split("\n").map((tag) => tag.trim()).filter(Boolean).reverse().find((tag) => tag.startsWith(`${packageName}@`));
83
+ try {
84
+ const { stdout } = await run("git", ["tag", "--list"], { nodeOptions: {
85
+ cwd: workspaceRoot,
86
+ stdio: "pipe"
87
+ } });
88
+ return stdout.split("\n").map((tag) => tag.trim()).filter(Boolean).reverse().find((tag) => tag.startsWith(`${packageName}@`));
89
+ } catch (err) {
90
+ logger.warn(`Failed to get tags for package ${packageName}: ${err.message}`);
91
+ return;
92
+ }
43
93
  }
44
94
  function determineHighestBump(commits) {
45
95
  if (commits.length === 0) return "none";
@@ -52,21 +102,43 @@ function determineHighestBump(commits) {
52
102
  }
53
103
  return highestBump;
54
104
  }
55
- async function getPackageCommits(pkg, workspaceRoot) {
105
+ /**
106
+ * Retrieves commits that affect a specific workspace package since its last tag.
107
+ *
108
+ * @param {string} workspaceRoot - The root directory of the workspace.
109
+ * @param {WorkspacePackage} pkg - The workspace package to analyze.
110
+ * @returns {Promise<GitCommit[]>} A promise that resolves to an array of GitCommit objects affecting the package.
111
+ */
112
+ async function getCommitsForWorkspacePackage(workspaceRoot, pkg) {
56
113
  const lastTag = await getLastPackageTag(pkg.name, workspaceRoot);
57
114
  const allCommits = getCommits({
58
115
  from: lastTag,
59
- to: "HEAD"
116
+ to: "HEAD",
117
+ cwd: workspaceRoot
118
+ });
119
+ logger.log(`Found ${allCommits.length} commits for ${pkg.name} since ${lastTag || "beginning"}`);
120
+ const touchedCommitHashes = getCommits({
121
+ from: lastTag,
122
+ to: "HEAD",
123
+ cwd: workspaceRoot,
124
+ folder: pkg.path
60
125
  });
61
- debug$1?.(`Found ${allCommits.length} commits for ${pkg.name} since ${lastTag || "beginning"}`);
62
- const touchedCommitHashes = await getCommitsTouchingPackage(lastTag || "HEAD", "HEAD", pkg.path, workspaceRoot);
63
126
  const touchedSet = new Set(touchedCommitHashes);
64
- const packageCommits = allCommits.filter((commit) => touchedSet.has(commit.shortHash));
65
- debug$1?.(`${packageCommits.length} commits affect ${pkg.name}`);
127
+ const packageCommits = allCommits.filter((commit) => touchedSet.has(commit));
128
+ logger.log(`${packageCommits.length} commits affect ${pkg.name}`);
66
129
  return packageCommits;
67
130
  }
68
- async function analyzePackageCommits(pkg, workspaceRoot) {
69
- return determineHighestBump(await getPackageCommits(pkg, workspaceRoot));
131
+ async function getWorkspacePackageCommits(workspaceRoot, packages) {
132
+ const changedPackages = /* @__PURE__ */ new Map();
133
+ const promises = packages.map(async (pkg) => {
134
+ return {
135
+ pkgName: pkg.name,
136
+ commits: await getCommitsForWorkspacePackage(workspaceRoot, pkg)
137
+ };
138
+ });
139
+ const results = await Promise.all(promises);
140
+ for (const { pkgName, commits } of results) changedPackages.set(pkgName, commits);
141
+ return changedPackages;
70
142
  }
71
143
  function determineBumpType(commit) {
72
144
  if (commit.isBreaking) return "major";
@@ -86,148 +158,6 @@ function determineBumpType(commit) {
86
158
  default: return "none";
87
159
  }
88
160
  }
89
- async function getCommitsTouchingPackage(from, to, packagePath, workspaceRoot) {
90
- try {
91
- const { stdout } = await run("git", [
92
- "log",
93
- "--pretty=format:%h",
94
- from === "HEAD" ? "HEAD" : `${from}...${to}`,
95
- "--",
96
- packagePath
97
- ], { nodeOptions: {
98
- cwd: workspaceRoot,
99
- stdio: "pipe"
100
- } });
101
- return stdout.split("\n").map((line) => line.trim()).filter(Boolean);
102
- } catch (error) {
103
- debug$1?.(`Error getting commits touching package: ${error}`);
104
- return [];
105
- }
106
- }
107
-
108
- //#endregion
109
- //#region src/validation.ts
110
- /**
111
- * Validation utilities for release scripts
112
- */
113
- function isValidSemver(version) {
114
- return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(version);
115
- }
116
- function validateSemver(version) {
117
- if (!isValidSemver(version)) throw new Error(`Invalid semver version: ${version}`);
118
- }
119
-
120
- //#endregion
121
- //#region src/version.ts
122
- /**
123
- * Calculate the new version based on current version and bump type
124
- * Pure function - no side effects, easily testable
125
- */
126
- function calculateNewVersion(currentVersion, bump) {
127
- if (bump === "none") return currentVersion;
128
- validateSemver(currentVersion);
129
- const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
130
- if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
131
- const [, major, minor, patch, suffix] = match;
132
- let newMajor = Number.parseInt(major, 10);
133
- let newMinor = Number.parseInt(minor, 10);
134
- let newPatch = Number.parseInt(patch, 10);
135
- switch (bump) {
136
- case "major":
137
- newMajor += 1;
138
- newMinor = 0;
139
- newPatch = 0;
140
- break;
141
- case "minor":
142
- newMinor += 1;
143
- newPatch = 0;
144
- break;
145
- case "patch":
146
- newPatch += 1;
147
- break;
148
- }
149
- return `${newMajor}.${newMinor}.${newPatch}`;
150
- }
151
- /**
152
- * Create a version update object
153
- */
154
- function createVersionUpdate(pkg, bump, hasDirectChanges) {
155
- const newVersion = calculateNewVersion(pkg.version, bump);
156
- return {
157
- package: pkg,
158
- currentVersion: pkg.version,
159
- newVersion,
160
- bumpType: bump,
161
- hasDirectChanges
162
- };
163
- }
164
- /**
165
- * Update a package.json file with new version and dependency versions
166
- */
167
- async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
168
- const packageJsonPath = join(pkg.path, "package.json");
169
- const content = await readFile(packageJsonPath, "utf-8");
170
- const packageJson = JSON.parse(content);
171
- packageJson.version = newVersion;
172
- for (const [depName, depVersion] of dependencyUpdates) {
173
- if (packageJson.dependencies?.[depName]) {
174
- if (packageJson.dependencies[depName] === "workspace:*") continue;
175
- packageJson.dependencies[depName] = `^${depVersion}`;
176
- }
177
- if (packageJson.devDependencies?.[depName]) {
178
- if (packageJson.devDependencies[depName] === "workspace:*") continue;
179
- packageJson.devDependencies[depName] = `^${depVersion}`;
180
- }
181
- if (packageJson.peerDependencies?.[depName]) {
182
- if (packageJson.peerDependencies[depName] === "workspace:*") continue;
183
- packageJson.peerDependencies[depName] = `^${depVersion}`;
184
- }
185
- }
186
- await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
187
- }
188
- /**
189
- * Get all dependency updates needed for a package
190
- */
191
- function getDependencyUpdates(pkg, allUpdates) {
192
- const updates = /* @__PURE__ */ new Map();
193
- const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
194
- for (const dep of allDeps) {
195
- const update = allUpdates.find((u) => u.package.name === dep);
196
- if (update) updates.set(dep, update.newVersion);
197
- }
198
- return updates;
199
- }
200
-
201
- //#endregion
202
- //#region src/dependencies.ts
203
- /**
204
- * Pure function: Determine which packages need updates due to dependency changes
205
- *
206
- * When a package is updated, all packages that depend on it should also be updated.
207
- * This function calculates which additional packages need patch bumps.
208
- *
209
- * @param updateOrder - Packages in topological order with their dependency levels
210
- * @param directUpdates - Packages with direct code changes
211
- * @returns All updates including dependent packages
212
- */
213
- function createDependentUpdates(updateOrder, directUpdates) {
214
- const allUpdates = [...directUpdates];
215
- const updatedPackages = new Set(directUpdates.map((u) => u.package.name));
216
- for (const { package: pkg } of updateOrder) {
217
- if (updatedPackages.has(pkg.name)) continue;
218
- if (hasUpdatedDependencies(pkg, updatedPackages)) {
219
- allUpdates.push(createVersionUpdate(pkg, "patch", false));
220
- updatedPackages.add(pkg.name);
221
- }
222
- }
223
- return allUpdates;
224
- }
225
- /**
226
- * Pure function: Check if a package has any updated dependencies
227
- */
228
- function hasUpdatedDependencies(pkg, updatedPackages) {
229
- return [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies].some((dep) => updatedPackages.has(dep));
230
- }
231
161
 
232
162
  //#endregion
233
163
  //#region src/git.ts
@@ -244,7 +174,7 @@ async function isWorkingDirectoryClean(workspaceRoot) {
244
174
  } })).stdout.trim() !== "") return false;
245
175
  return true;
246
176
  } catch (err) {
247
- console.error("Error checking git status:", err);
177
+ logger.error("Error checking git status:", err);
248
178
  return false;
249
179
  }
250
180
  }
@@ -297,12 +227,20 @@ async function pullLatestChanges(branch, workspaceRoot) {
297
227
  * @param workspaceRoot - The root directory of the workspace
298
228
  */
299
229
  async function createBranch(branch, base, workspaceRoot) {
300
- await runIfNotDry("git", [
301
- "checkout",
302
- "-b",
303
- branch,
304
- base
305
- ], { nodeOptions: { cwd: workspaceRoot } });
230
+ try {
231
+ logger.info(`Creating branch: ${farver.green(branch)} from ${farver.cyan(base)}`);
232
+ await runIfNotDry("git", [
233
+ "checkout",
234
+ "-b",
235
+ branch,
236
+ base
237
+ ], { nodeOptions: {
238
+ cwd: workspaceRoot,
239
+ stdio: "pipe"
240
+ } });
241
+ } catch {
242
+ exitWithError(`Failed to create branch: ${branch}`, `Make sure the branch doesn't already exist and you have a clean working directory`);
243
+ }
306
244
  }
307
245
  /**
308
246
  * Checkout a git branch
@@ -312,7 +250,11 @@ async function createBranch(branch, base, workspaceRoot) {
312
250
  */
313
251
  async function checkoutBranch(branch, workspaceRoot) {
314
252
  try {
315
- await run("git", ["checkout", branch], { nodeOptions: { cwd: workspaceRoot } });
253
+ logger.info(`Switching to branch: ${farver.green(branch)}`);
254
+ await run("git", ["checkout", branch], { nodeOptions: {
255
+ cwd: workspaceRoot,
256
+ stdio: "pipe"
257
+ } });
316
258
  return true;
317
259
  } catch {
318
260
  return false;
@@ -339,7 +281,36 @@ async function getCurrentBranch(workspaceRoot) {
339
281
  * @param workspaceRoot - The root directory of the workspace
340
282
  */
341
283
  async function rebaseBranch(ontoBranch, workspaceRoot) {
342
- await run("git", ["rebase", ontoBranch], { nodeOptions: { cwd: workspaceRoot } });
284
+ try {
285
+ logger.info(`Rebasing onto: ${farver.cyan(ontoBranch)}`);
286
+ await run("git", ["rebase", ontoBranch], { nodeOptions: {
287
+ cwd: workspaceRoot,
288
+ stdio: "pipe"
289
+ } });
290
+ } catch {
291
+ exitWithError(`Failed to rebase onto: ${ontoBranch}`, `You may have merge conflicts. Run 'git rebase --abort' to undo the rebase`);
292
+ }
293
+ }
294
+ /**
295
+ * Check if local branch is ahead of remote (has commits to push)
296
+ * @param branch - The branch name to check
297
+ * @param workspaceRoot - The root directory of the workspace
298
+ * @returns Promise resolving to true if local is ahead, false otherwise
299
+ */
300
+ async function isBranchAheadOfRemote(branch, workspaceRoot) {
301
+ try {
302
+ const result = await run("git", [
303
+ "rev-list",
304
+ `origin/${branch}..${branch}`,
305
+ "--count"
306
+ ], { nodeOptions: {
307
+ cwd: workspaceRoot,
308
+ stdio: "pipe"
309
+ } });
310
+ return Number.parseInt(result.stdout.trim(), 10) > 0;
311
+ } catch {
312
+ return true;
313
+ }
343
314
  }
344
315
  /**
345
316
  * Check if there are any changes to commit (staged or unstaged)
@@ -359,14 +330,25 @@ async function hasChangesToCommit(workspaceRoot) {
359
330
  * @returns Promise resolving to true if commit was made, false if there were no changes
360
331
  */
361
332
  async function commitChanges(message, workspaceRoot) {
362
- await run("git", ["add", "."], { nodeOptions: { cwd: workspaceRoot } });
363
- if (!await hasChangesToCommit(workspaceRoot)) return false;
364
- await run("git", [
365
- "commit",
366
- "-m",
367
- message
368
- ], { nodeOptions: { cwd: workspaceRoot } });
369
- return true;
333
+ try {
334
+ await run("git", ["add", "."], { nodeOptions: {
335
+ cwd: workspaceRoot,
336
+ stdio: "pipe"
337
+ } });
338
+ if (!await hasChangesToCommit(workspaceRoot)) return false;
339
+ logger.info(`Committing changes: ${farver.dim(message)}`);
340
+ await run("git", [
341
+ "commit",
342
+ "-m",
343
+ message
344
+ ], { nodeOptions: {
345
+ cwd: workspaceRoot,
346
+ stdio: "pipe"
347
+ } });
348
+ return true;
349
+ } catch {
350
+ exitWithError(`Failed to commit changes`, `Make sure you have git configured properly with user.name and user.email`);
351
+ }
370
352
  }
371
353
  /**
372
354
  * Push branch to remote
@@ -377,48 +359,33 @@ async function commitChanges(message, workspaceRoot) {
377
359
  * @param options.forceWithLease - Force push with safety check (won't overwrite unexpected changes)
378
360
  */
379
361
  async function pushBranch(branch, workspaceRoot, options) {
380
- const args = [
381
- "push",
382
- "origin",
383
- branch
384
- ];
385
- if (options?.forceWithLease) args.push("--force-with-lease");
386
- else if (options?.force) args.push("--force");
387
- await run("git", args, { nodeOptions: { cwd: workspaceRoot } });
388
- }
389
- /**
390
- * Generate PR body from version updates
391
- * @param updates - Array of version updates to include in the PR body
392
- * @returns Formatted PR body as a string
393
- */
394
- function generatePRBody(updates) {
395
- const lines = [];
396
- lines.push("## Packages");
397
- lines.push("");
398
- const directChanges = updates.filter((u) => u.hasDirectChanges);
399
- const dependencyUpdates = updates.filter((u) => !u.hasDirectChanges);
400
- if (directChanges.length > 0) {
401
- lines.push("### Direct Changes");
402
- lines.push("");
403
- for (const update of directChanges) lines.push(`- **${update.package.name}**: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
404
- lines.push("");
405
- }
406
- if (dependencyUpdates.length > 0) {
407
- lines.push("### Dependency Updates");
408
- lines.push("");
409
- for (const update of dependencyUpdates) lines.push(`- **${update.package.name}**: ${update.currentVersion} → ${update.newVersion} (dependencies changed)`);
410
- lines.push("");
411
- }
412
- lines.push("---");
413
- lines.push("");
414
- lines.push("This release PR was automatically generated.");
415
- return lines.join("\n");
362
+ try {
363
+ const args = [
364
+ "push",
365
+ "origin",
366
+ branch
367
+ ];
368
+ if (options?.forceWithLease) {
369
+ args.push("--force-with-lease");
370
+ logger.info(`Pushing branch: ${farver.green(branch)} ${farver.dim("(with lease)")}`);
371
+ } else if (options?.force) {
372
+ args.push("--force");
373
+ logger.info(`Force pushing branch: ${farver.green(branch)}`);
374
+ } else logger.info(`Pushing branch: ${farver.green(branch)}`);
375
+ await run("git", args, { nodeOptions: {
376
+ cwd: workspaceRoot,
377
+ stdio: "pipe"
378
+ } });
379
+ } catch {
380
+ exitWithError(`Failed to push branch: ${branch}`, `Make sure you have permission to push to the remote repository`);
381
+ }
416
382
  }
417
383
 
418
384
  //#endregion
419
385
  //#region src/github.ts
420
386
  async function getExistingPullRequest({ owner, repo, branch, githubToken }) {
421
387
  try {
388
+ logger.debug(`Requesting pull request for branch: ${branch} (url: https://api.github.com/repos/${owner}/${repo}/pulls?state=open&head=${branch})`);
422
389
  const res = await fetch(`https://api.github.com/repos/${owner}/${repo}/pulls?state=open&head=${branch}`, { headers: {
423
390
  Accept: "application/vnd.github.v3+json",
424
391
  Authorization: `token ${githubToken}`
@@ -435,10 +402,10 @@ async function getExistingPullRequest({ owner, repo, branch, githubToken }) {
435
402
  draft: firstPullRequest.draft,
436
403
  html_url: firstPullRequest.html_url
437
404
  };
438
- console.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
405
+ logger.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
439
406
  return pullRequest;
440
407
  } catch (err) {
441
- console.error("Error fetching pull request:", err);
408
+ logger.error("Error fetching pull request:", err);
442
409
  return null;
443
410
  }
444
411
  }
@@ -456,6 +423,7 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
456
423
  head,
457
424
  base
458
425
  };
426
+ logger.debug(`${isUpdate ? "Updating" : "Creating"} pull request (url: ${url})`);
459
427
  const res = await fetch(url, {
460
428
  method,
461
429
  headers: {
@@ -468,7 +436,7 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
468
436
  const pr = await res.json();
469
437
  if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
470
438
  const action = isUpdate ? "Updated" : "Created";
471
- console.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
439
+ logger.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
472
440
  return {
473
441
  number: pr.number,
474
442
  title: pr.title,
@@ -477,30 +445,63 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
477
445
  html_url: pr.html_url
478
446
  };
479
447
  } catch (err) {
480
- console.error(`Error upserting pull request:`, err);
448
+ logger.error(`Error upserting pull request:`, err);
481
449
  throw err;
482
450
  }
483
451
  }
452
+ const defaultTemplate = dedent`
453
+ This PR was automatically generated by the release script.
454
+
455
+ The following packages have been prepared for release:
456
+
457
+ <% it.packages.forEach((pkg) => { %>
458
+ - **<%= pkg.name %>**: <%= pkg.currentVersion %> → <%= pkg.newVersion %> (<%= pkg.bumpType %>)
459
+ <% }) %>
460
+
461
+ Please review the changes and merge when ready.
462
+
463
+ For a more in-depth look at the changes, please refer to the individual package changelogs.
464
+
465
+ > [!NOTE]
466
+ > When this PR is merged, the release process will be triggered automatically, publishing the new package versions to the registry.
467
+ `;
468
+ function dedentString(str) {
469
+ const lines = str.split("\n");
470
+ const minIndent = lines.filter((line) => line.trim().length > 0).reduce((min, line) => Math.min(min, line.search(/\S/)), Infinity);
471
+ return lines.map((line) => minIndent === Infinity ? line : line.slice(minIndent)).join("\n").trim();
472
+ }
473
+ function generatePullRequestBody(updates, body) {
474
+ const eta = new Eta();
475
+ const bodyTemplate = body ? dedentString(body) : defaultTemplate;
476
+ return eta.renderString(bodyTemplate, { packages: updates.map((u) => ({
477
+ name: u.package.name,
478
+ currentVersion: u.currentVersion,
479
+ newVersion: u.newVersion,
480
+ bumpType: u.bumpType,
481
+ hasDirectChanges: u.hasDirectChanges
482
+ })) });
483
+ }
484
484
 
485
485
  //#endregion
486
486
  //#region src/prompts.ts
487
- async function promptPackageSelection(packages) {
487
+ async function selectPackagePrompt(packages) {
488
488
  const response = await prompts({
489
489
  type: "multiselect",
490
490
  name: "selectedPackages",
491
491
  message: "Select packages to release",
492
492
  choices: packages.map((pkg) => ({
493
- title: `${pkg.name} (${pkg.version})`,
493
+ title: `${pkg.name} (${farver.bold(pkg.version)})`,
494
494
  value: pkg.name,
495
495
  selected: true
496
496
  })),
497
497
  min: 1,
498
- hint: "Space to select/deselect. Return to submit."
498
+ hint: "Space to select/deselect. Return to submit.",
499
+ instructions: false
499
500
  });
500
- if (!response.selectedPackages || response.selectedPackages.length === 0) throw new Error("No packages selected");
501
+ if (!response.selectedPackages || response.selectedPackages.length === 0) return [];
501
502
  return response.selectedPackages;
502
503
  }
503
- async function promptVersionOverride(packageName, currentVersion, suggestedVersion, suggestedBumpType) {
504
+ async function promptVersionOverride(pkg, workspaceRoot, currentVersion, suggestedVersion, suggestedBumpType) {
504
505
  const choices = [{
505
506
  title: `Use suggested: ${suggestedVersion} (${suggestedBumpType})`,
506
507
  value: "suggested"
@@ -510,7 +511,7 @@ async function promptVersionOverride(packageName, currentVersion, suggestedVersi
510
511
  "minor",
511
512
  "major"
512
513
  ]) if (bumpType !== suggestedBumpType) {
513
- const version = calculateNewVersion(currentVersion, bumpType);
514
+ const version = getNextVersion(currentVersion, bumpType);
514
515
  choices.push({
515
516
  title: `${bumpType}: ${version}`,
516
517
  value: bumpType
@@ -523,7 +524,7 @@ async function promptVersionOverride(packageName, currentVersion, suggestedVersi
523
524
  const response = await prompts([{
524
525
  type: "select",
525
526
  name: "choice",
526
- message: `${packageName} (${currentVersion}):`,
527
+ message: `${pkg.name} (${currentVersion}):`,
527
528
  choices,
528
529
  initial: 0
529
530
  }, {
@@ -537,68 +538,134 @@ async function promptVersionOverride(packageName, currentVersion, suggestedVersi
537
538
  }]);
538
539
  if (response.choice === "suggested") return suggestedVersion;
539
540
  else if (response.choice === "custom") return response.customVersion;
540
- else return calculateNewVersion(currentVersion, response.choice);
541
- }
542
- async function promptVersionOverrides(packages) {
543
- const overrides = /* @__PURE__ */ new Map();
544
- for (const pkg of packages) {
545
- const newVersion = await promptVersionOverride(pkg.name, pkg.currentVersion, pkg.suggestedVersion, pkg.bumpType);
546
- overrides.set(pkg.name, newVersion);
547
- }
548
- return overrides;
541
+ else return getNextVersion(currentVersion, response.choice);
549
542
  }
550
543
 
551
544
  //#endregion
552
- //#region src/workspace.ts
553
- const debug = createDebugger("ucdjs:release-scripts:workspace");
554
- function shouldIncludePackage(pkg, options) {
555
- if (!options) return true;
556
- if (options.excludePrivate && pkg.private) return false;
557
- if (options.included && options.included.length > 0) {
558
- if (!options.included.includes(pkg.name)) return false;
545
+ //#region src/version.ts
546
+ function isValidSemver(version) {
547
+ return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(version);
548
+ }
549
+ function validateSemver(version) {
550
+ if (!isValidSemver(version)) throw new Error(`Invalid semver version: ${version}`);
551
+ }
552
+ function getNextVersion(currentVersion, bump) {
553
+ if (bump === "none") return currentVersion;
554
+ validateSemver(currentVersion);
555
+ const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
556
+ if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
557
+ const [, major, minor, patch] = match;
558
+ let newMajor = Number.parseInt(major, 10);
559
+ let newMinor = Number.parseInt(minor, 10);
560
+ let newPatch = Number.parseInt(patch, 10);
561
+ switch (bump) {
562
+ case "major":
563
+ newMajor += 1;
564
+ newMinor = 0;
565
+ newPatch = 0;
566
+ break;
567
+ case "minor":
568
+ newMinor += 1;
569
+ newPatch = 0;
570
+ break;
571
+ case "patch":
572
+ newPatch += 1;
573
+ break;
559
574
  }
560
- if (options.excluded?.includes(pkg.name)) return false;
561
- return true;
575
+ return `${newMajor}.${newMinor}.${newPatch}`;
562
576
  }
563
- async function findWorkspacePackages(workspaceRoot, options) {
564
- const result = await run("pnpm", [
565
- "-r",
566
- "ls",
567
- "--json"
568
- ], { nodeOptions: {
569
- cwd: workspaceRoot,
570
- stdio: "pipe"
571
- } });
572
- const rawProjects = JSON.parse(result.stdout);
573
- const packages = [];
574
- const allPackageNames = new Set(rawProjects.map((p) => p.name));
575
- for (const rawProject of rawProjects) {
576
- const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
577
- const packageJson = JSON.parse(content);
578
- if (!shouldIncludePackage(packageJson, options)) {
579
- debug?.(`Excluding package ${rawProject.name}`);
577
+ /**
578
+ * Create a version update object
579
+ */
580
+ function createVersionUpdate(pkg, bump, hasDirectChanges) {
581
+ const newVersion = getNextVersion(pkg.version, bump);
582
+ return {
583
+ package: pkg,
584
+ currentVersion: pkg.version,
585
+ newVersion,
586
+ bumpType: bump,
587
+ hasDirectChanges
588
+ };
589
+ }
590
+ /**
591
+ * Infer version updates from package commits with optional interactive overrides
592
+ *
593
+ * @param workspacePackages - All workspace packages
594
+ * @param packageCommits - Map of package names to their commits
595
+ * @param workspaceRoot - Root directory for prompts
596
+ * @param showPrompt - Whether to show prompts for version overrides
597
+ * @returns Version updates for packages with changes
598
+ */
599
+ async function inferVersionUpdates(workspacePackages, packageCommits, workspaceRoot, showPrompt) {
600
+ const versionUpdates = [];
601
+ for (const [pkgName, commits] of packageCommits) {
602
+ if (commits.length === 0) continue;
603
+ const pkg = workspacePackages.find((p) => p.name === pkgName);
604
+ if (!pkg) continue;
605
+ const bump = determineHighestBump(commits);
606
+ if (bump === "none") {
607
+ logger.info(`No version bump needed for package ${pkg.name}`);
580
608
  continue;
581
609
  }
582
- const workspaceDeps = extractWorkspaceDependencies(rawProject.dependencies, allPackageNames);
583
- const workspaceDevDeps = extractWorkspaceDependencies(rawProject.devDependencies, allPackageNames);
584
- packages.push({
585
- name: rawProject.name,
586
- version: rawProject.version,
587
- path: rawProject.path,
588
- packageJson,
589
- workspaceDependencies: workspaceDeps,
590
- workspaceDevDependencies: workspaceDevDeps
610
+ let newVersion = getNextVersion(pkg.version, bump);
611
+ if (!isCI && showPrompt) newVersion = await promptVersionOverride(pkg, workspaceRoot, pkg.version, newVersion, bump);
612
+ versionUpdates.push({
613
+ package: pkg,
614
+ currentVersion: pkg.version,
615
+ newVersion,
616
+ bumpType: bump,
617
+ hasDirectChanges: true
591
618
  });
592
619
  }
593
- return packages;
620
+ return versionUpdates;
594
621
  }
595
- function extractWorkspaceDependencies(dependencies, workspacePackages) {
596
- if (!dependencies) return [];
597
- return Object.keys(dependencies).filter((dep) => {
598
- return workspacePackages.has(dep);
599
- });
622
+ async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
623
+ const packageJsonPath = join(pkg.path, "package.json");
624
+ const content = await readFile(packageJsonPath, "utf-8");
625
+ const packageJson = JSON.parse(content);
626
+ packageJson.version = newVersion;
627
+ for (const [depName, depVersion] of dependencyUpdates) {
628
+ if (packageJson.dependencies?.[depName]) {
629
+ if (packageJson.dependencies[depName] === "workspace:*") continue;
630
+ packageJson.dependencies[depName] = `^${depVersion}`;
631
+ }
632
+ if (packageJson.devDependencies?.[depName]) {
633
+ if (packageJson.devDependencies[depName] === "workspace:*") continue;
634
+ packageJson.devDependencies[depName] = `^${depVersion}`;
635
+ }
636
+ if (packageJson.peerDependencies?.[depName]) {
637
+ if (packageJson.peerDependencies[depName] === "workspace:*") continue;
638
+ packageJson.peerDependencies[depName] = `^${depVersion}`;
639
+ }
640
+ }
641
+ await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
642
+ }
643
+ /**
644
+ * Get all dependency updates needed for a package
645
+ */
646
+ function getDependencyUpdates(pkg, allUpdates) {
647
+ const updates = /* @__PURE__ */ new Map();
648
+ const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
649
+ for (const dep of allDeps) {
650
+ const update = allUpdates.find((u) => u.package.name === dep);
651
+ if (update) updates.set(dep, update.newVersion);
652
+ }
653
+ return updates;
600
654
  }
601
- function buildDependencyGraph(packages) {
655
+
656
+ //#endregion
657
+ //#region src/package.ts
658
+ /**
659
+ * Build a dependency graph from workspace packages
660
+ *
661
+ * Creates a bidirectional graph that maps:
662
+ * - packages: Map of package name → WorkspacePackage
663
+ * - dependents: Map of package name → Set of packages that depend on it
664
+ *
665
+ * @param packages - All workspace packages
666
+ * @returns Dependency graph with packages and dependents maps
667
+ */
668
+ function buildPackageDependencyGraph(packages) {
602
669
  const packagesMap = /* @__PURE__ */ new Map();
603
670
  const dependents = /* @__PURE__ */ new Map();
604
671
  for (const pkg of packages) {
@@ -617,136 +684,218 @@ function buildDependencyGraph(packages) {
617
684
  dependents
618
685
  };
619
686
  }
620
- function getPackageUpdateOrder(graph, changedPackages) {
621
- const result = [];
622
- const visited = /* @__PURE__ */ new Set();
623
- const toUpdate = new Set(changedPackages);
624
- const packagesToProcess = new Set(changedPackages);
625
- for (const pkg of changedPackages) {
626
- const deps = graph.dependents.get(pkg);
627
- if (deps) for (const dep of deps) {
628
- packagesToProcess.add(dep);
629
- toUpdate.add(dep);
630
- }
687
+ /**
688
+ * Get all packages affected by changes (including transitive dependents)
689
+ *
690
+ * Uses graph traversal to find all packages that need updates:
691
+ * - Packages with direct changes
692
+ * - All packages that depend on changed packages (transitively)
693
+ *
694
+ * @param graph - Dependency graph
695
+ * @param changedPackages - Set of package names with direct changes
696
+ * @returns Set of all package names that need updates
697
+ */
698
+ function getAllAffectedPackages(graph, changedPackages) {
699
+ const affected = /* @__PURE__ */ new Set();
700
+ function visitDependents(pkgName) {
701
+ if (affected.has(pkgName)) return;
702
+ affected.add(pkgName);
703
+ const dependents = graph.dependents.get(pkgName);
704
+ if (dependents) for (const dependent of dependents) visitDependents(dependent);
631
705
  }
632
- function visit(pkgName, level) {
633
- if (visited.has(pkgName)) return;
634
- visited.add(pkgName);
635
- const pkg = graph.packages.get(pkgName);
636
- if (!pkg) return;
637
- const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
638
- let maxDepLevel = level;
639
- for (const dep of allDeps) if (toUpdate.has(dep)) {
640
- visit(dep, level);
641
- const depResult = result.find((r) => r.package.name === dep);
642
- if (depResult && depResult.level >= maxDepLevel) maxDepLevel = depResult.level + 1;
643
- }
644
- result.push({
645
- package: pkg,
646
- level: maxDepLevel
647
- });
706
+ for (const pkg of changedPackages) visitDependents(pkg);
707
+ return affected;
708
+ }
709
+ /**
710
+ * Create version updates for all packages affected by dependency changes
711
+ *
712
+ * When a package is updated, all packages that depend on it should also be updated.
713
+ * This function calculates which additional packages need patch bumps due to dependency changes.
714
+ *
715
+ * @param graph - Dependency graph
716
+ * @param workspacePackages - All workspace packages
717
+ * @param directUpdates - Packages with direct code changes
718
+ * @returns All updates including dependent packages that need patch bumps
719
+ */
720
+ function createDependentUpdates(graph, workspacePackages, directUpdates) {
721
+ const allUpdates = [...directUpdates];
722
+ const directUpdateMap = new Map(directUpdates.map((u) => [u.package.name, u]));
723
+ const affectedPackages = getAllAffectedPackages(graph, new Set(directUpdates.map((u) => u.package.name)));
724
+ for (const pkgName of affectedPackages) {
725
+ if (directUpdateMap.has(pkgName)) continue;
726
+ const pkg = workspacePackages.find((p) => p.name === pkgName);
727
+ if (!pkg) continue;
728
+ allUpdates.push(createVersionUpdate(pkg, "patch", false));
648
729
  }
649
- for (const pkg of toUpdate) visit(pkg, 0);
650
- result.sort((a, b) => a.level - b.level);
651
- return result;
730
+ return allUpdates;
731
+ }
732
+ /**
733
+ * Update all package.json files with new versions and dependency updates
734
+ *
735
+ * Updates are performed in parallel for better performance.
736
+ *
737
+ * @param updates - Version updates to apply
738
+ */
739
+ async function updateAllPackageJsonFiles(updates) {
740
+ await Promise.all(updates.map(async (update) => {
741
+ const depUpdates = getDependencyUpdates(update.package, updates);
742
+ await updatePackageJson(update.package, update.newVersion, depUpdates);
743
+ }));
652
744
  }
653
745
 
654
746
  //#endregion
655
- //#region src/release.ts
656
- const isCI = process.env.CI === "true";
657
- async function release(options) {
658
- const { dryRun: dryRun$1 = false, safeguards = true, workspaceRoot = process.cwd(), releaseBranch = "release/next", githubToken } = options;
659
- globalOptions.dryRun = dryRun$1;
660
- if (githubToken.trim() === "" || githubToken == null) throw new Error("GitHub token is required");
661
- const [owner, repo] = options.repo.split("/");
662
- if (!owner || !repo) throw new Error(`Invalid repo format: ${options.repo}. Expected "owner/repo".`);
663
- if (safeguards && !isWorkingDirectoryClean(workspaceRoot)) {
664
- console.error("Working directory is not clean. Please commit or stash your changes before proceeding.");
665
- return null;
747
+ //#region src/workspace.ts
748
+ async function discoverWorkspacePackages(workspaceRoot, options) {
749
+ let workspaceOptions;
750
+ let explicitPackages;
751
+ if (options.packages == null || options.packages === true) workspaceOptions = { excludePrivate: false };
752
+ else if (Array.isArray(options.packages)) {
753
+ workspaceOptions = {
754
+ excludePrivate: false,
755
+ include: options.packages
756
+ };
757
+ explicitPackages = options.packages;
758
+ } else {
759
+ workspaceOptions = options.packages;
760
+ if (options.packages.include) explicitPackages = options.packages.include;
761
+ }
762
+ let workspacePackages = await findWorkspacePackages(workspaceRoot, workspaceOptions);
763
+ if (explicitPackages) {
764
+ const foundNames = new Set(workspacePackages.map((p) => p.name));
765
+ const missing = explicitPackages.filter((p) => !foundNames.has(p));
766
+ if (missing.length > 0) exitWithError(`Package${missing.length > 1 ? "s" : ""} not found in workspace: ${missing.join(", ")}`, "Check your package names or run 'pnpm ls' to see available packages");
666
767
  }
667
- const { workspacePackages, packagesToAnalyze: initialPackages } = await discoverPackages(workspaceRoot, options);
668
- if (initialPackages.length === 0) return null;
669
768
  const isPackagePromptEnabled = options.prompts?.packages !== false;
670
- const isPackagesPreConfigured = Array.isArray(options.packages) || typeof options.packages === "object" && options.packages.included != null;
671
- let packagesToAnalyze = initialPackages;
672
- if (!isCI && isPackagePromptEnabled && !isPackagesPreConfigured) {
673
- const selectedNames = await promptPackageSelection(initialPackages);
674
- packagesToAnalyze = initialPackages.filter((pkg) => selectedNames.includes(pkg.name));
675
- }
676
- const changedPackages = await analyzeCommits(packagesToAnalyze, workspaceRoot);
677
- if (changedPackages.size === 0) throw new Error("No packages have changes requiring a release");
678
- let versionUpdates = calculateVersions(workspacePackages, changedPackages);
679
- const isVersionPromptEnabled = options.prompts?.versions !== false;
680
- if (!isCI && isVersionPromptEnabled) {
681
- const versionOverrides = await promptVersionOverrides(versionUpdates.map((u) => ({
682
- name: u.package.name,
683
- currentVersion: u.currentVersion,
684
- suggestedVersion: u.newVersion,
685
- bumpType: u.bumpType
686
- })));
687
- versionUpdates = versionUpdates.map((update) => {
688
- const overriddenVersion = versionOverrides.get(update.package.name);
689
- if (overriddenVersion && overriddenVersion !== update.newVersion) return {
690
- ...update,
691
- newVersion: overriddenVersion
769
+ if (!isCI && isPackagePromptEnabled && !explicitPackages) {
770
+ const selectedNames = await selectPackagePrompt(workspacePackages);
771
+ workspacePackages = workspacePackages.filter((pkg) => selectedNames.includes(pkg.name));
772
+ }
773
+ return workspacePackages;
774
+ }
775
+ async function findWorkspacePackages(workspaceRoot, options) {
776
+ try {
777
+ const result = await run("pnpm", [
778
+ "-r",
779
+ "ls",
780
+ "--json"
781
+ ], { nodeOptions: {
782
+ cwd: workspaceRoot,
783
+ stdio: "pipe"
784
+ } });
785
+ const rawProjects = JSON.parse(result.stdout);
786
+ const allPackageNames = new Set(rawProjects.map((p) => p.name));
787
+ const excludedPackages = /* @__PURE__ */ new Set();
788
+ const promises = rawProjects.map(async (rawProject) => {
789
+ const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
790
+ const packageJson = JSON.parse(content);
791
+ if (!shouldIncludePackage(packageJson, options)) {
792
+ excludedPackages.add(rawProject.name);
793
+ return null;
794
+ }
795
+ return {
796
+ name: rawProject.name,
797
+ version: rawProject.version,
798
+ path: rawProject.path,
799
+ packageJson,
800
+ workspaceDependencies: Object.keys(rawProject.dependencies || []).filter((dep) => {
801
+ return allPackageNames.has(dep);
802
+ }),
803
+ workspaceDevDependencies: Object.keys(rawProject.devDependencies || []).filter((dep) => {
804
+ return allPackageNames.has(dep);
805
+ })
692
806
  };
693
- return update;
694
807
  });
808
+ const packages = await Promise.all(promises);
809
+ if (excludedPackages.size > 0) logger.info(`Excluded packages: ${farver.green(Array.from(excludedPackages).join(", "))}`);
810
+ return packages.filter((pkg) => pkg !== null);
811
+ } catch (err) {
812
+ logger.error("Error discovering workspace packages:", err);
813
+ throw err;
814
+ }
815
+ }
816
+ function shouldIncludePackage(pkg, options) {
817
+ if (!options) return true;
818
+ if (options.excludePrivate && pkg.private) return false;
819
+ if (options.include && options.include.length > 0) {
820
+ if (!options.include.includes(pkg.name)) return false;
821
+ }
822
+ if (options.exclude?.includes(pkg.name)) return false;
823
+ return true;
824
+ }
825
+
826
+ //#endregion
827
+ //#region src/release.ts
828
+ async function release(options) {
829
+ const normalizedOptions = normalizeSharedOptions(options);
830
+ normalizedOptions.dryRun ??= false;
831
+ normalizedOptions.releaseBranch ??= "release/next";
832
+ normalizedOptions.safeguards ??= true;
833
+ globalOptions.dryRun = normalizedOptions.dryRun;
834
+ const workspaceRoot = normalizedOptions.workspaceRoot;
835
+ if (normalizedOptions.safeguards && !await isWorkingDirectoryClean(workspaceRoot)) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
836
+ const workspacePackages = await discoverWorkspacePackages(workspaceRoot, options);
837
+ if (workspacePackages.length === 0) {
838
+ logger.log("No packages found to release.");
839
+ return null;
695
840
  }
696
- const allUpdates = createDependentUpdates(getPackageUpdateOrder(buildDependencyGraph(workspacePackages), new Set(versionUpdates.map((u) => u.package.name))), versionUpdates);
841
+ const versionUpdates = await inferVersionUpdates(workspacePackages, await getWorkspacePackageCommits(workspaceRoot, workspacePackages), workspaceRoot, options.prompts?.versions !== false);
842
+ if (versionUpdates.length === 0) logger.warn("No packages have changes requiring a release");
843
+ const allUpdates = createDependentUpdates(buildPackageDependencyGraph(workspacePackages), workspacePackages, versionUpdates);
697
844
  const currentBranch = await getCurrentBranch(workspaceRoot);
698
845
  const existingPullRequest = await getExistingPullRequest({
699
- owner,
700
- repo,
701
- branch: releaseBranch,
702
- githubToken
846
+ owner: normalizedOptions.owner,
847
+ repo: normalizedOptions.repo,
848
+ branch: normalizedOptions.releaseBranch,
849
+ githubToken: normalizedOptions.githubToken
703
850
  });
704
851
  const prExists = !!existingPullRequest;
705
- if (prExists) console.log("Existing pull request found:", existingPullRequest.html_url);
706
- else console.log("No existing pull request found, will create new one");
707
- const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
852
+ if (prExists) logger.log("Existing pull request found:", existingPullRequest.html_url);
853
+ else logger.log("No existing pull request found, will create new one");
854
+ const branchExists = await doesBranchExist(normalizedOptions.releaseBranch, workspaceRoot);
708
855
  if (!branchExists) {
709
- console.log("Creating release branch:", releaseBranch);
710
- await createBranch(releaseBranch, currentBranch, workspaceRoot);
856
+ logger.log("Creating release branch:", normalizedOptions.releaseBranch);
857
+ await createBranch(normalizedOptions.releaseBranch, currentBranch, workspaceRoot);
711
858
  }
712
- if (!await checkoutBranch(releaseBranch, workspaceRoot)) throw new Error(`Failed to checkout branch: ${releaseBranch}`);
859
+ if (!await checkoutBranch(normalizedOptions.releaseBranch, workspaceRoot)) throw new Error(`Failed to checkout branch: ${normalizedOptions.releaseBranch}`);
713
860
  if (branchExists) {
714
- console.log("Pulling latest changes from remote");
715
- if (!await pullLatestChanges(releaseBranch, workspaceRoot)) console.log("Warning: Failed to pull latest changes, continuing anyway");
861
+ logger.log("Pulling latest changes from remote");
862
+ if (!await pullLatestChanges(normalizedOptions.releaseBranch, workspaceRoot)) logger.log("Warning: Failed to pull latest changes, continuing anyway");
716
863
  }
717
- console.log("Rebasing release branch onto", currentBranch);
864
+ logger.log("Rebasing release branch onto", currentBranch);
718
865
  await rebaseBranch(currentBranch, workspaceRoot);
719
- await updatePackageJsonFiles(allUpdates);
720
- if (!await commitChanges("chore: update release versions", workspaceRoot)) {
721
- console.log("No changes to commit");
866
+ await updateAllPackageJsonFiles(allUpdates);
867
+ const hasCommitted = await commitChanges("chore: update release versions", workspaceRoot);
868
+ const isBranchAhead = await isBranchAheadOfRemote(normalizedOptions.releaseBranch, workspaceRoot);
869
+ if (!hasCommitted && !isBranchAhead) {
870
+ logger.log("No changes to commit and branch is in sync with remote");
722
871
  await checkoutBranch(currentBranch, workspaceRoot);
723
872
  if (prExists) {
724
- console.log("No updates needed, PR is already up to date");
873
+ logger.log("No updates needed, PR is already up to date");
725
874
  return {
726
875
  updates: allUpdates,
727
876
  prUrl: existingPullRequest.html_url,
728
877
  created: false
729
878
  };
730
879
  } else {
731
- console.error("No changes to commit, and no existing PR. Nothing to do.");
880
+ logger.error("No changes to commit, and no existing PR. Nothing to do.");
732
881
  return null;
733
882
  }
734
883
  }
735
- console.log("Pushing changes to remote");
736
- await pushBranch(releaseBranch, workspaceRoot, { forceWithLease: true });
737
- const prTitle = existingPullRequest?.title || "Release: Update package versions";
738
- const prBody = generatePRBody(allUpdates);
884
+ logger.log("Pushing changes to remote");
885
+ await pushBranch(normalizedOptions.releaseBranch, workspaceRoot, { forceWithLease: true });
886
+ const prTitle = existingPullRequest?.title || options.pullRequest?.title || "chore: update package versions";
887
+ const prBody = generatePullRequestBody(allUpdates, options.pullRequest?.body);
739
888
  const pullRequest = await upsertPullRequest({
740
- owner,
741
- repo,
889
+ owner: normalizedOptions.owner,
890
+ repo: normalizedOptions.repo,
742
891
  pullNumber: existingPullRequest?.number,
743
892
  title: prTitle,
744
893
  body: prBody,
745
- head: releaseBranch,
894
+ head: normalizedOptions.releaseBranch,
746
895
  base: currentBranch,
747
- githubToken
896
+ githubToken: normalizedOptions.githubToken
748
897
  });
749
- console.log(prExists ? "Updated pull request:" : "Created pull request:", pullRequest?.html_url);
898
+ logger.log(prExists ? "Updated pull request:" : "Created pull request:", pullRequest?.html_url);
750
899
  await checkoutBranch(currentBranch, workspaceRoot);
751
900
  return {
752
901
  updates: allUpdates,
@@ -754,64 +903,6 @@ async function release(options) {
754
903
  created: !prExists
755
904
  };
756
905
  }
757
- async function discoverPackages(workspaceRoot, options) {
758
- let workspacePackages;
759
- let packagesToAnalyze;
760
- if (typeof options.packages === "boolean" && options.packages === true) {
761
- workspacePackages = await findWorkspacePackages(workspaceRoot, { excludePrivate: false });
762
- packagesToAnalyze = workspacePackages;
763
- return {
764
- workspacePackages,
765
- packagesToAnalyze
766
- };
767
- }
768
- if (Array.isArray(options.packages)) {
769
- const packageNames = options.packages;
770
- workspacePackages = await findWorkspacePackages(workspaceRoot, {
771
- excludePrivate: false,
772
- included: packageNames
773
- });
774
- packagesToAnalyze = workspacePackages.filter((pkg) => packageNames.includes(pkg.name));
775
- if (packagesToAnalyze.length !== packageNames.length) {
776
- const found = new Set(packagesToAnalyze.map((p) => p.name));
777
- const missing = packageNames.filter((p) => !found.has(p));
778
- throw new Error(`Packages not found in workspace: ${missing.join(", ")}`);
779
- }
780
- return {
781
- workspacePackages,
782
- packagesToAnalyze
783
- };
784
- }
785
- workspacePackages = await findWorkspacePackages(workspaceRoot, options.packages);
786
- packagesToAnalyze = workspacePackages;
787
- return {
788
- workspacePackages,
789
- packagesToAnalyze
790
- };
791
- }
792
- async function analyzeCommits(packages, workspaceRoot) {
793
- const changedPackages = /* @__PURE__ */ new Map();
794
- for (const pkg of packages) {
795
- const bump = await analyzePackageCommits(pkg, workspaceRoot);
796
- if (bump !== "none") changedPackages.set(pkg.name, bump);
797
- }
798
- return changedPackages;
799
- }
800
- function calculateVersions(allPackages, changedPackages) {
801
- const updates = [];
802
- for (const [pkgName, bump] of changedPackages) {
803
- const pkg = allPackages.find((p) => p.name === pkgName);
804
- if (!pkg) continue;
805
- updates.push(createVersionUpdate(pkg, bump, true));
806
- }
807
- return updates;
808
- }
809
- async function updatePackageJsonFiles(updates) {
810
- await Promise.all(updates.map(async (update) => {
811
- const depUpdates = getDependencyUpdates(update.package, updates);
812
- await updatePackageJson(update.package, update.newVersion, depUpdates);
813
- }));
814
- }
815
906
 
816
907
  //#endregion
817
- export { release };
908
+ export { publish, release };