@ucdjs/release-scripts 0.1.0-beta.2 → 0.1.0-beta.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/eta-j5TFRbI4.mjs +477 -0
- package/dist/index.d.mts +122 -51
- package/dist/index.mjs +1568 -651
- package/package.json +17 -5
package/dist/index.mjs
CHANGED
|
@@ -1,23 +1,76 @@
|
|
|
1
|
+
import { t as Eta } from "./eta-j5TFRbI4.mjs";
|
|
2
|
+
import { mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
|
3
|
+
import { join, relative } from "node:path";
|
|
1
4
|
import process from "node:process";
|
|
2
|
-
import
|
|
3
|
-
import createDebug from "debug";
|
|
5
|
+
import readline from "node:readline";
|
|
4
6
|
import farver from "farver";
|
|
7
|
+
import mri from "mri";
|
|
5
8
|
import { exec } from "tinyexec";
|
|
6
|
-
import {
|
|
7
|
-
import {
|
|
9
|
+
import { dedent } from "@luxass/utils";
|
|
10
|
+
import { getCommits, groupByType } from "commit-parser";
|
|
8
11
|
import prompts from "prompts";
|
|
12
|
+
import { compare, gt } from "semver";
|
|
9
13
|
|
|
10
|
-
//#region src/
|
|
11
|
-
function
|
|
12
|
-
const debug$2 = createDebug(namespace);
|
|
13
|
-
if (debug$2.enabled) return debug$2;
|
|
14
|
-
}
|
|
14
|
+
//#region src/publish.ts
|
|
15
|
+
function publish(_options) {}
|
|
15
16
|
|
|
16
17
|
//#endregion
|
|
17
|
-
//#region src/utils.ts
|
|
18
|
-
const
|
|
19
|
-
|
|
20
|
-
|
|
18
|
+
//#region src/shared/utils.ts
|
|
19
|
+
const args = mri(process.argv.slice(2));
|
|
20
|
+
const isDryRun = !!args.dry;
|
|
21
|
+
const isVerbose = !!args.verbose;
|
|
22
|
+
const isForce = !!args.force;
|
|
23
|
+
const ucdjsReleaseOverridesPath = ".github/ucdjs-release.overrides.json";
|
|
24
|
+
const isCI = typeof process.env.CI === "string" && process.env.CI !== "" && process.env.CI.toLowerCase() !== "false";
|
|
25
|
+
const logger = {
|
|
26
|
+
info: (...args$1) => {
|
|
27
|
+
console.info(...args$1);
|
|
28
|
+
},
|
|
29
|
+
warn: (...args$1) => {
|
|
30
|
+
console.warn(` ${farver.yellow("⚠")}`, ...args$1);
|
|
31
|
+
},
|
|
32
|
+
error: (...args$1) => {
|
|
33
|
+
console.error(` ${farver.red("✖")}`, ...args$1);
|
|
34
|
+
},
|
|
35
|
+
verbose: (...args$1) => {
|
|
36
|
+
if (!isVerbose) return;
|
|
37
|
+
if (args$1.length === 0) {
|
|
38
|
+
console.log();
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
if (args$1.length > 1 && typeof args$1[0] === "string") {
|
|
42
|
+
console.log(farver.dim(args$1[0]), ...args$1.slice(1));
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
console.log(...args$1);
|
|
46
|
+
},
|
|
47
|
+
section: (title) => {
|
|
48
|
+
console.log();
|
|
49
|
+
console.log(` ${farver.bold(title)}`);
|
|
50
|
+
console.log(` ${farver.gray("─".repeat(title.length + 2))}`);
|
|
51
|
+
},
|
|
52
|
+
emptyLine: () => {
|
|
53
|
+
console.log();
|
|
54
|
+
},
|
|
55
|
+
item: (message, ...args$1) => {
|
|
56
|
+
console.log(` ${message}`, ...args$1);
|
|
57
|
+
},
|
|
58
|
+
step: (message) => {
|
|
59
|
+
console.log(` ${farver.blue("→")} ${message}`);
|
|
60
|
+
},
|
|
61
|
+
success: (message) => {
|
|
62
|
+
console.log(` ${farver.green("✓")} ${message}`);
|
|
63
|
+
},
|
|
64
|
+
clearScreen: () => {
|
|
65
|
+
const repeatCount = process.stdout.rows - 2;
|
|
66
|
+
const blank = repeatCount > 0 ? "\n".repeat(repeatCount) : "";
|
|
67
|
+
console.log(blank);
|
|
68
|
+
readline.cursorTo(process.stdout, 0, 0);
|
|
69
|
+
readline.clearScreenDown(process.stdout);
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
async function run(bin, args$1, opts = {}) {
|
|
73
|
+
return exec(bin, args$1, {
|
|
21
74
|
throwOnError: true,
|
|
22
75
|
...opts,
|
|
23
76
|
nodeOptions: {
|
|
@@ -26,211 +79,27 @@ async function run(bin, args, opts = {}) {
|
|
|
26
79
|
}
|
|
27
80
|
});
|
|
28
81
|
}
|
|
29
|
-
async function dryRun(bin, args, opts) {
|
|
30
|
-
return
|
|
82
|
+
async function dryRun(bin, args$1, opts) {
|
|
83
|
+
return logger.verbose(farver.blue(`[dryrun] ${bin} ${args$1.join(" ")}`), opts || "");
|
|
31
84
|
}
|
|
32
|
-
const runIfNotDry =
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
async function getLastPackageTag(packageName, workspaceRoot) {
|
|
38
|
-
const { stdout } = await run("git", ["tag", "--list"], { nodeOptions: {
|
|
39
|
-
cwd: workspaceRoot,
|
|
40
|
-
stdio: "pipe"
|
|
41
|
-
} });
|
|
42
|
-
return stdout.split("\n").map((tag) => tag.trim()).filter(Boolean).reverse().find((tag) => tag.startsWith(`${packageName}@`));
|
|
43
|
-
}
|
|
44
|
-
function determineHighestBump(commits) {
|
|
45
|
-
if (commits.length === 0) return "none";
|
|
46
|
-
let highestBump = "none";
|
|
47
|
-
for (const commit of commits) {
|
|
48
|
-
const bump = determineBumpType(commit);
|
|
49
|
-
if (bump === "major") return "major";
|
|
50
|
-
if (bump === "minor") highestBump = "minor";
|
|
51
|
-
else if (bump === "patch" && highestBump === "none") highestBump = "patch";
|
|
52
|
-
}
|
|
53
|
-
return highestBump;
|
|
85
|
+
const runIfNotDry = isDryRun ? dryRun : run;
|
|
86
|
+
function exitWithError(message, hint) {
|
|
87
|
+
logger.error(farver.bold(message));
|
|
88
|
+
if (hint) console.error(farver.gray(` ${hint}`));
|
|
89
|
+
process.exit(1);
|
|
54
90
|
}
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
91
|
+
if (isDryRun || isVerbose || isForce) {
|
|
92
|
+
logger.verbose(farver.inverse(farver.yellow(" Running with special flags ")));
|
|
93
|
+
logger.verbose({
|
|
94
|
+
isDryRun,
|
|
95
|
+
isVerbose,
|
|
96
|
+
isForce
|
|
60
97
|
});
|
|
61
|
-
|
|
62
|
-
const touchedCommitHashes = await getCommitsTouchingPackage(lastTag || "HEAD", "HEAD", pkg.path, workspaceRoot);
|
|
63
|
-
const touchedSet = new Set(touchedCommitHashes);
|
|
64
|
-
const packageCommits = allCommits.filter((commit) => touchedSet.has(commit.shortHash));
|
|
65
|
-
debug$1?.(`${packageCommits.length} commits affect ${pkg.name}`);
|
|
66
|
-
return packageCommits;
|
|
67
|
-
}
|
|
68
|
-
async function analyzePackageCommits(pkg, workspaceRoot) {
|
|
69
|
-
return determineHighestBump(await getPackageCommits(pkg, workspaceRoot));
|
|
70
|
-
}
|
|
71
|
-
function determineBumpType(commit) {
|
|
72
|
-
if (commit.isBreaking) return "major";
|
|
73
|
-
if (!commit.isConventional || !commit.type) return "none";
|
|
74
|
-
switch (commit.type) {
|
|
75
|
-
case "feat": return "minor";
|
|
76
|
-
case "fix":
|
|
77
|
-
case "perf": return "patch";
|
|
78
|
-
case "docs":
|
|
79
|
-
case "style":
|
|
80
|
-
case "refactor":
|
|
81
|
-
case "test":
|
|
82
|
-
case "build":
|
|
83
|
-
case "ci":
|
|
84
|
-
case "chore":
|
|
85
|
-
case "revert": return "none";
|
|
86
|
-
default: return "none";
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
async function getCommitsTouchingPackage(from, to, packagePath, workspaceRoot) {
|
|
90
|
-
try {
|
|
91
|
-
const { stdout } = await run("git", [
|
|
92
|
-
"log",
|
|
93
|
-
"--pretty=format:%h",
|
|
94
|
-
from === "HEAD" ? "HEAD" : `${from}...${to}`,
|
|
95
|
-
"--",
|
|
96
|
-
packagePath
|
|
97
|
-
], { nodeOptions: {
|
|
98
|
-
cwd: workspaceRoot,
|
|
99
|
-
stdio: "pipe"
|
|
100
|
-
} });
|
|
101
|
-
return stdout.split("\n").map((line) => line.trim()).filter(Boolean);
|
|
102
|
-
} catch (error) {
|
|
103
|
-
debug$1?.(`Error getting commits touching package: ${error}`);
|
|
104
|
-
return [];
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
//#endregion
|
|
109
|
-
//#region src/validation.ts
|
|
110
|
-
/**
|
|
111
|
-
* Validation utilities for release scripts
|
|
112
|
-
*/
|
|
113
|
-
function isValidSemver(version) {
|
|
114
|
-
return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(version);
|
|
115
|
-
}
|
|
116
|
-
function validateSemver(version) {
|
|
117
|
-
if (!isValidSemver(version)) throw new Error(`Invalid semver version: ${version}`);
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
//#endregion
|
|
121
|
-
//#region src/version.ts
|
|
122
|
-
/**
|
|
123
|
-
* Calculate the new version based on current version and bump type
|
|
124
|
-
* Pure function - no side effects, easily testable
|
|
125
|
-
*/
|
|
126
|
-
function calculateNewVersion(currentVersion, bump) {
|
|
127
|
-
if (bump === "none") return currentVersion;
|
|
128
|
-
validateSemver(currentVersion);
|
|
129
|
-
const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
|
|
130
|
-
if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
|
|
131
|
-
const [, major, minor, patch, suffix] = match;
|
|
132
|
-
let newMajor = Number.parseInt(major, 10);
|
|
133
|
-
let newMinor = Number.parseInt(minor, 10);
|
|
134
|
-
let newPatch = Number.parseInt(patch, 10);
|
|
135
|
-
switch (bump) {
|
|
136
|
-
case "major":
|
|
137
|
-
newMajor += 1;
|
|
138
|
-
newMinor = 0;
|
|
139
|
-
newPatch = 0;
|
|
140
|
-
break;
|
|
141
|
-
case "minor":
|
|
142
|
-
newMinor += 1;
|
|
143
|
-
newPatch = 0;
|
|
144
|
-
break;
|
|
145
|
-
case "patch":
|
|
146
|
-
newPatch += 1;
|
|
147
|
-
break;
|
|
148
|
-
}
|
|
149
|
-
return `${newMajor}.${newMinor}.${newPatch}`;
|
|
150
|
-
}
|
|
151
|
-
/**
|
|
152
|
-
* Create a version update object
|
|
153
|
-
*/
|
|
154
|
-
function createVersionUpdate(pkg, bump, hasDirectChanges) {
|
|
155
|
-
const newVersion = calculateNewVersion(pkg.version, bump);
|
|
156
|
-
return {
|
|
157
|
-
package: pkg,
|
|
158
|
-
currentVersion: pkg.version,
|
|
159
|
-
newVersion,
|
|
160
|
-
bumpType: bump,
|
|
161
|
-
hasDirectChanges
|
|
162
|
-
};
|
|
163
|
-
}
|
|
164
|
-
/**
|
|
165
|
-
* Update a package.json file with new version and dependency versions
|
|
166
|
-
*/
|
|
167
|
-
async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
|
|
168
|
-
const packageJsonPath = join(pkg.path, "package.json");
|
|
169
|
-
const content = await readFile(packageJsonPath, "utf-8");
|
|
170
|
-
const packageJson = JSON.parse(content);
|
|
171
|
-
packageJson.version = newVersion;
|
|
172
|
-
for (const [depName, depVersion] of dependencyUpdates) {
|
|
173
|
-
if (packageJson.dependencies?.[depName]) {
|
|
174
|
-
if (packageJson.dependencies[depName] === "workspace:*") continue;
|
|
175
|
-
packageJson.dependencies[depName] = `^${depVersion}`;
|
|
176
|
-
}
|
|
177
|
-
if (packageJson.devDependencies?.[depName]) {
|
|
178
|
-
if (packageJson.devDependencies[depName] === "workspace:*") continue;
|
|
179
|
-
packageJson.devDependencies[depName] = `^${depVersion}`;
|
|
180
|
-
}
|
|
181
|
-
if (packageJson.peerDependencies?.[depName]) {
|
|
182
|
-
if (packageJson.peerDependencies[depName] === "workspace:*") continue;
|
|
183
|
-
packageJson.peerDependencies[depName] = `^${depVersion}`;
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
|
|
187
|
-
}
|
|
188
|
-
/**
|
|
189
|
-
* Get all dependency updates needed for a package
|
|
190
|
-
*/
|
|
191
|
-
function getDependencyUpdates(pkg, allUpdates) {
|
|
192
|
-
const updates = /* @__PURE__ */ new Map();
|
|
193
|
-
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
194
|
-
for (const dep of allDeps) {
|
|
195
|
-
const update = allUpdates.find((u) => u.package.name === dep);
|
|
196
|
-
if (update) updates.set(dep, update.newVersion);
|
|
197
|
-
}
|
|
198
|
-
return updates;
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
//#endregion
|
|
202
|
-
//#region src/dependencies.ts
|
|
203
|
-
/**
|
|
204
|
-
* Pure function: Determine which packages need updates due to dependency changes
|
|
205
|
-
*
|
|
206
|
-
* When a package is updated, all packages that depend on it should also be updated.
|
|
207
|
-
* This function calculates which additional packages need patch bumps.
|
|
208
|
-
*
|
|
209
|
-
* @param updateOrder - Packages in topological order with their dependency levels
|
|
210
|
-
* @param directUpdates - Packages with direct code changes
|
|
211
|
-
* @returns All updates including dependent packages
|
|
212
|
-
*/
|
|
213
|
-
function createDependentUpdates(updateOrder, directUpdates) {
|
|
214
|
-
const allUpdates = [...directUpdates];
|
|
215
|
-
const updatedPackages = new Set(directUpdates.map((u) => u.package.name));
|
|
216
|
-
for (const { package: pkg } of updateOrder) {
|
|
217
|
-
if (updatedPackages.has(pkg.name)) continue;
|
|
218
|
-
if (hasUpdatedDependencies(pkg, updatedPackages)) {
|
|
219
|
-
allUpdates.push(createVersionUpdate(pkg, "patch", false));
|
|
220
|
-
updatedPackages.add(pkg.name);
|
|
221
|
-
}
|
|
222
|
-
}
|
|
223
|
-
return allUpdates;
|
|
224
|
-
}
|
|
225
|
-
/**
|
|
226
|
-
* Pure function: Check if a package has any updated dependencies
|
|
227
|
-
*/
|
|
228
|
-
function hasUpdatedDependencies(pkg, updatedPackages) {
|
|
229
|
-
return [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies].some((dep) => updatedPackages.has(dep));
|
|
98
|
+
logger.verbose();
|
|
230
99
|
}
|
|
231
100
|
|
|
232
101
|
//#endregion
|
|
233
|
-
//#region src/git.ts
|
|
102
|
+
//#region src/core/git.ts
|
|
234
103
|
/**
|
|
235
104
|
* Check if the working directory is clean (no uncommitted changes)
|
|
236
105
|
* @param {string} workspaceRoot - The root directory of the workspace
|
|
@@ -244,7 +113,7 @@ async function isWorkingDirectoryClean(workspaceRoot) {
|
|
|
244
113
|
} })).stdout.trim() !== "") return false;
|
|
245
114
|
return true;
|
|
246
115
|
} catch (err) {
|
|
247
|
-
|
|
116
|
+
logger.error("Error checking git status:", err);
|
|
248
117
|
return false;
|
|
249
118
|
}
|
|
250
119
|
}
|
|
@@ -270,83 +139,124 @@ async function doesBranchExist(branch, workspaceRoot) {
|
|
|
270
139
|
}
|
|
271
140
|
}
|
|
272
141
|
/**
|
|
273
|
-
*
|
|
274
|
-
*
|
|
275
|
-
* @
|
|
276
|
-
* @returns Promise resolving to true if pull succeeded, false otherwise
|
|
142
|
+
* Retrieves the default branch name from the remote repository.
|
|
143
|
+
* Falls back to "main" if the default branch cannot be determined.
|
|
144
|
+
* @returns {Promise<string>} A Promise resolving to the default branch name as a string.
|
|
277
145
|
*/
|
|
278
|
-
async function
|
|
146
|
+
async function getDefaultBranch(workspaceRoot) {
|
|
279
147
|
try {
|
|
280
|
-
await run("git", [
|
|
281
|
-
"pull",
|
|
282
|
-
"origin",
|
|
283
|
-
branch
|
|
284
|
-
], { nodeOptions: {
|
|
148
|
+
const match = (await run("git", ["symbolic-ref", "refs/remotes/origin/HEAD"], { nodeOptions: {
|
|
285
149
|
cwd: workspaceRoot,
|
|
286
150
|
stdio: "pipe"
|
|
287
|
-
} });
|
|
288
|
-
return
|
|
151
|
+
} })).stdout.trim().match(/^refs\/remotes\/origin\/(.+)$/);
|
|
152
|
+
if (match && match[1]) return match[1];
|
|
153
|
+
return "main";
|
|
289
154
|
} catch {
|
|
290
|
-
return
|
|
155
|
+
return "main";
|
|
291
156
|
}
|
|
292
157
|
}
|
|
293
158
|
/**
|
|
294
|
-
*
|
|
295
|
-
* @param
|
|
296
|
-
* @
|
|
297
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
159
|
+
* Retrieves the name of the current branch in the repository.
|
|
160
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
161
|
+
* @returns {Promise<string>} A Promise resolving to the current branch name as a string
|
|
298
162
|
*/
|
|
299
|
-
async function
|
|
300
|
-
|
|
301
|
-
"
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
163
|
+
async function getCurrentBranch(workspaceRoot) {
|
|
164
|
+
try {
|
|
165
|
+
return (await run("git", [
|
|
166
|
+
"rev-parse",
|
|
167
|
+
"--abbrev-ref",
|
|
168
|
+
"HEAD"
|
|
169
|
+
], { nodeOptions: {
|
|
170
|
+
cwd: workspaceRoot,
|
|
171
|
+
stdio: "pipe"
|
|
172
|
+
} })).stdout.trim();
|
|
173
|
+
} catch (err) {
|
|
174
|
+
logger.error("Error getting current branch:", err);
|
|
175
|
+
throw err;
|
|
176
|
+
}
|
|
306
177
|
}
|
|
307
178
|
/**
|
|
308
|
-
*
|
|
309
|
-
* @param
|
|
310
|
-
* @
|
|
311
|
-
|
|
179
|
+
* Retrieves the list of available branches in the repository.
|
|
180
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
181
|
+
* @returns {Promise<string[]>} A Promise resolving to an array of branch names
|
|
182
|
+
*/
|
|
183
|
+
async function getAvailableBranches(workspaceRoot) {
|
|
184
|
+
try {
|
|
185
|
+
return (await run("git", ["branch", "--list"], { nodeOptions: {
|
|
186
|
+
cwd: workspaceRoot,
|
|
187
|
+
stdio: "pipe"
|
|
188
|
+
} })).stdout.split("\n").map((line) => line.replace("*", "").trim()).filter((line) => line.length > 0);
|
|
189
|
+
} catch (err) {
|
|
190
|
+
logger.error("Error getting available branches:", err);
|
|
191
|
+
throw err;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Creates a new branch from the specified base branch.
|
|
196
|
+
* @param {string} branch - The name of the new branch to create
|
|
197
|
+
* @param {string} base - The base branch to create the new branch from
|
|
198
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
199
|
+
* @returns {Promise<void>} A Promise that resolves when the branch is created
|
|
312
200
|
*/
|
|
201
|
+
async function createBranch(branch, base, workspaceRoot) {
|
|
202
|
+
try {
|
|
203
|
+
logger.info(`Creating branch: ${farver.green(branch)} from ${farver.cyan(base)}`);
|
|
204
|
+
await runIfNotDry("git", [
|
|
205
|
+
"checkout",
|
|
206
|
+
"-b",
|
|
207
|
+
branch,
|
|
208
|
+
base
|
|
209
|
+
], { nodeOptions: {
|
|
210
|
+
cwd: workspaceRoot,
|
|
211
|
+
stdio: "pipe"
|
|
212
|
+
} });
|
|
213
|
+
} catch {
|
|
214
|
+
exitWithError(`Failed to create branch: ${branch}`, `Make sure the branch doesn't already exist and you have a clean working directory`);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
313
217
|
async function checkoutBranch(branch, workspaceRoot) {
|
|
314
218
|
try {
|
|
315
|
-
|
|
316
|
-
|
|
219
|
+
logger.info(`Switching to branch: ${farver.green(branch)}`);
|
|
220
|
+
const match = (await run("git", ["checkout", branch], { nodeOptions: {
|
|
221
|
+
cwd: workspaceRoot,
|
|
222
|
+
stdio: "pipe"
|
|
223
|
+
} })).stderr.trim().match(/Switched to branch '(.+)'/);
|
|
224
|
+
if (match && match[1] === branch) {
|
|
225
|
+
logger.info(`Successfully switched to branch: ${farver.green(branch)}`);
|
|
226
|
+
return true;
|
|
227
|
+
}
|
|
228
|
+
return false;
|
|
317
229
|
} catch {
|
|
318
230
|
return false;
|
|
319
231
|
}
|
|
320
232
|
}
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
}
|
|
233
|
+
async function pullLatestChanges(branch, workspaceRoot) {
|
|
234
|
+
try {
|
|
235
|
+
await run("git", [
|
|
236
|
+
"pull",
|
|
237
|
+
"origin",
|
|
238
|
+
branch
|
|
239
|
+
], { nodeOptions: {
|
|
240
|
+
cwd: workspaceRoot,
|
|
241
|
+
stdio: "pipe"
|
|
242
|
+
} });
|
|
243
|
+
return true;
|
|
244
|
+
} catch {
|
|
245
|
+
return false;
|
|
246
|
+
}
|
|
335
247
|
}
|
|
336
|
-
/**
|
|
337
|
-
* Rebase current branch onto another branch
|
|
338
|
-
* @param ontoBranch - The target branch to rebase onto
|
|
339
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
340
|
-
*/
|
|
341
248
|
async function rebaseBranch(ontoBranch, workspaceRoot) {
|
|
342
|
-
|
|
249
|
+
try {
|
|
250
|
+
logger.info(`Rebasing onto: ${farver.cyan(ontoBranch)}`);
|
|
251
|
+
await runIfNotDry("git", ["rebase", ontoBranch], { nodeOptions: {
|
|
252
|
+
cwd: workspaceRoot,
|
|
253
|
+
stdio: "pipe"
|
|
254
|
+
} });
|
|
255
|
+
return true;
|
|
256
|
+
} catch {
|
|
257
|
+
exitWithError(`Failed to rebase onto: ${ontoBranch}`, `You may have merge conflicts. Run 'git rebase --abort' to undo the rebase`);
|
|
258
|
+
}
|
|
343
259
|
}
|
|
344
|
-
/**
|
|
345
|
-
* Check if local branch is ahead of remote (has commits to push)
|
|
346
|
-
* @param branch - The branch name to check
|
|
347
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
348
|
-
* @returns Promise resolving to true if local is ahead, false otherwise
|
|
349
|
-
*/
|
|
350
260
|
async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
351
261
|
try {
|
|
352
262
|
const result = await run("git", [
|
|
@@ -362,134 +272,398 @@ async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
|
362
272
|
return true;
|
|
363
273
|
}
|
|
364
274
|
}
|
|
365
|
-
/**
|
|
366
|
-
* Check if there are any changes to commit (staged or unstaged)
|
|
367
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
368
|
-
* @returns Promise resolving to true if there are changes, false otherwise
|
|
369
|
-
*/
|
|
370
|
-
async function hasChangesToCommit(workspaceRoot) {
|
|
371
|
-
return (await run("git", ["status", "--porcelain"], { nodeOptions: {
|
|
372
|
-
cwd: workspaceRoot,
|
|
373
|
-
stdio: "pipe"
|
|
374
|
-
} })).stdout.trim() !== "";
|
|
375
|
-
}
|
|
376
|
-
/**
|
|
377
|
-
* Commit changes with a message
|
|
378
|
-
* @param message - The commit message
|
|
379
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
380
|
-
* @returns Promise resolving to true if commit was made, false if there were no changes
|
|
381
|
-
*/
|
|
382
275
|
async function commitChanges(message, workspaceRoot) {
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
276
|
+
try {
|
|
277
|
+
await run("git", ["add", "."], { nodeOptions: {
|
|
278
|
+
cwd: workspaceRoot,
|
|
279
|
+
stdio: "pipe"
|
|
280
|
+
} });
|
|
281
|
+
if (await isWorkingDirectoryClean(workspaceRoot)) return false;
|
|
282
|
+
logger.info(`Committing changes: ${farver.dim(message)}`);
|
|
283
|
+
await runIfNotDry("git", [
|
|
284
|
+
"commit",
|
|
285
|
+
"-m",
|
|
286
|
+
message
|
|
287
|
+
], { nodeOptions: {
|
|
288
|
+
cwd: workspaceRoot,
|
|
289
|
+
stdio: "pipe"
|
|
290
|
+
} });
|
|
291
|
+
return true;
|
|
292
|
+
} catch {
|
|
293
|
+
exitWithError(`Failed to commit changes`, `Make sure you have git configured properly with user.name and user.email`);
|
|
294
|
+
}
|
|
391
295
|
}
|
|
392
|
-
/**
|
|
393
|
-
* Push branch to remote
|
|
394
|
-
* @param branch - The branch name to push
|
|
395
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
396
|
-
* @param options - Push options
|
|
397
|
-
* @param options.force - Force push (overwrite remote)
|
|
398
|
-
* @param options.forceWithLease - Force push with safety check (won't overwrite unexpected changes)
|
|
399
|
-
*/
|
|
400
296
|
async function pushBranch(branch, workspaceRoot, options) {
|
|
401
|
-
const args = [
|
|
402
|
-
"push",
|
|
403
|
-
"origin",
|
|
404
|
-
branch
|
|
405
|
-
];
|
|
406
|
-
if (options?.forceWithLease) args.push("--force-with-lease");
|
|
407
|
-
else if (options?.force) args.push("--force");
|
|
408
|
-
await run("git", args, { nodeOptions: { cwd: workspaceRoot } });
|
|
409
|
-
}
|
|
410
|
-
/**
|
|
411
|
-
* Generate PR body from version updates
|
|
412
|
-
* @param updates - Array of version updates to include in the PR body
|
|
413
|
-
* @returns Formatted PR body as a string
|
|
414
|
-
*/
|
|
415
|
-
function generatePRBody(updates) {
|
|
416
|
-
const lines = [];
|
|
417
|
-
lines.push("## Packages");
|
|
418
|
-
lines.push("");
|
|
419
|
-
const directChanges = updates.filter((u) => u.hasDirectChanges);
|
|
420
|
-
const dependencyUpdates = updates.filter((u) => !u.hasDirectChanges);
|
|
421
|
-
if (directChanges.length > 0) {
|
|
422
|
-
lines.push("### Direct Changes");
|
|
423
|
-
lines.push("");
|
|
424
|
-
for (const update of directChanges) lines.push(`- **${update.package.name}**: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
|
|
425
|
-
lines.push("");
|
|
426
|
-
}
|
|
427
|
-
if (dependencyUpdates.length > 0) {
|
|
428
|
-
lines.push("### Dependency Updates");
|
|
429
|
-
lines.push("");
|
|
430
|
-
for (const update of dependencyUpdates) lines.push(`- **${update.package.name}**: ${update.currentVersion} → ${update.newVersion} (dependencies changed)`);
|
|
431
|
-
lines.push("");
|
|
432
|
-
}
|
|
433
|
-
lines.push("---");
|
|
434
|
-
lines.push("");
|
|
435
|
-
lines.push("This release PR was automatically generated.");
|
|
436
|
-
return lines.join("\n");
|
|
437
|
-
}
|
|
438
|
-
|
|
439
|
-
//#endregion
|
|
440
|
-
//#region src/github.ts
|
|
441
|
-
async function getExistingPullRequest({ owner, repo, branch, githubToken }) {
|
|
442
297
|
try {
|
|
443
|
-
const
|
|
444
|
-
|
|
445
|
-
|
|
298
|
+
const args$1 = [
|
|
299
|
+
"push",
|
|
300
|
+
"origin",
|
|
301
|
+
branch
|
|
302
|
+
];
|
|
303
|
+
if (options?.forceWithLease) {
|
|
304
|
+
args$1.push("--force-with-lease");
|
|
305
|
+
logger.info(`Pushing branch: ${farver.green(branch)} ${farver.dim("(with lease)")}`);
|
|
306
|
+
} else if (options?.force) {
|
|
307
|
+
args$1.push("--force");
|
|
308
|
+
logger.info(`Force pushing branch: ${farver.green(branch)}`);
|
|
309
|
+
} else logger.info(`Pushing branch: ${farver.green(branch)}`);
|
|
310
|
+
await runIfNotDry("git", args$1, { nodeOptions: {
|
|
311
|
+
cwd: workspaceRoot,
|
|
312
|
+
stdio: "pipe"
|
|
446
313
|
} });
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
const firstPullRequest = pulls[0];
|
|
451
|
-
if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
452
|
-
const pullRequest = {
|
|
453
|
-
number: firstPullRequest.number,
|
|
454
|
-
title: firstPullRequest.title,
|
|
455
|
-
body: firstPullRequest.body,
|
|
456
|
-
draft: firstPullRequest.draft,
|
|
457
|
-
html_url: firstPullRequest.html_url
|
|
458
|
-
};
|
|
459
|
-
console.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
|
|
460
|
-
return pullRequest;
|
|
461
|
-
} catch (err) {
|
|
462
|
-
console.error("Error fetching pull request:", err);
|
|
463
|
-
return null;
|
|
314
|
+
return true;
|
|
315
|
+
} catch {
|
|
316
|
+
exitWithError(`Failed to push branch: ${branch}`, `Make sure you have permission to push to the remote repository`);
|
|
464
317
|
}
|
|
465
318
|
}
|
|
466
|
-
async function
|
|
319
|
+
async function readFileFromGit(workspaceRoot, ref, filePath) {
|
|
467
320
|
try {
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
321
|
+
return (await run("git", ["show", `${ref}:${filePath}`], { nodeOptions: {
|
|
322
|
+
cwd: workspaceRoot,
|
|
323
|
+
stdio: "pipe"
|
|
324
|
+
} })).stdout;
|
|
325
|
+
} catch {
|
|
326
|
+
return null;
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
async function getMostRecentPackageTag(workspaceRoot, packageName) {
|
|
330
|
+
try {
|
|
331
|
+
const { stdout } = await run("git", [
|
|
332
|
+
"tag",
|
|
333
|
+
"--list",
|
|
334
|
+
`${packageName}@*`
|
|
335
|
+
], { nodeOptions: {
|
|
336
|
+
cwd: workspaceRoot,
|
|
337
|
+
stdio: "pipe"
|
|
338
|
+
} });
|
|
339
|
+
const tags = stdout.split("\n").map((tag) => tag.trim()).filter(Boolean);
|
|
340
|
+
if (tags.length === 0) return;
|
|
341
|
+
return tags.reverse()[0];
|
|
342
|
+
} catch (err) {
|
|
343
|
+
logger.warn(`Failed to get tags for package ${packageName}: ${err.message}`);
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
/**
|
|
348
|
+
* Builds a mapping of commit SHAs to the list of files changed in each commit
|
|
349
|
+
* within a given inclusive range.
|
|
350
|
+
*
|
|
351
|
+
* Internally runs:
|
|
352
|
+
* git log --name-only --format=%H <from>^..<to>
|
|
353
|
+
*
|
|
354
|
+
* Notes
|
|
355
|
+
* - This includes the commit identified by `from` (via `from^..to`).
|
|
356
|
+
* - Order of commits in the resulting Map follows `git log` output
|
|
357
|
+
* (reverse chronological, newest first).
|
|
358
|
+
* - On failure (e.g., invalid refs), the function returns null.
|
|
359
|
+
*
|
|
360
|
+
* @param {string} workspaceRoot Absolute path to the git repository root used as cwd.
|
|
361
|
+
* @param {string} from Starting commit/ref (inclusive).
|
|
362
|
+
* @param {string} to Ending commit/ref (inclusive).
|
|
363
|
+
* @returns {Promise<Map<string, string[]> | null>} Promise resolving to a Map where keys are commit SHAs and values are
|
|
364
|
+
* arrays of file paths changed by that commit, or null on error.
|
|
365
|
+
*/
|
|
366
|
+
async function getGroupedFilesByCommitSha(workspaceRoot, from, to) {
|
|
367
|
+
const commitsMap = /* @__PURE__ */ new Map();
|
|
368
|
+
try {
|
|
369
|
+
const { stdout } = await run("git", [
|
|
370
|
+
"log",
|
|
371
|
+
"--name-only",
|
|
372
|
+
"--format=%H",
|
|
373
|
+
`${from}^..${to}`
|
|
374
|
+
], { nodeOptions: {
|
|
375
|
+
cwd: workspaceRoot,
|
|
376
|
+
stdio: "pipe"
|
|
377
|
+
} });
|
|
378
|
+
const lines = stdout.trim().split("\n").filter((line) => line.trim() !== "");
|
|
379
|
+
let currentSha = null;
|
|
380
|
+
const HASH_REGEX = /^[0-9a-f]{40}$/i;
|
|
381
|
+
for (const line of lines) {
|
|
382
|
+
const trimmedLine = line.trim();
|
|
383
|
+
if (HASH_REGEX.test(trimmedLine)) {
|
|
384
|
+
currentSha = trimmedLine;
|
|
385
|
+
commitsMap.set(currentSha, []);
|
|
386
|
+
continue;
|
|
387
|
+
}
|
|
388
|
+
if (currentSha === null) continue;
|
|
389
|
+
commitsMap.get(currentSha).push(trimmedLine);
|
|
390
|
+
}
|
|
391
|
+
return commitsMap;
|
|
392
|
+
} catch {
|
|
393
|
+
return null;
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
//#endregion
|
|
398
|
+
//#region src/core/changelog.ts
|
|
399
|
+
const globalAuthorCache = /* @__PURE__ */ new Map();
|
|
400
|
+
const DEFAULT_CHANGELOG_TEMPLATE = dedent`
|
|
401
|
+
<% if (it.previousVersion) { -%>
|
|
402
|
+
## [<%= it.version %>](<%= it.compareUrl %>) (<%= it.date %>)
|
|
403
|
+
<% } else { -%>
|
|
404
|
+
## <%= it.version %> (<%= it.date %>)
|
|
405
|
+
<% } %>
|
|
406
|
+
|
|
407
|
+
<% it.groups.forEach((group) => { %>
|
|
408
|
+
<% if (group.commits.length > 0) { %>
|
|
409
|
+
|
|
410
|
+
### <%= group.title %>
|
|
411
|
+
<% group.commits.forEach((commit) => { %>
|
|
412
|
+
|
|
413
|
+
* <%= commit.line %>
|
|
414
|
+
<% }); %>
|
|
415
|
+
|
|
416
|
+
<% } %>
|
|
417
|
+
<% }); %>
|
|
418
|
+
`;
|
|
419
|
+
async function generateChangelogEntry(options) {
|
|
420
|
+
const { packageName, version, previousVersion, date, commits, owner, repo, groups, template, githubClient } = options;
|
|
421
|
+
const compareUrl = previousVersion ? `https://github.com/${owner}/${repo}/compare/${packageName}@${previousVersion}...${packageName}@${version}` : void 0;
|
|
422
|
+
const grouped = groupByType(commits, {
|
|
423
|
+
includeNonConventional: false,
|
|
424
|
+
mergeKeys: Object.fromEntries(groups.map((g) => [g.name, g.types]))
|
|
425
|
+
});
|
|
426
|
+
const commitAuthors = await resolveCommitAuthors(commits, githubClient);
|
|
427
|
+
const templateData = {
|
|
428
|
+
packageName,
|
|
429
|
+
version,
|
|
430
|
+
previousVersion,
|
|
431
|
+
date,
|
|
432
|
+
compareUrl,
|
|
433
|
+
owner,
|
|
434
|
+
repo,
|
|
435
|
+
groups: groups.map((group) => {
|
|
436
|
+
const commitsInGroup = grouped.get(group.name) ?? [];
|
|
437
|
+
if (commitsInGroup.length > 0) logger.verbose(`Found ${commitsInGroup.length} commits for group "${group.name}".`);
|
|
438
|
+
const formattedCommits = commitsInGroup.map((commit) => ({ line: formatCommitLine({
|
|
439
|
+
commit,
|
|
440
|
+
owner,
|
|
441
|
+
repo,
|
|
442
|
+
authors: commitAuthors.get(commit.hash) ?? []
|
|
443
|
+
}) }));
|
|
444
|
+
return {
|
|
445
|
+
name: group.name,
|
|
446
|
+
title: group.title,
|
|
447
|
+
commits: formattedCommits
|
|
448
|
+
};
|
|
449
|
+
})
|
|
450
|
+
};
|
|
451
|
+
const eta = new Eta();
|
|
452
|
+
const templateToUse = template || DEFAULT_CHANGELOG_TEMPLATE;
|
|
453
|
+
return eta.renderString(templateToUse, templateData).trim();
|
|
454
|
+
}
|
|
455
|
+
async function updateChangelog(options) {
|
|
456
|
+
const { version, previousVersion, commits, date, normalizedOptions, workspacePackage, githubClient } = options;
|
|
457
|
+
const changelogPath = join(workspacePackage.path, "CHANGELOG.md");
|
|
458
|
+
const changelogRelativePath = relative(normalizedOptions.workspaceRoot, join(workspacePackage.path, "CHANGELOG.md"));
|
|
459
|
+
const existingContent = await readFileFromGit(normalizedOptions.workspaceRoot, normalizedOptions.branch.default, changelogRelativePath);
|
|
460
|
+
logger.verbose("Existing content found: ", Boolean(existingContent));
|
|
461
|
+
const newEntry = await generateChangelogEntry({
|
|
462
|
+
packageName: workspacePackage.name,
|
|
463
|
+
version,
|
|
464
|
+
previousVersion,
|
|
465
|
+
date,
|
|
466
|
+
commits,
|
|
467
|
+
owner: normalizedOptions.owner,
|
|
468
|
+
repo: normalizedOptions.repo,
|
|
469
|
+
groups: normalizedOptions.groups,
|
|
470
|
+
template: normalizedOptions.changelog?.template,
|
|
471
|
+
githubClient
|
|
472
|
+
});
|
|
473
|
+
let updatedContent;
|
|
474
|
+
if (!existingContent) {
|
|
475
|
+
updatedContent = `# ${workspacePackage.name}\n\n${newEntry}\n`;
|
|
476
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
477
|
+
return;
|
|
478
|
+
}
|
|
479
|
+
const parsed = parseChangelog(existingContent);
|
|
480
|
+
const lines = existingContent.split("\n");
|
|
481
|
+
const existingVersionIndex = parsed.versions.findIndex((v) => v.version === version);
|
|
482
|
+
if (existingVersionIndex !== -1) {
|
|
483
|
+
const existingVersion = parsed.versions[existingVersionIndex];
|
|
484
|
+
const before = lines.slice(0, existingVersion.lineStart);
|
|
485
|
+
const after = lines.slice(existingVersion.lineEnd + 1);
|
|
486
|
+
updatedContent = [
|
|
487
|
+
...before,
|
|
488
|
+
newEntry,
|
|
489
|
+
...after
|
|
490
|
+
].join("\n");
|
|
491
|
+
} else {
|
|
492
|
+
const insertAt = parsed.headerLineEnd + 1;
|
|
493
|
+
const before = lines.slice(0, insertAt);
|
|
494
|
+
const after = lines.slice(insertAt);
|
|
495
|
+
if (before.length > 0 && before[before.length - 1] !== "") before.push("");
|
|
496
|
+
updatedContent = [
|
|
497
|
+
...before,
|
|
498
|
+
newEntry,
|
|
499
|
+
"",
|
|
500
|
+
...after
|
|
501
|
+
].join("\n");
|
|
502
|
+
}
|
|
503
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
504
|
+
}
|
|
505
|
+
async function resolveCommitAuthors(commits, githubClient) {
|
|
506
|
+
const authorsToResolve = /* @__PURE__ */ new Set();
|
|
507
|
+
const commitAuthors = /* @__PURE__ */ new Map();
|
|
508
|
+
for (const commit of commits) {
|
|
509
|
+
const authorsForCommit = [];
|
|
510
|
+
commit.authors.forEach((author, idx) => {
|
|
511
|
+
if (!author.email || !author.name) return;
|
|
512
|
+
let info = globalAuthorCache.get(author.email);
|
|
513
|
+
if (!info) {
|
|
514
|
+
info = {
|
|
515
|
+
commits: [],
|
|
516
|
+
name: author.name,
|
|
517
|
+
email: author.email
|
|
518
|
+
};
|
|
519
|
+
globalAuthorCache.set(author.email, info);
|
|
520
|
+
}
|
|
521
|
+
if (idx === 0) info.commits.push(commit.shortHash);
|
|
522
|
+
authorsForCommit.push(info);
|
|
523
|
+
if (!info.login) authorsToResolve.add(info);
|
|
524
|
+
});
|
|
525
|
+
commitAuthors.set(commit.hash, authorsForCommit);
|
|
526
|
+
}
|
|
527
|
+
await Promise.all(Array.from(authorsToResolve).map((info) => githubClient.resolveAuthorInfo(info)));
|
|
528
|
+
return commitAuthors;
|
|
529
|
+
}
|
|
530
|
+
function formatCommitLine({ commit, owner, repo, authors }) {
|
|
531
|
+
const commitUrl = `https://github.com/${owner}/${repo}/commit/${commit.hash}`;
|
|
532
|
+
let line = `${commit.description}`;
|
|
533
|
+
const references = commit.references ?? [];
|
|
534
|
+
if (references.length > 0) logger.verbose("Located references in commit", references.length);
|
|
535
|
+
for (const ref of references) {
|
|
536
|
+
if (!ref.value) continue;
|
|
537
|
+
const number = Number.parseInt(ref.value.replace(/^#/, ""), 10);
|
|
538
|
+
if (Number.isNaN(number)) continue;
|
|
539
|
+
if (ref.type === "issue") {
|
|
540
|
+
line += ` ([Issue ${ref.value}](https://github.com/${owner}/${repo}/issues/${number}))`;
|
|
541
|
+
continue;
|
|
542
|
+
}
|
|
543
|
+
line += ` ([PR ${ref.value}](https://github.com/${owner}/${repo}/pull/${number}))`;
|
|
544
|
+
}
|
|
545
|
+
line += ` ([${commit.shortHash}](${commitUrl}))`;
|
|
546
|
+
if (authors.length > 0) {
|
|
547
|
+
const authorList = authors.map((author) => {
|
|
548
|
+
if (author.login) return `[@${author.login}](https://github.com/${author.login})`;
|
|
549
|
+
return author.name;
|
|
550
|
+
}).join(", ");
|
|
551
|
+
line += ` (by ${authorList})`;
|
|
552
|
+
}
|
|
553
|
+
return line;
|
|
554
|
+
}
|
|
555
|
+
function parseChangelog(content) {
|
|
556
|
+
const lines = content.split("\n");
|
|
557
|
+
let packageName = null;
|
|
558
|
+
let headerLineEnd = -1;
|
|
559
|
+
const versions = [];
|
|
560
|
+
for (let i = 0; i < lines.length; i++) {
|
|
561
|
+
const line = lines[i].trim();
|
|
562
|
+
if (line.startsWith("# ")) {
|
|
563
|
+
packageName = line.slice(2).trim();
|
|
564
|
+
headerLineEnd = i;
|
|
565
|
+
break;
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
for (let i = headerLineEnd + 1; i < lines.length; i++) {
|
|
569
|
+
const line = lines[i].trim();
|
|
570
|
+
if (line.startsWith("## ")) {
|
|
571
|
+
const versionMatch = line.match(/##\s+(?:<small>)?\[?([^\](\s<]+)/);
|
|
572
|
+
if (versionMatch) {
|
|
573
|
+
const version = versionMatch[1];
|
|
574
|
+
const lineStart = i;
|
|
575
|
+
let lineEnd = lines.length - 1;
|
|
576
|
+
for (let j = i + 1; j < lines.length; j++) if (lines[j].trim().startsWith("## ")) {
|
|
577
|
+
lineEnd = j - 1;
|
|
578
|
+
break;
|
|
579
|
+
}
|
|
580
|
+
const versionContent = lines.slice(lineStart, lineEnd + 1).join("\n");
|
|
581
|
+
versions.push({
|
|
582
|
+
version,
|
|
583
|
+
lineStart,
|
|
584
|
+
lineEnd,
|
|
585
|
+
content: versionContent
|
|
586
|
+
});
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
return {
|
|
591
|
+
packageName,
|
|
592
|
+
versions,
|
|
593
|
+
headerLineEnd
|
|
594
|
+
};
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
//#endregion
|
|
598
|
+
//#region src/core/github.ts
|
|
599
|
+
var GitHubClient = class {
|
|
600
|
+
owner;
|
|
601
|
+
repo;
|
|
602
|
+
githubToken;
|
|
603
|
+
apiBase = "https://api.github.com";
|
|
604
|
+
constructor({ owner, repo, githubToken }) {
|
|
605
|
+
this.owner = owner;
|
|
606
|
+
this.repo = repo;
|
|
607
|
+
this.githubToken = githubToken;
|
|
608
|
+
}
|
|
609
|
+
async request(path, init = {}) {
|
|
610
|
+
const url = path.startsWith("http") ? path : `${this.apiBase}${path}`;
|
|
611
|
+
const res = await fetch(url, {
|
|
612
|
+
...init,
|
|
613
|
+
headers: {
|
|
614
|
+
...init.headers,
|
|
615
|
+
"Accept": "application/vnd.github.v3+json",
|
|
616
|
+
"Authorization": `token ${this.githubToken}`,
|
|
617
|
+
"User-Agent": "ucdjs-release-scripts (+https://github.com/ucdjs/ucdjs-release-scripts)"
|
|
618
|
+
}
|
|
619
|
+
});
|
|
620
|
+
if (!res.ok) {
|
|
621
|
+
const errorText = await res.text();
|
|
622
|
+
throw new Error(`GitHub API request failed with status ${res.status}: ${errorText || "No response body"}`);
|
|
623
|
+
}
|
|
624
|
+
if (res.status === 204) return;
|
|
625
|
+
return res.json();
|
|
626
|
+
}
|
|
627
|
+
async getExistingPullRequest(branch) {
|
|
628
|
+
const head = branch.includes(":") ? branch : `${this.owner}:${branch}`;
|
|
629
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/pulls?state=open&head=${encodeURIComponent(head)}`;
|
|
630
|
+
logger.verbose(`Requesting pull request for branch: ${branch} (url: ${this.apiBase}${endpoint})`);
|
|
631
|
+
const pulls = await this.request(endpoint);
|
|
632
|
+
if (!Array.isArray(pulls) || pulls.length === 0) return null;
|
|
633
|
+
const firstPullRequest = pulls[0];
|
|
634
|
+
if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
635
|
+
const pullRequest = {
|
|
636
|
+
number: firstPullRequest.number,
|
|
637
|
+
title: firstPullRequest.title,
|
|
638
|
+
body: firstPullRequest.body,
|
|
639
|
+
draft: firstPullRequest.draft,
|
|
640
|
+
html_url: firstPullRequest.html_url,
|
|
641
|
+
head: "head" in firstPullRequest && typeof firstPullRequest.head === "object" && firstPullRequest.head !== null && "sha" in firstPullRequest.head && typeof firstPullRequest.head.sha === "string" ? { sha: firstPullRequest.head.sha } : void 0
|
|
642
|
+
};
|
|
643
|
+
logger.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
|
|
644
|
+
return pullRequest;
|
|
645
|
+
}
|
|
646
|
+
async upsertPullRequest({ title, body, head, base, pullNumber }) {
|
|
647
|
+
const isUpdate = typeof pullNumber === "number";
|
|
648
|
+
const endpoint = isUpdate ? `/repos/${this.owner}/${this.repo}/pulls/${pullNumber}` : `/repos/${this.owner}/${this.repo}/pulls`;
|
|
649
|
+
const requestBody = isUpdate ? {
|
|
472
650
|
title,
|
|
473
651
|
body
|
|
474
652
|
} : {
|
|
475
653
|
title,
|
|
476
654
|
body,
|
|
477
655
|
head,
|
|
478
|
-
base
|
|
656
|
+
base,
|
|
657
|
+
draft: true
|
|
479
658
|
};
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
Accept: "application/vnd.github.v3+json",
|
|
484
|
-
Authorization: `token ${githubToken}`
|
|
485
|
-
},
|
|
659
|
+
logger.verbose(`${isUpdate ? "Updating" : "Creating"} pull request (url: ${this.apiBase}${endpoint})`);
|
|
660
|
+
const pr = await this.request(endpoint, {
|
|
661
|
+
method: isUpdate ? "PATCH" : "POST",
|
|
486
662
|
body: JSON.stringify(requestBody)
|
|
487
663
|
});
|
|
488
|
-
if (!res.ok) throw new Error(`GitHub API request failed with status ${res.status}`);
|
|
489
|
-
const pr = await res.json();
|
|
490
664
|
if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
491
665
|
const action = isUpdate ? "Updated" : "Created";
|
|
492
|
-
|
|
666
|
+
logger.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
|
|
493
667
|
return {
|
|
494
668
|
number: pr.number,
|
|
495
669
|
title: pr.title,
|
|
@@ -497,129 +671,266 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
|
|
|
497
671
|
draft: pr.draft,
|
|
498
672
|
html_url: pr.html_url
|
|
499
673
|
};
|
|
500
|
-
} catch (err) {
|
|
501
|
-
console.error(`Error upserting pull request:`, err);
|
|
502
|
-
throw err;
|
|
503
674
|
}
|
|
675
|
+
async setCommitStatus({ sha, state, targetUrl, description, context }) {
|
|
676
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/statuses/${sha}`;
|
|
677
|
+
logger.verbose(`Setting commit status on ${sha} to ${state} (url: ${this.apiBase}${endpoint})`);
|
|
678
|
+
await this.request(endpoint, {
|
|
679
|
+
method: "POST",
|
|
680
|
+
body: JSON.stringify({
|
|
681
|
+
state,
|
|
682
|
+
target_url: targetUrl,
|
|
683
|
+
description: description || "",
|
|
684
|
+
context
|
|
685
|
+
})
|
|
686
|
+
});
|
|
687
|
+
logger.info(`Commit status set to ${farver.cyan(state)} for ${farver.gray(sha.substring(0, 7))}`);
|
|
688
|
+
}
|
|
689
|
+
async resolveAuthorInfo(info) {
|
|
690
|
+
if (info.login) return info;
|
|
691
|
+
try {
|
|
692
|
+
const q = encodeURIComponent(`${info.email} type:user in:email`);
|
|
693
|
+
const data = await this.request(`/search/users?q=${q}`);
|
|
694
|
+
if (!data.items || data.items.length === 0) return info;
|
|
695
|
+
info.login = data.items[0].login;
|
|
696
|
+
} catch (err) {
|
|
697
|
+
logger.warn(`Failed to resolve author info for email ${info.email}: ${err.message}`);
|
|
698
|
+
}
|
|
699
|
+
if (info.login) return info;
|
|
700
|
+
if (info.commits.length > 0) try {
|
|
701
|
+
const data = await this.request(`/repos/${this.owner}/${this.repo}/commits/${info.commits[0]}`);
|
|
702
|
+
if (data.author && data.author.login) info.login = data.author.login;
|
|
703
|
+
} catch (err) {
|
|
704
|
+
logger.warn(`Failed to resolve author info from commits for email ${info.email}: ${err.message}`);
|
|
705
|
+
}
|
|
706
|
+
return info;
|
|
707
|
+
}
|
|
708
|
+
};
|
|
709
|
+
function createGitHubClient(options) {
|
|
710
|
+
return new GitHubClient(options);
|
|
711
|
+
}
|
|
712
|
+
const DEFAULT_PR_BODY_TEMPLATE = dedent`
|
|
713
|
+
This PR was automatically generated by the release script.
|
|
714
|
+
|
|
715
|
+
The following packages have been prepared for release:
|
|
716
|
+
|
|
717
|
+
<% it.packages.forEach((pkg) => { %>
|
|
718
|
+
- **<%= pkg.name %>**: <%= pkg.currentVersion %> → <%= pkg.newVersion %> (<%= pkg.bumpType %>)
|
|
719
|
+
<% }) %>
|
|
720
|
+
|
|
721
|
+
Please review the changes and merge when ready.
|
|
722
|
+
|
|
723
|
+
For a more in-depth look at the changes, please refer to the individual package changelogs.
|
|
724
|
+
|
|
725
|
+
> [!NOTE]
|
|
726
|
+
> When this PR is merged, the release process will be triggered automatically, publishing the new package versions to the registry.
|
|
727
|
+
`;
|
|
728
|
+
function dedentString(str) {
|
|
729
|
+
const lines = str.split("\n");
|
|
730
|
+
const minIndent = lines.filter((line) => line.trim().length > 0).reduce((min, line) => Math.min(min, line.search(/\S/)), Infinity);
|
|
731
|
+
return lines.map((line) => minIndent === Infinity ? line : line.slice(minIndent)).join("\n").trim();
|
|
732
|
+
}
|
|
733
|
+
function generatePullRequestBody(updates, body) {
|
|
734
|
+
const eta = new Eta();
|
|
735
|
+
const bodyTemplate = body ? dedentString(body) : DEFAULT_PR_BODY_TEMPLATE;
|
|
736
|
+
return eta.renderString(bodyTemplate, { packages: updates.map((u) => ({
|
|
737
|
+
name: u.package.name,
|
|
738
|
+
currentVersion: u.currentVersion,
|
|
739
|
+
newVersion: u.newVersion,
|
|
740
|
+
bumpType: u.bumpType,
|
|
741
|
+
hasDirectChanges: u.hasDirectChanges
|
|
742
|
+
})) });
|
|
504
743
|
}
|
|
505
744
|
|
|
506
745
|
//#endregion
|
|
507
|
-
//#region src/
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
min: 1,
|
|
519
|
-
hint: "Space to select/deselect. Return to submit."
|
|
520
|
-
});
|
|
521
|
-
if (!response.selectedPackages || response.selectedPackages.length === 0) throw new Error("No packages selected");
|
|
522
|
-
return response.selectedPackages;
|
|
746
|
+
//#region src/versioning/commits.ts
|
|
747
|
+
function determineHighestBump(commits) {
|
|
748
|
+
if (commits.length === 0) return "none";
|
|
749
|
+
let highestBump = "none";
|
|
750
|
+
for (const commit of commits) {
|
|
751
|
+
const bump = determineBumpType(commit);
|
|
752
|
+
if (bump === "major") return "major";
|
|
753
|
+
if (bump === "minor") highestBump = "minor";
|
|
754
|
+
else if (bump === "patch" && highestBump === "none") highestBump = "patch";
|
|
755
|
+
}
|
|
756
|
+
return highestBump;
|
|
523
757
|
}
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
758
|
+
/**
|
|
759
|
+
* Get commits grouped by workspace package.
|
|
760
|
+
* For each package, retrieves all commits since its last release tag that affect that package.
|
|
761
|
+
*
|
|
762
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
763
|
+
* @param {WorkspacePackage[]} packages - Array of workspace packages to analyze
|
|
764
|
+
* @returns {Promise<Map<string, GitCommit[]>>} A map of package names to their commits since their last release
|
|
765
|
+
*/
|
|
766
|
+
async function getWorkspacePackageGroupedCommits(workspaceRoot, packages) {
|
|
767
|
+
const changedPackages = /* @__PURE__ */ new Map();
|
|
768
|
+
const promises = packages.map(async (pkg) => {
|
|
769
|
+
const lastTag = await getMostRecentPackageTag(workspaceRoot, pkg.name);
|
|
770
|
+
const allCommits = await getCommits({
|
|
771
|
+
from: lastTag,
|
|
772
|
+
to: "HEAD",
|
|
773
|
+
cwd: workspaceRoot,
|
|
774
|
+
folder: pkg.path
|
|
538
775
|
});
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
776
|
+
logger.verbose(`Found ${farver.cyan(allCommits.length)} commits for package ${farver.bold(pkg.name)} since tag ${farver.cyan(lastTag ?? "N/A")}`);
|
|
777
|
+
return {
|
|
778
|
+
pkgName: pkg.name,
|
|
779
|
+
commits: allCommits
|
|
780
|
+
};
|
|
543
781
|
});
|
|
544
|
-
const
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
message: `${packageName} (${currentVersion}):`,
|
|
548
|
-
choices,
|
|
549
|
-
initial: 0
|
|
550
|
-
}, {
|
|
551
|
-
type: (prev) => prev === "custom" ? "text" : null,
|
|
552
|
-
name: "customVersion",
|
|
553
|
-
message: "Enter custom version:",
|
|
554
|
-
initial: suggestedVersion,
|
|
555
|
-
validate: (value) => {
|
|
556
|
-
return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(value) || "Invalid semver version (e.g., 1.0.0)";
|
|
557
|
-
}
|
|
558
|
-
}]);
|
|
559
|
-
if (response.choice === "suggested") return suggestedVersion;
|
|
560
|
-
else if (response.choice === "custom") return response.customVersion;
|
|
561
|
-
else return calculateNewVersion(currentVersion, response.choice);
|
|
782
|
+
const results = await Promise.all(promises);
|
|
783
|
+
for (const { pkgName, commits } of results) changedPackages.set(pkgName, commits);
|
|
784
|
+
return changedPackages;
|
|
562
785
|
}
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
786
|
+
/**
|
|
787
|
+
* Check if a file path touches any package folder.
|
|
788
|
+
* @param file - The file path to check
|
|
789
|
+
* @param packagePaths - Set of normalized package paths
|
|
790
|
+
* @param workspaceRoot - The workspace root for path normalization
|
|
791
|
+
* @returns true if the file is inside a package folder
|
|
792
|
+
*/
|
|
793
|
+
function fileMatchesPackageFolder(file, packagePaths, workspaceRoot) {
|
|
794
|
+
const normalizedFile = file.startsWith("./") ? file.slice(2) : file;
|
|
795
|
+
for (const pkgPath of packagePaths) {
|
|
796
|
+
const normalizedPkgPath = pkgPath.startsWith(workspaceRoot) ? pkgPath.slice(workspaceRoot.length + 1) : pkgPath;
|
|
797
|
+
if (normalizedFile.startsWith(`${normalizedPkgPath}/`) || normalizedFile === normalizedPkgPath) return true;
|
|
568
798
|
}
|
|
569
|
-
return
|
|
799
|
+
return false;
|
|
570
800
|
}
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
801
|
+
/**
|
|
802
|
+
* Check if a commit is a "global" commit (doesn't touch any package folder).
|
|
803
|
+
* @param workspaceRoot - The workspace root
|
|
804
|
+
* @param files - Array of files changed in the commit
|
|
805
|
+
* @param packagePaths - Set of normalized package paths
|
|
806
|
+
* @returns true if this is a global commit
|
|
807
|
+
*/
|
|
808
|
+
function isGlobalCommit(workspaceRoot, files, packagePaths) {
|
|
809
|
+
if (!files || files.length === 0) return false;
|
|
810
|
+
return !files.some((file) => fileMatchesPackageFolder(file, packagePaths, workspaceRoot));
|
|
811
|
+
}
|
|
812
|
+
const DEPENDENCY_FILES = [
|
|
813
|
+
"package.json",
|
|
814
|
+
"pnpm-lock.yaml",
|
|
815
|
+
"pnpm-workspace.yaml",
|
|
816
|
+
"yarn.lock",
|
|
817
|
+
"package-lock.json"
|
|
818
|
+
];
|
|
819
|
+
/**
|
|
820
|
+
* Find the oldest and newest commits across all packages.
|
|
821
|
+
* @param packageCommits - Map of package commits
|
|
822
|
+
* @returns Object with oldest and newest commit SHAs, or null if no commits
|
|
823
|
+
*/
|
|
824
|
+
function findCommitRange(packageCommits) {
|
|
825
|
+
let oldestCommit = null;
|
|
826
|
+
let newestCommit = null;
|
|
827
|
+
for (const commits of packageCommits.values()) {
|
|
828
|
+
if (commits.length === 0) continue;
|
|
829
|
+
const firstCommit = commits[0].shortHash;
|
|
830
|
+
const lastCommit = commits[commits.length - 1].shortHash;
|
|
831
|
+
if (!newestCommit) newestCommit = firstCommit;
|
|
832
|
+
oldestCommit = lastCommit;
|
|
580
833
|
}
|
|
581
|
-
if (
|
|
582
|
-
return
|
|
834
|
+
if (!oldestCommit || !newestCommit) return null;
|
|
835
|
+
return {
|
|
836
|
+
oldest: oldestCommit,
|
|
837
|
+
newest: newestCommit
|
|
838
|
+
};
|
|
583
839
|
}
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
840
|
+
/**
|
|
841
|
+
* Get global commits for each package based on their individual commit timelines.
|
|
842
|
+
* This solves the problem where packages with different release histories need different global commits.
|
|
843
|
+
*
|
|
844
|
+
* A "global commit" is a commit that doesn't touch any package folder but may affect all packages
|
|
845
|
+
* (e.g., root package.json, CI config, README).
|
|
846
|
+
*
|
|
847
|
+
* Performance: Makes ONE batched git call to get files for all commits across all packages.
|
|
848
|
+
*
|
|
849
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
850
|
+
* @param packageCommits - Map of package name to their commits (from getWorkspacePackageCommits)
|
|
851
|
+
* @param allPackages - All workspace packages (used to identify package folders)
|
|
852
|
+
* @param mode - Filter mode: false (disabled), "all" (all global commits), or "dependencies" (only dependency-related)
|
|
853
|
+
* @returns Map of package name to their global commits
|
|
854
|
+
*/
|
|
855
|
+
async function getGlobalCommitsPerPackage(workspaceRoot, packageCommits, allPackages, mode) {
|
|
856
|
+
const result = /* @__PURE__ */ new Map();
|
|
857
|
+
if (!mode) {
|
|
858
|
+
logger.verbose("Global commits mode disabled");
|
|
859
|
+
return result;
|
|
860
|
+
}
|
|
861
|
+
logger.verbose(`Computing global commits per-package (mode: ${farver.cyan(mode)})`);
|
|
862
|
+
const commitRange = findCommitRange(packageCommits);
|
|
863
|
+
if (!commitRange) {
|
|
864
|
+
logger.verbose("No commits found across packages");
|
|
865
|
+
return result;
|
|
866
|
+
}
|
|
867
|
+
logger.verbose("Fetching files for commits range", `${farver.cyan(commitRange.oldest)}..${farver.cyan(commitRange.newest)}`);
|
|
868
|
+
const commitFilesMap = await getGroupedFilesByCommitSha(workspaceRoot, commitRange.oldest, commitRange.newest);
|
|
869
|
+
if (!commitFilesMap) {
|
|
870
|
+
logger.warn("Failed to get commit file list, returning empty global commits");
|
|
871
|
+
return result;
|
|
872
|
+
}
|
|
873
|
+
logger.verbose("Got file lists for commits", `${farver.cyan(commitFilesMap.size)} commits in ONE git call`);
|
|
874
|
+
const packagePaths = new Set(allPackages.map((p) => p.path));
|
|
875
|
+
for (const [pkgName, commits] of packageCommits) {
|
|
876
|
+
const globalCommitsAffectingPackage = [];
|
|
877
|
+
logger.verbose("Filtering global commits for package", `${farver.bold(pkgName)} from ${farver.cyan(commits.length)} commits`);
|
|
878
|
+
for (const commit of commits) {
|
|
879
|
+
const files = commitFilesMap.get(commit.shortHash);
|
|
880
|
+
if (!files) continue;
|
|
881
|
+
if (isGlobalCommit(workspaceRoot, files, packagePaths)) globalCommitsAffectingPackage.push(commit);
|
|
882
|
+
}
|
|
883
|
+
logger.verbose("Package global commits found", `${farver.bold(pkgName)}: ${farver.cyan(globalCommitsAffectingPackage.length)} global commits`);
|
|
884
|
+
if (mode === "all") {
|
|
885
|
+
result.set(pkgName, globalCommitsAffectingPackage);
|
|
601
886
|
continue;
|
|
602
887
|
}
|
|
603
|
-
const
|
|
604
|
-
const
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
});
|
|
888
|
+
const dependencyCommits = [];
|
|
889
|
+
for (const commit of globalCommitsAffectingPackage) {
|
|
890
|
+
const files = commitFilesMap.get(commit.shortHash);
|
|
891
|
+
if (!files) continue;
|
|
892
|
+
if (files.some((file) => DEPENDENCY_FILES.includes(file.startsWith("./") ? file.slice(2) : file))) {
|
|
893
|
+
logger.verbose("Global commit affects dependencies", `${farver.bold(pkgName)}: commit ${farver.cyan(commit.shortHash)} affects dependencies`);
|
|
894
|
+
dependencyCommits.push(commit);
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
logger.verbose("Global commits affect dependencies", `${farver.bold(pkgName)}: ${farver.cyan(dependencyCommits.length)} global commits affect dependencies`);
|
|
898
|
+
result.set(pkgName, dependencyCommits);
|
|
613
899
|
}
|
|
614
|
-
return
|
|
900
|
+
return result;
|
|
615
901
|
}
|
|
616
|
-
function
|
|
617
|
-
if (
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
902
|
+
function determineBumpType(commit) {
|
|
903
|
+
if (commit.isBreaking) return "major";
|
|
904
|
+
if (!commit.isConventional || !commit.type) return "none";
|
|
905
|
+
switch (commit.type) {
|
|
906
|
+
case "feat": return "minor";
|
|
907
|
+
case "fix":
|
|
908
|
+
case "perf": return "patch";
|
|
909
|
+
case "docs":
|
|
910
|
+
case "style":
|
|
911
|
+
case "refactor":
|
|
912
|
+
case "test":
|
|
913
|
+
case "build":
|
|
914
|
+
case "ci":
|
|
915
|
+
case "chore":
|
|
916
|
+
case "revert": return "none";
|
|
917
|
+
default: return "none";
|
|
918
|
+
}
|
|
621
919
|
}
|
|
622
|
-
|
|
920
|
+
|
|
921
|
+
//#endregion
|
|
922
|
+
//#region src/versioning/package.ts
|
|
923
|
+
/**
|
|
924
|
+
* Build a dependency graph from workspace packages
|
|
925
|
+
*
|
|
926
|
+
* Creates a bidirectional graph that maps:
|
|
927
|
+
* - packages: Map of package name → WorkspacePackage
|
|
928
|
+
* - dependents: Map of package name → Set of packages that depend on it
|
|
929
|
+
*
|
|
930
|
+
* @param packages - All workspace packages
|
|
931
|
+
* @returns Dependency graph with packages and dependents maps
|
|
932
|
+
*/
|
|
933
|
+
function buildPackageDependencyGraph(packages) {
|
|
623
934
|
const packagesMap = /* @__PURE__ */ new Map();
|
|
624
935
|
const dependents = /* @__PURE__ */ new Map();
|
|
625
936
|
for (const pkg of packages) {
|
|
@@ -638,203 +949,809 @@ function buildDependencyGraph(packages) {
|
|
|
638
949
|
dependents
|
|
639
950
|
};
|
|
640
951
|
}
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
952
|
+
/**
|
|
953
|
+
* Get all packages affected by changes (including transitive dependents)
|
|
954
|
+
*
|
|
955
|
+
* Uses graph traversal to find all packages that need updates:
|
|
956
|
+
* - Packages with direct changes
|
|
957
|
+
* - All packages that depend on changed packages (transitively)
|
|
958
|
+
*
|
|
959
|
+
* @param graph - Dependency graph
|
|
960
|
+
* @param changedPackages - Set of package names with direct changes
|
|
961
|
+
* @returns Set of all package names that need updates
|
|
962
|
+
*/
|
|
963
|
+
function getAllAffectedPackages(graph, changedPackages) {
|
|
964
|
+
const affected = /* @__PURE__ */ new Set();
|
|
965
|
+
function visitDependents(pkgName) {
|
|
966
|
+
if (affected.has(pkgName)) return;
|
|
967
|
+
affected.add(pkgName);
|
|
968
|
+
const dependents = graph.dependents.get(pkgName);
|
|
969
|
+
if (dependents) for (const dependent of dependents) visitDependents(dependent);
|
|
970
|
+
}
|
|
971
|
+
for (const pkg of changedPackages) visitDependents(pkg);
|
|
972
|
+
return affected;
|
|
973
|
+
}
|
|
974
|
+
/**
|
|
975
|
+
* Create version updates for all packages affected by dependency changes
|
|
976
|
+
*
|
|
977
|
+
* When a package is updated, all packages that depend on it should also be updated.
|
|
978
|
+
* This function calculates which additional packages need patch bumps due to dependency changes.
|
|
979
|
+
*
|
|
980
|
+
* @param graph - Dependency graph
|
|
981
|
+
* @param workspacePackages - All workspace packages
|
|
982
|
+
* @param directUpdates - Packages with direct code changes
|
|
983
|
+
* @returns All updates including dependent packages that need patch bumps
|
|
984
|
+
*/
|
|
985
|
+
function createDependentUpdates(graph, workspacePackages, directUpdates) {
|
|
986
|
+
const allUpdates = [...directUpdates];
|
|
987
|
+
const directUpdateMap = new Map(directUpdates.map((u) => [u.package.name, u]));
|
|
988
|
+
const affectedPackages = getAllAffectedPackages(graph, new Set(directUpdates.map((u) => u.package.name)));
|
|
989
|
+
for (const pkgName of affectedPackages) {
|
|
990
|
+
logger.verbose(`Processing affected package: ${pkgName}`);
|
|
991
|
+
if (directUpdateMap.has(pkgName)) {
|
|
992
|
+
logger.verbose(`Skipping ${pkgName}, already has a direct update`);
|
|
993
|
+
continue;
|
|
651
994
|
}
|
|
995
|
+
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
996
|
+
if (!pkg) continue;
|
|
997
|
+
allUpdates.push(createVersionUpdate(pkg, "patch", false));
|
|
652
998
|
}
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
999
|
+
return allUpdates;
|
|
1000
|
+
}
|
|
1001
|
+
|
|
1002
|
+
//#endregion
|
|
1003
|
+
//#region src/versioning/version.ts
|
|
1004
|
+
function isValidSemver(version) {
|
|
1005
|
+
return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(version);
|
|
1006
|
+
}
|
|
1007
|
+
function getNextVersion(currentVersion, bump) {
|
|
1008
|
+
if (bump === "none") {
|
|
1009
|
+
logger.verbose(`No version bump needed, keeping version ${currentVersion}`);
|
|
1010
|
+
return currentVersion;
|
|
1011
|
+
}
|
|
1012
|
+
if (!isValidSemver(currentVersion)) throw new Error(`Cannot bump version for invalid semver: ${currentVersion}`);
|
|
1013
|
+
const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
|
|
1014
|
+
if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
|
|
1015
|
+
const [, major, minor, patch] = match;
|
|
1016
|
+
let newMajor = Number.parseInt(major, 10);
|
|
1017
|
+
let newMinor = Number.parseInt(minor, 10);
|
|
1018
|
+
let newPatch = Number.parseInt(patch, 10);
|
|
1019
|
+
switch (bump) {
|
|
1020
|
+
case "major":
|
|
1021
|
+
newMajor += 1;
|
|
1022
|
+
newMinor = 0;
|
|
1023
|
+
newPatch = 0;
|
|
1024
|
+
break;
|
|
1025
|
+
case "minor":
|
|
1026
|
+
newMinor += 1;
|
|
1027
|
+
newPatch = 0;
|
|
1028
|
+
break;
|
|
1029
|
+
case "patch":
|
|
1030
|
+
newPatch += 1;
|
|
1031
|
+
break;
|
|
1032
|
+
}
|
|
1033
|
+
return `${newMajor}.${newMinor}.${newPatch}`;
|
|
1034
|
+
}
|
|
1035
|
+
function createVersionUpdate(pkg, bump, hasDirectChanges) {
|
|
1036
|
+
const newVersion = getNextVersion(pkg.version, bump);
|
|
1037
|
+
return {
|
|
1038
|
+
package: pkg,
|
|
1039
|
+
currentVersion: pkg.version,
|
|
1040
|
+
newVersion,
|
|
1041
|
+
bumpType: bump,
|
|
1042
|
+
hasDirectChanges
|
|
1043
|
+
};
|
|
1044
|
+
}
|
|
1045
|
+
function _calculateBumpType(oldVersion, newVersion) {
|
|
1046
|
+
if (!isValidSemver(oldVersion) || !isValidSemver(newVersion)) throw new Error(`Cannot calculate bump type for invalid semver: ${oldVersion} or ${newVersion}`);
|
|
1047
|
+
const oldParts = oldVersion.split(".").map(Number);
|
|
1048
|
+
const newParts = newVersion.split(".").map(Number);
|
|
1049
|
+
if (newParts[0] > oldParts[0]) return "major";
|
|
1050
|
+
if (newParts[1] > oldParts[1]) return "minor";
|
|
1051
|
+
if (newParts[2] > oldParts[2]) return "patch";
|
|
1052
|
+
return "none";
|
|
1053
|
+
}
|
|
1054
|
+
const messageColorMap = {
|
|
1055
|
+
feat: farver.green,
|
|
1056
|
+
feature: farver.green,
|
|
1057
|
+
refactor: farver.cyan,
|
|
1058
|
+
style: farver.cyan,
|
|
1059
|
+
docs: farver.blue,
|
|
1060
|
+
doc: farver.blue,
|
|
1061
|
+
types: farver.blue,
|
|
1062
|
+
type: farver.blue,
|
|
1063
|
+
chore: farver.gray,
|
|
1064
|
+
ci: farver.gray,
|
|
1065
|
+
build: farver.gray,
|
|
1066
|
+
deps: farver.gray,
|
|
1067
|
+
dev: farver.gray,
|
|
1068
|
+
fix: farver.yellow,
|
|
1069
|
+
test: farver.yellow,
|
|
1070
|
+
perf: farver.magenta,
|
|
1071
|
+
revert: farver.red,
|
|
1072
|
+
breaking: farver.red
|
|
1073
|
+
};
|
|
1074
|
+
function formatCommitsForDisplay(commits) {
|
|
1075
|
+
if (commits.length === 0) return farver.dim("No commits found");
|
|
1076
|
+
const maxCommitsToShow = 10;
|
|
1077
|
+
const commitsToShow = commits.slice(0, maxCommitsToShow);
|
|
1078
|
+
const hasMore = commits.length > maxCommitsToShow;
|
|
1079
|
+
const typeLength = commits.map(({ type }) => type.length).reduce((a, b) => Math.max(a, b), 0);
|
|
1080
|
+
const scopeLength = commits.map(({ scope }) => scope?.length).reduce((a, b) => Math.max(a || 0, b || 0), 0) || 0;
|
|
1081
|
+
const formattedCommits = commitsToShow.map((commit) => {
|
|
1082
|
+
let color = messageColorMap[commit.type] || ((c) => c);
|
|
1083
|
+
if (commit.isBreaking) color = (s) => farver.inverse.red(s);
|
|
1084
|
+
const paddedType = commit.type.padStart(typeLength + 1, " ");
|
|
1085
|
+
const paddedScope = !commit.scope ? " ".repeat(scopeLength ? scopeLength + 2 : 0) : farver.dim("(") + commit.scope + farver.dim(")") + " ".repeat(scopeLength - commit.scope.length);
|
|
1086
|
+
return [
|
|
1087
|
+
farver.dim(commit.shortHash),
|
|
1088
|
+
" ",
|
|
1089
|
+
color === farver.gray ? color(paddedType) : farver.bold(color(paddedType)),
|
|
1090
|
+
" ",
|
|
1091
|
+
paddedScope,
|
|
1092
|
+
farver.dim(":"),
|
|
1093
|
+
" ",
|
|
1094
|
+
color === farver.gray ? color(commit.description) : commit.description
|
|
1095
|
+
].join("");
|
|
1096
|
+
}).join("\n");
|
|
1097
|
+
if (hasMore) return `${formattedCommits}\n ${farver.dim(`... and ${commits.length - maxCommitsToShow} more commits`)}`;
|
|
1098
|
+
return formattedCommits;
|
|
1099
|
+
}
|
|
1100
|
+
async function calculateVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides: initialOverrides = {} }) {
|
|
1101
|
+
const versionUpdates = [];
|
|
1102
|
+
const processedPackages = /* @__PURE__ */ new Set();
|
|
1103
|
+
const newOverrides = { ...initialOverrides };
|
|
1104
|
+
const bumpRanks = {
|
|
1105
|
+
major: 3,
|
|
1106
|
+
minor: 2,
|
|
1107
|
+
patch: 1,
|
|
1108
|
+
none: 0
|
|
1109
|
+
};
|
|
1110
|
+
logger.verbose(`Starting version inference for ${packageCommits.size} packages with commits`);
|
|
1111
|
+
for (const [pkgName, pkgCommits] of packageCommits) {
|
|
1112
|
+
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
1113
|
+
if (!pkg) {
|
|
1114
|
+
logger.error(`Package ${pkgName} not found in workspace packages, skipping`);
|
|
1115
|
+
continue;
|
|
1116
|
+
}
|
|
1117
|
+
processedPackages.add(pkgName);
|
|
1118
|
+
const globalCommits = globalCommitsPerPackage.get(pkgName) || [];
|
|
1119
|
+
const allCommitsForPackage = [...pkgCommits, ...globalCommits];
|
|
1120
|
+
const determinedBump = determineHighestBump(allCommitsForPackage);
|
|
1121
|
+
const override = newOverrides[pkgName];
|
|
1122
|
+
const effectiveBump = override?.type || determinedBump;
|
|
1123
|
+
if (effectiveBump === "none") continue;
|
|
1124
|
+
let newVersion = override?.version || getNextVersion(pkg.version, effectiveBump);
|
|
1125
|
+
let finalBumpType = effectiveBump;
|
|
1126
|
+
if (!isCI && showPrompt) {
|
|
1127
|
+
logger.clearScreen();
|
|
1128
|
+
logger.section(`📝 Commits for ${farver.cyan(pkg.name)}`);
|
|
1129
|
+
formatCommitsForDisplay(allCommitsForPackage).split("\n").forEach((line) => logger.item(line));
|
|
1130
|
+
logger.emptyLine();
|
|
1131
|
+
const selectedVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, newVersion);
|
|
1132
|
+
if (selectedVersion === null) continue;
|
|
1133
|
+
const userBump = _calculateBumpType(pkg.version, selectedVersion);
|
|
1134
|
+
finalBumpType = userBump;
|
|
1135
|
+
if (bumpRanks[userBump] < bumpRanks[determinedBump]) {
|
|
1136
|
+
newOverrides[pkgName] = {
|
|
1137
|
+
type: userBump,
|
|
1138
|
+
version: selectedVersion
|
|
1139
|
+
};
|
|
1140
|
+
logger.info(`Version override recorded for ${pkgName}: ${determinedBump} → ${userBump}`);
|
|
1141
|
+
} else if (newOverrides[pkgName] && bumpRanks[userBump] >= bumpRanks[determinedBump]) {
|
|
1142
|
+
delete newOverrides[pkgName];
|
|
1143
|
+
logger.info(`Version override removed for ${pkgName}.`);
|
|
1144
|
+
}
|
|
1145
|
+
newVersion = selectedVersion;
|
|
664
1146
|
}
|
|
665
|
-
|
|
1147
|
+
versionUpdates.push({
|
|
666
1148
|
package: pkg,
|
|
667
|
-
|
|
1149
|
+
currentVersion: pkg.version,
|
|
1150
|
+
newVersion,
|
|
1151
|
+
bumpType: finalBumpType,
|
|
1152
|
+
hasDirectChanges: allCommitsForPackage.length > 0
|
|
668
1153
|
});
|
|
669
1154
|
}
|
|
670
|
-
for (const pkg of
|
|
671
|
-
|
|
672
|
-
|
|
1155
|
+
if (!isCI && showPrompt) for (const pkg of workspacePackages) {
|
|
1156
|
+
if (processedPackages.has(pkg.name)) continue;
|
|
1157
|
+
logger.clearScreen();
|
|
1158
|
+
logger.section(`📦 Package: ${pkg.name}`);
|
|
1159
|
+
logger.item("No direct commits found");
|
|
1160
|
+
const newVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, pkg.version);
|
|
1161
|
+
if (newVersion === null) break;
|
|
1162
|
+
if (newVersion !== pkg.version) {
|
|
1163
|
+
const bumpType = _calculateBumpType(pkg.version, newVersion);
|
|
1164
|
+
versionUpdates.push({
|
|
1165
|
+
package: pkg,
|
|
1166
|
+
currentVersion: pkg.version,
|
|
1167
|
+
newVersion,
|
|
1168
|
+
bumpType,
|
|
1169
|
+
hasDirectChanges: false
|
|
1170
|
+
});
|
|
1171
|
+
}
|
|
1172
|
+
}
|
|
1173
|
+
return {
|
|
1174
|
+
updates: versionUpdates,
|
|
1175
|
+
overrides: newOverrides
|
|
1176
|
+
};
|
|
1177
|
+
}
|
|
1178
|
+
/**
|
|
1179
|
+
* Calculate version updates and prepare dependent updates
|
|
1180
|
+
* Returns both the updates and a function to apply them
|
|
1181
|
+
*/
|
|
1182
|
+
async function calculateAndPrepareVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides }) {
|
|
1183
|
+
const { updates: directUpdates, overrides: newOverrides } = await calculateVersionUpdates({
|
|
1184
|
+
workspacePackages,
|
|
1185
|
+
packageCommits,
|
|
1186
|
+
workspaceRoot,
|
|
1187
|
+
showPrompt,
|
|
1188
|
+
globalCommitsPerPackage,
|
|
1189
|
+
overrides
|
|
1190
|
+
});
|
|
1191
|
+
const allUpdates = createDependentUpdates(buildPackageDependencyGraph(workspacePackages), workspacePackages, directUpdates);
|
|
1192
|
+
const applyUpdates = async () => {
|
|
1193
|
+
await Promise.all(allUpdates.map(async (update) => {
|
|
1194
|
+
const depUpdates = getDependencyUpdates(update.package, allUpdates);
|
|
1195
|
+
await updatePackageJson(update.package, update.newVersion, depUpdates);
|
|
1196
|
+
}));
|
|
1197
|
+
};
|
|
1198
|
+
return {
|
|
1199
|
+
allUpdates,
|
|
1200
|
+
applyUpdates,
|
|
1201
|
+
overrides: newOverrides
|
|
1202
|
+
};
|
|
1203
|
+
}
|
|
1204
|
+
async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
|
|
1205
|
+
const packageJsonPath = join(pkg.path, "package.json");
|
|
1206
|
+
const content = await readFile(packageJsonPath, "utf-8");
|
|
1207
|
+
const packageJson = JSON.parse(content);
|
|
1208
|
+
packageJson.version = newVersion;
|
|
1209
|
+
function updateDependency(deps, depName, depVersion, isPeerDependency = false) {
|
|
1210
|
+
if (!deps) return;
|
|
1211
|
+
const oldVersion = deps[depName];
|
|
1212
|
+
if (!oldVersion) return;
|
|
1213
|
+
if (oldVersion === "workspace:*") {
|
|
1214
|
+
logger.verbose(` - Skipping workspace:* dependency: ${depName}`);
|
|
1215
|
+
return;
|
|
1216
|
+
}
|
|
1217
|
+
if (isPeerDependency) {
|
|
1218
|
+
const majorVersion = depVersion.split(".")[0];
|
|
1219
|
+
deps[depName] = `>=${depVersion} <${Number(majorVersion) + 1}.0.0`;
|
|
1220
|
+
} else deps[depName] = `^${depVersion}`;
|
|
1221
|
+
logger.verbose(` - Updated dependency ${depName}: ${oldVersion} → ${deps[depName]}`);
|
|
1222
|
+
}
|
|
1223
|
+
for (const [depName, depVersion] of dependencyUpdates) {
|
|
1224
|
+
updateDependency(packageJson.dependencies, depName, depVersion);
|
|
1225
|
+
updateDependency(packageJson.devDependencies, depName, depVersion);
|
|
1226
|
+
updateDependency(packageJson.peerDependencies, depName, depVersion, true);
|
|
1227
|
+
}
|
|
1228
|
+
await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
|
|
1229
|
+
logger.verbose(` - Successfully wrote updated package.json`);
|
|
1230
|
+
}
|
|
1231
|
+
/**
|
|
1232
|
+
* Get all dependency updates needed for a package
|
|
1233
|
+
*/
|
|
1234
|
+
function getDependencyUpdates(pkg, allUpdates) {
|
|
1235
|
+
const updates = /* @__PURE__ */ new Map();
|
|
1236
|
+
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
1237
|
+
for (const dep of allDeps) {
|
|
1238
|
+
const update = allUpdates.find((u) => u.package.name === dep);
|
|
1239
|
+
if (update) {
|
|
1240
|
+
logger.verbose(` - Dependency ${dep} will be updated: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
|
|
1241
|
+
updates.set(dep, update.newVersion);
|
|
1242
|
+
}
|
|
1243
|
+
}
|
|
1244
|
+
if (updates.size === 0) logger.verbose(` - No dependency updates needed`);
|
|
1245
|
+
return updates;
|
|
673
1246
|
}
|
|
674
1247
|
|
|
675
1248
|
//#endregion
|
|
676
|
-
//#region src/
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
1249
|
+
//#region src/core/prompts.ts
|
|
1250
|
+
async function selectPackagePrompt(packages) {
|
|
1251
|
+
const response = await prompts({
|
|
1252
|
+
type: "multiselect",
|
|
1253
|
+
name: "selectedPackages",
|
|
1254
|
+
message: "Select packages to release",
|
|
1255
|
+
choices: packages.map((pkg) => ({
|
|
1256
|
+
title: `${pkg.name} (${farver.bold(pkg.version)})`,
|
|
1257
|
+
value: pkg.name,
|
|
1258
|
+
selected: true
|
|
1259
|
+
})),
|
|
1260
|
+
min: 1,
|
|
1261
|
+
hint: "Space to select/deselect. Return to submit.",
|
|
1262
|
+
instructions: false
|
|
1263
|
+
});
|
|
1264
|
+
if (!response.selectedPackages || response.selectedPackages.length === 0) return [];
|
|
1265
|
+
return response.selectedPackages;
|
|
1266
|
+
}
|
|
1267
|
+
async function selectVersionPrompt(workspaceRoot, pkg, currentVersion, suggestedVersion) {
|
|
1268
|
+
const answers = await prompts([{
|
|
1269
|
+
type: "autocomplete",
|
|
1270
|
+
name: "version",
|
|
1271
|
+
message: `${pkg.name}: ${farver.green(pkg.version)}`,
|
|
1272
|
+
choices: [
|
|
1273
|
+
{
|
|
1274
|
+
value: "skip",
|
|
1275
|
+
title: `skip ${farver.dim("(no change)")}`
|
|
1276
|
+
},
|
|
1277
|
+
{
|
|
1278
|
+
value: "major",
|
|
1279
|
+
title: `major ${farver.bold(getNextVersion(pkg.version, "major"))}`
|
|
1280
|
+
},
|
|
1281
|
+
{
|
|
1282
|
+
value: "minor",
|
|
1283
|
+
title: `minor ${farver.bold(getNextVersion(pkg.version, "minor"))}`
|
|
1284
|
+
},
|
|
1285
|
+
{
|
|
1286
|
+
value: "patch",
|
|
1287
|
+
title: `patch ${farver.bold(getNextVersion(pkg.version, "patch"))}`
|
|
1288
|
+
},
|
|
1289
|
+
{
|
|
1290
|
+
value: "suggested",
|
|
1291
|
+
title: `suggested ${farver.bold(suggestedVersion)}`
|
|
1292
|
+
},
|
|
1293
|
+
{
|
|
1294
|
+
value: "custom",
|
|
1295
|
+
title: "custom"
|
|
1296
|
+
}
|
|
1297
|
+
],
|
|
1298
|
+
initial: suggestedVersion === currentVersion ? 0 : 4
|
|
1299
|
+
}, {
|
|
1300
|
+
type: (prev) => prev === "custom" ? "text" : null,
|
|
1301
|
+
name: "custom",
|
|
1302
|
+
message: "Enter the new version number:",
|
|
1303
|
+
initial: suggestedVersion,
|
|
1304
|
+
validate: (custom) => {
|
|
1305
|
+
if (isValidSemver(custom)) return true;
|
|
1306
|
+
return "That's not a valid version number";
|
|
1307
|
+
}
|
|
1308
|
+
}]);
|
|
1309
|
+
if (!answers.version) return null;
|
|
1310
|
+
if (answers.version === "skip") return null;
|
|
1311
|
+
else if (answers.version === "suggested") return suggestedVersion;
|
|
1312
|
+
else if (answers.version === "custom") {
|
|
1313
|
+
if (!answers.custom) return null;
|
|
1314
|
+
return answers.custom;
|
|
1315
|
+
} else return getNextVersion(pkg.version, answers.version);
|
|
1316
|
+
}
|
|
1317
|
+
|
|
1318
|
+
//#endregion
|
|
1319
|
+
//#region src/core/workspace.ts
|
|
1320
|
+
async function discoverWorkspacePackages(workspaceRoot, options) {
|
|
1321
|
+
let workspaceOptions;
|
|
1322
|
+
let explicitPackages;
|
|
1323
|
+
if (options.packages == null || options.packages === true) workspaceOptions = { excludePrivate: false };
|
|
1324
|
+
else if (Array.isArray(options.packages)) {
|
|
1325
|
+
workspaceOptions = {
|
|
1326
|
+
excludePrivate: false,
|
|
1327
|
+
include: options.packages
|
|
1328
|
+
};
|
|
1329
|
+
explicitPackages = options.packages;
|
|
1330
|
+
} else {
|
|
1331
|
+
workspaceOptions = options.packages;
|
|
1332
|
+
if (options.packages.include) explicitPackages = options.packages.include;
|
|
1333
|
+
}
|
|
1334
|
+
let workspacePackages = await findWorkspacePackages(workspaceRoot, workspaceOptions);
|
|
1335
|
+
if (explicitPackages) {
|
|
1336
|
+
const foundNames = new Set(workspacePackages.map((p) => p.name));
|
|
1337
|
+
const missing = explicitPackages.filter((p) => !foundNames.has(p));
|
|
1338
|
+
if (missing.length > 0) exitWithError(`Package${missing.length > 1 ? "s" : ""} not found in workspace: ${missing.join(", ")}`, "Check your package names or run 'pnpm ls' to see available packages");
|
|
687
1339
|
}
|
|
688
|
-
const { workspacePackages, packagesToAnalyze: initialPackages } = await discoverPackages(workspaceRoot, options);
|
|
689
|
-
if (initialPackages.length === 0) return null;
|
|
690
1340
|
const isPackagePromptEnabled = options.prompts?.packages !== false;
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
const selectedNames = await promptPackageSelection(initialPackages);
|
|
695
|
-
packagesToAnalyze = initialPackages.filter((pkg) => selectedNames.includes(pkg.name));
|
|
696
|
-
}
|
|
697
|
-
const changedPackages = await analyzeCommits(packagesToAnalyze, workspaceRoot);
|
|
698
|
-
if (changedPackages.size === 0) throw new Error("No packages have changes requiring a release");
|
|
699
|
-
let versionUpdates = calculateVersions(workspacePackages, changedPackages);
|
|
700
|
-
const isVersionPromptEnabled = options.prompts?.versions !== false;
|
|
701
|
-
if (!isCI && isVersionPromptEnabled) {
|
|
702
|
-
const versionOverrides = await promptVersionOverrides(versionUpdates.map((u) => ({
|
|
703
|
-
name: u.package.name,
|
|
704
|
-
currentVersion: u.currentVersion,
|
|
705
|
-
suggestedVersion: u.newVersion,
|
|
706
|
-
bumpType: u.bumpType
|
|
707
|
-
})));
|
|
708
|
-
versionUpdates = versionUpdates.map((update) => {
|
|
709
|
-
const overriddenVersion = versionOverrides.get(update.package.name);
|
|
710
|
-
if (overriddenVersion && overriddenVersion !== update.newVersion) return {
|
|
711
|
-
...update,
|
|
712
|
-
newVersion: overriddenVersion
|
|
713
|
-
};
|
|
714
|
-
return update;
|
|
715
|
-
});
|
|
1341
|
+
if (!isCI && isPackagePromptEnabled && !explicitPackages) {
|
|
1342
|
+
const selectedNames = await selectPackagePrompt(workspacePackages);
|
|
1343
|
+
workspacePackages = workspacePackages.filter((pkg) => selectedNames.includes(pkg.name));
|
|
716
1344
|
}
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
await rebaseBranch(currentBranch, workspaceRoot);
|
|
740
|
-
await updatePackageJsonFiles(allUpdates);
|
|
741
|
-
const hasCommitted = await commitChanges("chore: update release versions", workspaceRoot);
|
|
742
|
-
const isBranchAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
|
|
743
|
-
if (!hasCommitted && !isBranchAhead) {
|
|
744
|
-
console.log("No changes to commit and branch is in sync with remote");
|
|
745
|
-
await checkoutBranch(currentBranch, workspaceRoot);
|
|
746
|
-
if (prExists) {
|
|
747
|
-
console.log("No updates needed, PR is already up to date");
|
|
1345
|
+
return workspacePackages;
|
|
1346
|
+
}
|
|
1347
|
+
async function findWorkspacePackages(workspaceRoot, options) {
|
|
1348
|
+
try {
|
|
1349
|
+
const result = await run("pnpm", [
|
|
1350
|
+
"-r",
|
|
1351
|
+
"ls",
|
|
1352
|
+
"--json"
|
|
1353
|
+
], { nodeOptions: {
|
|
1354
|
+
cwd: workspaceRoot,
|
|
1355
|
+
stdio: "pipe"
|
|
1356
|
+
} });
|
|
1357
|
+
const rawProjects = JSON.parse(result.stdout);
|
|
1358
|
+
const allPackageNames = new Set(rawProjects.map((p) => p.name));
|
|
1359
|
+
const excludedPackages = /* @__PURE__ */ new Set();
|
|
1360
|
+
const promises = rawProjects.map(async (rawProject) => {
|
|
1361
|
+
const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
|
|
1362
|
+
const packageJson = JSON.parse(content);
|
|
1363
|
+
if (!shouldIncludePackage(packageJson, options)) {
|
|
1364
|
+
excludedPackages.add(rawProject.name);
|
|
1365
|
+
return null;
|
|
1366
|
+
}
|
|
748
1367
|
return {
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
1368
|
+
name: rawProject.name,
|
|
1369
|
+
version: rawProject.version,
|
|
1370
|
+
path: rawProject.path,
|
|
1371
|
+
packageJson,
|
|
1372
|
+
workspaceDependencies: Object.keys(rawProject.dependencies || []).filter((dep) => {
|
|
1373
|
+
return allPackageNames.has(dep);
|
|
1374
|
+
}),
|
|
1375
|
+
workspaceDevDependencies: Object.keys(rawProject.devDependencies || []).filter((dep) => {
|
|
1376
|
+
return allPackageNames.has(dep);
|
|
1377
|
+
})
|
|
752
1378
|
};
|
|
753
|
-
}
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
1379
|
+
});
|
|
1380
|
+
const packages = await Promise.all(promises);
|
|
1381
|
+
if (excludedPackages.size > 0) logger.info(`Excluded packages: ${farver.green(Array.from(excludedPackages).join(", "))}`);
|
|
1382
|
+
return packages.filter((pkg) => pkg !== null);
|
|
1383
|
+
} catch (err) {
|
|
1384
|
+
logger.error("Error discovering workspace packages:", err);
|
|
1385
|
+
throw err;
|
|
1386
|
+
}
|
|
1387
|
+
}
|
|
1388
|
+
function shouldIncludePackage(pkg, options) {
|
|
1389
|
+
if (!options) return true;
|
|
1390
|
+
if (options.excludePrivate && pkg.private) return false;
|
|
1391
|
+
if (options.include && options.include.length > 0) {
|
|
1392
|
+
if (!options.include.includes(pkg.name)) return false;
|
|
757
1393
|
}
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
1394
|
+
if (options.exclude?.includes(pkg.name)) return false;
|
|
1395
|
+
return true;
|
|
1396
|
+
}
|
|
1397
|
+
|
|
1398
|
+
//#endregion
|
|
1399
|
+
//#region src/shared/options.ts
|
|
1400
|
+
const DEFAULT_COMMIT_GROUPS = [
|
|
1401
|
+
{
|
|
1402
|
+
name: "features",
|
|
1403
|
+
title: "Features",
|
|
1404
|
+
types: ["feat"]
|
|
1405
|
+
},
|
|
1406
|
+
{
|
|
1407
|
+
name: "fixes",
|
|
1408
|
+
title: "Bug Fixes",
|
|
1409
|
+
types: ["fix", "perf"]
|
|
1410
|
+
},
|
|
1411
|
+
{
|
|
1412
|
+
name: "refactor",
|
|
1413
|
+
title: "Refactoring",
|
|
1414
|
+
types: ["refactor"]
|
|
1415
|
+
},
|
|
1416
|
+
{
|
|
1417
|
+
name: "docs",
|
|
1418
|
+
title: "Documentation",
|
|
1419
|
+
types: ["docs"]
|
|
1420
|
+
}
|
|
1421
|
+
];
|
|
1422
|
+
function normalizeSharedOptions(options) {
|
|
1423
|
+
const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, prompts: prompts$1 = {
|
|
1424
|
+
packages: true,
|
|
1425
|
+
versions: true
|
|
1426
|
+
}, groups = DEFAULT_COMMIT_GROUPS } = options;
|
|
1427
|
+
if (!githubToken.trim()) exitWithError("GitHub token is required", "Set GITHUB_TOKEN environment variable or pass it in options");
|
|
1428
|
+
if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) exitWithError("Repository (repo) is required", "Specify the repository in 'owner/repo' format (e.g., 'octocat/hello-world')");
|
|
1429
|
+
const [owner, repo] = fullRepo.split("/");
|
|
1430
|
+
if (!owner || !repo) exitWithError(`Invalid repo format: "${fullRepo}"`, "Expected format: \"owner/repo\" (e.g., \"octocat/hello-world\")");
|
|
1431
|
+
return {
|
|
1432
|
+
packages: typeof packages === "object" && !Array.isArray(packages) ? {
|
|
1433
|
+
exclude: packages.exclude ?? [],
|
|
1434
|
+
include: packages.include ?? [],
|
|
1435
|
+
excludePrivate: packages.excludePrivate ?? false
|
|
1436
|
+
} : packages,
|
|
1437
|
+
prompts: {
|
|
1438
|
+
packages: prompts$1?.packages ?? true,
|
|
1439
|
+
versions: prompts$1?.versions ?? true
|
|
1440
|
+
},
|
|
1441
|
+
workspaceRoot,
|
|
1442
|
+
githubToken,
|
|
763
1443
|
owner,
|
|
764
1444
|
repo,
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
1445
|
+
groups
|
|
1446
|
+
};
|
|
1447
|
+
}
|
|
1448
|
+
async function normalizeReleaseOptions(options) {
|
|
1449
|
+
const normalized = normalizeSharedOptions(options);
|
|
1450
|
+
let defaultBranch = options.branch?.default?.trim();
|
|
1451
|
+
const releaseBranch = options.branch?.release?.trim() ?? "release/next";
|
|
1452
|
+
if (defaultBranch == null || defaultBranch === "") {
|
|
1453
|
+
defaultBranch = await getDefaultBranch(normalized.workspaceRoot);
|
|
1454
|
+
if (!defaultBranch) exitWithError("Could not determine default branch", "Please specify the default branch in options");
|
|
1455
|
+
}
|
|
1456
|
+
if (defaultBranch === releaseBranch) exitWithError(`Default branch and release branch cannot be the same: "${defaultBranch}"`, "Specify different branches for default and release");
|
|
1457
|
+
const availableBranches = await getAvailableBranches(normalized.workspaceRoot);
|
|
1458
|
+
if (!availableBranches.includes(defaultBranch)) exitWithError(`Default branch "${defaultBranch}" does not exist in the repository`, `Available branches: ${availableBranches.join(", ")}`);
|
|
1459
|
+
logger.verbose(`Using default branch: ${farver.green(defaultBranch)}`);
|
|
774
1460
|
return {
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
1461
|
+
...normalized,
|
|
1462
|
+
branch: {
|
|
1463
|
+
release: releaseBranch,
|
|
1464
|
+
default: defaultBranch
|
|
1465
|
+
},
|
|
1466
|
+
safeguards: options.safeguards ?? true,
|
|
1467
|
+
globalCommitMode: options.globalCommitMode ?? "dependencies",
|
|
1468
|
+
pullRequest: {
|
|
1469
|
+
title: options.pullRequest?.title ?? "chore: release new version",
|
|
1470
|
+
body: options.pullRequest?.body ?? DEFAULT_PR_BODY_TEMPLATE
|
|
1471
|
+
},
|
|
1472
|
+
changelog: {
|
|
1473
|
+
enabled: options.changelog?.enabled ?? true,
|
|
1474
|
+
template: options.changelog?.template ?? DEFAULT_CHANGELOG_TEMPLATE
|
|
1475
|
+
}
|
|
778
1476
|
};
|
|
779
1477
|
}
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
1478
|
+
|
|
1479
|
+
//#endregion
|
|
1480
|
+
//#region src/release.ts
|
|
1481
|
+
async function release(options) {
|
|
1482
|
+
const { workspaceRoot,...normalizedOptions } = await normalizeReleaseOptions(options);
|
|
1483
|
+
if (normalizedOptions.safeguards && !await isWorkingDirectoryClean(workspaceRoot)) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
1484
|
+
const workspacePackages = await discoverWorkspacePackages(workspaceRoot, options);
|
|
1485
|
+
if (workspacePackages.length === 0) {
|
|
1486
|
+
logger.warn("No packages found to release");
|
|
1487
|
+
return null;
|
|
790
1488
|
}
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
1489
|
+
logger.section("📦 Workspace Packages");
|
|
1490
|
+
logger.item(`Found ${workspacePackages.length} packages`);
|
|
1491
|
+
for (const pkg of workspacePackages) {
|
|
1492
|
+
logger.item(`${farver.cyan(pkg.name)} (${farver.bold(pkg.version)})`);
|
|
1493
|
+
logger.item(` ${farver.gray("→")} ${farver.gray(pkg.path)}`);
|
|
1494
|
+
}
|
|
1495
|
+
logger.emptyLine();
|
|
1496
|
+
const groupedPackageCommits = await getWorkspacePackageGroupedCommits(workspaceRoot, workspacePackages);
|
|
1497
|
+
const globalCommitsPerPackage = await getGlobalCommitsPerPackage(workspaceRoot, groupedPackageCommits, workspacePackages, normalizedOptions.globalCommitMode);
|
|
1498
|
+
const githubClient = createGitHubClient({
|
|
1499
|
+
owner: normalizedOptions.owner,
|
|
1500
|
+
repo: normalizedOptions.repo,
|
|
1501
|
+
githubToken: normalizedOptions.githubToken
|
|
1502
|
+
});
|
|
1503
|
+
const prOps = await orchestrateReleasePullRequest({
|
|
1504
|
+
workspaceRoot,
|
|
1505
|
+
githubClient,
|
|
1506
|
+
releaseBranch: normalizedOptions.branch.release,
|
|
1507
|
+
defaultBranch: normalizedOptions.branch.default,
|
|
1508
|
+
pullRequestTitle: options.pullRequest?.title,
|
|
1509
|
+
pullRequestBody: options.pullRequest?.body
|
|
1510
|
+
});
|
|
1511
|
+
await prOps.prepareBranch();
|
|
1512
|
+
const overridesPath = join(workspaceRoot, ucdjsReleaseOverridesPath);
|
|
1513
|
+
let existingOverrides = {};
|
|
1514
|
+
try {
|
|
1515
|
+
const overridesContent = await readFile(overridesPath, "utf-8");
|
|
1516
|
+
existingOverrides = JSON.parse(overridesContent);
|
|
1517
|
+
logger.info("Found existing version overrides file.");
|
|
1518
|
+
} catch {
|
|
1519
|
+
logger.info("No existing version overrides file found. Continuing...");
|
|
1520
|
+
}
|
|
1521
|
+
const { allUpdates, applyUpdates, overrides: newOverrides } = await calculateAndPrepareVersionUpdates({
|
|
1522
|
+
workspacePackages,
|
|
1523
|
+
packageCommits: groupedPackageCommits,
|
|
1524
|
+
workspaceRoot,
|
|
1525
|
+
showPrompt: options.prompts?.versions !== false,
|
|
1526
|
+
globalCommitsPerPackage,
|
|
1527
|
+
overrides: existingOverrides
|
|
1528
|
+
});
|
|
1529
|
+
if (Object.keys(newOverrides).length > 0) {
|
|
1530
|
+
logger.info("Writing version overrides file...");
|
|
1531
|
+
try {
|
|
1532
|
+
await mkdir(join(workspaceRoot, ".github"), { recursive: true });
|
|
1533
|
+
await writeFile(overridesPath, JSON.stringify(newOverrides, null, 2), "utf-8");
|
|
1534
|
+
logger.success("Successfully wrote version overrides file.");
|
|
1535
|
+
} catch (e) {
|
|
1536
|
+
logger.error("Failed to write version overrides file:", e);
|
|
1537
|
+
}
|
|
1538
|
+
}
|
|
1539
|
+
if (Object.keys(newOverrides).length === 0 && Object.keys(existingOverrides).length > 0) {
|
|
1540
|
+
let shouldRemoveOverrides = false;
|
|
1541
|
+
for (const update of allUpdates) {
|
|
1542
|
+
const overriddenVersion = existingOverrides[update.package.name];
|
|
1543
|
+
if (overriddenVersion) {
|
|
1544
|
+
if (compare(update.newVersion, overriddenVersion.version) > 0) {
|
|
1545
|
+
shouldRemoveOverrides = true;
|
|
1546
|
+
break;
|
|
1547
|
+
}
|
|
1548
|
+
}
|
|
802
1549
|
}
|
|
1550
|
+
if (shouldRemoveOverrides) {
|
|
1551
|
+
logger.info("Removing obsolete version overrides file...");
|
|
1552
|
+
try {
|
|
1553
|
+
await rm(overridesPath);
|
|
1554
|
+
logger.success("Successfully removed obsolete version overrides file.");
|
|
1555
|
+
} catch (e) {
|
|
1556
|
+
logger.error("Failed to remove obsolete version overrides file:", e);
|
|
1557
|
+
}
|
|
1558
|
+
}
|
|
1559
|
+
}
|
|
1560
|
+
if (allUpdates.filter((u) => u.hasDirectChanges).length === 0) logger.warn("No packages have changes requiring a release");
|
|
1561
|
+
logger.section("🔄 Version Updates");
|
|
1562
|
+
logger.item(`Updating ${allUpdates.length} packages (including dependents)`);
|
|
1563
|
+
for (const update of allUpdates) logger.item(`${update.package.name}: ${update.currentVersion} → ${update.newVersion}`);
|
|
1564
|
+
await applyUpdates();
|
|
1565
|
+
if (normalizedOptions.changelog.enabled) {
|
|
1566
|
+
logger.step("Updating changelogs");
|
|
1567
|
+
const changelogPromises = allUpdates.map((update) => {
|
|
1568
|
+
const pkgCommits = groupedPackageCommits.get(update.package.name) || [];
|
|
1569
|
+
const globalCommits = globalCommitsPerPackage.get(update.package.name) || [];
|
|
1570
|
+
const allCommits = [...pkgCommits, ...globalCommits];
|
|
1571
|
+
if (allCommits.length === 0) {
|
|
1572
|
+
logger.verbose(`No commits for ${update.package.name}, skipping changelog`);
|
|
1573
|
+
return Promise.resolve();
|
|
1574
|
+
}
|
|
1575
|
+
logger.verbose(`Updating changelog for ${farver.cyan(update.package.name)}`);
|
|
1576
|
+
return updateChangelog({
|
|
1577
|
+
normalizedOptions: {
|
|
1578
|
+
...normalizedOptions,
|
|
1579
|
+
workspaceRoot
|
|
1580
|
+
},
|
|
1581
|
+
githubClient,
|
|
1582
|
+
workspacePackage: update.package,
|
|
1583
|
+
version: update.newVersion,
|
|
1584
|
+
previousVersion: update.currentVersion !== "0.0.0" ? update.currentVersion : void 0,
|
|
1585
|
+
commits: allCommits,
|
|
1586
|
+
date: (/* @__PURE__ */ new Date()).toISOString().split("T")[0]
|
|
1587
|
+
});
|
|
1588
|
+
}).filter((p) => p != null);
|
|
1589
|
+
const updates = await Promise.all(changelogPromises);
|
|
1590
|
+
logger.success(`Updated ${updates.length} changelog(s)`);
|
|
1591
|
+
}
|
|
1592
|
+
if (!await prOps.syncChanges(true)) if (prOps.doesReleasePRExist && prOps.existingPullRequest) {
|
|
1593
|
+
logger.item("No updates needed, PR is already up to date");
|
|
1594
|
+
const { pullRequest: pullRequest$1, created: created$1 } = await prOps.syncPullRequest(allUpdates);
|
|
1595
|
+
await prOps.cleanup();
|
|
803
1596
|
return {
|
|
804
|
-
|
|
805
|
-
|
|
1597
|
+
updates: allUpdates,
|
|
1598
|
+
prUrl: pullRequest$1?.html_url,
|
|
1599
|
+
created: created$1
|
|
806
1600
|
};
|
|
1601
|
+
} else {
|
|
1602
|
+
logger.error("No changes to commit, and no existing PR. Nothing to do.");
|
|
1603
|
+
return null;
|
|
1604
|
+
}
|
|
1605
|
+
const { pullRequest, created } = await prOps.syncPullRequest(allUpdates);
|
|
1606
|
+
await prOps.cleanup();
|
|
1607
|
+
if (pullRequest?.html_url) {
|
|
1608
|
+
logger.section("🚀 Pull Request");
|
|
1609
|
+
logger.success(`Pull request ${created ? "created" : "updated"}: ${pullRequest.html_url}`);
|
|
807
1610
|
}
|
|
808
|
-
workspacePackages = await findWorkspacePackages(workspaceRoot, options.packages);
|
|
809
|
-
packagesToAnalyze = workspacePackages;
|
|
810
1611
|
return {
|
|
811
|
-
|
|
812
|
-
|
|
1612
|
+
updates: allUpdates,
|
|
1613
|
+
prUrl: pullRequest?.html_url,
|
|
1614
|
+
created
|
|
813
1615
|
};
|
|
814
1616
|
}
|
|
815
|
-
async function
|
|
816
|
-
const
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
1617
|
+
async function orchestrateReleasePullRequest({ workspaceRoot, githubClient, releaseBranch, defaultBranch, pullRequestTitle, pullRequestBody }) {
|
|
1618
|
+
const currentBranch = await getCurrentBranch(workspaceRoot);
|
|
1619
|
+
if (currentBranch !== defaultBranch) exitWithError(`Current branch is '${currentBranch}'. Please switch to the default branch '${defaultBranch}' before proceeding.`, `git checkout ${defaultBranch}`);
|
|
1620
|
+
const existingPullRequest = await githubClient.getExistingPullRequest(releaseBranch);
|
|
1621
|
+
const doesReleasePRExist = !!existingPullRequest;
|
|
1622
|
+
if (doesReleasePRExist) logger.item("Found existing release pull request");
|
|
1623
|
+
else logger.item("Will create new pull request");
|
|
1624
|
+
const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
|
|
1625
|
+
return {
|
|
1626
|
+
existingPullRequest,
|
|
1627
|
+
doesReleasePRExist,
|
|
1628
|
+
async prepareBranch() {
|
|
1629
|
+
if (!branchExists) await createBranch(releaseBranch, defaultBranch, workspaceRoot);
|
|
1630
|
+
logger.step(`Checking out release branch: ${releaseBranch}`);
|
|
1631
|
+
if (!await checkoutBranch(releaseBranch, workspaceRoot)) throw new Error(`Failed to checkout branch: ${releaseBranch}`);
|
|
1632
|
+
if (branchExists) {
|
|
1633
|
+
logger.step("Pulling latest changes from remote");
|
|
1634
|
+
if (!await pullLatestChanges(releaseBranch, workspaceRoot)) logger.warn("Failed to pull latest changes, continuing anyway");
|
|
1635
|
+
}
|
|
1636
|
+
logger.step(`Rebasing onto ${defaultBranch}`);
|
|
1637
|
+
if (!await rebaseBranch(defaultBranch, workspaceRoot)) throw new Error(`Failed to rebase onto ${defaultBranch}. Please resolve conflicts manually.`);
|
|
1638
|
+
},
|
|
1639
|
+
async syncChanges(hasChanges) {
|
|
1640
|
+
const hasCommitted = hasChanges ? await commitChanges("chore: update release versions", workspaceRoot) : false;
|
|
1641
|
+
const isBranchAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
|
|
1642
|
+
if (!hasCommitted && !isBranchAhead) {
|
|
1643
|
+
logger.item("No changes to commit and branch is in sync with remote");
|
|
1644
|
+
return false;
|
|
1645
|
+
}
|
|
1646
|
+
logger.step("Pushing changes to remote");
|
|
1647
|
+
if (!await pushBranch(releaseBranch, workspaceRoot, { forceWithLease: true })) throw new Error(`Failed to push changes to ${releaseBranch}. Remote may have been updated.`);
|
|
1648
|
+
return true;
|
|
1649
|
+
},
|
|
1650
|
+
async syncPullRequest(updates) {
|
|
1651
|
+
const prTitle = existingPullRequest?.title || pullRequestTitle || "chore: update package versions";
|
|
1652
|
+
const prBody = generatePullRequestBody(updates, pullRequestBody);
|
|
1653
|
+
const pullRequest = await githubClient.upsertPullRequest({
|
|
1654
|
+
pullNumber: existingPullRequest?.number,
|
|
1655
|
+
title: prTitle,
|
|
1656
|
+
body: prBody,
|
|
1657
|
+
head: releaseBranch,
|
|
1658
|
+
base: defaultBranch
|
|
1659
|
+
});
|
|
1660
|
+
logger.success(`${doesReleasePRExist ? "Updated" : "Created"} pull request: ${pullRequest?.html_url}`);
|
|
1661
|
+
return {
|
|
1662
|
+
pullRequest,
|
|
1663
|
+
created: !doesReleasePRExist
|
|
1664
|
+
};
|
|
1665
|
+
},
|
|
1666
|
+
async cleanup() {
|
|
1667
|
+
await checkoutBranch(defaultBranch, workspaceRoot);
|
|
1668
|
+
}
|
|
1669
|
+
};
|
|
822
1670
|
}
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
1671
|
+
|
|
1672
|
+
//#endregion
|
|
1673
|
+
//#region src/verify.ts
|
|
1674
|
+
async function verify(options) {
|
|
1675
|
+
const { workspaceRoot,...normalizedOptions } = await normalizeReleaseOptions(options);
|
|
1676
|
+
if (normalizedOptions.safeguards && !await isWorkingDirectoryClean(workspaceRoot)) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
1677
|
+
const githubClient = createGitHubClient({
|
|
1678
|
+
owner: normalizedOptions.owner,
|
|
1679
|
+
repo: normalizedOptions.repo,
|
|
1680
|
+
githubToken: normalizedOptions.githubToken
|
|
1681
|
+
});
|
|
1682
|
+
const releaseBranch = normalizedOptions.branch.release;
|
|
1683
|
+
const defaultBranch = normalizedOptions.branch.default;
|
|
1684
|
+
const releasePr = await githubClient.getExistingPullRequest(releaseBranch);
|
|
1685
|
+
if (!releasePr || !releasePr.head) {
|
|
1686
|
+
logger.warn(`No open release pull request found for branch "${releaseBranch}". Nothing to verify.`);
|
|
1687
|
+
return;
|
|
1688
|
+
}
|
|
1689
|
+
logger.info(`Found release PR #${releasePr.number}. Verifying against default branch "${defaultBranch}"...`);
|
|
1690
|
+
const originalBranch = await getCurrentBranch(workspaceRoot);
|
|
1691
|
+
if (originalBranch !== defaultBranch) await checkoutBranch(defaultBranch, workspaceRoot);
|
|
1692
|
+
const overridesPath = join(workspaceRoot, ucdjsReleaseOverridesPath);
|
|
1693
|
+
let existingOverrides = {};
|
|
1694
|
+
try {
|
|
1695
|
+
const overridesContent = await readFileFromGit(workspaceRoot, releasePr.head.sha, overridesPath);
|
|
1696
|
+
if (overridesContent) {
|
|
1697
|
+
existingOverrides = JSON.parse(overridesContent);
|
|
1698
|
+
logger.info("Found existing version overrides file on release branch.");
|
|
1699
|
+
}
|
|
1700
|
+
} catch {
|
|
1701
|
+
logger.info("No version overrides file found on release branch. Continuing...");
|
|
1702
|
+
}
|
|
1703
|
+
const mainPackages = await discoverWorkspacePackages(workspaceRoot, options);
|
|
1704
|
+
const mainCommits = await getWorkspacePackageGroupedCommits(workspaceRoot, mainPackages);
|
|
1705
|
+
const { allUpdates: expectedUpdates } = await calculateAndPrepareVersionUpdates({
|
|
1706
|
+
workspacePackages: mainPackages,
|
|
1707
|
+
packageCommits: mainCommits,
|
|
1708
|
+
workspaceRoot,
|
|
1709
|
+
showPrompt: false,
|
|
1710
|
+
globalCommitsPerPackage: await getGlobalCommitsPerPackage(workspaceRoot, mainCommits, mainPackages, normalizedOptions.globalCommitMode),
|
|
1711
|
+
overrides: existingOverrides
|
|
1712
|
+
});
|
|
1713
|
+
const expectedVersionMap = new Map(expectedUpdates.map((u) => [u.package.name, u.newVersion]));
|
|
1714
|
+
const prVersionMap = /* @__PURE__ */ new Map();
|
|
1715
|
+
for (const pkg of mainPackages) {
|
|
1716
|
+
const pkgJsonPath = join(pkg.path.replace(workspaceRoot, ""), "package.json").substring(1);
|
|
1717
|
+
const pkgJsonContent = await readFileFromGit(workspaceRoot, releasePr.head.sha, pkgJsonPath);
|
|
1718
|
+
if (pkgJsonContent) {
|
|
1719
|
+
const pkgJson = JSON.parse(pkgJsonContent);
|
|
1720
|
+
prVersionMap.set(pkg.name, pkgJson.version);
|
|
1721
|
+
}
|
|
1722
|
+
}
|
|
1723
|
+
if (originalBranch !== defaultBranch) await checkoutBranch(originalBranch, workspaceRoot);
|
|
1724
|
+
let isOutOfSync = false;
|
|
1725
|
+
for (const [pkgName, expectedVersion] of expectedVersionMap.entries()) {
|
|
1726
|
+
const prVersion = prVersionMap.get(pkgName);
|
|
1727
|
+
if (!prVersion) {
|
|
1728
|
+
logger.warn(`Package "${pkgName}" found in default branch but not in release branch. Skipping.`);
|
|
1729
|
+
continue;
|
|
1730
|
+
}
|
|
1731
|
+
if (gt(expectedVersion, prVersion)) {
|
|
1732
|
+
logger.error(`Package "${pkgName}" is out of sync. Expected version >= ${expectedVersion}, but PR has ${prVersion}.`);
|
|
1733
|
+
isOutOfSync = true;
|
|
1734
|
+
} else logger.success(`Package "${pkgName}" is up to date (PR version: ${prVersion}, Expected: ${expectedVersion})`);
|
|
1735
|
+
}
|
|
1736
|
+
const statusContext = "ucdjs/release-verify";
|
|
1737
|
+
if (isOutOfSync) {
|
|
1738
|
+
await githubClient.setCommitStatus({
|
|
1739
|
+
sha: releasePr.head.sha,
|
|
1740
|
+
state: "failure",
|
|
1741
|
+
context: statusContext,
|
|
1742
|
+
description: "Release PR is out of sync with the default branch. Please re-run the release process."
|
|
1743
|
+
});
|
|
1744
|
+
logger.error("Verification failed. Commit status set to 'failure'.");
|
|
1745
|
+
} else {
|
|
1746
|
+
await githubClient.setCommitStatus({
|
|
1747
|
+
sha: releasePr.head.sha,
|
|
1748
|
+
state: "success",
|
|
1749
|
+
context: statusContext,
|
|
1750
|
+
description: "Release PR is up to date."
|
|
1751
|
+
});
|
|
1752
|
+
logger.success("Verification successful. Commit status set to 'success'.");
|
|
829
1753
|
}
|
|
830
|
-
return updates;
|
|
831
|
-
}
|
|
832
|
-
async function updatePackageJsonFiles(updates) {
|
|
833
|
-
await Promise.all(updates.map(async (update) => {
|
|
834
|
-
const depUpdates = getDependencyUpdates(update.package, updates);
|
|
835
|
-
await updatePackageJson(update.package, update.newVersion, depUpdates);
|
|
836
|
-
}));
|
|
837
1754
|
}
|
|
838
1755
|
|
|
839
1756
|
//#endregion
|
|
840
|
-
export { release };
|
|
1757
|
+
export { publish, release, verify };
|