@ucdjs/release-scripts 0.1.0-beta.4 → 0.1.0-beta.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/eta-DAZlmVBQ.mjs +481 -0
- package/dist/index.d.mts +68 -84
- package/dist/index.mjs +1910 -643
- package/package.json +20 -10
package/dist/index.mjs
CHANGED
|
@@ -1,16 +1,114 @@
|
|
|
1
|
+
import { t as Eta } from "./eta-DAZlmVBQ.mjs";
|
|
2
|
+
import { mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
|
3
|
+
import { join, relative } from "node:path";
|
|
4
|
+
import { getCommits, groupByType } from "commit-parser";
|
|
1
5
|
import process from "node:process";
|
|
2
|
-
import
|
|
6
|
+
import readline from "node:readline";
|
|
3
7
|
import farver from "farver";
|
|
8
|
+
import mri from "mri";
|
|
4
9
|
import { exec } from "tinyexec";
|
|
5
10
|
import { dedent } from "@luxass/utils";
|
|
6
|
-
import { Eta } from "eta";
|
|
7
|
-
import { readFile, writeFile } from "node:fs/promises";
|
|
8
|
-
import { join } from "node:path";
|
|
9
11
|
import prompts from "prompts";
|
|
12
|
+
import { compare, gt } from "semver";
|
|
10
13
|
|
|
11
|
-
//#region src/
|
|
12
|
-
|
|
14
|
+
//#region src/operations/changelog-format.ts
|
|
15
|
+
function formatCommitLine({ commit, owner, repo, authors }) {
|
|
16
|
+
const commitUrl = `https://github.com/${owner}/${repo}/commit/${commit.hash}`;
|
|
17
|
+
let line = `${commit.description}`;
|
|
18
|
+
const references = commit.references ?? [];
|
|
19
|
+
for (const ref of references) {
|
|
20
|
+
if (!ref.value) continue;
|
|
21
|
+
const number = Number.parseInt(ref.value.replace(/^#/, ""), 10);
|
|
22
|
+
if (Number.isNaN(number)) continue;
|
|
23
|
+
if (ref.type === "issue") {
|
|
24
|
+
line += ` ([Issue ${ref.value}](https://github.com/${owner}/${repo}/issues/${number}))`;
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
line += ` ([PR ${ref.value}](https://github.com/${owner}/${repo}/pull/${number}))`;
|
|
28
|
+
}
|
|
29
|
+
line += ` ([${commit.shortHash}](${commitUrl}))`;
|
|
30
|
+
if (authors.length > 0) {
|
|
31
|
+
const authorList = authors.map((author) => author.login ? `[@${author.login}](https://github.com/${author.login})` : author.name).join(", ");
|
|
32
|
+
line += ` (by ${authorList})`;
|
|
33
|
+
}
|
|
34
|
+
return line;
|
|
35
|
+
}
|
|
36
|
+
function buildTemplateGroups(options) {
|
|
37
|
+
const { commits, owner, repo, types, commitAuthors } = options;
|
|
38
|
+
const grouped = groupByType(commits, {
|
|
39
|
+
includeNonConventional: false,
|
|
40
|
+
mergeKeys: Object.fromEntries(Object.entries(types).map(([key, value]) => [key, value.types ?? [key]]))
|
|
41
|
+
});
|
|
42
|
+
return Object.entries(types).map(([key, value]) => {
|
|
43
|
+
const formattedCommits = (grouped.get(key) ?? []).map((commit) => ({ line: formatCommitLine({
|
|
44
|
+
commit,
|
|
45
|
+
owner,
|
|
46
|
+
repo,
|
|
47
|
+
authors: commitAuthors.get(commit.hash) ?? []
|
|
48
|
+
}) }));
|
|
49
|
+
return {
|
|
50
|
+
name: key,
|
|
51
|
+
title: value.title,
|
|
52
|
+
commits: formattedCommits
|
|
53
|
+
};
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
//#endregion
|
|
58
|
+
//#region src/shared/utils.ts
|
|
59
|
+
const args = mri(process.argv.slice(2));
|
|
60
|
+
const isDryRun = !!args.dry;
|
|
61
|
+
const isVerbose = !!args.verbose;
|
|
62
|
+
const isForce = !!args.force;
|
|
63
|
+
const ucdjsReleaseOverridesPath = ".github/ucdjs-release.overrides.json";
|
|
13
64
|
const isCI = typeof process.env.CI === "string" && process.env.CI !== "" && process.env.CI.toLowerCase() !== "false";
|
|
65
|
+
const logger = {
|
|
66
|
+
info: (...args) => {
|
|
67
|
+
console.info(...args);
|
|
68
|
+
},
|
|
69
|
+
warn: (...args) => {
|
|
70
|
+
console.warn(` ${farver.yellow("⚠")}`, ...args);
|
|
71
|
+
},
|
|
72
|
+
error: (...args) => {
|
|
73
|
+
console.error(` ${farver.red("✖")}`, ...args);
|
|
74
|
+
},
|
|
75
|
+
verbose: (...args) => {
|
|
76
|
+
if (!isVerbose) return;
|
|
77
|
+
if (args.length === 0) {
|
|
78
|
+
console.log();
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
if (args.length > 1 && typeof args[0] === "string") {
|
|
82
|
+
console.log(farver.dim(args[0]), ...args.slice(1));
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
console.log(...args);
|
|
86
|
+
},
|
|
87
|
+
section: (title) => {
|
|
88
|
+
console.log();
|
|
89
|
+
console.log(` ${farver.bold(title)}`);
|
|
90
|
+
console.log(` ${farver.gray("─".repeat(title.length + 2))}`);
|
|
91
|
+
},
|
|
92
|
+
emptyLine: () => {
|
|
93
|
+
console.log();
|
|
94
|
+
},
|
|
95
|
+
item: (message, ...args) => {
|
|
96
|
+
console.log(` ${message}`, ...args);
|
|
97
|
+
},
|
|
98
|
+
step: (message) => {
|
|
99
|
+
console.log(` ${farver.blue("→")} ${message}`);
|
|
100
|
+
},
|
|
101
|
+
success: (message) => {
|
|
102
|
+
console.log(` ${farver.green("✓")} ${message}`);
|
|
103
|
+
},
|
|
104
|
+
clearScreen: () => {
|
|
105
|
+
const repeatCount = process.stdout.rows - 2;
|
|
106
|
+
const blank = repeatCount > 0 ? "\n".repeat(repeatCount) : "";
|
|
107
|
+
console.log(blank);
|
|
108
|
+
readline.cursorTo(process.stdout, 0, 0);
|
|
109
|
+
readline.clearScreenDown(process.stdout);
|
|
110
|
+
}
|
|
111
|
+
};
|
|
14
112
|
async function run(bin, args, opts = {}) {
|
|
15
113
|
return exec(bin, args, {
|
|
16
114
|
throwOnError: true,
|
|
@@ -22,115 +120,292 @@ async function run(bin, args, opts = {}) {
|
|
|
22
120
|
});
|
|
23
121
|
}
|
|
24
122
|
async function dryRun(bin, args, opts) {
|
|
25
|
-
return
|
|
123
|
+
return logger.verbose(farver.blue(`[dryrun] ${bin} ${args.join(" ")}`), opts || "");
|
|
124
|
+
}
|
|
125
|
+
const runIfNotDry = isDryRun ? dryRun : run;
|
|
126
|
+
function exitWithError(message, hint) {
|
|
127
|
+
logger.error(farver.bold(message));
|
|
128
|
+
if (hint) console.error(farver.gray(` ${hint}`));
|
|
129
|
+
process.exit(1);
|
|
130
|
+
}
|
|
131
|
+
if (isDryRun || isVerbose || isForce) {
|
|
132
|
+
logger.verbose(farver.inverse(farver.yellow(" Running with special flags ")));
|
|
133
|
+
logger.verbose({
|
|
134
|
+
isDryRun,
|
|
135
|
+
isVerbose,
|
|
136
|
+
isForce
|
|
137
|
+
});
|
|
138
|
+
logger.verbose();
|
|
26
139
|
}
|
|
27
|
-
const runIfNotDry = globalOptions.dryRun ? dryRun : run;
|
|
28
140
|
|
|
29
141
|
//#endregion
|
|
30
|
-
//#region src/
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
let highestBump = "none";
|
|
41
|
-
for (const commit of commits) {
|
|
42
|
-
const bump = determineBumpType(commit);
|
|
43
|
-
if (bump === "major") return "major";
|
|
44
|
-
if (bump === "minor") highestBump = "minor";
|
|
45
|
-
else if (bump === "patch" && highestBump === "none") highestBump = "patch";
|
|
142
|
+
//#region src/core/github.ts
|
|
143
|
+
var GitHubClient = class {
|
|
144
|
+
owner;
|
|
145
|
+
repo;
|
|
146
|
+
githubToken;
|
|
147
|
+
apiBase = "https://api.github.com";
|
|
148
|
+
constructor({ owner, repo, githubToken }) {
|
|
149
|
+
this.owner = owner;
|
|
150
|
+
this.repo = repo;
|
|
151
|
+
this.githubToken = githubToken;
|
|
46
152
|
}
|
|
47
|
-
|
|
153
|
+
async request(path, init = {}) {
|
|
154
|
+
const url = path.startsWith("http") ? path : `${this.apiBase}${path}`;
|
|
155
|
+
const res = await fetch(url, {
|
|
156
|
+
...init,
|
|
157
|
+
headers: {
|
|
158
|
+
...init.headers,
|
|
159
|
+
"Accept": "application/vnd.github.v3+json",
|
|
160
|
+
"Authorization": `token ${this.githubToken}`,
|
|
161
|
+
"User-Agent": "ucdjs-release-scripts (+https://github.com/ucdjs/ucdjs-release-scripts)"
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
if (!res.ok) {
|
|
165
|
+
const errorText = await res.text();
|
|
166
|
+
throw new Error(`GitHub API request failed with status ${res.status}: ${errorText || "No response body"}`);
|
|
167
|
+
}
|
|
168
|
+
if (res.status === 204) return;
|
|
169
|
+
return res.json();
|
|
170
|
+
}
|
|
171
|
+
async getExistingPullRequest(branch) {
|
|
172
|
+
const head = branch.includes(":") ? branch : `${this.owner}:${branch}`;
|
|
173
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/pulls?state=open&head=${encodeURIComponent(head)}`;
|
|
174
|
+
logger.verbose(`Requesting pull request for branch: ${branch} (url: ${this.apiBase}${endpoint})`);
|
|
175
|
+
const pulls = await this.request(endpoint);
|
|
176
|
+
if (!Array.isArray(pulls) || pulls.length === 0) return null;
|
|
177
|
+
const firstPullRequest = pulls[0];
|
|
178
|
+
if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
179
|
+
const pullRequest = {
|
|
180
|
+
number: firstPullRequest.number,
|
|
181
|
+
title: firstPullRequest.title,
|
|
182
|
+
body: firstPullRequest.body,
|
|
183
|
+
draft: firstPullRequest.draft,
|
|
184
|
+
html_url: firstPullRequest.html_url,
|
|
185
|
+
head: "head" in firstPullRequest && typeof firstPullRequest.head === "object" && firstPullRequest.head !== null && "sha" in firstPullRequest.head && typeof firstPullRequest.head.sha === "string" ? { sha: firstPullRequest.head.sha } : void 0
|
|
186
|
+
};
|
|
187
|
+
logger.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
|
|
188
|
+
return pullRequest;
|
|
189
|
+
}
|
|
190
|
+
async upsertPullRequest({ title, body, head, base, pullNumber }) {
|
|
191
|
+
const isUpdate = typeof pullNumber === "number";
|
|
192
|
+
const endpoint = isUpdate ? `/repos/${this.owner}/${this.repo}/pulls/${pullNumber}` : `/repos/${this.owner}/${this.repo}/pulls`;
|
|
193
|
+
const requestBody = isUpdate ? {
|
|
194
|
+
title,
|
|
195
|
+
body
|
|
196
|
+
} : {
|
|
197
|
+
title,
|
|
198
|
+
body,
|
|
199
|
+
head,
|
|
200
|
+
base,
|
|
201
|
+
draft: true
|
|
202
|
+
};
|
|
203
|
+
logger.verbose(`${isUpdate ? "Updating" : "Creating"} pull request (url: ${this.apiBase}${endpoint})`);
|
|
204
|
+
const pr = await this.request(endpoint, {
|
|
205
|
+
method: isUpdate ? "PATCH" : "POST",
|
|
206
|
+
body: JSON.stringify(requestBody)
|
|
207
|
+
});
|
|
208
|
+
if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
209
|
+
const action = isUpdate ? "Updated" : "Created";
|
|
210
|
+
logger.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
|
|
211
|
+
return {
|
|
212
|
+
number: pr.number,
|
|
213
|
+
title: pr.title,
|
|
214
|
+
body: pr.body,
|
|
215
|
+
draft: pr.draft,
|
|
216
|
+
html_url: pr.html_url
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
async setCommitStatus({ sha, state, targetUrl, description, context }) {
|
|
220
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/statuses/${sha}`;
|
|
221
|
+
logger.verbose(`Setting commit status on ${sha} to ${state} (url: ${this.apiBase}${endpoint})`);
|
|
222
|
+
await this.request(endpoint, {
|
|
223
|
+
method: "POST",
|
|
224
|
+
body: JSON.stringify({
|
|
225
|
+
state,
|
|
226
|
+
target_url: targetUrl,
|
|
227
|
+
description: description || "",
|
|
228
|
+
context
|
|
229
|
+
})
|
|
230
|
+
});
|
|
231
|
+
logger.info(`Commit status set to ${farver.cyan(state)} for ${farver.gray(sha.substring(0, 7))}`);
|
|
232
|
+
}
|
|
233
|
+
async resolveAuthorInfo(info) {
|
|
234
|
+
if (info.login) return info;
|
|
235
|
+
try {
|
|
236
|
+
const q = encodeURIComponent(`${info.email} type:user in:email`);
|
|
237
|
+
const data = await this.request(`/search/users?q=${q}`);
|
|
238
|
+
if (!data.items || data.items.length === 0) return info;
|
|
239
|
+
info.login = data.items[0].login;
|
|
240
|
+
} catch (err) {
|
|
241
|
+
logger.warn(`Failed to resolve author info for email ${info.email}: ${err.message}`);
|
|
242
|
+
}
|
|
243
|
+
if (info.login) return info;
|
|
244
|
+
if (info.commits.length > 0) try {
|
|
245
|
+
const data = await this.request(`/repos/${this.owner}/${this.repo}/commits/${info.commits[0]}`);
|
|
246
|
+
if (data.author && data.author.login) info.login = data.author.login;
|
|
247
|
+
} catch (err) {
|
|
248
|
+
logger.warn(`Failed to resolve author info from commits for email ${info.email}: ${err.message}`);
|
|
249
|
+
}
|
|
250
|
+
return info;
|
|
251
|
+
}
|
|
252
|
+
};
|
|
253
|
+
function createGitHubClient(options) {
|
|
254
|
+
return new GitHubClient(options);
|
|
48
255
|
}
|
|
49
|
-
|
|
50
|
-
const
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
to: "HEAD"
|
|
54
|
-
});
|
|
55
|
-
console.log(`Found ${allCommits.length} commits for ${pkg.name} since ${lastTag || "beginning"}`);
|
|
56
|
-
const touchedCommitHashes = await getCommitsTouchingPackage(lastTag || "HEAD", "HEAD", pkg.path, workspaceRoot);
|
|
57
|
-
const touchedSet = new Set(touchedCommitHashes);
|
|
58
|
-
const packageCommits = allCommits.filter((commit) => touchedSet.has(commit.shortHash));
|
|
59
|
-
console.log(`${packageCommits.length} commits affect ${pkg.name}`);
|
|
60
|
-
return packageCommits;
|
|
256
|
+
function dedentString(str) {
|
|
257
|
+
const lines = str.split("\n");
|
|
258
|
+
const minIndent = lines.filter((line) => line.trim().length > 0).reduce((min, line) => Math.min(min, line.search(/\S/)), Infinity);
|
|
259
|
+
return lines.map((line) => minIndent === Infinity ? line : line.slice(minIndent)).join("\n").trim();
|
|
61
260
|
}
|
|
62
|
-
|
|
63
|
-
|
|
261
|
+
function generatePullRequestBody(updates, body) {
|
|
262
|
+
const eta = new Eta();
|
|
263
|
+
const bodyTemplate = body ? dedentString(body) : DEFAULT_PR_BODY_TEMPLATE;
|
|
264
|
+
return eta.renderString(bodyTemplate, { packages: updates.map((u) => ({
|
|
265
|
+
name: u.package.name,
|
|
266
|
+
currentVersion: u.currentVersion,
|
|
267
|
+
newVersion: u.newVersion,
|
|
268
|
+
bumpType: u.bumpType,
|
|
269
|
+
hasDirectChanges: u.hasDirectChanges
|
|
270
|
+
})) });
|
|
64
271
|
}
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
272
|
+
|
|
273
|
+
//#endregion
|
|
274
|
+
//#region src/options.ts
|
|
275
|
+
const DEFAULT_PR_BODY_TEMPLATE = dedent`
|
|
276
|
+
This PR was automatically generated by the UCD release scripts.
|
|
277
|
+
|
|
278
|
+
The following packages have been prepared for release:
|
|
279
|
+
|
|
280
|
+
<% if (it.packages.length > 0) { %>
|
|
281
|
+
<% it.packages.forEach((pkg) => { %>
|
|
282
|
+
- **<%= pkg.name %>**: <%= pkg.currentVersion %> → <%= pkg.newVersion %> (<%= pkg.bumpType %>)
|
|
283
|
+
<% }) %>
|
|
284
|
+
<% } else { %>
|
|
285
|
+
There are no packages to release.
|
|
286
|
+
<% } %>
|
|
287
|
+
|
|
288
|
+
Please review the changes and merge when ready.
|
|
289
|
+
|
|
290
|
+
> [!NOTE]
|
|
291
|
+
> When this PR is merged, the release process will be triggered automatically, publishing the new package versions to the registry.
|
|
292
|
+
`;
|
|
293
|
+
const DEFAULT_CHANGELOG_TEMPLATE = dedent`
|
|
294
|
+
<% if (it.previousVersion) { -%>
|
|
295
|
+
## [<%= it.version %>](<%= it.compareUrl %>) (<%= it.date %>)
|
|
296
|
+
<% } else { -%>
|
|
297
|
+
## <%= it.version %> (<%= it.date %>)
|
|
298
|
+
<% } %>
|
|
299
|
+
|
|
300
|
+
<% it.groups.forEach((group) => { %>
|
|
301
|
+
<% if (group.commits.length > 0) { %>
|
|
302
|
+
|
|
303
|
+
### <%= group.title %>
|
|
304
|
+
<% group.commits.forEach((commit) => { %>
|
|
305
|
+
|
|
306
|
+
* <%= commit.line %>
|
|
307
|
+
<% }); %>
|
|
308
|
+
|
|
309
|
+
<% } %>
|
|
310
|
+
<% }); %>
|
|
311
|
+
`;
|
|
312
|
+
const DEFAULT_TYPES = {
|
|
313
|
+
feat: { title: "🚀 Features" },
|
|
314
|
+
fix: { title: "🐞 Bug Fixes" },
|
|
315
|
+
perf: { title: "🏎 Performance" },
|
|
316
|
+
docs: { title: "📚 Documentation" },
|
|
317
|
+
style: { title: "🎨 Styles" }
|
|
318
|
+
};
|
|
319
|
+
function normalizeReleaseScriptsOptions(options) {
|
|
320
|
+
const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, branch = {}, globalCommitMode = "dependencies", pullRequest = {}, changelog = {}, types, safeguards = true, dryRun = false, npm = {}, prompts = {} } = options;
|
|
321
|
+
const token = githubToken.trim();
|
|
322
|
+
if (!token) throw new Error("GitHub token is required. Pass it in via options.");
|
|
323
|
+
if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) throw new Error("Repository (repo) is required. Specify in 'owner/repo' format (e.g., 'octocat/hello-world').");
|
|
324
|
+
const [owner, repo] = fullRepo.split("/");
|
|
325
|
+
if (!owner || !repo) throw new Error(`Invalid repo format: "${fullRepo}". Expected format: "owner/repo" (e.g., "octocat/hello-world").`);
|
|
326
|
+
const normalizedPackages = typeof packages === "object" && !Array.isArray(packages) ? {
|
|
327
|
+
exclude: packages.exclude ?? [],
|
|
328
|
+
include: packages.include ?? [],
|
|
329
|
+
excludePrivate: packages.excludePrivate ?? false
|
|
330
|
+
} : packages;
|
|
331
|
+
const isCI = process.env.CI === "true" || process.env.GITHUB_ACTIONS === "true";
|
|
332
|
+
return {
|
|
333
|
+
dryRun,
|
|
334
|
+
workspaceRoot,
|
|
335
|
+
githubToken: token,
|
|
336
|
+
owner,
|
|
337
|
+
repo,
|
|
338
|
+
githubClient: createGitHubClient({
|
|
339
|
+
owner,
|
|
340
|
+
repo,
|
|
341
|
+
githubToken: token
|
|
342
|
+
}),
|
|
343
|
+
packages: normalizedPackages,
|
|
344
|
+
branch: {
|
|
345
|
+
release: branch.release ?? "release/next",
|
|
346
|
+
default: branch.default ?? "main"
|
|
347
|
+
},
|
|
348
|
+
globalCommitMode,
|
|
349
|
+
safeguards,
|
|
350
|
+
pullRequest: {
|
|
351
|
+
title: pullRequest.title ?? "chore: release new version",
|
|
352
|
+
body: pullRequest.body ?? DEFAULT_PR_BODY_TEMPLATE
|
|
353
|
+
},
|
|
354
|
+
changelog: {
|
|
355
|
+
enabled: changelog.enabled ?? true,
|
|
356
|
+
template: changelog.template ?? DEFAULT_CHANGELOG_TEMPLATE,
|
|
357
|
+
emojis: changelog.emojis ?? true
|
|
358
|
+
},
|
|
359
|
+
types: types ? {
|
|
360
|
+
...DEFAULT_TYPES,
|
|
361
|
+
...types
|
|
362
|
+
} : DEFAULT_TYPES,
|
|
363
|
+
npm: {
|
|
364
|
+
otp: npm.otp,
|
|
365
|
+
provenance: npm.provenance ?? true,
|
|
366
|
+
access: npm.access ?? "public",
|
|
367
|
+
runBuild: npm.runBuild ?? true
|
|
368
|
+
},
|
|
369
|
+
prompts: {
|
|
370
|
+
versions: prompts.versions ?? !isCI,
|
|
371
|
+
packages: prompts.packages ?? !isCI
|
|
372
|
+
}
|
|
373
|
+
};
|
|
79
374
|
}
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
case "revert": return "none";
|
|
95
|
-
default: return "none";
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
async function getCommitsTouchingPackage(from, to, packagePath, workspaceRoot) {
|
|
99
|
-
try {
|
|
100
|
-
const { stdout } = await run("git", [
|
|
101
|
-
"log",
|
|
102
|
-
"--pretty=format:%h",
|
|
103
|
-
from === "HEAD" ? "HEAD" : `${from}...${to}`,
|
|
104
|
-
"--",
|
|
105
|
-
packagePath
|
|
106
|
-
], { nodeOptions: {
|
|
107
|
-
cwd: workspaceRoot,
|
|
108
|
-
stdio: "pipe"
|
|
109
|
-
} });
|
|
110
|
-
return stdout.split("\n").map((line) => line.trim()).filter(Boolean);
|
|
111
|
-
} catch (error) {
|
|
112
|
-
console.error(`Error getting commits touching package: ${error}`);
|
|
113
|
-
return [];
|
|
114
|
-
}
|
|
375
|
+
|
|
376
|
+
//#endregion
|
|
377
|
+
//#region src/types.ts
|
|
378
|
+
function ok(value) {
|
|
379
|
+
return {
|
|
380
|
+
ok: true,
|
|
381
|
+
value
|
|
382
|
+
};
|
|
383
|
+
}
|
|
384
|
+
function err(error) {
|
|
385
|
+
return {
|
|
386
|
+
ok: false,
|
|
387
|
+
error
|
|
388
|
+
};
|
|
115
389
|
}
|
|
116
390
|
|
|
117
391
|
//#endregion
|
|
118
|
-
//#region src/git.ts
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
392
|
+
//#region src/core/git.ts
|
|
393
|
+
function toGitError(operation, error) {
|
|
394
|
+
return {
|
|
395
|
+
type: "git",
|
|
396
|
+
operation,
|
|
397
|
+
message: error instanceof Error ? error.message : String(error),
|
|
398
|
+
stderr: (typeof error === "object" && error && "stderr" in error ? String(error.stderr ?? "") : void 0)?.trim() || void 0
|
|
399
|
+
};
|
|
400
|
+
}
|
|
124
401
|
async function isWorkingDirectoryClean(workspaceRoot) {
|
|
125
402
|
try {
|
|
126
|
-
|
|
403
|
+
return ok((await run("git", ["status", "--porcelain"], { nodeOptions: {
|
|
127
404
|
cwd: workspaceRoot,
|
|
128
405
|
stdio: "pipe"
|
|
129
|
-
} })).stdout.trim()
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
console.error("Error checking git status:", err);
|
|
133
|
-
return false;
|
|
406
|
+
} })).stdout.trim() === "");
|
|
407
|
+
} catch (error) {
|
|
408
|
+
return err(toGitError("isWorkingDirectoryClean", error));
|
|
134
409
|
}
|
|
135
410
|
}
|
|
136
411
|
/**
|
|
@@ -149,89 +424,110 @@ async function doesBranchExist(branch, workspaceRoot) {
|
|
|
149
424
|
cwd: workspaceRoot,
|
|
150
425
|
stdio: "pipe"
|
|
151
426
|
} });
|
|
152
|
-
return true;
|
|
427
|
+
return ok(true);
|
|
153
428
|
} catch {
|
|
154
|
-
return false;
|
|
429
|
+
return ok(false);
|
|
155
430
|
}
|
|
156
431
|
}
|
|
157
432
|
/**
|
|
158
|
-
*
|
|
159
|
-
* @param
|
|
160
|
-
* @
|
|
161
|
-
* @returns Promise resolving to true if pull succeeded, false otherwise
|
|
433
|
+
* Retrieves the name of the current branch in the repository.
|
|
434
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
435
|
+
* @returns {Promise<string>} A Promise resolving to the current branch name as a string
|
|
162
436
|
*/
|
|
163
|
-
async function
|
|
437
|
+
async function getCurrentBranch(workspaceRoot) {
|
|
164
438
|
try {
|
|
165
|
-
await run("git", [
|
|
166
|
-
"
|
|
167
|
-
"
|
|
168
|
-
|
|
439
|
+
return ok((await run("git", [
|
|
440
|
+
"rev-parse",
|
|
441
|
+
"--abbrev-ref",
|
|
442
|
+
"HEAD"
|
|
169
443
|
], { nodeOptions: {
|
|
170
444
|
cwd: workspaceRoot,
|
|
171
445
|
stdio: "pipe"
|
|
172
|
-
} });
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
return false;
|
|
446
|
+
} })).stdout.trim());
|
|
447
|
+
} catch (error) {
|
|
448
|
+
return err(toGitError("getCurrentBranch", error));
|
|
176
449
|
}
|
|
177
450
|
}
|
|
178
451
|
/**
|
|
179
|
-
*
|
|
180
|
-
* @param branch - The new branch
|
|
181
|
-
* @param base - The base branch to create from
|
|
182
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
452
|
+
* Creates a new branch from the specified base branch.
|
|
453
|
+
* @param {string} branch - The name of the new branch to create
|
|
454
|
+
* @param {string} base - The base branch to create the new branch from
|
|
455
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
456
|
+
* @returns {Promise<void>} A Promise that resolves when the branch is created
|
|
183
457
|
*/
|
|
184
458
|
async function createBranch(branch, base, workspaceRoot) {
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
"
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
459
|
+
try {
|
|
460
|
+
logger.info(`Creating branch: ${farver.green(branch)} from ${farver.cyan(base)}`);
|
|
461
|
+
await runIfNotDry("git", [
|
|
462
|
+
"branch",
|
|
463
|
+
branch,
|
|
464
|
+
base
|
|
465
|
+
], { nodeOptions: {
|
|
466
|
+
cwd: workspaceRoot,
|
|
467
|
+
stdio: "pipe"
|
|
468
|
+
} });
|
|
469
|
+
return ok(void 0);
|
|
470
|
+
} catch (error) {
|
|
471
|
+
return err(toGitError("createBranch", error));
|
|
472
|
+
}
|
|
191
473
|
}
|
|
192
|
-
/**
|
|
193
|
-
* Checkout a git branch
|
|
194
|
-
* @param branch - The branch name to checkout
|
|
195
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
196
|
-
* @returns Promise resolving to true if checkout succeeded, false otherwise
|
|
197
|
-
*/
|
|
198
474
|
async function checkoutBranch(branch, workspaceRoot) {
|
|
199
475
|
try {
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
476
|
+
logger.info(`Switching to branch: ${farver.green(branch)}`);
|
|
477
|
+
const output = (await run("git", ["checkout", branch], { nodeOptions: {
|
|
478
|
+
cwd: workspaceRoot,
|
|
479
|
+
stdio: "pipe"
|
|
480
|
+
} })).stderr.trim();
|
|
481
|
+
const match = output.match(/Switched to (?:a new )?branch '(.+)'/);
|
|
482
|
+
if (match && match[1] === branch) {
|
|
483
|
+
logger.info(`Successfully switched to branch: ${farver.green(branch)}`);
|
|
484
|
+
return ok(true);
|
|
485
|
+
}
|
|
486
|
+
console.warn(`Unexpected git checkout output: ${output}`);
|
|
487
|
+
return ok(false);
|
|
488
|
+
} catch (error) {
|
|
489
|
+
const gitError = toGitError("checkoutBranch", error);
|
|
490
|
+
logger.error(`Git checkout failed: ${gitError.message}`);
|
|
491
|
+
if (gitError.stderr) logger.error(`Git stderr: ${gitError.stderr}`);
|
|
492
|
+
try {
|
|
493
|
+
const branchResult = await run("git", ["branch", "-a"], { nodeOptions: {
|
|
494
|
+
cwd: workspaceRoot,
|
|
495
|
+
stdio: "pipe"
|
|
496
|
+
} });
|
|
497
|
+
logger.verbose(`Available branches:\n${branchResult.stdout}`);
|
|
498
|
+
} catch {
|
|
499
|
+
logger.verbose("Could not list available branches");
|
|
500
|
+
}
|
|
501
|
+
return err(gitError);
|
|
204
502
|
}
|
|
205
503
|
}
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
}
|
|
504
|
+
async function pullLatestChanges(branch, workspaceRoot) {
|
|
505
|
+
try {
|
|
506
|
+
await run("git", [
|
|
507
|
+
"pull",
|
|
508
|
+
"origin",
|
|
509
|
+
branch
|
|
510
|
+
], { nodeOptions: {
|
|
511
|
+
cwd: workspaceRoot,
|
|
512
|
+
stdio: "pipe"
|
|
513
|
+
} });
|
|
514
|
+
return ok(true);
|
|
515
|
+
} catch (error) {
|
|
516
|
+
return err(toGitError("pullLatestChanges", error));
|
|
517
|
+
}
|
|
220
518
|
}
|
|
221
|
-
/**
|
|
222
|
-
* Rebase current branch onto another branch
|
|
223
|
-
* @param ontoBranch - The target branch to rebase onto
|
|
224
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
225
|
-
*/
|
|
226
519
|
async function rebaseBranch(ontoBranch, workspaceRoot) {
|
|
227
|
-
|
|
520
|
+
try {
|
|
521
|
+
logger.info(`Rebasing onto: ${farver.cyan(ontoBranch)}`);
|
|
522
|
+
await runIfNotDry("git", ["rebase", ontoBranch], { nodeOptions: {
|
|
523
|
+
cwd: workspaceRoot,
|
|
524
|
+
stdio: "pipe"
|
|
525
|
+
} });
|
|
526
|
+
return ok(void 0);
|
|
527
|
+
} catch (error) {
|
|
528
|
+
return err(toGitError("rebaseBranch", error));
|
|
529
|
+
}
|
|
228
530
|
}
|
|
229
|
-
/**
|
|
230
|
-
* Check if local branch is ahead of remote (has commits to push)
|
|
231
|
-
* @param branch - The branch name to check
|
|
232
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
233
|
-
* @returns Promise resolving to true if local is ahead, false otherwise
|
|
234
|
-
*/
|
|
235
531
|
async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
236
532
|
try {
|
|
237
533
|
const result = await run("git", [
|
|
@@ -242,171 +538,350 @@ async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
|
242
538
|
cwd: workspaceRoot,
|
|
243
539
|
stdio: "pipe"
|
|
244
540
|
} });
|
|
245
|
-
return Number.parseInt(result.stdout.trim(), 10) > 0;
|
|
541
|
+
return ok(Number.parseInt(result.stdout.trim(), 10) > 0);
|
|
246
542
|
} catch {
|
|
247
|
-
return true;
|
|
543
|
+
return ok(true);
|
|
248
544
|
}
|
|
249
545
|
}
|
|
250
|
-
/**
|
|
251
|
-
* Check if there are any changes to commit (staged or unstaged)
|
|
252
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
253
|
-
* @returns Promise resolving to true if there are changes, false otherwise
|
|
254
|
-
*/
|
|
255
|
-
async function hasChangesToCommit(workspaceRoot) {
|
|
256
|
-
return (await run("git", ["status", "--porcelain"], { nodeOptions: {
|
|
257
|
-
cwd: workspaceRoot,
|
|
258
|
-
stdio: "pipe"
|
|
259
|
-
} })).stdout.trim() !== "";
|
|
260
|
-
}
|
|
261
|
-
/**
|
|
262
|
-
* Commit changes with a message
|
|
263
|
-
* @param message - The commit message
|
|
264
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
265
|
-
* @returns Promise resolving to true if commit was made, false if there were no changes
|
|
266
|
-
*/
|
|
267
546
|
async function commitChanges(message, workspaceRoot) {
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
547
|
+
try {
|
|
548
|
+
await run("git", ["add", "."], { nodeOptions: {
|
|
549
|
+
cwd: workspaceRoot,
|
|
550
|
+
stdio: "pipe"
|
|
551
|
+
} });
|
|
552
|
+
const isClean = await isWorkingDirectoryClean(workspaceRoot);
|
|
553
|
+
if (!isClean.ok || isClean.value) return ok(false);
|
|
554
|
+
logger.info(`Committing changes: ${farver.dim(message)}`);
|
|
555
|
+
await runIfNotDry("git", [
|
|
556
|
+
"commit",
|
|
557
|
+
"-m",
|
|
558
|
+
message
|
|
559
|
+
], { nodeOptions: {
|
|
560
|
+
cwd: workspaceRoot,
|
|
561
|
+
stdio: "pipe"
|
|
562
|
+
} });
|
|
563
|
+
return ok(true);
|
|
564
|
+
} catch (error) {
|
|
565
|
+
const gitError = toGitError("commitChanges", error);
|
|
566
|
+
logger.error(`Git commit failed: ${gitError.message}`);
|
|
567
|
+
if (gitError.stderr) logger.error(`Git stderr: ${gitError.stderr}`);
|
|
568
|
+
return err(gitError);
|
|
569
|
+
}
|
|
276
570
|
}
|
|
277
|
-
/**
|
|
278
|
-
* Push branch to remote
|
|
279
|
-
* @param branch - The branch name to push
|
|
280
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
281
|
-
* @param options - Push options
|
|
282
|
-
* @param options.force - Force push (overwrite remote)
|
|
283
|
-
* @param options.forceWithLease - Force push with safety check (won't overwrite unexpected changes)
|
|
284
|
-
*/
|
|
285
571
|
async function pushBranch(branch, workspaceRoot, options) {
|
|
286
|
-
const args = [
|
|
287
|
-
"push",
|
|
288
|
-
"origin",
|
|
289
|
-
branch
|
|
290
|
-
];
|
|
291
|
-
if (options?.forceWithLease) args.push("--force-with-lease");
|
|
292
|
-
else if (options?.force) args.push("--force");
|
|
293
|
-
await run("git", args, { nodeOptions: { cwd: workspaceRoot } });
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
//#endregion
|
|
297
|
-
//#region src/github.ts
|
|
298
|
-
async function getExistingPullRequest({ owner, repo, branch, githubToken }) {
|
|
299
572
|
try {
|
|
300
|
-
const
|
|
301
|
-
|
|
302
|
-
|
|
573
|
+
const args = [
|
|
574
|
+
"push",
|
|
575
|
+
"origin",
|
|
576
|
+
branch
|
|
577
|
+
];
|
|
578
|
+
if (options?.forceWithLease) {
|
|
579
|
+
await run("git", [
|
|
580
|
+
"fetch",
|
|
581
|
+
"origin",
|
|
582
|
+
branch
|
|
583
|
+
], { nodeOptions: {
|
|
584
|
+
cwd: workspaceRoot,
|
|
585
|
+
stdio: "pipe"
|
|
586
|
+
} });
|
|
587
|
+
args.push("--force-with-lease");
|
|
588
|
+
logger.info(`Pushing branch: ${farver.green(branch)} ${farver.dim("(with lease)")}`);
|
|
589
|
+
} else if (options?.force) {
|
|
590
|
+
args.push("--force");
|
|
591
|
+
logger.info(`Force pushing branch: ${farver.green(branch)}`);
|
|
592
|
+
} else logger.info(`Pushing branch: ${farver.green(branch)}`);
|
|
593
|
+
await runIfNotDry("git", args, { nodeOptions: {
|
|
594
|
+
cwd: workspaceRoot,
|
|
595
|
+
stdio: "pipe"
|
|
303
596
|
} });
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
const firstPullRequest = pulls[0];
|
|
308
|
-
if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
309
|
-
const pullRequest = {
|
|
310
|
-
number: firstPullRequest.number,
|
|
311
|
-
title: firstPullRequest.title,
|
|
312
|
-
body: firstPullRequest.body,
|
|
313
|
-
draft: firstPullRequest.draft,
|
|
314
|
-
html_url: firstPullRequest.html_url
|
|
315
|
-
};
|
|
316
|
-
console.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
|
|
317
|
-
return pullRequest;
|
|
318
|
-
} catch (err) {
|
|
319
|
-
console.error("Error fetching pull request:", err);
|
|
320
|
-
return null;
|
|
597
|
+
return ok(true);
|
|
598
|
+
} catch (error) {
|
|
599
|
+
return err(toGitError("pushBranch", error));
|
|
321
600
|
}
|
|
322
601
|
}
|
|
323
|
-
async function
|
|
602
|
+
async function readFileFromGit(workspaceRoot, ref, filePath) {
|
|
324
603
|
try {
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
} : {
|
|
332
|
-
title,
|
|
333
|
-
body,
|
|
334
|
-
head,
|
|
335
|
-
base
|
|
336
|
-
};
|
|
337
|
-
const res = await fetch(url, {
|
|
338
|
-
method,
|
|
339
|
-
headers: {
|
|
340
|
-
Accept: "application/vnd.github.v3+json",
|
|
341
|
-
Authorization: `token ${githubToken}`
|
|
342
|
-
},
|
|
343
|
-
body: JSON.stringify(requestBody)
|
|
344
|
-
});
|
|
345
|
-
if (!res.ok) throw new Error(`GitHub API request failed with status ${res.status}`);
|
|
346
|
-
const pr = await res.json();
|
|
347
|
-
if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
348
|
-
const action = isUpdate ? "Updated" : "Created";
|
|
349
|
-
console.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
|
|
350
|
-
return {
|
|
351
|
-
number: pr.number,
|
|
352
|
-
title: pr.title,
|
|
353
|
-
body: pr.body,
|
|
354
|
-
draft: pr.draft,
|
|
355
|
-
html_url: pr.html_url
|
|
356
|
-
};
|
|
357
|
-
} catch (err) {
|
|
358
|
-
console.error(`Error upserting pull request:`, err);
|
|
359
|
-
throw err;
|
|
604
|
+
return ok((await run("git", ["show", `${ref}:${filePath}`], { nodeOptions: {
|
|
605
|
+
cwd: workspaceRoot,
|
|
606
|
+
stdio: "pipe"
|
|
607
|
+
} })).stdout);
|
|
608
|
+
} catch {
|
|
609
|
+
return ok(null);
|
|
360
610
|
}
|
|
361
611
|
}
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
function dedentString(str) {
|
|
379
|
-
const lines = str.split("\n");
|
|
380
|
-
const minIndent = lines.filter((line) => line.trim().length > 0).reduce((min, line) => Math.min(min, line.search(/\S/)), Infinity);
|
|
381
|
-
return lines.map((line) => minIndent === Infinity ? line : line.slice(minIndent)).join("\n").trim();
|
|
612
|
+
async function getMostRecentPackageTag(workspaceRoot, packageName) {
|
|
613
|
+
try {
|
|
614
|
+
const { stdout } = await run("git", [
|
|
615
|
+
"tag",
|
|
616
|
+
"--list",
|
|
617
|
+
`${packageName}@*`
|
|
618
|
+
], { nodeOptions: {
|
|
619
|
+
cwd: workspaceRoot,
|
|
620
|
+
stdio: "pipe"
|
|
621
|
+
} });
|
|
622
|
+
const tags = stdout.split("\n").map((tag) => tag.trim()).filter(Boolean);
|
|
623
|
+
if (tags.length === 0) return ok(void 0);
|
|
624
|
+
return ok(tags.reverse()[0]);
|
|
625
|
+
} catch (error) {
|
|
626
|
+
return err(toGitError("getMostRecentPackageTag", error));
|
|
627
|
+
}
|
|
382
628
|
}
|
|
383
|
-
|
|
629
|
+
/**
|
|
630
|
+
* Builds a mapping of commit SHAs to the list of files changed in each commit
|
|
631
|
+
* within a given inclusive range.
|
|
632
|
+
*
|
|
633
|
+
* Internally runs:
|
|
634
|
+
* git log --name-only --format=%H <from>^..<to>
|
|
635
|
+
*
|
|
636
|
+
* Notes
|
|
637
|
+
* - This includes the commit identified by `from` (via `from^..to`).
|
|
638
|
+
* - Order of commits in the resulting Map follows `git log` output
|
|
639
|
+
* (reverse chronological, newest first).
|
|
640
|
+
* - On failure (e.g., invalid refs), the function returns null.
|
|
641
|
+
*
|
|
642
|
+
* @param {string} workspaceRoot Absolute path to the git repository root used as cwd.
|
|
643
|
+
* @param {string} from Starting commit/ref (inclusive).
|
|
644
|
+
* @param {string} to Ending commit/ref (inclusive).
|
|
645
|
+
* @returns {Promise<Map<string, string[]> | null>} Promise resolving to a Map where keys are commit SHAs and values are
|
|
646
|
+
* arrays of file paths changed by that commit, or null on error.
|
|
647
|
+
*/
|
|
648
|
+
async function getGroupedFilesByCommitSha(workspaceRoot, from, to) {
|
|
649
|
+
const commitsMap = /* @__PURE__ */ new Map();
|
|
650
|
+
try {
|
|
651
|
+
const { stdout } = await run("git", [
|
|
652
|
+
"log",
|
|
653
|
+
"--name-only",
|
|
654
|
+
"--format=%H",
|
|
655
|
+
`${from}^..${to}`
|
|
656
|
+
], { nodeOptions: {
|
|
657
|
+
cwd: workspaceRoot,
|
|
658
|
+
stdio: "pipe"
|
|
659
|
+
} });
|
|
660
|
+
const lines = stdout.trim().split("\n").filter((line) => line.trim() !== "");
|
|
661
|
+
let currentSha = null;
|
|
662
|
+
const HASH_REGEX = /^[0-9a-f]{40}$/i;
|
|
663
|
+
for (const line of lines) {
|
|
664
|
+
const trimmedLine = line.trim();
|
|
665
|
+
if (HASH_REGEX.test(trimmedLine)) {
|
|
666
|
+
currentSha = trimmedLine;
|
|
667
|
+
commitsMap.set(currentSha, []);
|
|
668
|
+
continue;
|
|
669
|
+
}
|
|
670
|
+
if (currentSha === null) continue;
|
|
671
|
+
commitsMap.get(currentSha).push(trimmedLine);
|
|
672
|
+
}
|
|
673
|
+
return ok(commitsMap);
|
|
674
|
+
} catch (error) {
|
|
675
|
+
return err(toGitError("getGroupedFilesByCommitSha", error));
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
/**
|
|
679
|
+
* Create a git tag for a package release
|
|
680
|
+
* @param packageName - The package name (e.g., "@scope/name")
|
|
681
|
+
* @param version - The version to tag (e.g., "1.2.3")
|
|
682
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
683
|
+
* @returns Result indicating success or failure
|
|
684
|
+
*/
|
|
685
|
+
async function createPackageTag(packageName, version, workspaceRoot) {
|
|
686
|
+
const tagName = `${packageName}@${version}`;
|
|
687
|
+
try {
|
|
688
|
+
logger.info(`Creating tag: ${farver.green(tagName)}`);
|
|
689
|
+
await runIfNotDry("git", ["tag", tagName], { nodeOptions: {
|
|
690
|
+
cwd: workspaceRoot,
|
|
691
|
+
stdio: "pipe"
|
|
692
|
+
} });
|
|
693
|
+
return ok(void 0);
|
|
694
|
+
} catch (error) {
|
|
695
|
+
return err(toGitError("createPackageTag", error));
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
/**
|
|
699
|
+
* Push a specific tag to the remote repository
|
|
700
|
+
* @param tagName - The tag name to push
|
|
701
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
702
|
+
* @returns Result indicating success or failure
|
|
703
|
+
*/
|
|
704
|
+
async function pushTag(tagName, workspaceRoot) {
|
|
705
|
+
try {
|
|
706
|
+
logger.info(`Pushing tag: ${farver.green(tagName)}`);
|
|
707
|
+
await runIfNotDry("git", [
|
|
708
|
+
"push",
|
|
709
|
+
"origin",
|
|
710
|
+
tagName
|
|
711
|
+
], { nodeOptions: {
|
|
712
|
+
cwd: workspaceRoot,
|
|
713
|
+
stdio: "pipe"
|
|
714
|
+
} });
|
|
715
|
+
return ok(void 0);
|
|
716
|
+
} catch (error) {
|
|
717
|
+
return err(toGitError("pushTag", error));
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
/**
|
|
721
|
+
* Create and push a package tag in one operation
|
|
722
|
+
* @param packageName - The package name
|
|
723
|
+
* @param version - The version to tag
|
|
724
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
725
|
+
* @returns Result indicating success or failure
|
|
726
|
+
*/
|
|
727
|
+
async function createAndPushPackageTag(packageName, version, workspaceRoot) {
|
|
728
|
+
const createResult = await createPackageTag(packageName, version, workspaceRoot);
|
|
729
|
+
if (!createResult.ok) return createResult;
|
|
730
|
+
return pushTag(`${packageName}@${version}`, workspaceRoot);
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
//#endregion
|
|
734
|
+
//#region src/core/changelog.ts
|
|
735
|
+
const excludeAuthors = [
|
|
736
|
+
/\[bot\]/i,
|
|
737
|
+
/dependabot/i,
|
|
738
|
+
/\(bot\)/i
|
|
739
|
+
];
|
|
740
|
+
async function generateChangelogEntry(options) {
|
|
741
|
+
const { packageName, version, previousVersion, date, commits, owner, repo, types, template, githubClient } = options;
|
|
742
|
+
const templateData = {
|
|
743
|
+
packageName,
|
|
744
|
+
version,
|
|
745
|
+
previousVersion,
|
|
746
|
+
date,
|
|
747
|
+
compareUrl: previousVersion ? `https://github.com/${owner}/${repo}/compare/${packageName}@${previousVersion}...${packageName}@${version}` : void 0,
|
|
748
|
+
owner,
|
|
749
|
+
repo,
|
|
750
|
+
groups: buildTemplateGroups({
|
|
751
|
+
commits,
|
|
752
|
+
owner,
|
|
753
|
+
repo,
|
|
754
|
+
types,
|
|
755
|
+
commitAuthors: await resolveCommitAuthors(commits, githubClient)
|
|
756
|
+
})
|
|
757
|
+
};
|
|
384
758
|
const eta = new Eta();
|
|
385
|
-
const
|
|
386
|
-
return eta.renderString(
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
759
|
+
const templateToUse = template || DEFAULT_CHANGELOG_TEMPLATE;
|
|
760
|
+
return eta.renderString(templateToUse, templateData).trim();
|
|
761
|
+
}
|
|
762
|
+
async function updateChangelog(options) {
|
|
763
|
+
const { version, previousVersion, commits, date, normalizedOptions, workspacePackage, githubClient } = options;
|
|
764
|
+
const changelogPath = join(workspacePackage.path, "CHANGELOG.md");
|
|
765
|
+
const changelogRelativePath = relative(normalizedOptions.workspaceRoot, join(workspacePackage.path, "CHANGELOG.md"));
|
|
766
|
+
const existingContent = await readFileFromGit(normalizedOptions.workspaceRoot, normalizedOptions.branch.default, changelogRelativePath);
|
|
767
|
+
logger.verbose("Existing content found: ", existingContent.ok && Boolean(existingContent.value));
|
|
768
|
+
const newEntry = await generateChangelogEntry({
|
|
769
|
+
packageName: workspacePackage.name,
|
|
770
|
+
version,
|
|
771
|
+
previousVersion,
|
|
772
|
+
date,
|
|
773
|
+
commits,
|
|
774
|
+
owner: normalizedOptions.owner,
|
|
775
|
+
repo: normalizedOptions.repo,
|
|
776
|
+
types: normalizedOptions.types,
|
|
777
|
+
template: normalizedOptions.changelog?.template,
|
|
778
|
+
githubClient
|
|
779
|
+
});
|
|
780
|
+
let updatedContent;
|
|
781
|
+
if (!existingContent.ok || !existingContent.value) {
|
|
782
|
+
updatedContent = `# ${workspacePackage.name}\n\n${newEntry}\n`;
|
|
783
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
784
|
+
return;
|
|
785
|
+
}
|
|
786
|
+
const parsed = parseChangelog(existingContent.value);
|
|
787
|
+
const lines = existingContent.value.split("\n");
|
|
788
|
+
const existingVersionIndex = parsed.versions.findIndex((v) => v.version === version);
|
|
789
|
+
if (existingVersionIndex !== -1) {
|
|
790
|
+
const existingVersion = parsed.versions[existingVersionIndex];
|
|
791
|
+
const before = lines.slice(0, existingVersion.lineStart);
|
|
792
|
+
const after = lines.slice(existingVersion.lineEnd + 1);
|
|
793
|
+
updatedContent = [
|
|
794
|
+
...before,
|
|
795
|
+
newEntry,
|
|
796
|
+
...after
|
|
797
|
+
].join("\n");
|
|
798
|
+
} else {
|
|
799
|
+
const insertAt = parsed.headerLineEnd + 1;
|
|
800
|
+
const before = lines.slice(0, insertAt);
|
|
801
|
+
const after = lines.slice(insertAt);
|
|
802
|
+
if (before.length > 0 && before[before.length - 1] !== "") before.push("");
|
|
803
|
+
updatedContent = [
|
|
804
|
+
...before,
|
|
805
|
+
newEntry,
|
|
806
|
+
"",
|
|
807
|
+
...after
|
|
808
|
+
].join("\n");
|
|
809
|
+
}
|
|
810
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
811
|
+
}
|
|
812
|
+
async function resolveCommitAuthors(commits, githubClient) {
|
|
813
|
+
const authorMap = /* @__PURE__ */ new Map();
|
|
814
|
+
const commitAuthors = /* @__PURE__ */ new Map();
|
|
815
|
+
for (const commit of commits) {
|
|
816
|
+
const authorsForCommit = [];
|
|
817
|
+
commit.authors.forEach((author, idx) => {
|
|
818
|
+
if (!author.email || !author.name) return;
|
|
819
|
+
if (excludeAuthors.some((re) => re.test(author.name))) return;
|
|
820
|
+
if (!authorMap.has(author.email)) authorMap.set(author.email, {
|
|
821
|
+
commits: [],
|
|
822
|
+
name: author.name,
|
|
823
|
+
email: author.email
|
|
824
|
+
});
|
|
825
|
+
const info = authorMap.get(author.email);
|
|
826
|
+
if (idx === 0) info.commits.push(commit.shortHash);
|
|
827
|
+
authorsForCommit.push(info);
|
|
828
|
+
});
|
|
829
|
+
commitAuthors.set(commit.hash, authorsForCommit);
|
|
830
|
+
}
|
|
831
|
+
const authors = Array.from(authorMap.values());
|
|
832
|
+
await Promise.all(authors.map((info) => githubClient.resolveAuthorInfo(info)));
|
|
833
|
+
return commitAuthors;
|
|
834
|
+
}
|
|
835
|
+
function parseChangelog(content) {
|
|
836
|
+
const lines = content.split("\n");
|
|
837
|
+
let packageName = null;
|
|
838
|
+
let headerLineEnd = -1;
|
|
839
|
+
const versions = [];
|
|
840
|
+
for (let i = 0; i < lines.length; i++) {
|
|
841
|
+
const line = lines[i].trim();
|
|
842
|
+
if (line.startsWith("# ")) {
|
|
843
|
+
packageName = line.slice(2).trim();
|
|
844
|
+
headerLineEnd = i;
|
|
845
|
+
break;
|
|
846
|
+
}
|
|
847
|
+
}
|
|
848
|
+
for (let i = headerLineEnd + 1; i < lines.length; i++) {
|
|
849
|
+
const line = lines[i].trim();
|
|
850
|
+
if (line.startsWith("## ")) {
|
|
851
|
+
const versionMatch = line.match(/##\s+(?:<small>)?\[?([^\](\s<]+)/);
|
|
852
|
+
if (versionMatch) {
|
|
853
|
+
const version = versionMatch[1];
|
|
854
|
+
const lineStart = i;
|
|
855
|
+
let lineEnd = lines.length - 1;
|
|
856
|
+
for (let j = i + 1; j < lines.length; j++) if (lines[j].trim().startsWith("## ")) {
|
|
857
|
+
lineEnd = j - 1;
|
|
858
|
+
break;
|
|
859
|
+
}
|
|
860
|
+
const versionContent = lines.slice(lineStart, lineEnd + 1).join("\n");
|
|
861
|
+
versions.push({
|
|
862
|
+
version,
|
|
863
|
+
lineStart,
|
|
864
|
+
lineEnd,
|
|
865
|
+
content: versionContent
|
|
866
|
+
});
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
return {
|
|
871
|
+
packageName,
|
|
872
|
+
versions,
|
|
873
|
+
headerLineEnd
|
|
874
|
+
};
|
|
393
875
|
}
|
|
394
876
|
|
|
395
877
|
//#endregion
|
|
396
|
-
//#region src/
|
|
878
|
+
//#region src/operations/semver.ts
|
|
397
879
|
function isValidSemver(version) {
|
|
398
880
|
return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(version);
|
|
399
881
|
}
|
|
400
|
-
function
|
|
401
|
-
if (!isValidSemver(version)) throw new Error(`Invalid semver version: ${version}`);
|
|
402
|
-
}
|
|
403
|
-
/**
|
|
404
|
-
* Calculate the new version based on current version and bump type
|
|
405
|
-
* Pure function - no side effects, easily testable
|
|
406
|
-
*/
|
|
407
|
-
function calculateNewVersion(currentVersion, bump) {
|
|
882
|
+
function getNextVersion(currentVersion, bump) {
|
|
408
883
|
if (bump === "none") return currentVersion;
|
|
409
|
-
|
|
884
|
+
if (!isValidSemver(currentVersion)) throw new Error(`Cannot bump version for invalid semver: ${currentVersion}`);
|
|
410
885
|
const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
|
|
411
886
|
if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
|
|
412
887
|
const [, major, minor, patch] = match;
|
|
@@ -429,58 +904,404 @@ function calculateNewVersion(currentVersion, bump) {
|
|
|
429
904
|
}
|
|
430
905
|
return `${newMajor}.${newMinor}.${newPatch}`;
|
|
431
906
|
}
|
|
907
|
+
function calculateBumpType(oldVersion, newVersion) {
|
|
908
|
+
if (!isValidSemver(oldVersion) || !isValidSemver(newVersion)) throw new Error(`Cannot calculate bump type for invalid semver: ${oldVersion} or ${newVersion}`);
|
|
909
|
+
const oldParts = oldVersion.split(".").map(Number);
|
|
910
|
+
const newParts = newVersion.split(".").map(Number);
|
|
911
|
+
if (newParts[0] > oldParts[0]) return "major";
|
|
912
|
+
if (newParts[1] > oldParts[1]) return "minor";
|
|
913
|
+
if (newParts[2] > oldParts[2]) return "patch";
|
|
914
|
+
return "none";
|
|
915
|
+
}
|
|
916
|
+
|
|
917
|
+
//#endregion
|
|
918
|
+
//#region src/core/prompts.ts
|
|
919
|
+
async function selectPackagePrompt(packages) {
|
|
920
|
+
const response = await prompts({
|
|
921
|
+
type: "multiselect",
|
|
922
|
+
name: "selectedPackages",
|
|
923
|
+
message: "Select packages to release",
|
|
924
|
+
choices: packages.map((pkg) => ({
|
|
925
|
+
title: `${pkg.name} (${farver.bold(pkg.version)})`,
|
|
926
|
+
value: pkg.name,
|
|
927
|
+
selected: true
|
|
928
|
+
})),
|
|
929
|
+
min: 1,
|
|
930
|
+
hint: "Space to select/deselect. Return to submit.",
|
|
931
|
+
instructions: false
|
|
932
|
+
});
|
|
933
|
+
if (!response.selectedPackages || response.selectedPackages.length === 0) return [];
|
|
934
|
+
return response.selectedPackages;
|
|
935
|
+
}
|
|
936
|
+
async function selectVersionPrompt(workspaceRoot, pkg, currentVersion, suggestedVersion) {
|
|
937
|
+
const answers = await prompts([{
|
|
938
|
+
type: "autocomplete",
|
|
939
|
+
name: "version",
|
|
940
|
+
message: `${pkg.name}: ${farver.green(pkg.version)}`,
|
|
941
|
+
choices: [
|
|
942
|
+
{
|
|
943
|
+
value: "skip",
|
|
944
|
+
title: `skip ${farver.dim("(no change)")}`
|
|
945
|
+
},
|
|
946
|
+
{
|
|
947
|
+
value: "major",
|
|
948
|
+
title: `major ${farver.bold(getNextVersion(pkg.version, "major"))}`
|
|
949
|
+
},
|
|
950
|
+
{
|
|
951
|
+
value: "minor",
|
|
952
|
+
title: `minor ${farver.bold(getNextVersion(pkg.version, "minor"))}`
|
|
953
|
+
},
|
|
954
|
+
{
|
|
955
|
+
value: "patch",
|
|
956
|
+
title: `patch ${farver.bold(getNextVersion(pkg.version, "patch"))}`
|
|
957
|
+
},
|
|
958
|
+
{
|
|
959
|
+
value: "suggested",
|
|
960
|
+
title: `suggested ${farver.bold(suggestedVersion)}`
|
|
961
|
+
},
|
|
962
|
+
{
|
|
963
|
+
value: "as-is",
|
|
964
|
+
title: `as-is ${farver.dim("(keep current version)")}`
|
|
965
|
+
},
|
|
966
|
+
{
|
|
967
|
+
value: "custom",
|
|
968
|
+
title: "custom"
|
|
969
|
+
}
|
|
970
|
+
],
|
|
971
|
+
initial: suggestedVersion === currentVersion ? 0 : 4
|
|
972
|
+
}, {
|
|
973
|
+
type: (prev) => prev === "custom" ? "text" : null,
|
|
974
|
+
name: "custom",
|
|
975
|
+
message: "Enter the new version number:",
|
|
976
|
+
initial: suggestedVersion,
|
|
977
|
+
validate: (custom) => {
|
|
978
|
+
if (isValidSemver(custom)) return true;
|
|
979
|
+
return "That's not a valid version number";
|
|
980
|
+
}
|
|
981
|
+
}]);
|
|
982
|
+
if (!answers.version) return null;
|
|
983
|
+
if (answers.version === "skip") return null;
|
|
984
|
+
else if (answers.version === "suggested") return suggestedVersion;
|
|
985
|
+
else if (answers.version === "custom") {
|
|
986
|
+
if (!answers.custom) return null;
|
|
987
|
+
return answers.custom;
|
|
988
|
+
} else if (answers.version === "as-is") return currentVersion;
|
|
989
|
+
else return getNextVersion(pkg.version, answers.version);
|
|
990
|
+
}
|
|
991
|
+
|
|
992
|
+
//#endregion
|
|
993
|
+
//#region src/core/workspace.ts
|
|
994
|
+
function toWorkspaceError(operation, error) {
|
|
995
|
+
return {
|
|
996
|
+
type: "workspace",
|
|
997
|
+
operation,
|
|
998
|
+
message: error instanceof Error ? error.message : String(error)
|
|
999
|
+
};
|
|
1000
|
+
}
|
|
1001
|
+
async function discoverWorkspacePackages(workspaceRoot, options) {
|
|
1002
|
+
let workspaceOptions;
|
|
1003
|
+
let explicitPackages;
|
|
1004
|
+
if (options.packages == null || options.packages === true) workspaceOptions = { excludePrivate: false };
|
|
1005
|
+
else if (Array.isArray(options.packages)) {
|
|
1006
|
+
workspaceOptions = {
|
|
1007
|
+
excludePrivate: false,
|
|
1008
|
+
include: options.packages
|
|
1009
|
+
};
|
|
1010
|
+
explicitPackages = options.packages;
|
|
1011
|
+
} else {
|
|
1012
|
+
workspaceOptions = options.packages;
|
|
1013
|
+
if (options.packages.include) explicitPackages = options.packages.include;
|
|
1014
|
+
}
|
|
1015
|
+
let workspacePackages;
|
|
1016
|
+
try {
|
|
1017
|
+
workspacePackages = await findWorkspacePackages(workspaceRoot, workspaceOptions);
|
|
1018
|
+
} catch (error) {
|
|
1019
|
+
return err(toWorkspaceError("discoverWorkspacePackages", error));
|
|
1020
|
+
}
|
|
1021
|
+
if (explicitPackages) {
|
|
1022
|
+
const foundNames = new Set(workspacePackages.map((p) => p.name));
|
|
1023
|
+
const missing = explicitPackages.filter((p) => !foundNames.has(p));
|
|
1024
|
+
if (missing.length > 0) exitWithError(`Package${missing.length > 1 ? "s" : ""} not found in workspace: ${missing.join(", ")}`, "Check your package names or run 'pnpm ls' to see available packages");
|
|
1025
|
+
}
|
|
1026
|
+
const isPackagePromptEnabled = options.prompts?.packages !== false;
|
|
1027
|
+
if (!isCI && isPackagePromptEnabled && !explicitPackages) {
|
|
1028
|
+
const selectedNames = await selectPackagePrompt(workspacePackages);
|
|
1029
|
+
workspacePackages = workspacePackages.filter((pkg) => selectedNames.includes(pkg.name));
|
|
1030
|
+
}
|
|
1031
|
+
return ok(workspacePackages);
|
|
1032
|
+
}
|
|
1033
|
+
async function findWorkspacePackages(workspaceRoot, options) {
|
|
1034
|
+
try {
|
|
1035
|
+
const result = await run("pnpm", [
|
|
1036
|
+
"-r",
|
|
1037
|
+
"ls",
|
|
1038
|
+
"--json"
|
|
1039
|
+
], { nodeOptions: {
|
|
1040
|
+
cwd: workspaceRoot,
|
|
1041
|
+
stdio: "pipe"
|
|
1042
|
+
} });
|
|
1043
|
+
const rawProjects = JSON.parse(result.stdout);
|
|
1044
|
+
const allPackageNames = new Set(rawProjects.map((p) => p.name));
|
|
1045
|
+
const excludedPackages = /* @__PURE__ */ new Set();
|
|
1046
|
+
const promises = rawProjects.map(async (rawProject) => {
|
|
1047
|
+
const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
|
|
1048
|
+
const packageJson = JSON.parse(content);
|
|
1049
|
+
if (!shouldIncludePackage(packageJson, options)) {
|
|
1050
|
+
excludedPackages.add(rawProject.name);
|
|
1051
|
+
return null;
|
|
1052
|
+
}
|
|
1053
|
+
return {
|
|
1054
|
+
name: rawProject.name,
|
|
1055
|
+
version: rawProject.version,
|
|
1056
|
+
path: rawProject.path,
|
|
1057
|
+
packageJson,
|
|
1058
|
+
workspaceDependencies: Object.keys(rawProject.dependencies || []).filter((dep) => {
|
|
1059
|
+
return allPackageNames.has(dep);
|
|
1060
|
+
}),
|
|
1061
|
+
workspaceDevDependencies: Object.keys(rawProject.devDependencies || []).filter((dep) => {
|
|
1062
|
+
return allPackageNames.has(dep);
|
|
1063
|
+
})
|
|
1064
|
+
};
|
|
1065
|
+
});
|
|
1066
|
+
const packages = await Promise.all(promises);
|
|
1067
|
+
if (excludedPackages.size > 0) logger.info(`Excluded packages: ${farver.green(Array.from(excludedPackages).join(", "))}`);
|
|
1068
|
+
return packages.filter((pkg) => pkg !== null);
|
|
1069
|
+
} catch (err) {
|
|
1070
|
+
logger.error("Error discovering workspace packages:", err);
|
|
1071
|
+
throw err;
|
|
1072
|
+
}
|
|
1073
|
+
}
|
|
1074
|
+
function shouldIncludePackage(pkg, options) {
|
|
1075
|
+
if (!options) return true;
|
|
1076
|
+
if (options.excludePrivate && pkg.private) return false;
|
|
1077
|
+
if (options.include && options.include.length > 0) {
|
|
1078
|
+
if (!options.include.includes(pkg.name)) return false;
|
|
1079
|
+
}
|
|
1080
|
+
if (options.exclude?.includes(pkg.name)) return false;
|
|
1081
|
+
return true;
|
|
1082
|
+
}
|
|
1083
|
+
|
|
1084
|
+
//#endregion
|
|
1085
|
+
//#region src/operations/branch.ts
|
|
1086
|
+
async function prepareReleaseBranch(options) {
|
|
1087
|
+
const { workspaceRoot, releaseBranch, defaultBranch } = options;
|
|
1088
|
+
const currentBranch = await getCurrentBranch(workspaceRoot);
|
|
1089
|
+
if (!currentBranch.ok) return currentBranch;
|
|
1090
|
+
if (currentBranch.value !== defaultBranch) return err({
|
|
1091
|
+
type: "git",
|
|
1092
|
+
operation: "validateBranch",
|
|
1093
|
+
message: `Current branch is '${currentBranch.value}'. Please switch to '${defaultBranch}'.`
|
|
1094
|
+
});
|
|
1095
|
+
const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
|
|
1096
|
+
if (!branchExists.ok) return branchExists;
|
|
1097
|
+
if (!branchExists.value) {
|
|
1098
|
+
const created = await createBranch(releaseBranch, defaultBranch, workspaceRoot);
|
|
1099
|
+
if (!created.ok) return created;
|
|
1100
|
+
}
|
|
1101
|
+
const checkedOut = await checkoutBranch(releaseBranch, workspaceRoot);
|
|
1102
|
+
if (!checkedOut.ok) return checkedOut;
|
|
1103
|
+
if (branchExists.value) {
|
|
1104
|
+
const pulled = await pullLatestChanges(releaseBranch, workspaceRoot);
|
|
1105
|
+
if (!pulled.ok) return pulled;
|
|
1106
|
+
if (!pulled.value) logger.warn("Failed to pull latest changes, continuing anyway.");
|
|
1107
|
+
}
|
|
1108
|
+
const rebased = await rebaseBranch(defaultBranch, workspaceRoot);
|
|
1109
|
+
if (!rebased.ok) return rebased;
|
|
1110
|
+
return ok(void 0);
|
|
1111
|
+
}
|
|
1112
|
+
async function syncReleaseChanges(options) {
|
|
1113
|
+
const { workspaceRoot, releaseBranch, commitMessage, hasChanges } = options;
|
|
1114
|
+
const committed = hasChanges ? await commitChanges(commitMessage, workspaceRoot) : ok(false);
|
|
1115
|
+
if (!committed.ok) return committed;
|
|
1116
|
+
const isAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
|
|
1117
|
+
if (!isAhead.ok) return isAhead;
|
|
1118
|
+
if (!committed.value && !isAhead.value) return ok(false);
|
|
1119
|
+
const pushed = await pushBranch(releaseBranch, workspaceRoot, { forceWithLease: true });
|
|
1120
|
+
if (!pushed.ok) return pushed;
|
|
1121
|
+
return ok(true);
|
|
1122
|
+
}
|
|
1123
|
+
|
|
1124
|
+
//#endregion
|
|
1125
|
+
//#region src/versioning/commits.ts
|
|
432
1126
|
/**
|
|
433
|
-
*
|
|
1127
|
+
* Get commits grouped by workspace package.
|
|
1128
|
+
* For each package, retrieves all commits since its last release tag that affect that package.
|
|
1129
|
+
*
|
|
1130
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
1131
|
+
* @param {WorkspacePackage[]} packages - Array of workspace packages to analyze
|
|
1132
|
+
* @returns {Promise<Map<string, GitCommit[]>>} A map of package names to their commits since their last release
|
|
434
1133
|
*/
|
|
435
|
-
function
|
|
436
|
-
const
|
|
1134
|
+
async function getWorkspacePackageGroupedCommits(workspaceRoot, packages) {
|
|
1135
|
+
const changedPackages = /* @__PURE__ */ new Map();
|
|
1136
|
+
const promises = packages.map(async (pkg) => {
|
|
1137
|
+
const lastTagResult = await getMostRecentPackageTag(workspaceRoot, pkg.name);
|
|
1138
|
+
const lastTag = lastTagResult.ok ? lastTagResult.value : void 0;
|
|
1139
|
+
const allCommits = await getCommits({
|
|
1140
|
+
from: lastTag,
|
|
1141
|
+
to: "HEAD",
|
|
1142
|
+
cwd: workspaceRoot,
|
|
1143
|
+
folder: pkg.path
|
|
1144
|
+
});
|
|
1145
|
+
logger.verbose(`Found ${farver.cyan(allCommits.length)} commits for package ${farver.bold(pkg.name)} since tag ${farver.cyan(lastTag ?? "N/A")}`);
|
|
1146
|
+
return {
|
|
1147
|
+
pkgName: pkg.name,
|
|
1148
|
+
commits: allCommits
|
|
1149
|
+
};
|
|
1150
|
+
});
|
|
1151
|
+
const results = await Promise.all(promises);
|
|
1152
|
+
for (const { pkgName, commits } of results) changedPackages.set(pkgName, commits);
|
|
1153
|
+
return changedPackages;
|
|
1154
|
+
}
|
|
1155
|
+
/**
|
|
1156
|
+
* Check if a file path touches any package folder.
|
|
1157
|
+
* @param file - The file path to check
|
|
1158
|
+
* @param packagePaths - Set of normalized package paths
|
|
1159
|
+
* @param workspaceRoot - The workspace root for path normalization
|
|
1160
|
+
* @returns true if the file is inside a package folder
|
|
1161
|
+
*/
|
|
1162
|
+
function fileMatchesPackageFolder(file, packagePaths, workspaceRoot) {
|
|
1163
|
+
const normalizedFile = file.startsWith("./") ? file.slice(2) : file;
|
|
1164
|
+
for (const pkgPath of packagePaths) {
|
|
1165
|
+
const normalizedPkgPath = pkgPath.startsWith(workspaceRoot) ? pkgPath.slice(workspaceRoot.length + 1) : pkgPath;
|
|
1166
|
+
if (normalizedFile.startsWith(`${normalizedPkgPath}/`) || normalizedFile === normalizedPkgPath) return true;
|
|
1167
|
+
}
|
|
1168
|
+
return false;
|
|
1169
|
+
}
|
|
1170
|
+
/**
|
|
1171
|
+
* Check if a commit is a "global" commit (doesn't touch any package folder).
|
|
1172
|
+
* @param workspaceRoot - The workspace root
|
|
1173
|
+
* @param files - Array of files changed in the commit
|
|
1174
|
+
* @param packagePaths - Set of normalized package paths
|
|
1175
|
+
* @returns true if this is a global commit
|
|
1176
|
+
*/
|
|
1177
|
+
function isGlobalCommit(workspaceRoot, files, packagePaths) {
|
|
1178
|
+
if (!files || files.length === 0) return false;
|
|
1179
|
+
return !files.some((file) => fileMatchesPackageFolder(file, packagePaths, workspaceRoot));
|
|
1180
|
+
}
|
|
1181
|
+
const DEPENDENCY_FILES = [
|
|
1182
|
+
"package.json",
|
|
1183
|
+
"pnpm-lock.yaml",
|
|
1184
|
+
"pnpm-workspace.yaml",
|
|
1185
|
+
"yarn.lock",
|
|
1186
|
+
"package-lock.json"
|
|
1187
|
+
];
|
|
1188
|
+
/**
|
|
1189
|
+
* Find the oldest and newest commits across all packages.
|
|
1190
|
+
* @param packageCommits - Map of package commits
|
|
1191
|
+
* @returns Object with oldest and newest commit SHAs, or null if no commits
|
|
1192
|
+
*/
|
|
1193
|
+
function findCommitRange(packageCommits) {
|
|
1194
|
+
let oldestCommit = null;
|
|
1195
|
+
let newestCommit = null;
|
|
1196
|
+
for (const commits of packageCommits.values()) {
|
|
1197
|
+
if (commits.length === 0) continue;
|
|
1198
|
+
const firstCommit = commits[0].shortHash;
|
|
1199
|
+
const lastCommit = commits[commits.length - 1].shortHash;
|
|
1200
|
+
if (!newestCommit) newestCommit = firstCommit;
|
|
1201
|
+
oldestCommit = lastCommit;
|
|
1202
|
+
}
|
|
1203
|
+
if (!oldestCommit || !newestCommit) return null;
|
|
437
1204
|
return {
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
newVersion,
|
|
441
|
-
bumpType: bump,
|
|
442
|
-
hasDirectChanges
|
|
1205
|
+
oldest: oldestCommit,
|
|
1206
|
+
newest: newestCommit
|
|
443
1207
|
};
|
|
444
1208
|
}
|
|
445
1209
|
/**
|
|
446
|
-
*
|
|
1210
|
+
* Get global commits for each package based on their individual commit timelines.
|
|
1211
|
+
* This solves the problem where packages with different release histories need different global commits.
|
|
1212
|
+
*
|
|
1213
|
+
* A "global commit" is a commit that doesn't touch any package folder but may affect all packages
|
|
1214
|
+
* (e.g., root package.json, CI config, README).
|
|
1215
|
+
*
|
|
1216
|
+
* Performance: Makes ONE batched git call to get files for all commits across all packages.
|
|
1217
|
+
*
|
|
1218
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
1219
|
+
* @param packageCommits - Map of package name to their commits (from getWorkspacePackageCommits)
|
|
1220
|
+
* @param allPackages - All workspace packages (used to identify package folders)
|
|
1221
|
+
* @param mode - Filter mode: false (disabled), "all" (all global commits), or "dependencies" (only dependency-related)
|
|
1222
|
+
* @returns Map of package name to their global commits
|
|
447
1223
|
*/
|
|
448
|
-
async function
|
|
449
|
-
const
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
1224
|
+
async function getGlobalCommitsPerPackage(workspaceRoot, packageCommits, allPackages, mode) {
|
|
1225
|
+
const result = /* @__PURE__ */ new Map();
|
|
1226
|
+
if (!mode) {
|
|
1227
|
+
logger.verbose("Global commits mode disabled");
|
|
1228
|
+
return result;
|
|
1229
|
+
}
|
|
1230
|
+
logger.verbose(`Computing global commits per-package (mode: ${farver.cyan(mode)})`);
|
|
1231
|
+
const commitRange = findCommitRange(packageCommits);
|
|
1232
|
+
if (!commitRange) {
|
|
1233
|
+
logger.verbose("No commits found across packages");
|
|
1234
|
+
return result;
|
|
1235
|
+
}
|
|
1236
|
+
logger.verbose("Fetching files for commits range", `${farver.cyan(commitRange.oldest)}..${farver.cyan(commitRange.newest)}`);
|
|
1237
|
+
const commitFilesMap = await getGroupedFilesByCommitSha(workspaceRoot, commitRange.oldest, commitRange.newest);
|
|
1238
|
+
if (!commitFilesMap.ok) {
|
|
1239
|
+
logger.warn("Failed to get commit file list, returning empty global commits");
|
|
1240
|
+
return result;
|
|
1241
|
+
}
|
|
1242
|
+
logger.verbose("Got file lists for commits", `${farver.cyan(commitFilesMap.value.size)} commits in ONE git call`);
|
|
1243
|
+
const packagePaths = new Set(allPackages.map((p) => p.path));
|
|
1244
|
+
for (const [pkgName, commits] of packageCommits) {
|
|
1245
|
+
const globalCommitsAffectingPackage = [];
|
|
1246
|
+
logger.verbose("Filtering global commits for package", `${farver.bold(pkgName)} from ${farver.cyan(commits.length)} commits`);
|
|
1247
|
+
for (const commit of commits) {
|
|
1248
|
+
const files = commitFilesMap.value.get(commit.shortHash);
|
|
1249
|
+
if (!files) continue;
|
|
1250
|
+
if (isGlobalCommit(workspaceRoot, files, packagePaths)) globalCommitsAffectingPackage.push(commit);
|
|
457
1251
|
}
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
1252
|
+
logger.verbose("Package global commits found", `${farver.bold(pkgName)}: ${farver.cyan(globalCommitsAffectingPackage.length)} global commits`);
|
|
1253
|
+
if (mode === "all") {
|
|
1254
|
+
result.set(pkgName, globalCommitsAffectingPackage);
|
|
1255
|
+
continue;
|
|
461
1256
|
}
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
1257
|
+
const dependencyCommits = [];
|
|
1258
|
+
for (const commit of globalCommitsAffectingPackage) {
|
|
1259
|
+
const files = commitFilesMap.value.get(commit.shortHash);
|
|
1260
|
+
if (!files) continue;
|
|
1261
|
+
if (files.some((file) => DEPENDENCY_FILES.includes(file.startsWith("./") ? file.slice(2) : file))) {
|
|
1262
|
+
logger.verbose("Global commit affects dependencies", `${farver.bold(pkgName)}: commit ${farver.cyan(commit.shortHash)} affects dependencies`);
|
|
1263
|
+
dependencyCommits.push(commit);
|
|
1264
|
+
}
|
|
465
1265
|
}
|
|
1266
|
+
logger.verbose("Global commits affect dependencies", `${farver.bold(pkgName)}: ${farver.cyan(dependencyCommits.length)} global commits affect dependencies`);
|
|
1267
|
+
result.set(pkgName, dependencyCommits);
|
|
466
1268
|
}
|
|
467
|
-
|
|
1269
|
+
return result;
|
|
468
1270
|
}
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
function
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
for (const
|
|
476
|
-
const
|
|
477
|
-
if (
|
|
1271
|
+
|
|
1272
|
+
//#endregion
|
|
1273
|
+
//#region src/operations/version.ts
|
|
1274
|
+
function determineHighestBump(commits) {
|
|
1275
|
+
if (commits.length === 0) return "none";
|
|
1276
|
+
let highestBump = "none";
|
|
1277
|
+
for (const commit of commits) {
|
|
1278
|
+
const bump = determineBumpType(commit);
|
|
1279
|
+
if (bump === "major") return "major";
|
|
1280
|
+
if (bump === "minor") highestBump = "minor";
|
|
1281
|
+
else if (bump === "patch" && highestBump === "none") highestBump = "patch";
|
|
478
1282
|
}
|
|
479
|
-
return
|
|
1283
|
+
return highestBump;
|
|
1284
|
+
}
|
|
1285
|
+
function createVersionUpdate(pkg, bump, hasDirectChanges) {
|
|
1286
|
+
const newVersion = getNextVersion(pkg.version, bump);
|
|
1287
|
+
return {
|
|
1288
|
+
package: pkg,
|
|
1289
|
+
currentVersion: pkg.version,
|
|
1290
|
+
newVersion,
|
|
1291
|
+
bumpType: bump,
|
|
1292
|
+
hasDirectChanges
|
|
1293
|
+
};
|
|
1294
|
+
}
|
|
1295
|
+
function determineBumpType(commit) {
|
|
1296
|
+
if (!commit.isConventional) return "none";
|
|
1297
|
+
if (commit.isBreaking) return "major";
|
|
1298
|
+
if (commit.type === "feat") return "minor";
|
|
1299
|
+
if (commit.type === "fix" || commit.type === "perf") return "patch";
|
|
1300
|
+
return "none";
|
|
480
1301
|
}
|
|
481
1302
|
|
|
482
1303
|
//#endregion
|
|
483
|
-
//#region src/package.ts
|
|
1304
|
+
//#region src/versioning/package.ts
|
|
484
1305
|
/**
|
|
485
1306
|
* Build a dependency graph from workspace packages
|
|
486
1307
|
*
|
|
@@ -533,6 +1354,51 @@ function getAllAffectedPackages(graph, changedPackages) {
|
|
|
533
1354
|
return affected;
|
|
534
1355
|
}
|
|
535
1356
|
/**
|
|
1357
|
+
* Calculate the order in which packages should be published
|
|
1358
|
+
*
|
|
1359
|
+
* Performs topological sorting to ensure dependencies are published before dependents.
|
|
1360
|
+
* Assigns a "level" to each package based on its depth in the dependency tree.
|
|
1361
|
+
*
|
|
1362
|
+
* This is used by the publish command to publish packages in the correct order.
|
|
1363
|
+
*
|
|
1364
|
+
* @param graph - Dependency graph
|
|
1365
|
+
* @param packagesToPublish - Set of package names to publish
|
|
1366
|
+
* @returns Array of packages in publish order with their dependency level
|
|
1367
|
+
*/
|
|
1368
|
+
function getPackagePublishOrder(graph, packagesToPublish) {
|
|
1369
|
+
const result = [];
|
|
1370
|
+
const visited = /* @__PURE__ */ new Set();
|
|
1371
|
+
const toUpdate = new Set(packagesToPublish);
|
|
1372
|
+
const packagesToProcess = new Set(packagesToPublish);
|
|
1373
|
+
for (const pkg of packagesToPublish) {
|
|
1374
|
+
const deps = graph.dependents.get(pkg);
|
|
1375
|
+
if (deps) for (const dep of deps) {
|
|
1376
|
+
packagesToProcess.add(dep);
|
|
1377
|
+
toUpdate.add(dep);
|
|
1378
|
+
}
|
|
1379
|
+
}
|
|
1380
|
+
function visit(pkgName, level) {
|
|
1381
|
+
if (visited.has(pkgName)) return;
|
|
1382
|
+
visited.add(pkgName);
|
|
1383
|
+
const pkg = graph.packages.get(pkgName);
|
|
1384
|
+
if (!pkg) return;
|
|
1385
|
+
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
1386
|
+
let maxDepLevel = level;
|
|
1387
|
+
for (const dep of allDeps) if (toUpdate.has(dep)) {
|
|
1388
|
+
visit(dep, level);
|
|
1389
|
+
const depResult = result.find((r) => r.package.name === dep);
|
|
1390
|
+
if (depResult && depResult.level >= maxDepLevel) maxDepLevel = depResult.level + 1;
|
|
1391
|
+
}
|
|
1392
|
+
result.push({
|
|
1393
|
+
package: pkg,
|
|
1394
|
+
level: maxDepLevel
|
|
1395
|
+
});
|
|
1396
|
+
}
|
|
1397
|
+
for (const pkg of toUpdate) visit(pkg, 0);
|
|
1398
|
+
result.sort((a, b) => a.level - b.level);
|
|
1399
|
+
return result;
|
|
1400
|
+
}
|
|
1401
|
+
/**
|
|
536
1402
|
* Create version updates for all packages affected by dependency changes
|
|
537
1403
|
*
|
|
538
1404
|
* When a package is updated, all packages that depend on it should also be updated.
|
|
@@ -548,359 +1414,760 @@ function createDependentUpdates(graph, workspacePackages, directUpdates) {
|
|
|
548
1414
|
const directUpdateMap = new Map(directUpdates.map((u) => [u.package.name, u]));
|
|
549
1415
|
const affectedPackages = getAllAffectedPackages(graph, new Set(directUpdates.map((u) => u.package.name)));
|
|
550
1416
|
for (const pkgName of affectedPackages) {
|
|
551
|
-
|
|
1417
|
+
logger.verbose(`Processing affected package: ${pkgName}`);
|
|
1418
|
+
if (directUpdateMap.has(pkgName)) {
|
|
1419
|
+
logger.verbose(`Skipping ${pkgName}, already has a direct update`);
|
|
1420
|
+
continue;
|
|
1421
|
+
}
|
|
552
1422
|
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
553
1423
|
if (!pkg) continue;
|
|
554
1424
|
allUpdates.push(createVersionUpdate(pkg, "patch", false));
|
|
555
1425
|
}
|
|
556
1426
|
return allUpdates;
|
|
557
1427
|
}
|
|
558
|
-
/**
|
|
559
|
-
* Update all package.json files with new versions and dependency updates
|
|
560
|
-
*
|
|
561
|
-
* Updates are performed in parallel for better performance.
|
|
562
|
-
*
|
|
563
|
-
* @param updates - Version updates to apply
|
|
564
|
-
*/
|
|
565
|
-
async function updateAllPackageJsonFiles(updates) {
|
|
566
|
-
await Promise.all(updates.map(async (update) => {
|
|
567
|
-
const depUpdates = getDependencyUpdates(update.package, updates);
|
|
568
|
-
await updatePackageJson(update.package, update.newVersion, depUpdates);
|
|
569
|
-
}));
|
|
570
|
-
}
|
|
571
1428
|
|
|
572
1429
|
//#endregion
|
|
573
|
-
//#region src/
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
1430
|
+
//#region src/versioning/version.ts
|
|
1431
|
+
const messageColorMap = {
|
|
1432
|
+
feat: farver.green,
|
|
1433
|
+
feature: farver.green,
|
|
1434
|
+
refactor: farver.cyan,
|
|
1435
|
+
style: farver.cyan,
|
|
1436
|
+
docs: farver.blue,
|
|
1437
|
+
doc: farver.blue,
|
|
1438
|
+
types: farver.blue,
|
|
1439
|
+
type: farver.blue,
|
|
1440
|
+
chore: farver.gray,
|
|
1441
|
+
ci: farver.gray,
|
|
1442
|
+
build: farver.gray,
|
|
1443
|
+
deps: farver.gray,
|
|
1444
|
+
dev: farver.gray,
|
|
1445
|
+
fix: farver.yellow,
|
|
1446
|
+
test: farver.yellow,
|
|
1447
|
+
perf: farver.magenta,
|
|
1448
|
+
revert: farver.red,
|
|
1449
|
+
breaking: farver.red
|
|
1450
|
+
};
|
|
1451
|
+
function formatCommitsForDisplay(commits) {
|
|
1452
|
+
if (commits.length === 0) return farver.dim("No commits found");
|
|
1453
|
+
const maxCommitsToShow = 10;
|
|
1454
|
+
const commitsToShow = commits.slice(0, maxCommitsToShow);
|
|
1455
|
+
const hasMore = commits.length > maxCommitsToShow;
|
|
1456
|
+
const typeLength = commits.map(({ type }) => type.length).reduce((a, b) => Math.max(a, b), 0);
|
|
1457
|
+
const scopeLength = commits.map(({ scope }) => scope?.length).reduce((a, b) => Math.max(a || 0, b || 0), 0) || 0;
|
|
1458
|
+
const formattedCommits = commitsToShow.map((commit) => {
|
|
1459
|
+
let color = messageColorMap[commit.type] || ((c) => c);
|
|
1460
|
+
if (commit.isBreaking) color = (s) => farver.inverse.red(s);
|
|
1461
|
+
const paddedType = commit.type.padStart(typeLength + 1, " ");
|
|
1462
|
+
const paddedScope = !commit.scope ? " ".repeat(scopeLength ? scopeLength + 2 : 0) : farver.dim("(") + commit.scope + farver.dim(")") + " ".repeat(scopeLength - commit.scope.length);
|
|
1463
|
+
return [
|
|
1464
|
+
farver.dim(commit.shortHash),
|
|
1465
|
+
" ",
|
|
1466
|
+
color === farver.gray ? color(paddedType) : farver.bold(color(paddedType)),
|
|
1467
|
+
" ",
|
|
1468
|
+
paddedScope,
|
|
1469
|
+
farver.dim(":"),
|
|
1470
|
+
" ",
|
|
1471
|
+
color === farver.gray ? color(commit.description) : commit.description
|
|
1472
|
+
].join("");
|
|
1473
|
+
}).join("\n");
|
|
1474
|
+
if (hasMore) return `${formattedCommits}\n ${farver.dim(`... and ${commits.length - maxCommitsToShow} more commits`)}`;
|
|
1475
|
+
return formattedCommits;
|
|
1476
|
+
}
|
|
1477
|
+
async function calculateVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides: initialOverrides = {} }) {
|
|
1478
|
+
const versionUpdates = [];
|
|
1479
|
+
const processedPackages = /* @__PURE__ */ new Set();
|
|
1480
|
+
const newOverrides = { ...initialOverrides };
|
|
1481
|
+
const bumpRanks = {
|
|
1482
|
+
major: 3,
|
|
1483
|
+
minor: 2,
|
|
1484
|
+
patch: 1,
|
|
1485
|
+
none: 0
|
|
1486
|
+
};
|
|
1487
|
+
logger.verbose(`Starting version inference for ${packageCommits.size} packages with commits`);
|
|
1488
|
+
for (const [pkgName, pkgCommits] of packageCommits) {
|
|
1489
|
+
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
1490
|
+
if (!pkg) {
|
|
1491
|
+
logger.error(`Package ${pkgName} not found in workspace packages, skipping`);
|
|
1492
|
+
continue;
|
|
1493
|
+
}
|
|
1494
|
+
processedPackages.add(pkgName);
|
|
1495
|
+
const globalCommits = globalCommitsPerPackage.get(pkgName) || [];
|
|
1496
|
+
const allCommitsForPackage = [...pkgCommits, ...globalCommits];
|
|
1497
|
+
const determinedBump = determineHighestBump(allCommitsForPackage);
|
|
1498
|
+
const override = newOverrides[pkgName];
|
|
1499
|
+
const effectiveBump = override?.type || determinedBump;
|
|
1500
|
+
if (effectiveBump === "none") continue;
|
|
1501
|
+
let newVersion = override?.version || getNextVersion(pkg.version, effectiveBump);
|
|
1502
|
+
let finalBumpType = effectiveBump;
|
|
1503
|
+
if (!isCI && showPrompt) {
|
|
1504
|
+
logger.clearScreen();
|
|
1505
|
+
logger.section(`📝 Commits for ${farver.cyan(pkg.name)}`);
|
|
1506
|
+
formatCommitsForDisplay(allCommitsForPackage).split("\n").forEach((line) => logger.item(line));
|
|
1507
|
+
logger.emptyLine();
|
|
1508
|
+
const selectedVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, newVersion);
|
|
1509
|
+
if (selectedVersion === null) continue;
|
|
1510
|
+
const userBump = calculateBumpType(pkg.version, selectedVersion);
|
|
1511
|
+
finalBumpType = userBump;
|
|
1512
|
+
if (bumpRanks[userBump] < bumpRanks[determinedBump]) {
|
|
1513
|
+
newOverrides[pkgName] = {
|
|
1514
|
+
type: userBump,
|
|
1515
|
+
version: selectedVersion
|
|
1516
|
+
};
|
|
1517
|
+
logger.info(`Version override recorded for ${pkgName}: ${determinedBump} → ${userBump}`);
|
|
1518
|
+
} else if (newOverrides[pkgName] && bumpRanks[userBump] >= bumpRanks[determinedBump]) {
|
|
1519
|
+
delete newOverrides[pkgName];
|
|
1520
|
+
logger.info(`Version override removed for ${pkgName}.`);
|
|
1521
|
+
}
|
|
1522
|
+
newVersion = selectedVersion;
|
|
1523
|
+
}
|
|
1524
|
+
versionUpdates.push({
|
|
1525
|
+
package: pkg,
|
|
1526
|
+
currentVersion: pkg.version,
|
|
1527
|
+
newVersion,
|
|
1528
|
+
bumpType: finalBumpType,
|
|
1529
|
+
hasDirectChanges: allCommitsForPackage.length > 0
|
|
1530
|
+
});
|
|
1531
|
+
}
|
|
1532
|
+
if (!isCI && showPrompt) for (const pkg of workspacePackages) {
|
|
1533
|
+
if (processedPackages.has(pkg.name)) continue;
|
|
1534
|
+
logger.clearScreen();
|
|
1535
|
+
logger.section(`📦 Package: ${pkg.name}`);
|
|
1536
|
+
logger.item("No direct commits found");
|
|
1537
|
+
const newVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, pkg.version);
|
|
1538
|
+
if (newVersion === null) break;
|
|
1539
|
+
if (newVersion !== pkg.version) {
|
|
1540
|
+
const bumpType = calculateBumpType(pkg.version, newVersion);
|
|
1541
|
+
versionUpdates.push({
|
|
1542
|
+
package: pkg,
|
|
1543
|
+
currentVersion: pkg.version,
|
|
1544
|
+
newVersion,
|
|
1545
|
+
bumpType,
|
|
1546
|
+
hasDirectChanges: false
|
|
1547
|
+
});
|
|
1548
|
+
}
|
|
1549
|
+
}
|
|
1550
|
+
return {
|
|
1551
|
+
updates: versionUpdates,
|
|
1552
|
+
overrides: newOverrides
|
|
597
1553
|
};
|
|
598
|
-
for (const commit of limitedCommits) if (commit.type && commit.type in grouped) grouped[commit.type].push(commit);
|
|
599
|
-
else grouped.other.push(commit);
|
|
600
|
-
return grouped;
|
|
601
1554
|
}
|
|
602
1555
|
/**
|
|
603
|
-
*
|
|
1556
|
+
* Calculate version updates and prepare dependent updates
|
|
1557
|
+
* Returns both the updates and a function to apply them
|
|
604
1558
|
*/
|
|
605
|
-
function
|
|
606
|
-
const
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
1559
|
+
async function calculateAndPrepareVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides }) {
|
|
1560
|
+
const { updates: directUpdates, overrides: newOverrides } = await calculateVersionUpdates({
|
|
1561
|
+
workspacePackages,
|
|
1562
|
+
packageCommits,
|
|
1563
|
+
workspaceRoot,
|
|
1564
|
+
showPrompt,
|
|
1565
|
+
globalCommitsPerPackage,
|
|
1566
|
+
overrides
|
|
1567
|
+
});
|
|
1568
|
+
const allUpdates = createDependentUpdates(buildPackageDependencyGraph(workspacePackages), workspacePackages, directUpdates);
|
|
1569
|
+
const applyUpdates = async () => {
|
|
1570
|
+
await Promise.all(allUpdates.map(async (update) => {
|
|
1571
|
+
const depUpdates = getDependencyUpdates(update.package, allUpdates);
|
|
1572
|
+
await updatePackageJson(update.package, update.newVersion, depUpdates);
|
|
1573
|
+
}));
|
|
620
1574
|
};
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
if (
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
const scope = commit.scope ? `(${commit.scope})` : "";
|
|
640
|
-
const breaking = commit.isBreaking ? " ⚠️ BREAKING" : "";
|
|
641
|
-
lines.push(` • ${commit.type}${scope}: ${commit.message}${breaking}`);
|
|
642
|
-
}
|
|
1575
|
+
return {
|
|
1576
|
+
allUpdates,
|
|
1577
|
+
applyUpdates,
|
|
1578
|
+
overrides: newOverrides
|
|
1579
|
+
};
|
|
1580
|
+
}
|
|
1581
|
+
async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
|
|
1582
|
+
const packageJsonPath = join(pkg.path, "package.json");
|
|
1583
|
+
const content = await readFile(packageJsonPath, "utf-8");
|
|
1584
|
+
const packageJson = JSON.parse(content);
|
|
1585
|
+
packageJson.version = newVersion;
|
|
1586
|
+
function updateDependency(deps, depName, depVersion, isPeerDependency = false) {
|
|
1587
|
+
if (!deps) return;
|
|
1588
|
+
const oldVersion = deps[depName];
|
|
1589
|
+
if (!oldVersion) return;
|
|
1590
|
+
if (oldVersion === "workspace:*") {
|
|
1591
|
+
logger.verbose(` - Skipping workspace:* dependency: ${depName}`);
|
|
1592
|
+
return;
|
|
643
1593
|
}
|
|
1594
|
+
if (isPeerDependency) {
|
|
1595
|
+
const majorVersion = depVersion.split(".")[0];
|
|
1596
|
+
deps[depName] = `>=${depVersion} <${Number(majorVersion) + 1}.0.0`;
|
|
1597
|
+
} else deps[depName] = `^${depVersion}`;
|
|
1598
|
+
logger.verbose(` - Updated dependency ${depName}: ${oldVersion} → ${deps[depName]}`);
|
|
1599
|
+
}
|
|
1600
|
+
for (const [depName, depVersion] of dependencyUpdates) {
|
|
1601
|
+
updateDependency(packageJson.dependencies, depName, depVersion);
|
|
1602
|
+
updateDependency(packageJson.devDependencies, depName, depVersion);
|
|
1603
|
+
updateDependency(packageJson.peerDependencies, depName, depVersion, true);
|
|
644
1604
|
}
|
|
645
|
-
|
|
1605
|
+
await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
|
|
1606
|
+
logger.verbose(` - Successfully wrote updated package.json`);
|
|
646
1607
|
}
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
return response.selectedPackages;
|
|
1608
|
+
/**
|
|
1609
|
+
* Get all dependency updates needed for a package
|
|
1610
|
+
*/
|
|
1611
|
+
function getDependencyUpdates(pkg, allUpdates) {
|
|
1612
|
+
const updates = /* @__PURE__ */ new Map();
|
|
1613
|
+
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
1614
|
+
for (const dep of allDeps) {
|
|
1615
|
+
const update = allUpdates.find((u) => u.package.name === dep);
|
|
1616
|
+
if (update) {
|
|
1617
|
+
logger.verbose(` - Dependency ${dep} will be updated: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
|
|
1618
|
+
updates.set(dep, update.newVersion);
|
|
1619
|
+
}
|
|
1620
|
+
}
|
|
1621
|
+
if (updates.size === 0) logger.verbose(` - No dependency updates needed`);
|
|
1622
|
+
return updates;
|
|
663
1623
|
}
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
1624
|
+
|
|
1625
|
+
//#endregion
|
|
1626
|
+
//#region src/operations/calculate.ts
|
|
1627
|
+
async function calculateUpdates(options) {
|
|
1628
|
+
const { workspacePackages, workspaceRoot, showPrompt, overrides, globalCommitMode } = options;
|
|
1629
|
+
try {
|
|
1630
|
+
const grouped = await getWorkspacePackageGroupedCommits(workspaceRoot, workspacePackages);
|
|
1631
|
+
return ok(await calculateAndPrepareVersionUpdates({
|
|
1632
|
+
workspacePackages,
|
|
1633
|
+
packageCommits: grouped,
|
|
1634
|
+
workspaceRoot,
|
|
1635
|
+
showPrompt,
|
|
1636
|
+
globalCommitsPerPackage: await getGlobalCommitsPerPackage(workspaceRoot, grouped, workspacePackages, globalCommitMode),
|
|
1637
|
+
overrides
|
|
1638
|
+
}));
|
|
1639
|
+
} catch (error) {
|
|
1640
|
+
return err({
|
|
1641
|
+
type: "git",
|
|
1642
|
+
operation: "calculateUpdates",
|
|
1643
|
+
message: error instanceof Error ? error.message : String(error)
|
|
680
1644
|
});
|
|
681
1645
|
}
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
1646
|
+
}
|
|
1647
|
+
function ensureHasPackages(packages) {
|
|
1648
|
+
if (packages.length === 0) return err({
|
|
1649
|
+
type: "git",
|
|
1650
|
+
operation: "discoverWorkspacePackages",
|
|
1651
|
+
message: "No packages found to release"
|
|
685
1652
|
});
|
|
686
|
-
|
|
687
|
-
type: "select",
|
|
688
|
-
name: "choice",
|
|
689
|
-
message: `${pkg.name} (${currentVersion}):`,
|
|
690
|
-
choices,
|
|
691
|
-
initial: 0
|
|
692
|
-
}, {
|
|
693
|
-
type: (prev) => prev === "custom" ? "text" : null,
|
|
694
|
-
name: "customVersion",
|
|
695
|
-
message: "Enter custom version:",
|
|
696
|
-
initial: suggestedVersion,
|
|
697
|
-
validate: (value) => {
|
|
698
|
-
return /^\d+\.\d+\.\d+(?:[-+].+)?$/.test(value) || "Invalid semver version (e.g., 1.0.0)";
|
|
699
|
-
}
|
|
700
|
-
}]);
|
|
701
|
-
if (response.choice === "suggested") return suggestedVersion;
|
|
702
|
-
else if (response.choice === "custom") return response.customVersion;
|
|
703
|
-
else return calculateNewVersion(currentVersion, response.choice);
|
|
1653
|
+
return ok(packages);
|
|
704
1654
|
}
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
1655
|
+
|
|
1656
|
+
//#endregion
|
|
1657
|
+
//#region src/operations/pr.ts
|
|
1658
|
+
async function syncPullRequest(options) {
|
|
1659
|
+
const { github, releaseBranch, defaultBranch, pullRequestTitle, pullRequestBody, updates } = options;
|
|
1660
|
+
let existing = null;
|
|
1661
|
+
try {
|
|
1662
|
+
existing = await github.getExistingPullRequest(releaseBranch);
|
|
1663
|
+
} catch (error) {
|
|
1664
|
+
return {
|
|
1665
|
+
ok: false,
|
|
1666
|
+
error: {
|
|
1667
|
+
type: "github",
|
|
1668
|
+
operation: "getExistingPullRequest",
|
|
1669
|
+
message: error instanceof Error ? error.message : String(error)
|
|
1670
|
+
}
|
|
1671
|
+
};
|
|
1672
|
+
}
|
|
1673
|
+
const doesExist = !!existing;
|
|
1674
|
+
const title = existing?.title || pullRequestTitle || "chore: update package versions";
|
|
1675
|
+
const body = generatePullRequestBody(updates, pullRequestBody);
|
|
1676
|
+
let pr = null;
|
|
1677
|
+
try {
|
|
1678
|
+
pr = await github.upsertPullRequest({
|
|
1679
|
+
pullNumber: existing?.number,
|
|
1680
|
+
title,
|
|
1681
|
+
body,
|
|
1682
|
+
head: releaseBranch,
|
|
1683
|
+
base: defaultBranch
|
|
1684
|
+
});
|
|
1685
|
+
} catch (error) {
|
|
1686
|
+
return {
|
|
1687
|
+
ok: false,
|
|
1688
|
+
error: {
|
|
1689
|
+
type: "github",
|
|
1690
|
+
operation: "upsertPullRequest",
|
|
1691
|
+
message: error instanceof Error ? error.message : String(error)
|
|
1692
|
+
}
|
|
1693
|
+
};
|
|
710
1694
|
}
|
|
711
|
-
return
|
|
1695
|
+
return ok({
|
|
1696
|
+
pullRequest: pr,
|
|
1697
|
+
created: !doesExist
|
|
1698
|
+
});
|
|
712
1699
|
}
|
|
713
1700
|
|
|
714
1701
|
//#endregion
|
|
715
|
-
//#region src/
|
|
716
|
-
async function
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
else if (Array.isArray(options.packages)) {
|
|
721
|
-
workspaceOptions = {
|
|
722
|
-
excludePrivate: false,
|
|
723
|
-
included: options.packages
|
|
724
|
-
};
|
|
725
|
-
explicitPackages = options.packages;
|
|
726
|
-
} else {
|
|
727
|
-
workspaceOptions = options.packages;
|
|
728
|
-
if (options.packages.included) explicitPackages = options.packages.included;
|
|
1702
|
+
//#region src/workflows/prepare.ts
|
|
1703
|
+
async function prepareWorkflow(options) {
|
|
1704
|
+
if (options.safeguards) {
|
|
1705
|
+
const clean = await isWorkingDirectoryClean(options.workspaceRoot);
|
|
1706
|
+
if (!clean.ok || !clean.value) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
729
1707
|
}
|
|
730
|
-
const
|
|
731
|
-
if (
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
1708
|
+
const discovered = await discoverWorkspacePackages(options.workspaceRoot, options);
|
|
1709
|
+
if (!discovered.ok) exitWithError(`Failed to discover packages: ${discovered.error.message}`);
|
|
1710
|
+
const ensured = ensureHasPackages(discovered.value);
|
|
1711
|
+
if (!ensured.ok) {
|
|
1712
|
+
logger.warn(ensured.error.message);
|
|
1713
|
+
return null;
|
|
735
1714
|
}
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
1715
|
+
const workspacePackages = ensured.value;
|
|
1716
|
+
logger.section("📦 Workspace Packages");
|
|
1717
|
+
logger.item(`Found ${workspacePackages.length} packages`);
|
|
1718
|
+
for (const pkg of workspacePackages) {
|
|
1719
|
+
logger.item(`${farver.cyan(pkg.name)} (${farver.bold(pkg.version)})`);
|
|
1720
|
+
logger.item(` ${farver.gray("→")} ${farver.gray(pkg.path)}`);
|
|
741
1721
|
}
|
|
742
|
-
|
|
1722
|
+
logger.emptyLine();
|
|
1723
|
+
const prepareBranchResult = await prepareReleaseBranch({
|
|
1724
|
+
workspaceRoot: options.workspaceRoot,
|
|
1725
|
+
releaseBranch: options.branch.release,
|
|
1726
|
+
defaultBranch: options.branch.default
|
|
1727
|
+
});
|
|
1728
|
+
if (!prepareBranchResult.ok) exitWithError(prepareBranchResult.error.message);
|
|
1729
|
+
const overridesPath = join(options.workspaceRoot, ucdjsReleaseOverridesPath);
|
|
1730
|
+
let existingOverrides = {};
|
|
1731
|
+
try {
|
|
1732
|
+
const overridesContent = await readFile(overridesPath, "utf-8");
|
|
1733
|
+
existingOverrides = JSON.parse(overridesContent);
|
|
1734
|
+
logger.info("Found existing version overrides file.");
|
|
1735
|
+
} catch {
|
|
1736
|
+
logger.info("No existing version overrides file found. Continuing...");
|
|
1737
|
+
}
|
|
1738
|
+
const updatesResult = await calculateUpdates({
|
|
743
1739
|
workspacePackages,
|
|
744
|
-
|
|
1740
|
+
workspaceRoot: options.workspaceRoot,
|
|
1741
|
+
showPrompt: options.prompts?.versions !== false,
|
|
1742
|
+
globalCommitMode: options.globalCommitMode === "none" ? false : options.globalCommitMode,
|
|
1743
|
+
overrides: existingOverrides
|
|
1744
|
+
});
|
|
1745
|
+
if (!updatesResult.ok) exitWithError(updatesResult.error.message);
|
|
1746
|
+
const { allUpdates, applyUpdates, overrides: newOverrides } = updatesResult.value;
|
|
1747
|
+
if (Object.keys(newOverrides).length > 0) {
|
|
1748
|
+
logger.info("Writing version overrides file...");
|
|
1749
|
+
try {
|
|
1750
|
+
await mkdir(join(options.workspaceRoot, ".github"), { recursive: true });
|
|
1751
|
+
await writeFile(overridesPath, JSON.stringify(newOverrides, null, 2), "utf-8");
|
|
1752
|
+
logger.success("Successfully wrote version overrides file.");
|
|
1753
|
+
} catch (e) {
|
|
1754
|
+
logger.error("Failed to write version overrides file:", e);
|
|
1755
|
+
}
|
|
1756
|
+
}
|
|
1757
|
+
if (Object.keys(newOverrides).length === 0 && Object.keys(existingOverrides).length > 0) {
|
|
1758
|
+
let shouldRemoveOverrides = false;
|
|
1759
|
+
for (const update of allUpdates) {
|
|
1760
|
+
const overriddenVersion = existingOverrides[update.package.name];
|
|
1761
|
+
if (overriddenVersion) {
|
|
1762
|
+
if (compare(update.newVersion, overriddenVersion.version) > 0) {
|
|
1763
|
+
shouldRemoveOverrides = true;
|
|
1764
|
+
break;
|
|
1765
|
+
}
|
|
1766
|
+
}
|
|
1767
|
+
}
|
|
1768
|
+
if (shouldRemoveOverrides) {
|
|
1769
|
+
logger.info("Removing obsolete version overrides file...");
|
|
1770
|
+
try {
|
|
1771
|
+
await rm(overridesPath);
|
|
1772
|
+
logger.success("Successfully removed obsolete version overrides file.");
|
|
1773
|
+
} catch (e) {
|
|
1774
|
+
logger.error("Failed to remove obsolete version overrides file:", e);
|
|
1775
|
+
}
|
|
1776
|
+
}
|
|
1777
|
+
}
|
|
1778
|
+
if (allUpdates.filter((u) => u.hasDirectChanges).length === 0) logger.warn("No packages have changes requiring a release");
|
|
1779
|
+
logger.section("🔄 Version Updates");
|
|
1780
|
+
logger.item(`Updating ${allUpdates.length} packages (including dependents)`);
|
|
1781
|
+
for (const update of allUpdates) logger.item(`${update.package.name}: ${update.currentVersion} → ${update.newVersion}`);
|
|
1782
|
+
await applyUpdates();
|
|
1783
|
+
if (options.changelog?.enabled) {
|
|
1784
|
+
logger.step("Updating changelogs");
|
|
1785
|
+
const groupedPackageCommits = await getWorkspacePackageGroupedCommits(options.workspaceRoot, workspacePackages);
|
|
1786
|
+
const globalCommitsPerPackage = await getGlobalCommitsPerPackage(options.workspaceRoot, groupedPackageCommits, workspacePackages, options.globalCommitMode === "none" ? false : options.globalCommitMode);
|
|
1787
|
+
const changelogPromises = allUpdates.map((update) => {
|
|
1788
|
+
const pkgCommits = groupedPackageCommits.get(update.package.name) || [];
|
|
1789
|
+
const globalCommits = globalCommitsPerPackage.get(update.package.name) || [];
|
|
1790
|
+
const allCommits = [...pkgCommits, ...globalCommits];
|
|
1791
|
+
if (allCommits.length === 0) {
|
|
1792
|
+
logger.verbose(`No commits for ${update.package.name}, skipping changelog`);
|
|
1793
|
+
return Promise.resolve();
|
|
1794
|
+
}
|
|
1795
|
+
logger.verbose(`Updating changelog for ${farver.cyan(update.package.name)}`);
|
|
1796
|
+
return updateChangelog({
|
|
1797
|
+
normalizedOptions: {
|
|
1798
|
+
...options,
|
|
1799
|
+
workspaceRoot: options.workspaceRoot
|
|
1800
|
+
},
|
|
1801
|
+
githubClient: options.githubClient,
|
|
1802
|
+
workspacePackage: update.package,
|
|
1803
|
+
version: update.newVersion,
|
|
1804
|
+
previousVersion: update.currentVersion !== "0.0.0" ? update.currentVersion : void 0,
|
|
1805
|
+
commits: allCommits,
|
|
1806
|
+
date: (/* @__PURE__ */ new Date()).toISOString().split("T")[0]
|
|
1807
|
+
});
|
|
1808
|
+
}).filter((p) => p != null);
|
|
1809
|
+
const updates = await Promise.all(changelogPromises);
|
|
1810
|
+
logger.success(`Updated ${updates.length} changelog(s)`);
|
|
1811
|
+
}
|
|
1812
|
+
const hasChangesToPush = await syncReleaseChanges({
|
|
1813
|
+
workspaceRoot: options.workspaceRoot,
|
|
1814
|
+
releaseBranch: options.branch.release,
|
|
1815
|
+
commitMessage: "chore: update release versions",
|
|
1816
|
+
hasChanges: true
|
|
1817
|
+
});
|
|
1818
|
+
if (!hasChangesToPush.ok) exitWithError(hasChangesToPush.error.message);
|
|
1819
|
+
if (!hasChangesToPush.value) {
|
|
1820
|
+
const prResult = await syncPullRequest({
|
|
1821
|
+
github: options.githubClient,
|
|
1822
|
+
releaseBranch: options.branch.release,
|
|
1823
|
+
defaultBranch: options.branch.default,
|
|
1824
|
+
pullRequestTitle: options.pullRequest?.title,
|
|
1825
|
+
pullRequestBody: options.pullRequest?.body,
|
|
1826
|
+
updates: allUpdates
|
|
1827
|
+
});
|
|
1828
|
+
if (!prResult.ok) exitWithError(prResult.error.message);
|
|
1829
|
+
if (prResult.value.pullRequest) {
|
|
1830
|
+
logger.item("No updates needed, PR is already up to date");
|
|
1831
|
+
return {
|
|
1832
|
+
updates: allUpdates,
|
|
1833
|
+
prUrl: prResult.value.pullRequest.html_url,
|
|
1834
|
+
created: prResult.value.created
|
|
1835
|
+
};
|
|
1836
|
+
}
|
|
1837
|
+
logger.error("No changes to commit, and no existing PR. Nothing to do.");
|
|
1838
|
+
return null;
|
|
1839
|
+
}
|
|
1840
|
+
const prResult = await syncPullRequest({
|
|
1841
|
+
github: options.githubClient,
|
|
1842
|
+
releaseBranch: options.branch.release,
|
|
1843
|
+
defaultBranch: options.branch.default,
|
|
1844
|
+
pullRequestTitle: options.pullRequest?.title,
|
|
1845
|
+
pullRequestBody: options.pullRequest?.body,
|
|
1846
|
+
updates: allUpdates
|
|
1847
|
+
});
|
|
1848
|
+
if (!prResult.ok) exitWithError(prResult.error.message);
|
|
1849
|
+
if (prResult.value.pullRequest?.html_url) {
|
|
1850
|
+
logger.section("🚀 Pull Request");
|
|
1851
|
+
logger.success(`Pull request ${prResult.value.created ? "created" : "updated"}: ${prResult.value.pullRequest.html_url}`);
|
|
1852
|
+
}
|
|
1853
|
+
const returnToDefault = await checkoutBranch(options.branch.default, options.workspaceRoot);
|
|
1854
|
+
if (!returnToDefault.ok || !returnToDefault.value) exitWithError(`Failed to checkout branch: ${options.branch.default}`);
|
|
1855
|
+
return {
|
|
1856
|
+
updates: allUpdates,
|
|
1857
|
+
prUrl: prResult.value.pullRequest?.html_url,
|
|
1858
|
+
created: prResult.value.created
|
|
745
1859
|
};
|
|
746
1860
|
}
|
|
747
|
-
|
|
1861
|
+
|
|
1862
|
+
//#endregion
|
|
1863
|
+
//#region src/core/npm.ts
|
|
1864
|
+
function toNPMError(operation, error, code) {
|
|
1865
|
+
return {
|
|
1866
|
+
type: "npm",
|
|
1867
|
+
operation,
|
|
1868
|
+
message: error instanceof Error ? error.message : String(error),
|
|
1869
|
+
code
|
|
1870
|
+
};
|
|
1871
|
+
}
|
|
1872
|
+
/**
|
|
1873
|
+
* Get the NPM registry URL
|
|
1874
|
+
* Respects NPM_CONFIG_REGISTRY environment variable, defaults to npmjs.org
|
|
1875
|
+
*/
|
|
1876
|
+
function getRegistryURL() {
|
|
1877
|
+
return process.env.NPM_CONFIG_REGISTRY || "https://registry.npmjs.org";
|
|
1878
|
+
}
|
|
1879
|
+
/**
|
|
1880
|
+
* Fetch package metadata from NPM registry
|
|
1881
|
+
* @param packageName - The package name (e.g., "lodash" or "@scope/name")
|
|
1882
|
+
* @returns Result with package metadata or error
|
|
1883
|
+
*/
|
|
1884
|
+
async function getPackageMetadata(packageName) {
|
|
748
1885
|
try {
|
|
749
|
-
const
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
1886
|
+
const registry = getRegistryURL();
|
|
1887
|
+
const encodedName = packageName.startsWith("@") ? `@${encodeURIComponent(packageName.slice(1))}` : encodeURIComponent(packageName);
|
|
1888
|
+
const response = await fetch(`${registry}/${encodedName}`, { headers: { Accept: "application/json" } });
|
|
1889
|
+
if (!response.ok) {
|
|
1890
|
+
if (response.status === 404) return err(toNPMError("getPackageMetadata", `Package not found: ${packageName}`, "E404"));
|
|
1891
|
+
return err(toNPMError("getPackageMetadata", `HTTP ${response.status}: ${response.statusText}`));
|
|
1892
|
+
}
|
|
1893
|
+
return ok(await response.json());
|
|
1894
|
+
} catch (error) {
|
|
1895
|
+
return err(toNPMError("getPackageMetadata", error, "ENETWORK"));
|
|
1896
|
+
}
|
|
1897
|
+
}
|
|
1898
|
+
/**
|
|
1899
|
+
* Check if a specific package version exists on NPM
|
|
1900
|
+
* @param packageName - The package name
|
|
1901
|
+
* @param version - The version to check (e.g., "1.2.3")
|
|
1902
|
+
* @returns Result with boolean (true if version exists) or error
|
|
1903
|
+
*/
|
|
1904
|
+
async function checkVersionExists(packageName, version) {
|
|
1905
|
+
const metadataResult = await getPackageMetadata(packageName);
|
|
1906
|
+
if (!metadataResult.ok) {
|
|
1907
|
+
if (metadataResult.error.code === "E404") return ok(false);
|
|
1908
|
+
return err(metadataResult.error);
|
|
1909
|
+
}
|
|
1910
|
+
return ok(version in metadataResult.value.versions);
|
|
1911
|
+
}
|
|
1912
|
+
/**
|
|
1913
|
+
* Build a package before publishing
|
|
1914
|
+
* @param packageName - The package name to build
|
|
1915
|
+
* @param workspaceRoot - Path to the workspace root
|
|
1916
|
+
* @param options - Normalized release scripts options
|
|
1917
|
+
* @returns Result indicating success or failure
|
|
1918
|
+
*/
|
|
1919
|
+
async function buildPackage(packageName, workspaceRoot, options) {
|
|
1920
|
+
if (!options.npm.runBuild) return ok(void 0);
|
|
1921
|
+
try {
|
|
1922
|
+
await runIfNotDry("pnpm", [
|
|
1923
|
+
"--filter",
|
|
1924
|
+
packageName,
|
|
1925
|
+
"build"
|
|
753
1926
|
], { nodeOptions: {
|
|
754
1927
|
cwd: workspaceRoot,
|
|
755
|
-
stdio: "
|
|
1928
|
+
stdio: "inherit"
|
|
756
1929
|
} });
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
const promises = rawProjects.map(async (rawProject) => {
|
|
761
|
-
const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
|
|
762
|
-
const packageJson = JSON.parse(content);
|
|
763
|
-
if (!shouldIncludePackage(packageJson, options)) {
|
|
764
|
-
excludedPackages.add(rawProject.name);
|
|
765
|
-
return null;
|
|
766
|
-
}
|
|
767
|
-
return {
|
|
768
|
-
name: rawProject.name,
|
|
769
|
-
version: rawProject.version,
|
|
770
|
-
path: rawProject.path,
|
|
771
|
-
packageJson,
|
|
772
|
-
workspaceDependencies: extractWorkspaceDependencies(rawProject.dependencies, allPackageNames),
|
|
773
|
-
workspaceDevDependencies: extractWorkspaceDependencies(rawProject.devDependencies, allPackageNames)
|
|
774
|
-
};
|
|
775
|
-
});
|
|
776
|
-
const packages = await Promise.all(promises);
|
|
777
|
-
if (excludedPackages.size > 0) console.info(`${farver.cyan("[info]:")} Excluded packages: ${farver.green(Array.from(excludedPackages).join(", "))}`);
|
|
778
|
-
return packages.filter((pkg) => pkg !== null);
|
|
779
|
-
} catch (err) {
|
|
780
|
-
console.error("Error discovering workspace packages:", err);
|
|
781
|
-
throw err;
|
|
1930
|
+
return ok(void 0);
|
|
1931
|
+
} catch (error) {
|
|
1932
|
+
return err(toNPMError("buildPackage", error));
|
|
782
1933
|
}
|
|
783
1934
|
}
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
1935
|
+
/**
|
|
1936
|
+
* Publish a package to NPM
|
|
1937
|
+
* Uses pnpm to handle workspace protocol and catalog: resolution automatically
|
|
1938
|
+
* @param packageName - The package name to publish
|
|
1939
|
+
* @param workspaceRoot - Path to the workspace root
|
|
1940
|
+
* @param options - Normalized release scripts options
|
|
1941
|
+
* @returns Result indicating success or failure
|
|
1942
|
+
*/
|
|
1943
|
+
async function publishPackage(packageName, workspaceRoot, options) {
|
|
1944
|
+
const args = [
|
|
1945
|
+
"--filter",
|
|
1946
|
+
packageName,
|
|
1947
|
+
"publish",
|
|
1948
|
+
"--access",
|
|
1949
|
+
options.npm.access,
|
|
1950
|
+
"--no-git-checks"
|
|
1951
|
+
];
|
|
1952
|
+
if (options.npm.otp) args.push("--otp", options.npm.otp);
|
|
1953
|
+
if (process.env.NPM_CONFIG_TAG) args.push("--tag", process.env.NPM_CONFIG_TAG);
|
|
1954
|
+
const env = { ...process.env };
|
|
1955
|
+
if (options.npm.provenance) env.NPM_CONFIG_PROVENANCE = "true";
|
|
1956
|
+
try {
|
|
1957
|
+
await runIfNotDry("pnpm", args, { nodeOptions: {
|
|
1958
|
+
cwd: workspaceRoot,
|
|
1959
|
+
stdio: "inherit",
|
|
1960
|
+
env
|
|
1961
|
+
} });
|
|
1962
|
+
return ok(void 0);
|
|
1963
|
+
} catch (error) {
|
|
1964
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
1965
|
+
return err(toNPMError("publishPackage", error, errorMessage.includes("E403") ? "E403" : errorMessage.includes("EPUBLISHCONFLICT") ? "EPUBLISHCONFLICT" : errorMessage.includes("EOTP") ? "EOTP" : void 0));
|
|
789
1966
|
}
|
|
790
|
-
if (options.excluded?.includes(pkg.name)) return false;
|
|
791
|
-
return true;
|
|
792
1967
|
}
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
1968
|
+
|
|
1969
|
+
//#endregion
|
|
1970
|
+
//#region src/workflows/publish.ts
|
|
1971
|
+
async function publishWorkflow(options) {
|
|
1972
|
+
logger.section("📦 Publishing Packages");
|
|
1973
|
+
const discovered = await discoverWorkspacePackages(options.workspaceRoot, options);
|
|
1974
|
+
if (!discovered.ok) exitWithError(`Failed to discover packages: ${discovered.error.message}`);
|
|
1975
|
+
const workspacePackages = discovered.value;
|
|
1976
|
+
logger.item(`Found ${workspacePackages.length} packages in workspace`);
|
|
1977
|
+
const graph = buildPackageDependencyGraph(workspacePackages);
|
|
1978
|
+
const publicPackages = workspacePackages.filter((pkg) => !pkg.packageJson.private);
|
|
1979
|
+
logger.item(`Publishing ${publicPackages.length} public packages (private packages excluded)`);
|
|
1980
|
+
if (publicPackages.length === 0) {
|
|
1981
|
+
logger.warn("No public packages to publish");
|
|
1982
|
+
return;
|
|
1983
|
+
}
|
|
1984
|
+
const publishOrder = getPackagePublishOrder(graph, new Set(publicPackages.map((p) => p.name)));
|
|
1985
|
+
const status = {
|
|
1986
|
+
published: [],
|
|
1987
|
+
skipped: [],
|
|
1988
|
+
failed: []
|
|
1989
|
+
};
|
|
1990
|
+
for (const order of publishOrder) {
|
|
1991
|
+
const pkg = order.package;
|
|
1992
|
+
const version = pkg.version;
|
|
1993
|
+
const packageName = pkg.name;
|
|
1994
|
+
logger.section(`📦 ${farver.cyan(packageName)} ${farver.gray(`(level ${order.level})`)}`);
|
|
1995
|
+
logger.step(`Checking if ${farver.cyan(`${packageName}@${version}`)} exists on NPM...`);
|
|
1996
|
+
const existsResult = await checkVersionExists(packageName, version);
|
|
1997
|
+
if (!existsResult.ok) {
|
|
1998
|
+
logger.error(`Failed to check version: ${existsResult.error.message}`);
|
|
1999
|
+
status.failed.push(packageName);
|
|
2000
|
+
exitWithError(`Publishing failed for ${packageName}: ${existsResult.error.message}`, "Check your network connection and NPM registry access");
|
|
2001
|
+
}
|
|
2002
|
+
if (existsResult.value) {
|
|
2003
|
+
logger.info(`Version ${farver.cyan(version)} already exists on NPM, skipping`);
|
|
2004
|
+
status.skipped.push(packageName);
|
|
2005
|
+
continue;
|
|
2006
|
+
}
|
|
2007
|
+
if (options.npm.runBuild) {
|
|
2008
|
+
logger.step(`Building ${farver.cyan(packageName)}...`);
|
|
2009
|
+
const buildResult = await buildPackage(packageName, options.workspaceRoot, options);
|
|
2010
|
+
if (!buildResult.ok) {
|
|
2011
|
+
logger.error(`Failed to build package: ${buildResult.error.message}`);
|
|
2012
|
+
status.failed.push(packageName);
|
|
2013
|
+
exitWithError(`Publishing failed for ${packageName}: build failed`, "Check your build scripts and dependencies");
|
|
2014
|
+
}
|
|
2015
|
+
}
|
|
2016
|
+
logger.step(`Publishing ${farver.cyan(`${packageName}@${version}`)} to NPM...`);
|
|
2017
|
+
const publishResult = await publishPackage(packageName, options.workspaceRoot, options);
|
|
2018
|
+
if (!publishResult.ok) {
|
|
2019
|
+
logger.error(`Failed to publish: ${publishResult.error.message}`);
|
|
2020
|
+
status.failed.push(packageName);
|
|
2021
|
+
let hint;
|
|
2022
|
+
if (publishResult.error.code === "E403") hint = "Authentication failed. Ensure your NPM token or OIDC configuration is correct";
|
|
2023
|
+
else if (publishResult.error.code === "EPUBLISHCONFLICT") hint = "Version conflict. The version may have been published recently";
|
|
2024
|
+
else if (publishResult.error.code === "EOTP") hint = "2FA/OTP required. Provide the otp option or use OIDC authentication";
|
|
2025
|
+
exitWithError(`Publishing failed for ${packageName}`, hint);
|
|
2026
|
+
}
|
|
2027
|
+
logger.success(`Published ${farver.cyan(`${packageName}@${version}`)}`);
|
|
2028
|
+
status.published.push(packageName);
|
|
2029
|
+
logger.step(`Creating git tag ${farver.cyan(`${packageName}@${version}`)}...`);
|
|
2030
|
+
const tagResult = await createAndPushPackageTag(packageName, version, options.workspaceRoot);
|
|
2031
|
+
if (!tagResult.ok) {
|
|
2032
|
+
logger.error(`Failed to create/push tag: ${tagResult.error.message}`);
|
|
2033
|
+
logger.warn(`Package was published but tag was not created. You may need to create it manually.`);
|
|
2034
|
+
} else logger.success(`Created and pushed tag ${farver.cyan(`${packageName}@${version}`)}`);
|
|
2035
|
+
}
|
|
2036
|
+
logger.section("📊 Publishing Summary");
|
|
2037
|
+
logger.item(`${farver.green("✓")} Published: ${status.published.length} package(s)`);
|
|
2038
|
+
if (status.published.length > 0) for (const pkg of status.published) logger.item(` ${farver.green("•")} ${pkg}`);
|
|
2039
|
+
if (status.skipped.length > 0) {
|
|
2040
|
+
logger.item(`${farver.yellow("⚠")} Skipped (already exists): ${status.skipped.length} package(s)`);
|
|
2041
|
+
for (const pkg of status.skipped) logger.item(` ${farver.yellow("•")} ${pkg}`);
|
|
2042
|
+
}
|
|
2043
|
+
if (status.failed.length > 0) {
|
|
2044
|
+
logger.item(`${farver.red("✖")} Failed: ${status.failed.length} package(s)`);
|
|
2045
|
+
for (const pkg of status.failed) logger.item(` ${farver.red("•")} ${pkg}`);
|
|
2046
|
+
}
|
|
2047
|
+
if (status.failed.length > 0) exitWithError(`Publishing completed with ${status.failed.length} failure(s)`);
|
|
2048
|
+
logger.success("All packages published successfully!");
|
|
798
2049
|
}
|
|
799
2050
|
|
|
800
2051
|
//#endregion
|
|
801
|
-
//#region src/
|
|
802
|
-
async function
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
const [owner, repo] = options.repo.split("/");
|
|
807
|
-
if (!owner || !repo) throw new Error(`Invalid repo format: ${options.repo}. Expected "owner/repo".`);
|
|
808
|
-
if (safeguards && !await isWorkingDirectoryClean(workspaceRoot)) {
|
|
809
|
-
console.error("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
810
|
-
return null;
|
|
2052
|
+
//#region src/workflows/verify.ts
|
|
2053
|
+
async function verifyWorkflow(options) {
|
|
2054
|
+
if (options.safeguards) {
|
|
2055
|
+
const clean = await isWorkingDirectoryClean(options.workspaceRoot);
|
|
2056
|
+
if (!clean.ok || !clean.value) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
811
2057
|
}
|
|
812
|
-
const
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
2058
|
+
const releaseBranch = options.branch.release;
|
|
2059
|
+
const defaultBranch = options.branch.default;
|
|
2060
|
+
const releasePr = await options.githubClient.getExistingPullRequest(releaseBranch);
|
|
2061
|
+
if (!releasePr || !releasePr.head) {
|
|
2062
|
+
logger.warn(`No open release pull request found for branch "${releaseBranch}". Nothing to verify.`);
|
|
2063
|
+
return;
|
|
816
2064
|
}
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
const
|
|
822
|
-
if (
|
|
823
|
-
}
|
|
824
|
-
const isVersionPromptEnabled = options.prompts?.versions !== false;
|
|
825
|
-
if (!isCI && isVersionPromptEnabled) {
|
|
826
|
-
const versionOverrides = await promptVersionOverrides(versionUpdates.map((u) => ({
|
|
827
|
-
package: u.package,
|
|
828
|
-
currentVersion: u.currentVersion,
|
|
829
|
-
suggestedVersion: u.newVersion,
|
|
830
|
-
bumpType: u.bumpType
|
|
831
|
-
})), workspaceRoot);
|
|
832
|
-
versionUpdates = versionUpdates.map((update) => {
|
|
833
|
-
const overriddenVersion = versionOverrides.get(update.package.name);
|
|
834
|
-
if (overriddenVersion && overriddenVersion !== update.newVersion) return {
|
|
835
|
-
...update,
|
|
836
|
-
newVersion: overriddenVersion
|
|
837
|
-
};
|
|
838
|
-
return update;
|
|
839
|
-
});
|
|
2065
|
+
logger.info(`Found release PR #${releasePr.number}. Verifying against default branch "${defaultBranch}"...`);
|
|
2066
|
+
const originalBranch = await getCurrentBranch(options.workspaceRoot);
|
|
2067
|
+
if (!originalBranch.ok) exitWithError(originalBranch.error.message);
|
|
2068
|
+
if (originalBranch.value !== defaultBranch) {
|
|
2069
|
+
const checkout = await checkoutBranch(defaultBranch, options.workspaceRoot);
|
|
2070
|
+
if (!checkout.ok || !checkout.value) exitWithError(`Failed to checkout branch: ${defaultBranch}`);
|
|
840
2071
|
}
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
githubToken
|
|
848
|
-
});
|
|
849
|
-
const prExists = !!existingPullRequest;
|
|
850
|
-
if (prExists) console.log("Existing pull request found:", existingPullRequest.html_url);
|
|
851
|
-
else console.log("No existing pull request found, will create new one");
|
|
852
|
-
const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
|
|
853
|
-
if (!branchExists) {
|
|
854
|
-
console.log("Creating release branch:", releaseBranch);
|
|
855
|
-
await createBranch(releaseBranch, currentBranch, workspaceRoot);
|
|
856
|
-
}
|
|
857
|
-
if (!await checkoutBranch(releaseBranch, workspaceRoot)) throw new Error(`Failed to checkout branch: ${releaseBranch}`);
|
|
858
|
-
if (branchExists) {
|
|
859
|
-
console.log("Pulling latest changes from remote");
|
|
860
|
-
if (!await pullLatestChanges(releaseBranch, workspaceRoot)) console.log("Warning: Failed to pull latest changes, continuing anyway");
|
|
861
|
-
}
|
|
862
|
-
console.log("Rebasing release branch onto", currentBranch);
|
|
863
|
-
await rebaseBranch(currentBranch, workspaceRoot);
|
|
864
|
-
await updateAllPackageJsonFiles(allUpdates);
|
|
865
|
-
const hasCommitted = await commitChanges("chore: update release versions", workspaceRoot);
|
|
866
|
-
const isBranchAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
|
|
867
|
-
if (!hasCommitted && !isBranchAhead) {
|
|
868
|
-
console.log("No changes to commit and branch is in sync with remote");
|
|
869
|
-
await checkoutBranch(currentBranch, workspaceRoot);
|
|
870
|
-
if (prExists) {
|
|
871
|
-
console.log("No updates needed, PR is already up to date");
|
|
872
|
-
return {
|
|
873
|
-
updates: allUpdates,
|
|
874
|
-
prUrl: existingPullRequest.html_url,
|
|
875
|
-
created: false
|
|
876
|
-
};
|
|
877
|
-
} else {
|
|
878
|
-
console.error("No changes to commit, and no existing PR. Nothing to do.");
|
|
879
|
-
return null;
|
|
2072
|
+
let existingOverrides = {};
|
|
2073
|
+
try {
|
|
2074
|
+
const overridesContent = await readFileFromGit(options.workspaceRoot, releasePr.head.sha, ucdjsReleaseOverridesPath);
|
|
2075
|
+
if (overridesContent.ok && overridesContent.value) {
|
|
2076
|
+
existingOverrides = JSON.parse(overridesContent.value);
|
|
2077
|
+
logger.info("Found existing version overrides file on release branch.");
|
|
880
2078
|
}
|
|
2079
|
+
} catch {
|
|
2080
|
+
logger.info("No version overrides file found on release branch. Continuing...");
|
|
881
2081
|
}
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
const
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
2082
|
+
const discovered = await discoverWorkspacePackages(options.workspaceRoot, options);
|
|
2083
|
+
if (!discovered.ok) exitWithError(`Failed to discover packages: ${discovered.error.message}`);
|
|
2084
|
+
const ensured = ensureHasPackages(discovered.value);
|
|
2085
|
+
if (!ensured.ok) {
|
|
2086
|
+
logger.warn(ensured.error.message);
|
|
2087
|
+
return;
|
|
2088
|
+
}
|
|
2089
|
+
const mainPackages = ensured.value;
|
|
2090
|
+
const updatesResult = await calculateUpdates({
|
|
2091
|
+
workspacePackages: mainPackages,
|
|
2092
|
+
workspaceRoot: options.workspaceRoot,
|
|
2093
|
+
showPrompt: false,
|
|
2094
|
+
globalCommitMode: options.globalCommitMode === "none" ? false : options.globalCommitMode,
|
|
2095
|
+
overrides: existingOverrides
|
|
895
2096
|
});
|
|
896
|
-
|
|
897
|
-
|
|
2097
|
+
if (!updatesResult.ok) exitWithError(updatesResult.error.message);
|
|
2098
|
+
const expectedUpdates = updatesResult.value.allUpdates;
|
|
2099
|
+
const expectedVersionMap = new Map(expectedUpdates.map((u) => [u.package.name, u.newVersion]));
|
|
2100
|
+
const prVersionMap = /* @__PURE__ */ new Map();
|
|
2101
|
+
for (const pkg of mainPackages) {
|
|
2102
|
+
const pkgJsonPath = relative(options.workspaceRoot, join(pkg.path, "package.json"));
|
|
2103
|
+
const pkgJsonContent = await readFileFromGit(options.workspaceRoot, releasePr.head.sha, pkgJsonPath);
|
|
2104
|
+
if (pkgJsonContent.ok && pkgJsonContent.value) {
|
|
2105
|
+
const pkgJson = JSON.parse(pkgJsonContent.value);
|
|
2106
|
+
prVersionMap.set(pkg.name, pkgJson.version);
|
|
2107
|
+
}
|
|
2108
|
+
}
|
|
2109
|
+
if (originalBranch.value !== defaultBranch) await checkoutBranch(originalBranch.value, options.workspaceRoot);
|
|
2110
|
+
let isOutOfSync = false;
|
|
2111
|
+
for (const [pkgName, expectedVersion] of expectedVersionMap.entries()) {
|
|
2112
|
+
const prVersion = prVersionMap.get(pkgName);
|
|
2113
|
+
if (!prVersion) {
|
|
2114
|
+
logger.warn(`Package "${pkgName}" found in default branch but not in release branch. Skipping.`);
|
|
2115
|
+
continue;
|
|
2116
|
+
}
|
|
2117
|
+
if (gt(expectedVersion, prVersion)) {
|
|
2118
|
+
logger.error(`Package "${pkgName}" is out of sync. Expected version >= ${expectedVersion}, but PR has ${prVersion}.`);
|
|
2119
|
+
isOutOfSync = true;
|
|
2120
|
+
} else logger.success(`Package "${pkgName}" is up to date (PR version: ${prVersion}, Expected: ${expectedVersion})`);
|
|
2121
|
+
}
|
|
2122
|
+
const statusContext = "ucdjs/release-verify";
|
|
2123
|
+
if (isOutOfSync) {
|
|
2124
|
+
await options.githubClient.setCommitStatus({
|
|
2125
|
+
sha: releasePr.head.sha,
|
|
2126
|
+
state: "failure",
|
|
2127
|
+
context: statusContext,
|
|
2128
|
+
description: "Release PR is out of sync with the default branch. Please re-run the release process."
|
|
2129
|
+
});
|
|
2130
|
+
logger.error("Verification failed. Commit status set to 'failure'.");
|
|
2131
|
+
} else {
|
|
2132
|
+
await options.githubClient.setCommitStatus({
|
|
2133
|
+
sha: releasePr.head.sha,
|
|
2134
|
+
state: "success",
|
|
2135
|
+
context: statusContext,
|
|
2136
|
+
description: "Release PR is up to date.",
|
|
2137
|
+
targetUrl: `https://github.com/${options.owner}/${options.repo}/pull/${releasePr.number}`
|
|
2138
|
+
});
|
|
2139
|
+
logger.success("Verification successful. Commit status set to 'success'.");
|
|
2140
|
+
}
|
|
2141
|
+
}
|
|
2142
|
+
|
|
2143
|
+
//#endregion
|
|
2144
|
+
//#region src/index.ts
|
|
2145
|
+
async function createReleaseScripts(options) {
|
|
2146
|
+
const normalizedOptions = normalizeReleaseScriptsOptions(options);
|
|
898
2147
|
return {
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
2148
|
+
async verify() {
|
|
2149
|
+
return verifyWorkflow(normalizedOptions);
|
|
2150
|
+
},
|
|
2151
|
+
async prepare() {
|
|
2152
|
+
return prepareWorkflow(normalizedOptions);
|
|
2153
|
+
},
|
|
2154
|
+
async publish() {
|
|
2155
|
+
return publishWorkflow(normalizedOptions);
|
|
2156
|
+
},
|
|
2157
|
+
packages: {
|
|
2158
|
+
async list() {
|
|
2159
|
+
const result = await discoverWorkspacePackages(normalizedOptions.workspaceRoot, normalizedOptions);
|
|
2160
|
+
if (!result.ok) throw new Error(result.error.message);
|
|
2161
|
+
return result.value;
|
|
2162
|
+
},
|
|
2163
|
+
async get(packageName) {
|
|
2164
|
+
const result = await discoverWorkspacePackages(normalizedOptions.workspaceRoot, normalizedOptions);
|
|
2165
|
+
if (!result.ok) throw new Error(result.error.message);
|
|
2166
|
+
return result.value.find((p) => p.name === packageName);
|
|
2167
|
+
}
|
|
2168
|
+
}
|
|
902
2169
|
};
|
|
903
2170
|
}
|
|
904
2171
|
|
|
905
2172
|
//#endregion
|
|
906
|
-
export {
|
|
2173
|
+
export { createReleaseScripts };
|