@ucdjs/release-scripts 0.1.0-beta.6 → 0.1.0-beta.61
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +100 -1
- package/dist/{eta-Boh7yPZi.mjs → eta-g9ausaEx.mjs} +12 -10
- package/dist/index.d.mts +66 -92
- package/dist/index.mjs +2675 -677
- package/package.json +23 -11
package/dist/index.mjs
CHANGED
|
@@ -1,39 +1,100 @@
|
|
|
1
|
-
import { t as Eta } from "./eta-
|
|
2
|
-
import { getCommits } from "commit-parser";
|
|
1
|
+
import { t as Eta } from "./eta-g9ausaEx.mjs";
|
|
3
2
|
import process from "node:process";
|
|
3
|
+
import readline from "node:readline";
|
|
4
|
+
import { parseArgs } from "node:util";
|
|
4
5
|
import farver from "farver";
|
|
5
6
|
import { exec } from "tinyexec";
|
|
7
|
+
import { mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
|
8
|
+
import { join, relative } from "node:path";
|
|
9
|
+
import { getCommits, groupByType } from "commit-parser";
|
|
6
10
|
import { dedent } from "@luxass/utils";
|
|
7
|
-
import {
|
|
8
|
-
import { readFile, writeFile } from "node:fs/promises";
|
|
11
|
+
import semver, { gt } from "semver";
|
|
9
12
|
import prompts from "prompts";
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
function
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
13
|
+
//#region src/shared/utils.ts
|
|
14
|
+
const ucdjsReleaseOverridesPath = ".github/ucdjs-release.overrides.json";
|
|
15
|
+
function parseCLIFlags() {
|
|
16
|
+
const { values } = parseArgs({
|
|
17
|
+
args: process.argv.slice(2),
|
|
18
|
+
options: {
|
|
19
|
+
dry: {
|
|
20
|
+
type: "boolean",
|
|
21
|
+
short: "d",
|
|
22
|
+
default: false
|
|
23
|
+
},
|
|
24
|
+
verbose: {
|
|
25
|
+
type: "boolean",
|
|
26
|
+
short: "v",
|
|
27
|
+
default: false
|
|
28
|
+
},
|
|
29
|
+
force: {
|
|
30
|
+
type: "boolean",
|
|
31
|
+
short: "f",
|
|
32
|
+
default: false
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
strict: false
|
|
36
|
+
});
|
|
37
|
+
return {
|
|
38
|
+
dry: !!values.dry,
|
|
39
|
+
verbose: !!values.verbose,
|
|
40
|
+
force: !!values.force
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
function getIsDryRun() {
|
|
44
|
+
return parseCLIFlags().dry;
|
|
45
|
+
}
|
|
46
|
+
function getIsVerbose() {
|
|
47
|
+
return parseCLIFlags().verbose;
|
|
48
|
+
}
|
|
49
|
+
function getIsCI() {
|
|
50
|
+
const ci = process.env.CI;
|
|
51
|
+
return typeof ci === "string" && ci !== "" && ci.toLowerCase() !== "false";
|
|
52
|
+
}
|
|
21
53
|
const logger = {
|
|
22
54
|
info: (...args) => {
|
|
23
|
-
console.info(
|
|
24
|
-
},
|
|
25
|
-
debug: (...args) => {
|
|
26
|
-
console.debug(farver.gray("[debug]:"), ...args);
|
|
55
|
+
console.info(...args);
|
|
27
56
|
},
|
|
28
57
|
warn: (...args) => {
|
|
29
|
-
console.warn(farver.yellow("
|
|
58
|
+
console.warn(` ${farver.yellow("⚠")}`, ...args);
|
|
30
59
|
},
|
|
31
60
|
error: (...args) => {
|
|
32
|
-
console.error(farver.red("
|
|
61
|
+
console.error(` ${farver.red("✖")}`, ...args);
|
|
33
62
|
},
|
|
34
|
-
|
|
35
|
-
if (!
|
|
63
|
+
verbose: (...args) => {
|
|
64
|
+
if (!getIsVerbose()) return;
|
|
65
|
+
if (args.length === 0) {
|
|
66
|
+
console.log();
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
if (args.length > 1 && typeof args[0] === "string") {
|
|
70
|
+
console.log(farver.dim(args[0]), ...args.slice(1));
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
36
73
|
console.log(...args);
|
|
74
|
+
},
|
|
75
|
+
section: (title) => {
|
|
76
|
+
console.log();
|
|
77
|
+
console.log(` ${farver.bold(title)}`);
|
|
78
|
+
console.log(` ${farver.gray("─".repeat(title.length + 2))}`);
|
|
79
|
+
},
|
|
80
|
+
emptyLine: () => {
|
|
81
|
+
console.log();
|
|
82
|
+
},
|
|
83
|
+
item: (message, ...args) => {
|
|
84
|
+
console.log(` ${message}`, ...args);
|
|
85
|
+
},
|
|
86
|
+
step: (message) => {
|
|
87
|
+
console.log(` ${farver.blue("→")} ${message}`);
|
|
88
|
+
},
|
|
89
|
+
success: (message) => {
|
|
90
|
+
console.log(` ${farver.green("✓")} ${message}`);
|
|
91
|
+
},
|
|
92
|
+
clearScreen: () => {
|
|
93
|
+
const repeatCount = process.stdout.rows - 2;
|
|
94
|
+
const blank = repeatCount > 0 ? "\n".repeat(repeatCount) : "";
|
|
95
|
+
console.log(blank);
|
|
96
|
+
readline.cursorTo(process.stdout, 0, 0);
|
|
97
|
+
readline.clearScreenDown(process.stdout);
|
|
37
98
|
}
|
|
38
99
|
};
|
|
39
100
|
async function run(bin, args, opts = {}) {
|
|
@@ -47,308 +108,222 @@ async function run(bin, args, opts = {}) {
|
|
|
47
108
|
});
|
|
48
109
|
}
|
|
49
110
|
async function dryRun(bin, args, opts) {
|
|
50
|
-
return logger.
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
logger.error(farver.bold(message));
|
|
55
|
-
if (hint) console.error(farver.gray(` ${hint}`));
|
|
56
|
-
process.exit(1);
|
|
57
|
-
}
|
|
58
|
-
function normalizeSharedOptions(options) {
|
|
59
|
-
const { workspaceRoot = process.cwd(), githubToken = "", verbose = false, repo, packages = true, prompts: prompts$1 = {
|
|
60
|
-
packages: true,
|
|
61
|
-
versions: true
|
|
62
|
-
},...rest } = options;
|
|
63
|
-
globalOptions.verbose = verbose;
|
|
64
|
-
if (!githubToken.trim()) exitWithError("GitHub token is required", "Set GITHUB_TOKEN environment variable or pass it in options");
|
|
65
|
-
if (!repo || !repo.trim() || !repo.includes("/")) exitWithError("Repository (repo) is required", "Specify the repository in 'owner/repo' format (e.g., 'octocat/hello-world')");
|
|
66
|
-
const [owner, name] = options.repo.split("/");
|
|
67
|
-
if (!owner || !name) exitWithError(`Invalid repo format: "${options.repo}"`, "Expected format: \"owner/repo\" (e.g., \"octocat/hello-world\")");
|
|
68
|
-
return {
|
|
69
|
-
...rest,
|
|
70
|
-
packages,
|
|
71
|
-
prompts: prompts$1,
|
|
72
|
-
workspaceRoot,
|
|
73
|
-
githubToken,
|
|
74
|
-
owner,
|
|
75
|
-
repo,
|
|
76
|
-
verbose
|
|
77
|
-
};
|
|
111
|
+
return logger.verbose(farver.blue(`[dryrun] ${bin} ${args.join(" ")}`), opts || "");
|
|
112
|
+
}
|
|
113
|
+
async function runIfNotDry(bin, args, opts) {
|
|
114
|
+
return getIsDryRun() ? dryRun(bin, args, opts) : run(bin, args, opts);
|
|
78
115
|
}
|
|
79
|
-
|
|
80
116
|
//#endregion
|
|
81
|
-
//#region src/
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
return
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
|
|
117
|
+
//#region src/shared/errors.ts
|
|
118
|
+
function isRecord(value) {
|
|
119
|
+
return typeof value === "object" && value !== null;
|
|
120
|
+
}
|
|
121
|
+
function toTrimmedString(value) {
|
|
122
|
+
if (typeof value === "string") {
|
|
123
|
+
const normalized = value.trim();
|
|
124
|
+
return normalized.length > 0 ? normalized : void 0;
|
|
125
|
+
}
|
|
126
|
+
if (value instanceof Uint8Array) {
|
|
127
|
+
const normalized = new TextDecoder().decode(value).trim();
|
|
128
|
+
return normalized.length > 0 ? normalized : void 0;
|
|
129
|
+
}
|
|
130
|
+
if (isRecord(value) && typeof value.toString === "function") {
|
|
131
|
+
const rendered = value.toString();
|
|
132
|
+
if (typeof rendered === "string" && rendered !== "[object Object]") {
|
|
133
|
+
const normalized = rendered.trim();
|
|
134
|
+
return normalized.length > 0 ? normalized : void 0;
|
|
135
|
+
}
|
|
92
136
|
}
|
|
93
137
|
}
|
|
94
|
-
function
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
if (bump === "major") return "major";
|
|
100
|
-
if (bump === "minor") highestBump = "minor";
|
|
101
|
-
else if (bump === "patch" && highestBump === "none") highestBump = "patch";
|
|
138
|
+
function getNestedField(record, keys) {
|
|
139
|
+
let current = record;
|
|
140
|
+
for (const key of keys) {
|
|
141
|
+
if (!isRecord(current) || !(key in current)) return;
|
|
142
|
+
current = current[key];
|
|
102
143
|
}
|
|
103
|
-
return
|
|
144
|
+
return current;
|
|
104
145
|
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
const
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
folder: pkg.path
|
|
125
|
-
});
|
|
126
|
-
const touchedSet = new Set(touchedCommitHashes);
|
|
127
|
-
const packageCommits = allCommits.filter((commit) => touchedSet.has(commit));
|
|
128
|
-
logger.log(`${packageCommits.length} commits affect ${pkg.name}`);
|
|
129
|
-
return packageCommits;
|
|
146
|
+
function extractStderrLike(record) {
|
|
147
|
+
const candidates = [
|
|
148
|
+
record.stderr,
|
|
149
|
+
record.stdout,
|
|
150
|
+
record.shortMessage,
|
|
151
|
+
record.originalMessage,
|
|
152
|
+
getNestedField(record, ["result", "stderr"]),
|
|
153
|
+
getNestedField(record, ["result", "stdout"]),
|
|
154
|
+
getNestedField(record, ["output", "stderr"]),
|
|
155
|
+
getNestedField(record, ["output", "stdout"]),
|
|
156
|
+
getNestedField(record, ["cause", "stderr"]),
|
|
157
|
+
getNestedField(record, ["cause", "stdout"]),
|
|
158
|
+
getNestedField(record, ["cause", "shortMessage"]),
|
|
159
|
+
getNestedField(record, ["cause", "originalMessage"])
|
|
160
|
+
];
|
|
161
|
+
for (const candidate of candidates) {
|
|
162
|
+
const rendered = toTrimmedString(candidate);
|
|
163
|
+
if (rendered) return rendered;
|
|
164
|
+
}
|
|
130
165
|
}
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
commits: await getCommitsForWorkspacePackage(workspaceRoot, pkg)
|
|
166
|
+
function formatUnknownError(error) {
|
|
167
|
+
if (error instanceof Error) {
|
|
168
|
+
const base = {
|
|
169
|
+
message: error.message || error.name,
|
|
170
|
+
stack: error.stack
|
|
137
171
|
};
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
if (!commit.isConventional || !commit.type) return "none";
|
|
146
|
-
switch (commit.type) {
|
|
147
|
-
case "feat": return "minor";
|
|
148
|
-
case "fix":
|
|
149
|
-
case "perf": return "patch";
|
|
150
|
-
case "docs":
|
|
151
|
-
case "style":
|
|
152
|
-
case "refactor":
|
|
153
|
-
case "test":
|
|
154
|
-
case "build":
|
|
155
|
-
case "ci":
|
|
156
|
-
case "chore":
|
|
157
|
-
case "revert": return "none";
|
|
158
|
-
default: return "none";
|
|
172
|
+
const maybeError = error;
|
|
173
|
+
if (typeof maybeError.code === "string") base.code = maybeError.code;
|
|
174
|
+
if (typeof maybeError.status === "number") base.status = maybeError.status;
|
|
175
|
+
base.stderr = extractStderrLike(maybeError);
|
|
176
|
+
if (typeof maybeError.shortMessage === "string" && maybeError.shortMessage.trim() && base.message.startsWith("Process exited with non-zero status")) base.message = maybeError.shortMessage.trim();
|
|
177
|
+
if (!base.stderr && typeof maybeError.cause === "string" && maybeError.cause.trim()) base.stderr = maybeError.cause.trim();
|
|
178
|
+
return base;
|
|
159
179
|
}
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
* @returns {Promise<boolean>} A Promise resolving to true if clean, false otherwise
|
|
168
|
-
*/
|
|
169
|
-
async function isWorkingDirectoryClean(workspaceRoot) {
|
|
170
|
-
try {
|
|
171
|
-
if ((await run("git", ["status", "--porcelain"], { nodeOptions: {
|
|
172
|
-
cwd: workspaceRoot,
|
|
173
|
-
stdio: "pipe"
|
|
174
|
-
} })).stdout.trim() !== "") return false;
|
|
175
|
-
return true;
|
|
176
|
-
} catch (err) {
|
|
177
|
-
logger.error("Error checking git status:", err);
|
|
178
|
-
return false;
|
|
180
|
+
if (typeof error === "string") return { message: error };
|
|
181
|
+
if (isRecord(error)) {
|
|
182
|
+
const formatted = { message: typeof error.message === "string" ? error.message : typeof error.error === "string" ? error.error : JSON.stringify(error) };
|
|
183
|
+
if (typeof error.code === "string") formatted.code = error.code;
|
|
184
|
+
if (typeof error.status === "number") formatted.status = error.status;
|
|
185
|
+
formatted.stderr = extractStderrLike(error);
|
|
186
|
+
return formatted;
|
|
179
187
|
}
|
|
188
|
+
return { message: String(error) };
|
|
180
189
|
}
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
try {
|
|
189
|
-
await run("git", [
|
|
190
|
-
"rev-parse",
|
|
191
|
-
"--verify",
|
|
192
|
-
branch
|
|
193
|
-
], { nodeOptions: {
|
|
194
|
-
cwd: workspaceRoot,
|
|
195
|
-
stdio: "pipe"
|
|
196
|
-
} });
|
|
197
|
-
return true;
|
|
198
|
-
} catch {
|
|
199
|
-
return false;
|
|
190
|
+
var ReleaseError = class extends Error {
|
|
191
|
+
hint;
|
|
192
|
+
constructor(message, hint, cause) {
|
|
193
|
+
super(message);
|
|
194
|
+
this.name = "ReleaseError";
|
|
195
|
+
this.hint = hint;
|
|
196
|
+
this.cause = cause;
|
|
200
197
|
}
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
} });
|
|
218
|
-
return true;
|
|
219
|
-
} catch {
|
|
220
|
-
return false;
|
|
198
|
+
};
|
|
199
|
+
function printReleaseError(error) {
|
|
200
|
+
console.error(` ${farver.red("✖")} ${farver.bold(error.message)}`);
|
|
201
|
+
if (error.cause !== void 0) {
|
|
202
|
+
const formatted = formatUnknownError(error.cause);
|
|
203
|
+
if (formatted.message && formatted.message !== error.message) console.error(farver.gray(` Cause: ${formatted.message}`));
|
|
204
|
+
if (formatted.code) console.error(farver.gray(` Code: ${formatted.code}`));
|
|
205
|
+
if (typeof formatted.status === "number") console.error(farver.gray(` Status: ${formatted.status}`));
|
|
206
|
+
if (formatted.stderr) {
|
|
207
|
+
console.error(farver.gray(" Stderr:"));
|
|
208
|
+
console.error(farver.gray(` ${formatted.stderr}`));
|
|
209
|
+
}
|
|
210
|
+
if (getIsVerbose() && formatted.stack) {
|
|
211
|
+
console.error(farver.gray(" Stack:"));
|
|
212
|
+
console.error(farver.gray(` ${formatted.stack}`));
|
|
213
|
+
}
|
|
221
214
|
}
|
|
215
|
+
if (error.hint) console.error(farver.gray(` ${error.hint}`));
|
|
222
216
|
}
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
* @param branch - The new branch name
|
|
226
|
-
* @param base - The base branch to create from
|
|
227
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
228
|
-
*/
|
|
229
|
-
async function createBranch(branch, base, workspaceRoot) {
|
|
230
|
-
await runIfNotDry("git", [
|
|
231
|
-
"checkout",
|
|
232
|
-
"-b",
|
|
233
|
-
branch,
|
|
234
|
-
base
|
|
235
|
-
], { nodeOptions: { cwd: workspaceRoot } });
|
|
217
|
+
function exitWithError(message, hint, cause) {
|
|
218
|
+
throw new ReleaseError(message, hint, cause);
|
|
236
219
|
}
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
220
|
+
//#endregion
|
|
221
|
+
//#region src/operations/changelog-format.ts
|
|
222
|
+
const HASH_PREFIX_RE = /^#/;
|
|
223
|
+
function formatCommitLine({ commit, owner, repo, authors }) {
|
|
224
|
+
const commitUrl = `https://github.com/${owner}/${repo}/commit/${commit.hash}`;
|
|
225
|
+
let line = `${commit.description}`;
|
|
226
|
+
const references = commit.references ?? [];
|
|
227
|
+
for (const ref of references) {
|
|
228
|
+
if (!ref.value) continue;
|
|
229
|
+
const number = Number.parseInt(ref.value.replace(HASH_PREFIX_RE, ""), 10);
|
|
230
|
+
if (Number.isNaN(number)) continue;
|
|
231
|
+
if (ref.type === "issue") {
|
|
232
|
+
line += ` ([Issue ${ref.value}](https://github.com/${owner}/${repo}/issues/${number}))`;
|
|
233
|
+
continue;
|
|
234
|
+
}
|
|
235
|
+
line += ` ([PR ${ref.value}](https://github.com/${owner}/${repo}/pull/${number}))`;
|
|
249
236
|
}
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
* @returns Promise resolving to the current branch name
|
|
255
|
-
*/
|
|
256
|
-
async function getCurrentBranch(workspaceRoot) {
|
|
257
|
-
return (await run("git", [
|
|
258
|
-
"rev-parse",
|
|
259
|
-
"--abbrev-ref",
|
|
260
|
-
"HEAD"
|
|
261
|
-
], { nodeOptions: {
|
|
262
|
-
cwd: workspaceRoot,
|
|
263
|
-
stdio: "pipe"
|
|
264
|
-
} })).stdout.trim();
|
|
265
|
-
}
|
|
266
|
-
/**
|
|
267
|
-
* Rebase current branch onto another branch
|
|
268
|
-
* @param ontoBranch - The target branch to rebase onto
|
|
269
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
270
|
-
*/
|
|
271
|
-
async function rebaseBranch(ontoBranch, workspaceRoot) {
|
|
272
|
-
await run("git", ["rebase", ontoBranch], { nodeOptions: { cwd: workspaceRoot } });
|
|
273
|
-
}
|
|
274
|
-
/**
|
|
275
|
-
* Check if local branch is ahead of remote (has commits to push)
|
|
276
|
-
* @param branch - The branch name to check
|
|
277
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
278
|
-
* @returns Promise resolving to true if local is ahead, false otherwise
|
|
279
|
-
*/
|
|
280
|
-
async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
281
|
-
try {
|
|
282
|
-
const result = await run("git", [
|
|
283
|
-
"rev-list",
|
|
284
|
-
`origin/${branch}..${branch}`,
|
|
285
|
-
"--count"
|
|
286
|
-
], { nodeOptions: {
|
|
287
|
-
cwd: workspaceRoot,
|
|
288
|
-
stdio: "pipe"
|
|
289
|
-
} });
|
|
290
|
-
return Number.parseInt(result.stdout.trim(), 10) > 0;
|
|
291
|
-
} catch {
|
|
292
|
-
return true;
|
|
237
|
+
line += ` ([${commit.shortHash}](${commitUrl}))`;
|
|
238
|
+
if (authors.length > 0) {
|
|
239
|
+
const authorList = authors.map((author) => author.login ? `[@${author.login}](https://github.com/${author.login})` : author.name).join(", ");
|
|
240
|
+
line += ` (by ${authorList})`;
|
|
293
241
|
}
|
|
242
|
+
return line;
|
|
294
243
|
}
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
return (
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
if (!await hasChangesToCommit(workspaceRoot)) return false;
|
|
315
|
-
await run("git", [
|
|
316
|
-
"commit",
|
|
317
|
-
"-m",
|
|
318
|
-
message
|
|
319
|
-
], { nodeOptions: { cwd: workspaceRoot } });
|
|
320
|
-
return true;
|
|
321
|
-
}
|
|
322
|
-
/**
|
|
323
|
-
* Push branch to remote
|
|
324
|
-
* @param branch - The branch name to push
|
|
325
|
-
* @param workspaceRoot - The root directory of the workspace
|
|
326
|
-
* @param options - Push options
|
|
327
|
-
* @param options.force - Force push (overwrite remote)
|
|
328
|
-
* @param options.forceWithLease - Force push with safety check (won't overwrite unexpected changes)
|
|
329
|
-
*/
|
|
330
|
-
async function pushBranch(branch, workspaceRoot, options) {
|
|
331
|
-
const args = [
|
|
332
|
-
"push",
|
|
333
|
-
"origin",
|
|
334
|
-
branch
|
|
335
|
-
];
|
|
336
|
-
if (options?.forceWithLease) args.push("--force-with-lease");
|
|
337
|
-
else if (options?.force) args.push("--force");
|
|
338
|
-
await run("git", args, { nodeOptions: { cwd: workspaceRoot } });
|
|
244
|
+
function buildTemplateGroups(options) {
|
|
245
|
+
const { commits, owner, repo, types, commitAuthors } = options;
|
|
246
|
+
const grouped = groupByType(commits, {
|
|
247
|
+
includeNonConventional: false,
|
|
248
|
+
mergeKeys: Object.fromEntries(Object.entries(types).map(([key, value]) => [key, value.types ?? [key]]))
|
|
249
|
+
});
|
|
250
|
+
return Object.entries(types).map(([key, value]) => {
|
|
251
|
+
const formattedCommits = (grouped.get(key) ?? []).map((commit) => ({ line: formatCommitLine({
|
|
252
|
+
commit,
|
|
253
|
+
owner,
|
|
254
|
+
repo,
|
|
255
|
+
authors: commitAuthors.get(commit.hash) ?? []
|
|
256
|
+
}) }));
|
|
257
|
+
return {
|
|
258
|
+
name: key,
|
|
259
|
+
title: value.title,
|
|
260
|
+
commits: formattedCommits
|
|
261
|
+
};
|
|
262
|
+
});
|
|
339
263
|
}
|
|
340
|
-
|
|
341
264
|
//#endregion
|
|
342
|
-
//#region src/github.ts
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
265
|
+
//#region src/core/github.ts
|
|
266
|
+
function toGitHubError(operation, error) {
|
|
267
|
+
const formatted = formatUnknownError(error);
|
|
268
|
+
return {
|
|
269
|
+
type: "github",
|
|
270
|
+
operation,
|
|
271
|
+
message: formatted.message,
|
|
272
|
+
status: formatted.status
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
var GitHubClient = class {
|
|
276
|
+
owner;
|
|
277
|
+
repo;
|
|
278
|
+
githubToken;
|
|
279
|
+
apiBase = "https://api.github.com";
|
|
280
|
+
constructor({ owner, repo, githubToken }) {
|
|
281
|
+
this.owner = owner;
|
|
282
|
+
this.repo = repo;
|
|
283
|
+
this.githubToken = githubToken;
|
|
284
|
+
}
|
|
285
|
+
async request(path, init = {}) {
|
|
286
|
+
const url = path.startsWith("http") ? path : `${this.apiBase}${path}`;
|
|
287
|
+
const method = init.method ?? "GET";
|
|
288
|
+
let res;
|
|
289
|
+
try {
|
|
290
|
+
res = await fetch(url, {
|
|
291
|
+
...init,
|
|
292
|
+
headers: {
|
|
293
|
+
...init.headers,
|
|
294
|
+
"Accept": "application/vnd.github.v3+json",
|
|
295
|
+
"Authorization": `token ${this.githubToken}`,
|
|
296
|
+
"User-Agent": "ucdjs-release-scripts (+https://github.com/ucdjs/ucdjs-release-scripts)"
|
|
297
|
+
}
|
|
298
|
+
});
|
|
299
|
+
} catch (error) {
|
|
300
|
+
throw Object.assign(/* @__PURE__ */ new Error(`[${method} ${path}] GitHub request failed: ${formatUnknownError(error).message}`), { status: void 0 });
|
|
301
|
+
}
|
|
302
|
+
if (!res.ok) {
|
|
303
|
+
const errorText = await res.text();
|
|
304
|
+
const parsedMessage = (() => {
|
|
305
|
+
try {
|
|
306
|
+
const parsed = JSON.parse(errorText);
|
|
307
|
+
if (typeof parsed.message === "string" && parsed.message.trim()) {
|
|
308
|
+
if (Array.isArray(parsed.errors) && parsed.errors.length > 0) return `${parsed.message} (${JSON.stringify(parsed.errors)})`;
|
|
309
|
+
return parsed.message;
|
|
310
|
+
}
|
|
311
|
+
return errorText;
|
|
312
|
+
} catch {
|
|
313
|
+
return errorText;
|
|
314
|
+
}
|
|
315
|
+
})();
|
|
316
|
+
throw Object.assign(/* @__PURE__ */ new Error(`[${method} ${path}] GitHub API request failed (${res.status} ${res.statusText}): ${parsedMessage || "No response body"}`), { status: res.status });
|
|
317
|
+
}
|
|
318
|
+
if (res.status === 204) return;
|
|
319
|
+
return res.json();
|
|
320
|
+
}
|
|
321
|
+
async getExistingPullRequest(branch) {
|
|
322
|
+
const head = branch.includes(":") ? branch : `${this.owner}:${branch}`;
|
|
323
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/pulls?state=open&head=${encodeURIComponent(head)}`;
|
|
324
|
+
logger.verbose(`Requesting pull request for branch: ${branch} (url: ${this.apiBase}${endpoint})`);
|
|
325
|
+
const pulls = await this.request(endpoint);
|
|
326
|
+
if (!Array.isArray(pulls) || pulls.length === 0) return null;
|
|
352
327
|
const firstPullRequest = pulls[0];
|
|
353
328
|
if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
354
329
|
const pullRequest = {
|
|
@@ -356,20 +331,15 @@ async function getExistingPullRequest({ owner, repo, branch, githubToken }) {
|
|
|
356
331
|
title: firstPullRequest.title,
|
|
357
332
|
body: firstPullRequest.body,
|
|
358
333
|
draft: firstPullRequest.draft,
|
|
359
|
-
html_url: firstPullRequest.html_url
|
|
334
|
+
html_url: firstPullRequest.html_url,
|
|
335
|
+
head: "head" in firstPullRequest && typeof firstPullRequest.head === "object" && firstPullRequest.head !== null && "sha" in firstPullRequest.head && typeof firstPullRequest.head.sha === "string" ? { sha: firstPullRequest.head.sha } : void 0
|
|
360
336
|
};
|
|
361
337
|
logger.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
|
|
362
338
|
return pullRequest;
|
|
363
|
-
} catch (err) {
|
|
364
|
-
logger.error("Error fetching pull request:", err);
|
|
365
|
-
return null;
|
|
366
339
|
}
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
const isUpdate = pullNumber != null;
|
|
371
|
-
const url = isUpdate ? `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}` : `https://api.github.com/repos/${owner}/${repo}/pulls`;
|
|
372
|
-
const method = isUpdate ? "PATCH" : "POST";
|
|
340
|
+
async upsertPullRequest({ title, body, head, base, pullNumber }) {
|
|
341
|
+
const isUpdate = typeof pullNumber === "number";
|
|
342
|
+
const endpoint = isUpdate ? `/repos/${this.owner}/${this.repo}/pulls/${pullNumber}` : `/repos/${this.owner}/${this.repo}/pulls`;
|
|
373
343
|
const requestBody = isUpdate ? {
|
|
374
344
|
title,
|
|
375
345
|
body
|
|
@@ -377,18 +347,14 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
|
|
|
377
347
|
title,
|
|
378
348
|
body,
|
|
379
349
|
head,
|
|
380
|
-
base
|
|
350
|
+
base,
|
|
351
|
+
draft: true
|
|
381
352
|
};
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
Accept: "application/vnd.github.v3+json",
|
|
386
|
-
Authorization: `token ${githubToken}`
|
|
387
|
-
},
|
|
353
|
+
logger.verbose(`${isUpdate ? "Updating" : "Creating"} pull request (url: ${this.apiBase}${endpoint})`);
|
|
354
|
+
const pr = await this.request(endpoint, {
|
|
355
|
+
method: isUpdate ? "PATCH" : "POST",
|
|
388
356
|
body: JSON.stringify(requestBody)
|
|
389
357
|
});
|
|
390
|
-
if (!res.ok) throw new Error(`GitHub API request failed with status ${res.status}`);
|
|
391
|
-
const pr = await res.json();
|
|
392
358
|
if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
393
359
|
const action = isUpdate ? "Updated" : "Created";
|
|
394
360
|
logger.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
|
|
@@ -399,35 +365,106 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
|
|
|
399
365
|
draft: pr.draft,
|
|
400
366
|
html_url: pr.html_url
|
|
401
367
|
};
|
|
402
|
-
} catch (err) {
|
|
403
|
-
logger.error(`Error upserting pull request:`, err);
|
|
404
|
-
throw err;
|
|
405
368
|
}
|
|
369
|
+
async setCommitStatus({ sha, state, targetUrl, description, context }) {
|
|
370
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/statuses/${sha}`;
|
|
371
|
+
logger.verbose(`Setting commit status on ${sha} to ${state} (url: ${this.apiBase}${endpoint})`);
|
|
372
|
+
await this.request(endpoint, {
|
|
373
|
+
method: "POST",
|
|
374
|
+
body: JSON.stringify({
|
|
375
|
+
state,
|
|
376
|
+
target_url: targetUrl,
|
|
377
|
+
description: description || "",
|
|
378
|
+
context
|
|
379
|
+
})
|
|
380
|
+
});
|
|
381
|
+
logger.info(`Commit status set to ${farver.cyan(state)} for ${farver.gray(sha.substring(0, 7))}`);
|
|
382
|
+
}
|
|
383
|
+
async upsertReleaseByTag({ tagName, name, body, prerelease = false }) {
|
|
384
|
+
const encodedTag = encodeURIComponent(tagName);
|
|
385
|
+
let existingRelease = null;
|
|
386
|
+
try {
|
|
387
|
+
existingRelease = await this.request(`/repos/${this.owner}/${this.repo}/releases/tags/${encodedTag}`);
|
|
388
|
+
} catch (error) {
|
|
389
|
+
if (formatUnknownError(error).status !== 404) throw error;
|
|
390
|
+
}
|
|
391
|
+
if (existingRelease) {
|
|
392
|
+
logger.verbose(`Updating release for tag ${farver.cyan(tagName)}`);
|
|
393
|
+
const updated = await this.request(`/repos/${this.owner}/${this.repo}/releases/${existingRelease.id}`, {
|
|
394
|
+
method: "PATCH",
|
|
395
|
+
body: JSON.stringify({
|
|
396
|
+
name,
|
|
397
|
+
body,
|
|
398
|
+
prerelease,
|
|
399
|
+
draft: false
|
|
400
|
+
})
|
|
401
|
+
});
|
|
402
|
+
logger.info(`Updated GitHub release for ${farver.cyan(tagName)}`);
|
|
403
|
+
return {
|
|
404
|
+
release: {
|
|
405
|
+
id: updated.id,
|
|
406
|
+
tagName: updated.tag_name,
|
|
407
|
+
name: updated.name ?? name,
|
|
408
|
+
htmlUrl: updated.html_url
|
|
409
|
+
},
|
|
410
|
+
created: false
|
|
411
|
+
};
|
|
412
|
+
}
|
|
413
|
+
logger.verbose(`Creating release for tag ${farver.cyan(tagName)}`);
|
|
414
|
+
const created = await this.request(`/repos/${this.owner}/${this.repo}/releases`, {
|
|
415
|
+
method: "POST",
|
|
416
|
+
body: JSON.stringify({
|
|
417
|
+
tag_name: tagName,
|
|
418
|
+
name,
|
|
419
|
+
body,
|
|
420
|
+
prerelease,
|
|
421
|
+
draft: false,
|
|
422
|
+
generate_release_notes: body == null
|
|
423
|
+
})
|
|
424
|
+
});
|
|
425
|
+
logger.info(`Created GitHub release for ${farver.cyan(tagName)}`);
|
|
426
|
+
return {
|
|
427
|
+
release: {
|
|
428
|
+
id: created.id,
|
|
429
|
+
tagName: created.tag_name,
|
|
430
|
+
name: created.name ?? name,
|
|
431
|
+
htmlUrl: created.html_url
|
|
432
|
+
},
|
|
433
|
+
created: true
|
|
434
|
+
};
|
|
435
|
+
}
|
|
436
|
+
async resolveAuthorInfo(info) {
|
|
437
|
+
if (info.login) return info;
|
|
438
|
+
try {
|
|
439
|
+
const q = encodeURIComponent(`${info.email} type:user in:email`);
|
|
440
|
+
const data = await this.request(`/search/users?q=${q}`);
|
|
441
|
+
if (!data.items || data.items.length === 0) return info;
|
|
442
|
+
info.login = data.items[0].login;
|
|
443
|
+
} catch (err) {
|
|
444
|
+
logger.warn(`Failed to resolve author info for email ${info.email}: ${formatUnknownError(err).message}`);
|
|
445
|
+
}
|
|
446
|
+
if (info.login) return info;
|
|
447
|
+
if (info.commits.length > 0) try {
|
|
448
|
+
const data = await this.request(`/repos/${this.owner}/${this.repo}/commits/${info.commits[0]}`);
|
|
449
|
+
if (data.author && data.author.login) info.login = data.author.login;
|
|
450
|
+
} catch (err) {
|
|
451
|
+
logger.warn(`Failed to resolve author info from commits for email ${info.email}: ${formatUnknownError(err).message}`);
|
|
452
|
+
}
|
|
453
|
+
return info;
|
|
454
|
+
}
|
|
455
|
+
};
|
|
456
|
+
function createGitHubClient(options) {
|
|
457
|
+
return new GitHubClient(options);
|
|
406
458
|
}
|
|
407
|
-
const
|
|
408
|
-
This PR was automatically generated by the release script.
|
|
409
|
-
|
|
410
|
-
The following packages have been prepared for release:
|
|
411
|
-
|
|
412
|
-
<% it.packages.forEach((pkg) => { %>
|
|
413
|
-
- **<%= pkg.name %>**: <%= pkg.currentVersion %> → <%= pkg.newVersion %> (<%= pkg.bumpType %>)
|
|
414
|
-
<% }) %>
|
|
415
|
-
|
|
416
|
-
Please review the changes and merge when ready.
|
|
417
|
-
|
|
418
|
-
For a more in-depth look at the changes, please refer to the individual package changelogs.
|
|
419
|
-
|
|
420
|
-
> [!NOTE]
|
|
421
|
-
> When this PR is merged, the release process will be triggered automatically, publishing the new package versions to the registry.
|
|
422
|
-
`;
|
|
459
|
+
const NON_WHITESPACE_RE = /\S/;
|
|
423
460
|
function dedentString(str) {
|
|
424
461
|
const lines = str.split("\n");
|
|
425
|
-
const minIndent = lines.filter((line) => line.trim().length > 0).reduce((min, line) => Math.min(min, line.search(
|
|
462
|
+
const minIndent = lines.filter((line) => line.trim().length > 0).reduce((min, line) => Math.min(min, line.search(NON_WHITESPACE_RE)), Infinity);
|
|
426
463
|
return lines.map((line) => minIndent === Infinity ? line : line.slice(minIndent)).join("\n").trim();
|
|
427
464
|
}
|
|
428
465
|
function generatePullRequestBody(updates, body) {
|
|
429
466
|
const eta = new Eta();
|
|
430
|
-
const bodyTemplate = body ? dedentString(body) :
|
|
467
|
+
const bodyTemplate = body ? dedentString(body) : DEFAULT_PR_BODY_TEMPLATE;
|
|
431
468
|
return eta.renderString(bodyTemplate, { packages: updates.map((u) => ({
|
|
432
469
|
name: u.package.name,
|
|
433
470
|
currentVersion: u.currentVersion,
|
|
@@ -436,102 +473,1277 @@ function generatePullRequestBody(updates, body) {
|
|
|
436
473
|
hasDirectChanges: u.hasDirectChanges
|
|
437
474
|
})) });
|
|
438
475
|
}
|
|
439
|
-
|
|
440
476
|
//#endregion
|
|
441
|
-
//#region src/
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
477
|
+
//#region src/options.ts
|
|
478
|
+
const DEFAULT_PR_BODY_TEMPLATE = dedent`
|
|
479
|
+
This PR was automatically generated by the UCD release scripts.
|
|
480
|
+
|
|
481
|
+
The following packages have been prepared for release:
|
|
482
|
+
|
|
483
|
+
<% if (it.packages.length > 0) { %>
|
|
484
|
+
<% it.packages.forEach((pkg) => { %>
|
|
485
|
+
- **<%= pkg.name %>**: <%= pkg.currentVersion %> → <%= pkg.newVersion %> (<%= pkg.bumpType %>)
|
|
486
|
+
<% }) %>
|
|
487
|
+
<% } else { %>
|
|
488
|
+
There are no packages to release.
|
|
489
|
+
<% } %>
|
|
490
|
+
|
|
491
|
+
Please review the changes and merge when ready.
|
|
492
|
+
|
|
493
|
+
> [!NOTE]
|
|
494
|
+
> When this PR is merged, the release process will be triggered automatically, publishing the new package versions to the registry.
|
|
495
|
+
`;
|
|
496
|
+
const DEFAULT_CHANGELOG_TEMPLATE = dedent`
|
|
497
|
+
<% if (it.previousVersion) { -%>
|
|
498
|
+
## [<%= it.version %>](<%= it.compareUrl %>) (<%= it.date %>)
|
|
499
|
+
<% } else { -%>
|
|
500
|
+
## <%= it.version %> (<%= it.date %>)
|
|
501
|
+
<% } %>
|
|
502
|
+
<% let hasCommits = false; %>
|
|
503
|
+
|
|
504
|
+
<% it.groups.forEach((group) => { %>
|
|
505
|
+
<% if (group.commits.length > 0) { %>
|
|
506
|
+
<% hasCommits = true; %>
|
|
507
|
+
|
|
508
|
+
### <%= group.title %>
|
|
509
|
+
<% group.commits.forEach((commit) => { %>
|
|
510
|
+
|
|
511
|
+
* <%= commit.line %>
|
|
512
|
+
<% }); %>
|
|
513
|
+
|
|
514
|
+
<% } %>
|
|
515
|
+
<% }); %>
|
|
516
|
+
|
|
517
|
+
<% if (!hasCommits) { %>
|
|
518
|
+
*No significant changes*
|
|
519
|
+
|
|
520
|
+
##### [View changes on GitHub](<%= it.compareUrl %>)
|
|
521
|
+
<% } %>
|
|
522
|
+
`;
|
|
523
|
+
const DEFAULT_TYPES = {
|
|
524
|
+
feat: { title: "🚀 Features" },
|
|
525
|
+
fix: { title: "🐞 Bug Fixes" },
|
|
526
|
+
perf: { title: "🏎 Performance" },
|
|
527
|
+
docs: { title: "📚 Documentation" },
|
|
528
|
+
style: { title: "🎨 Styles" }
|
|
529
|
+
};
|
|
530
|
+
function normalizeReleaseScriptsOptions(options) {
|
|
531
|
+
const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, branch = {}, globalCommitMode = "dependencies", pullRequest = {}, changelog = {}, types, safeguards = true, dryRun = false, npm = {}, prompts = {} } = options;
|
|
532
|
+
const token = githubToken.trim();
|
|
533
|
+
if (!token) throw new Error("GitHub token is required. Pass it in via options.");
|
|
534
|
+
if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) throw new Error("Repository (repo) is required. Specify in 'owner/repo' format (e.g., 'octocat/hello-world').");
|
|
535
|
+
const [owner, repo] = fullRepo.split("/");
|
|
536
|
+
if (!owner || !repo) throw new Error(`Invalid repo format: "${fullRepo}". Expected format: "owner/repo" (e.g., "octocat/hello-world").`);
|
|
537
|
+
const normalizedPackages = typeof packages === "object" && !Array.isArray(packages) ? {
|
|
538
|
+
exclude: packages.exclude ?? [],
|
|
539
|
+
include: packages.include ?? [],
|
|
540
|
+
excludePrivate: packages.excludePrivate ?? false
|
|
541
|
+
} : packages;
|
|
542
|
+
const isCI = process.env.CI === "true" || process.env.GITHUB_ACTIONS === "true";
|
|
543
|
+
return {
|
|
544
|
+
dryRun,
|
|
545
|
+
workspaceRoot,
|
|
546
|
+
githubToken: token,
|
|
547
|
+
owner,
|
|
548
|
+
repo,
|
|
549
|
+
githubClient: createGitHubClient({
|
|
550
|
+
owner,
|
|
551
|
+
repo,
|
|
552
|
+
githubToken: token
|
|
553
|
+
}),
|
|
554
|
+
packages: normalizedPackages,
|
|
555
|
+
branch: {
|
|
556
|
+
release: branch.release ?? "release/next",
|
|
557
|
+
default: branch.default ?? "main"
|
|
558
|
+
},
|
|
559
|
+
globalCommitMode,
|
|
560
|
+
safeguards,
|
|
561
|
+
pullRequest: {
|
|
562
|
+
title: pullRequest.title ?? "chore: release new version",
|
|
563
|
+
body: pullRequest.body ?? DEFAULT_PR_BODY_TEMPLATE
|
|
564
|
+
},
|
|
565
|
+
changelog: {
|
|
566
|
+
enabled: changelog.enabled ?? true,
|
|
567
|
+
template: changelog.template ?? DEFAULT_CHANGELOG_TEMPLATE,
|
|
568
|
+
emojis: changelog.emojis ?? true,
|
|
569
|
+
combinePrereleaseIntoFirstStable: changelog.combinePrereleaseIntoFirstStable ?? false
|
|
570
|
+
},
|
|
571
|
+
types: types ? {
|
|
572
|
+
...DEFAULT_TYPES,
|
|
573
|
+
...types
|
|
574
|
+
} : DEFAULT_TYPES,
|
|
575
|
+
npm: {
|
|
576
|
+
otp: npm.otp,
|
|
577
|
+
provenance: npm.provenance ?? true,
|
|
578
|
+
access: npm.access ?? "public"
|
|
579
|
+
},
|
|
580
|
+
prompts: {
|
|
581
|
+
versions: prompts.versions ?? !isCI,
|
|
582
|
+
packages: prompts.packages ?? !isCI
|
|
583
|
+
}
|
|
584
|
+
};
|
|
585
|
+
}
|
|
586
|
+
//#endregion
|
|
587
|
+
//#region src/types.ts
|
|
588
|
+
function ok(value) {
|
|
589
|
+
return {
|
|
590
|
+
ok: true,
|
|
591
|
+
value
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
function err(error) {
|
|
595
|
+
return {
|
|
596
|
+
ok: false,
|
|
597
|
+
error
|
|
598
|
+
};
|
|
599
|
+
}
|
|
600
|
+
//#endregion
|
|
601
|
+
//#region src/core/git.ts
|
|
602
|
+
const CHECKOUT_BRANCH_RE = /Switched to (?:a new )?branch '(.+)'/;
|
|
603
|
+
const COMMIT_HASH_RE = /^[0-9a-f]{40}$/i;
|
|
604
|
+
function toGitError(operation, error) {
|
|
605
|
+
const formatted = formatUnknownError(error);
|
|
606
|
+
return {
|
|
607
|
+
type: "git",
|
|
608
|
+
operation,
|
|
609
|
+
message: formatted.message,
|
|
610
|
+
stderr: formatted.stderr
|
|
611
|
+
};
|
|
612
|
+
}
|
|
613
|
+
function isMissingGitIdentityError(error) {
|
|
614
|
+
const formatted = formatUnknownError(error);
|
|
615
|
+
const combined = `${formatted.message}\n${formatted.stderr ?? ""}`;
|
|
616
|
+
return combined.includes("Author identity unknown") || combined.includes("empty ident name") || combined.includes("Please tell me who you are");
|
|
617
|
+
}
|
|
618
|
+
async function ensureLocalGitIdentity(workspaceRoot) {
|
|
619
|
+
try {
|
|
620
|
+
const actor = process.env.GITHUB_ACTOR?.trim();
|
|
621
|
+
const name = process.env.GIT_AUTHOR_NAME?.trim() || process.env.GIT_COMMITTER_NAME?.trim() || actor || "github-actions[bot]";
|
|
622
|
+
const email = process.env.GIT_AUTHOR_EMAIL?.trim() || process.env.GIT_COMMITTER_EMAIL?.trim() || (actor ? `${actor}@users.noreply.github.com` : "github-actions[bot]@users.noreply.github.com");
|
|
623
|
+
logger.warn("Git author identity missing. Configuring repository-local git identity for this run.");
|
|
624
|
+
await runIfNotDry("git", [
|
|
625
|
+
"config",
|
|
626
|
+
"user.name",
|
|
627
|
+
name
|
|
628
|
+
], { nodeOptions: {
|
|
629
|
+
cwd: workspaceRoot,
|
|
630
|
+
stdio: "pipe"
|
|
631
|
+
} });
|
|
632
|
+
await runIfNotDry("git", [
|
|
633
|
+
"config",
|
|
634
|
+
"user.email",
|
|
635
|
+
email
|
|
636
|
+
], { nodeOptions: {
|
|
637
|
+
cwd: workspaceRoot,
|
|
638
|
+
stdio: "pipe"
|
|
639
|
+
} });
|
|
640
|
+
logger.info(`Configured git identity: ${farver.dim(`${name} <${email}>`)}`);
|
|
641
|
+
return ok(void 0);
|
|
642
|
+
} catch (error) {
|
|
643
|
+
return err(toGitError("ensureLocalGitIdentity", error));
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
async function commitWithRetryOnMissingIdentity(message, workspaceRoot, operation) {
|
|
647
|
+
const runCommit = async () => runIfNotDry("git", [
|
|
648
|
+
"commit",
|
|
649
|
+
"-m",
|
|
650
|
+
message
|
|
651
|
+
], { nodeOptions: {
|
|
652
|
+
cwd: workspaceRoot,
|
|
653
|
+
stdio: "pipe"
|
|
654
|
+
} });
|
|
655
|
+
try {
|
|
656
|
+
await runCommit();
|
|
657
|
+
return ok(void 0);
|
|
658
|
+
} catch (error) {
|
|
659
|
+
if (!isMissingGitIdentityError(error)) return err(toGitError(operation, error));
|
|
660
|
+
const configured = await ensureLocalGitIdentity(workspaceRoot);
|
|
661
|
+
if (!configured.ok) return configured;
|
|
662
|
+
try {
|
|
663
|
+
await runCommit();
|
|
664
|
+
return ok(void 0);
|
|
665
|
+
} catch (retryError) {
|
|
666
|
+
return err(toGitError(operation, retryError));
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
async function isWorkingDirectoryClean(workspaceRoot) {
|
|
671
|
+
try {
|
|
672
|
+
return ok((await run("git", ["status", "--porcelain"], { nodeOptions: {
|
|
673
|
+
cwd: workspaceRoot,
|
|
674
|
+
stdio: "pipe"
|
|
675
|
+
} })).stdout.trim() === "");
|
|
676
|
+
} catch (error) {
|
|
677
|
+
return err(toGitError("isWorkingDirectoryClean", error));
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
/**
|
|
681
|
+
* Check if a git branch exists locally
|
|
682
|
+
* @param {string} branch - The branch name to check
|
|
683
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
684
|
+
* @returns {Promise<boolean>} Promise resolving to true if branch exists, false otherwise
|
|
685
|
+
*/
|
|
686
|
+
/**
|
|
687
|
+
* Check if a remote branch exists on origin
|
|
688
|
+
* @param {string} branch - The branch name to check
|
|
689
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
690
|
+
* @returns {Promise<Result<boolean, GitError>>} Promise resolving to true if remote branch exists
|
|
691
|
+
*/
|
|
692
|
+
async function doesRemoteBranchExist(branch, workspaceRoot) {
|
|
693
|
+
try {
|
|
694
|
+
await run("git", [
|
|
695
|
+
"ls-remote",
|
|
696
|
+
"--exit-code",
|
|
697
|
+
"--heads",
|
|
698
|
+
"origin",
|
|
699
|
+
branch
|
|
700
|
+
], { nodeOptions: {
|
|
701
|
+
cwd: workspaceRoot,
|
|
702
|
+
stdio: "pipe"
|
|
703
|
+
} });
|
|
704
|
+
return ok(true);
|
|
705
|
+
} catch (error) {
|
|
706
|
+
logger.verbose(`Remote branch "origin/${branch}" does not exist: ${formatUnknownError(error).message}`);
|
|
707
|
+
return ok(false);
|
|
708
|
+
}
|
|
709
|
+
}
|
|
710
|
+
async function doesBranchExist(branch, workspaceRoot) {
|
|
711
|
+
try {
|
|
712
|
+
await run("git", [
|
|
713
|
+
"rev-parse",
|
|
714
|
+
"--verify",
|
|
715
|
+
branch
|
|
716
|
+
], { nodeOptions: {
|
|
717
|
+
cwd: workspaceRoot,
|
|
718
|
+
stdio: "pipe"
|
|
719
|
+
} });
|
|
720
|
+
return ok(true);
|
|
721
|
+
} catch (error) {
|
|
722
|
+
logger.verbose(`Failed to verify branch "${branch}": ${formatUnknownError(error).message}`);
|
|
723
|
+
return ok(false);
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
/**
|
|
727
|
+
* Retrieves the name of the current branch in the repository.
|
|
728
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
729
|
+
* @returns {Promise<string>} A Promise resolving to the current branch name as a string
|
|
730
|
+
*/
|
|
731
|
+
async function getCurrentBranch(workspaceRoot) {
|
|
732
|
+
try {
|
|
733
|
+
return ok((await run("git", [
|
|
734
|
+
"rev-parse",
|
|
735
|
+
"--abbrev-ref",
|
|
736
|
+
"HEAD"
|
|
737
|
+
], { nodeOptions: {
|
|
738
|
+
cwd: workspaceRoot,
|
|
739
|
+
stdio: "pipe"
|
|
740
|
+
} })).stdout.trim());
|
|
741
|
+
} catch (error) {
|
|
742
|
+
return err(toGitError("getCurrentBranch", error));
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
/**
|
|
746
|
+
* Creates a new branch from the specified base branch.
|
|
747
|
+
* @param {string} branch - The name of the new branch to create
|
|
748
|
+
* @param {string} base - The base branch to create the new branch from
|
|
749
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
750
|
+
* @returns {Promise<void>} A Promise that resolves when the branch is created
|
|
751
|
+
*/
|
|
752
|
+
async function createBranch(branch, base, workspaceRoot) {
|
|
753
|
+
try {
|
|
754
|
+
logger.info(`Creating branch: ${farver.green(branch)} from ${farver.cyan(base)}`);
|
|
755
|
+
await runIfNotDry("git", [
|
|
756
|
+
"branch",
|
|
757
|
+
branch,
|
|
758
|
+
base
|
|
759
|
+
], { nodeOptions: {
|
|
760
|
+
cwd: workspaceRoot,
|
|
761
|
+
stdio: "pipe"
|
|
762
|
+
} });
|
|
763
|
+
return ok(void 0);
|
|
764
|
+
} catch (error) {
|
|
765
|
+
return err(toGitError("createBranch", error));
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
async function checkoutBranch(branch, workspaceRoot) {
|
|
769
|
+
try {
|
|
770
|
+
logger.info(`Switching to branch: ${farver.green(branch)}`);
|
|
771
|
+
const output = (await run("git", ["checkout", branch], { nodeOptions: {
|
|
772
|
+
cwd: workspaceRoot,
|
|
773
|
+
stdio: "pipe"
|
|
774
|
+
} })).stderr.trim();
|
|
775
|
+
const match = output.match(CHECKOUT_BRANCH_RE);
|
|
776
|
+
if (match && match[1] === branch) {
|
|
777
|
+
logger.info(`Successfully switched to branch: ${farver.green(branch)}`);
|
|
778
|
+
return ok(true);
|
|
779
|
+
}
|
|
780
|
+
logger.warn(`Unexpected git checkout output: ${output}`);
|
|
781
|
+
return ok(false);
|
|
782
|
+
} catch (error) {
|
|
783
|
+
const gitError = toGitError("checkoutBranch", error);
|
|
784
|
+
logger.error(`Git checkout failed: ${gitError.message}`);
|
|
785
|
+
if (gitError.stderr) logger.error(`Git stderr: ${gitError.stderr}`);
|
|
786
|
+
try {
|
|
787
|
+
const branchResult = await run("git", ["branch", "-a"], { nodeOptions: {
|
|
788
|
+
cwd: workspaceRoot,
|
|
789
|
+
stdio: "pipe"
|
|
790
|
+
} });
|
|
791
|
+
logger.verbose(`Available branches:\n${branchResult.stdout}`);
|
|
792
|
+
} catch (error) {
|
|
793
|
+
logger.verbose(`Could not list available branches: ${formatUnknownError(error).message}`);
|
|
794
|
+
}
|
|
795
|
+
return err(gitError);
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
async function pullLatestChanges(branch, workspaceRoot) {
|
|
799
|
+
try {
|
|
800
|
+
await run("git", [
|
|
801
|
+
"pull",
|
|
802
|
+
"origin",
|
|
803
|
+
branch
|
|
804
|
+
], { nodeOptions: {
|
|
805
|
+
cwd: workspaceRoot,
|
|
806
|
+
stdio: "pipe"
|
|
807
|
+
} });
|
|
808
|
+
return ok(true);
|
|
809
|
+
} catch (error) {
|
|
810
|
+
return err(toGitError("pullLatestChanges", error));
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
async function rebaseBranch(ontoBranch, workspaceRoot) {
|
|
814
|
+
try {
|
|
815
|
+
logger.info(`Rebasing onto: ${farver.cyan(ontoBranch)}`);
|
|
816
|
+
await runIfNotDry("git", ["rebase", ontoBranch], { nodeOptions: {
|
|
817
|
+
cwd: workspaceRoot,
|
|
818
|
+
stdio: "pipe"
|
|
819
|
+
} });
|
|
820
|
+
return ok(void 0);
|
|
821
|
+
} catch (error) {
|
|
822
|
+
return err(toGitError("rebaseBranch", error));
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
826
|
+
try {
|
|
827
|
+
const result = await run("git", [
|
|
828
|
+
"rev-list",
|
|
829
|
+
`origin/${branch}..${branch}`,
|
|
830
|
+
"--count"
|
|
831
|
+
], { nodeOptions: {
|
|
832
|
+
cwd: workspaceRoot,
|
|
833
|
+
stdio: "pipe"
|
|
834
|
+
} });
|
|
835
|
+
return ok(Number.parseInt(result.stdout.trim(), 10) > 0);
|
|
836
|
+
} catch (error) {
|
|
837
|
+
logger.verbose(`Failed to compare branch "${branch}" with remote: ${formatUnknownError(error).message}`);
|
|
838
|
+
return ok(true);
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
async function commitChanges(message, workspaceRoot) {
|
|
842
|
+
try {
|
|
843
|
+
await run("git", ["add", "."], { nodeOptions: {
|
|
844
|
+
cwd: workspaceRoot,
|
|
845
|
+
stdio: "pipe"
|
|
846
|
+
} });
|
|
847
|
+
const isClean = await isWorkingDirectoryClean(workspaceRoot);
|
|
848
|
+
if (!isClean.ok || isClean.value) return ok(false);
|
|
849
|
+
logger.info(`Committing changes: ${farver.dim(message)}`);
|
|
850
|
+
const committed = await commitWithRetryOnMissingIdentity(message, workspaceRoot, "commitChanges");
|
|
851
|
+
if (!committed.ok) return committed;
|
|
852
|
+
return ok(true);
|
|
853
|
+
} catch (error) {
|
|
854
|
+
const gitError = toGitError("commitChanges", error);
|
|
855
|
+
logger.error(`Git commit failed: ${gitError.message}`);
|
|
856
|
+
if (gitError.stderr) logger.error(`Git stderr: ${gitError.stderr}`);
|
|
857
|
+
return err(gitError);
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
async function commitPaths(paths, message, workspaceRoot) {
|
|
861
|
+
try {
|
|
862
|
+
if (paths.length === 0) return ok(false);
|
|
863
|
+
await run("git", [
|
|
864
|
+
"add",
|
|
865
|
+
"--",
|
|
866
|
+
...paths
|
|
867
|
+
], { nodeOptions: {
|
|
868
|
+
cwd: workspaceRoot,
|
|
869
|
+
stdio: "pipe"
|
|
870
|
+
} });
|
|
871
|
+
if ((await run("git", [
|
|
872
|
+
"diff",
|
|
873
|
+
"--cached",
|
|
874
|
+
"--name-only"
|
|
875
|
+
], { nodeOptions: {
|
|
876
|
+
cwd: workspaceRoot,
|
|
877
|
+
stdio: "pipe"
|
|
878
|
+
} })).stdout.trim() === "") return ok(false);
|
|
879
|
+
logger.info(`Committing changes: ${farver.dim(message)}`);
|
|
880
|
+
const committed = await commitWithRetryOnMissingIdentity(message, workspaceRoot, "commitPaths");
|
|
881
|
+
if (!committed.ok) return committed;
|
|
882
|
+
return ok(true);
|
|
883
|
+
} catch (error) {
|
|
884
|
+
const gitError = toGitError("commitPaths", error);
|
|
885
|
+
logger.error(`Git commit failed: ${gitError.message}`);
|
|
886
|
+
if (gitError.stderr) logger.error(`Git stderr: ${gitError.stderr}`);
|
|
887
|
+
return err(gitError);
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
async function pushBranch(branch, workspaceRoot, options) {
|
|
891
|
+
try {
|
|
892
|
+
const args = [
|
|
893
|
+
"push",
|
|
894
|
+
"origin",
|
|
895
|
+
branch
|
|
896
|
+
];
|
|
897
|
+
if (options?.forceWithLease) try {
|
|
898
|
+
await run("git", [
|
|
899
|
+
"fetch",
|
|
900
|
+
"origin",
|
|
901
|
+
branch
|
|
902
|
+
], { nodeOptions: {
|
|
903
|
+
cwd: workspaceRoot,
|
|
904
|
+
stdio: "pipe"
|
|
905
|
+
} });
|
|
906
|
+
args.push("--force-with-lease");
|
|
907
|
+
logger.info(`Pushing branch: ${farver.green(branch)} ${farver.dim("(with lease)")}`);
|
|
908
|
+
} catch (error) {
|
|
909
|
+
const fetchError = toGitError("pushBranch.fetch", error);
|
|
910
|
+
if (fetchError.stderr?.includes("couldn't find remote ref") || fetchError.message.includes("couldn't find remote ref")) logger.verbose(`Remote branch origin/${branch} does not exist yet, falling back to regular push without --force-with-lease.`);
|
|
911
|
+
else return err(fetchError);
|
|
912
|
+
}
|
|
913
|
+
else if (options?.force) {
|
|
914
|
+
args.push("--force");
|
|
915
|
+
logger.info(`Force pushing branch: ${farver.green(branch)}`);
|
|
916
|
+
} else logger.info(`Pushing branch: ${farver.green(branch)}`);
|
|
917
|
+
await runIfNotDry("git", args, { nodeOptions: {
|
|
918
|
+
cwd: workspaceRoot,
|
|
919
|
+
stdio: "pipe"
|
|
920
|
+
} });
|
|
921
|
+
return ok(true);
|
|
922
|
+
} catch (error) {
|
|
923
|
+
return err(toGitError("pushBranch", error));
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
async function readFileFromGit(workspaceRoot, ref, filePath) {
|
|
927
|
+
try {
|
|
928
|
+
return ok((await run("git", ["show", `${ref}:${filePath}`], { nodeOptions: {
|
|
929
|
+
cwd: workspaceRoot,
|
|
930
|
+
stdio: "pipe"
|
|
931
|
+
} })).stdout);
|
|
932
|
+
} catch (error) {
|
|
933
|
+
logger.verbose(`Failed to read ${filePath} from ${ref}: ${formatUnknownError(error).message}`);
|
|
934
|
+
return ok(null);
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
async function getMostRecentPackageTag(workspaceRoot, packageName) {
|
|
938
|
+
try {
|
|
939
|
+
const { stdout } = await run("git", [
|
|
940
|
+
"tag",
|
|
941
|
+
"--list",
|
|
942
|
+
`${packageName}@*`
|
|
943
|
+
], { nodeOptions: {
|
|
944
|
+
cwd: workspaceRoot,
|
|
945
|
+
stdio: "pipe"
|
|
946
|
+
} });
|
|
947
|
+
const tags = stdout.split("\n").map((tag) => tag.trim()).filter(Boolean);
|
|
948
|
+
if (tags.length === 0) return ok(void 0);
|
|
949
|
+
return ok(tags.reverse()[0]);
|
|
950
|
+
} catch (error) {
|
|
951
|
+
return err(toGitError("getMostRecentPackageTag", error));
|
|
952
|
+
}
|
|
953
|
+
}
|
|
954
|
+
async function getMostRecentPackageStableTag(workspaceRoot, packageName) {
|
|
955
|
+
try {
|
|
956
|
+
const { stdout } = await run("git", [
|
|
957
|
+
"tag",
|
|
958
|
+
"--list",
|
|
959
|
+
`${packageName}@*`
|
|
960
|
+
], { nodeOptions: {
|
|
961
|
+
cwd: workspaceRoot,
|
|
962
|
+
stdio: "pipe"
|
|
963
|
+
} });
|
|
964
|
+
const tags = stdout.split("\n").map((tag) => tag.trim()).filter(Boolean).reverse();
|
|
965
|
+
for (const tag of tags) {
|
|
966
|
+
const atIndex = tag.lastIndexOf("@");
|
|
967
|
+
if (atIndex === -1) continue;
|
|
968
|
+
const version = tag.slice(atIndex + 1);
|
|
969
|
+
if (semver.valid(version) && semver.prerelease(version) == null) return ok(tag);
|
|
970
|
+
}
|
|
971
|
+
return ok(void 0);
|
|
972
|
+
} catch (error) {
|
|
973
|
+
return err(toGitError("getMostRecentPackageStableTag", error));
|
|
974
|
+
}
|
|
975
|
+
}
|
|
976
|
+
/**
|
|
977
|
+
* Builds a mapping of commit SHAs to the list of files changed in each commit
|
|
978
|
+
* within a given inclusive range.
|
|
979
|
+
*
|
|
980
|
+
* Internally runs:
|
|
981
|
+
* git log --name-only --format=%H <from>^..<to>
|
|
982
|
+
*
|
|
983
|
+
* Notes
|
|
984
|
+
* - This includes the commit identified by `from` (via `from^..to`).
|
|
985
|
+
* - Order of commits in the resulting Map follows `git log` output
|
|
986
|
+
* (reverse chronological, newest first).
|
|
987
|
+
* - On failure (e.g., invalid refs), the function returns null.
|
|
988
|
+
*
|
|
989
|
+
* @param {string} workspaceRoot Absolute path to the git repository root used as cwd.
|
|
990
|
+
* @param {string} from Starting commit/ref (inclusive).
|
|
991
|
+
* @param {string} to Ending commit/ref (inclusive).
|
|
992
|
+
* @returns {Promise<Map<string, string[]> | null>} Promise resolving to a Map where keys are commit SHAs and values are
|
|
993
|
+
* arrays of file paths changed by that commit, or null on error.
|
|
994
|
+
*/
|
|
995
|
+
async function getGroupedFilesByCommitSha(workspaceRoot, from, to) {
|
|
996
|
+
const commitsMap = /* @__PURE__ */ new Map();
|
|
997
|
+
try {
|
|
998
|
+
const { stdout } = await run("git", [
|
|
999
|
+
"log",
|
|
1000
|
+
"--name-only",
|
|
1001
|
+
"--format=%H",
|
|
1002
|
+
`${from}^..${to}`
|
|
1003
|
+
], { nodeOptions: {
|
|
1004
|
+
cwd: workspaceRoot,
|
|
1005
|
+
stdio: "pipe"
|
|
1006
|
+
} });
|
|
1007
|
+
const lines = stdout.trim().split("\n").filter((line) => line.trim() !== "");
|
|
1008
|
+
let currentSha = null;
|
|
1009
|
+
for (const line of lines) {
|
|
1010
|
+
const trimmedLine = line.trim();
|
|
1011
|
+
if (COMMIT_HASH_RE.test(trimmedLine)) {
|
|
1012
|
+
currentSha = trimmedLine;
|
|
1013
|
+
commitsMap.set(currentSha, []);
|
|
1014
|
+
continue;
|
|
1015
|
+
}
|
|
1016
|
+
if (currentSha === null) continue;
|
|
1017
|
+
commitsMap.get(currentSha).push(trimmedLine);
|
|
1018
|
+
}
|
|
1019
|
+
return ok(commitsMap);
|
|
1020
|
+
} catch (error) {
|
|
1021
|
+
return err(toGitError("getGroupedFilesByCommitSha", error));
|
|
1022
|
+
}
|
|
1023
|
+
}
|
|
1024
|
+
/**
|
|
1025
|
+
* Create a git tag for a package release
|
|
1026
|
+
* @param packageName - The package name (e.g., "@scope/name")
|
|
1027
|
+
* @param version - The version to tag (e.g., "1.2.3")
|
|
1028
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
1029
|
+
* @returns Result indicating success or failure
|
|
1030
|
+
*/
|
|
1031
|
+
async function createPackageTag(packageName, version, workspaceRoot) {
|
|
1032
|
+
const tagName = `${packageName}@${version}`;
|
|
1033
|
+
try {
|
|
1034
|
+
logger.info(`Creating tag: ${farver.green(tagName)}`);
|
|
1035
|
+
await runIfNotDry("git", ["tag", tagName], { nodeOptions: {
|
|
1036
|
+
cwd: workspaceRoot,
|
|
1037
|
+
stdio: "pipe"
|
|
1038
|
+
} });
|
|
1039
|
+
return ok(void 0);
|
|
1040
|
+
} catch (error) {
|
|
1041
|
+
return err(toGitError("createPackageTag", error));
|
|
1042
|
+
}
|
|
1043
|
+
}
|
|
1044
|
+
/**
|
|
1045
|
+
* Push a specific tag to the remote repository
|
|
1046
|
+
* @param tagName - The tag name to push
|
|
1047
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
1048
|
+
* @returns Result indicating success or failure
|
|
1049
|
+
*/
|
|
1050
|
+
async function pushTag(tagName, workspaceRoot) {
|
|
1051
|
+
try {
|
|
1052
|
+
logger.info(`Pushing tag: ${farver.green(tagName)}`);
|
|
1053
|
+
await runIfNotDry("git", [
|
|
1054
|
+
"push",
|
|
1055
|
+
"origin",
|
|
1056
|
+
tagName
|
|
1057
|
+
], { nodeOptions: {
|
|
1058
|
+
cwd: workspaceRoot,
|
|
1059
|
+
stdio: "pipe"
|
|
1060
|
+
} });
|
|
1061
|
+
return ok(void 0);
|
|
1062
|
+
} catch (error) {
|
|
1063
|
+
return err(toGitError("pushTag", error));
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
/**
|
|
1067
|
+
* Create and push a package tag in one operation
|
|
1068
|
+
* @param packageName - The package name
|
|
1069
|
+
* @param version - The version to tag
|
|
1070
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
1071
|
+
* @returns Result indicating success or failure
|
|
1072
|
+
*/
|
|
1073
|
+
async function createAndPushPackageTag(packageName, version, workspaceRoot) {
|
|
1074
|
+
const createResult = await createPackageTag(packageName, version, workspaceRoot);
|
|
1075
|
+
if (!createResult.ok) return createResult;
|
|
1076
|
+
return pushTag(`${packageName}@${version}`, workspaceRoot);
|
|
1077
|
+
}
|
|
1078
|
+
//#endregion
|
|
1079
|
+
//#region src/core/changelog.ts
|
|
1080
|
+
const CHANGELOG_VERSION_RE = /##\s+(?:<small>)?\[?([^\](\s<]+)/;
|
|
1081
|
+
const excludeAuthors = [
|
|
1082
|
+
/\[bot\]/i,
|
|
1083
|
+
/dependabot/i,
|
|
1084
|
+
/\(bot\)/i
|
|
1085
|
+
];
|
|
1086
|
+
async function generateChangelogEntry(options) {
|
|
1087
|
+
const { packageName, version, previousVersion, date, commits, owner, repo, types, template, githubClient } = options;
|
|
1088
|
+
const templateData = {
|
|
1089
|
+
packageName,
|
|
1090
|
+
version,
|
|
1091
|
+
previousVersion,
|
|
1092
|
+
date,
|
|
1093
|
+
compareUrl: previousVersion ? `https://github.com/${owner}/${repo}/compare/${packageName}@${previousVersion}...${packageName}@${version}` : void 0,
|
|
1094
|
+
owner,
|
|
1095
|
+
repo,
|
|
1096
|
+
groups: buildTemplateGroups({
|
|
1097
|
+
commits,
|
|
1098
|
+
owner,
|
|
1099
|
+
repo,
|
|
1100
|
+
types,
|
|
1101
|
+
commitAuthors: await resolveCommitAuthors(commits, githubClient)
|
|
1102
|
+
})
|
|
1103
|
+
};
|
|
1104
|
+
const eta = new Eta();
|
|
1105
|
+
const templateToUse = template || DEFAULT_CHANGELOG_TEMPLATE;
|
|
1106
|
+
return eta.renderString(templateToUse, templateData).trim();
|
|
1107
|
+
}
|
|
1108
|
+
async function updateChangelog(options) {
|
|
1109
|
+
const { version, previousVersion, commits, date, normalizedOptions, workspacePackage, githubClient } = options;
|
|
1110
|
+
const changelogPath = join(workspacePackage.path, "CHANGELOG.md");
|
|
1111
|
+
const changelogRelativePath = relative(normalizedOptions.workspaceRoot, join(workspacePackage.path, "CHANGELOG.md"));
|
|
1112
|
+
const existingContent = await readFileFromGit(normalizedOptions.workspaceRoot, normalizedOptions.branch.default, changelogRelativePath);
|
|
1113
|
+
logger.verbose("Existing content found: ", existingContent.ok && Boolean(existingContent.value));
|
|
1114
|
+
const newEntry = await generateChangelogEntry({
|
|
1115
|
+
packageName: workspacePackage.name,
|
|
1116
|
+
version,
|
|
1117
|
+
previousVersion,
|
|
1118
|
+
date,
|
|
1119
|
+
commits,
|
|
1120
|
+
owner: normalizedOptions.owner,
|
|
1121
|
+
repo: normalizedOptions.repo,
|
|
1122
|
+
types: normalizedOptions.types,
|
|
1123
|
+
template: normalizedOptions.changelog?.template,
|
|
1124
|
+
githubClient
|
|
1125
|
+
});
|
|
1126
|
+
let updatedContent;
|
|
1127
|
+
if (!existingContent.ok || !existingContent.value) {
|
|
1128
|
+
updatedContent = `# ${workspacePackage.name}\n\n${newEntry}\n`;
|
|
1129
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
1130
|
+
return;
|
|
1131
|
+
}
|
|
1132
|
+
const parsed = parseChangelog(existingContent.value);
|
|
1133
|
+
const lines = existingContent.value.split("\n");
|
|
1134
|
+
const existingVersionIndex = parsed.versions.findIndex((v) => v.version === version);
|
|
1135
|
+
if (existingVersionIndex !== -1) {
|
|
1136
|
+
const existingVersion = parsed.versions[existingVersionIndex];
|
|
1137
|
+
const before = lines.slice(0, existingVersion.lineStart);
|
|
1138
|
+
const after = lines.slice(existingVersion.lineEnd + 1);
|
|
1139
|
+
updatedContent = [
|
|
1140
|
+
...before,
|
|
1141
|
+
newEntry,
|
|
1142
|
+
...after
|
|
1143
|
+
].join("\n");
|
|
1144
|
+
} else {
|
|
1145
|
+
const insertAt = parsed.headerLineEnd + 1;
|
|
1146
|
+
const before = lines.slice(0, insertAt);
|
|
1147
|
+
const after = lines.slice(insertAt);
|
|
1148
|
+
if (before.length > 0 && before.at(-1) !== "") before.push("");
|
|
1149
|
+
updatedContent = [
|
|
1150
|
+
...before,
|
|
1151
|
+
newEntry,
|
|
1152
|
+
"",
|
|
1153
|
+
...after
|
|
1154
|
+
].join("\n");
|
|
1155
|
+
}
|
|
1156
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
1157
|
+
}
|
|
1158
|
+
async function resolveCommitAuthors(commits, githubClient) {
|
|
1159
|
+
const authorMap = /* @__PURE__ */ new Map();
|
|
1160
|
+
const commitAuthors = /* @__PURE__ */ new Map();
|
|
1161
|
+
for (const commit of commits) {
|
|
1162
|
+
const authorsForCommit = [];
|
|
1163
|
+
commit.authors.forEach((author, idx) => {
|
|
1164
|
+
if (!author.email || !author.name) return;
|
|
1165
|
+
if (excludeAuthors.some((re) => re.test(author.name))) return;
|
|
1166
|
+
if (!authorMap.has(author.email)) authorMap.set(author.email, {
|
|
1167
|
+
commits: [],
|
|
1168
|
+
name: author.name,
|
|
1169
|
+
email: author.email
|
|
1170
|
+
});
|
|
1171
|
+
const info = authorMap.get(author.email);
|
|
1172
|
+
if (idx === 0) info.commits.push(commit.shortHash);
|
|
1173
|
+
authorsForCommit.push(info);
|
|
1174
|
+
});
|
|
1175
|
+
commitAuthors.set(commit.hash, authorsForCommit);
|
|
1176
|
+
}
|
|
1177
|
+
const authors = [...authorMap.values()];
|
|
1178
|
+
await Promise.all(authors.map((info) => githubClient.resolveAuthorInfo(info)));
|
|
1179
|
+
return commitAuthors;
|
|
1180
|
+
}
|
|
1181
|
+
function parseChangelog(content) {
|
|
1182
|
+
const lines = content.split("\n");
|
|
1183
|
+
let packageName = null;
|
|
1184
|
+
let headerLineEnd = -1;
|
|
1185
|
+
const versions = [];
|
|
1186
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1187
|
+
const line = lines[i].trim();
|
|
1188
|
+
if (line.startsWith("# ")) {
|
|
1189
|
+
packageName = line.slice(2).trim();
|
|
1190
|
+
headerLineEnd = i;
|
|
1191
|
+
break;
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
for (let i = headerLineEnd + 1; i < lines.length; i++) {
|
|
1195
|
+
const line = lines[i].trim();
|
|
1196
|
+
if (line.startsWith("## ")) {
|
|
1197
|
+
const versionMatch = line.match(CHANGELOG_VERSION_RE);
|
|
1198
|
+
if (versionMatch) {
|
|
1199
|
+
const version = versionMatch[1];
|
|
1200
|
+
const lineStart = i;
|
|
1201
|
+
let lineEnd = lines.length - 1;
|
|
1202
|
+
for (let j = i + 1; j < lines.length; j++) if (lines[j].trim().startsWith("## ")) {
|
|
1203
|
+
lineEnd = j - 1;
|
|
1204
|
+
break;
|
|
1205
|
+
}
|
|
1206
|
+
const versionContent = lines.slice(lineStart, lineEnd + 1).join("\n");
|
|
1207
|
+
versions.push({
|
|
1208
|
+
version,
|
|
1209
|
+
lineStart,
|
|
1210
|
+
lineEnd,
|
|
1211
|
+
content: versionContent
|
|
1212
|
+
});
|
|
1213
|
+
}
|
|
1214
|
+
}
|
|
1215
|
+
}
|
|
1216
|
+
return {
|
|
1217
|
+
packageName,
|
|
1218
|
+
versions,
|
|
1219
|
+
headerLineEnd
|
|
1220
|
+
};
|
|
1221
|
+
}
|
|
1222
|
+
//#endregion
|
|
1223
|
+
//#region src/operations/semver.ts
|
|
1224
|
+
function isValidSemver(version) {
|
|
1225
|
+
return semver.valid(version) != null;
|
|
1226
|
+
}
|
|
1227
|
+
function getNextVersion(currentVersion, bump) {
|
|
1228
|
+
if (bump === "none") return currentVersion;
|
|
1229
|
+
if (!isValidSemver(currentVersion)) throw new Error(`Cannot bump version for invalid semver: ${currentVersion}`);
|
|
1230
|
+
const next = semver.inc(currentVersion, bump);
|
|
1231
|
+
if (!next) throw new Error(`Failed to bump version ${currentVersion} with bump ${bump}`);
|
|
1232
|
+
return next;
|
|
1233
|
+
}
|
|
1234
|
+
function calculateBumpType(oldVersion, newVersion) {
|
|
1235
|
+
if (!isValidSemver(oldVersion) || !isValidSemver(newVersion)) throw new Error(`Cannot calculate bump type for invalid semver: ${oldVersion} or ${newVersion}`);
|
|
1236
|
+
const diff = semver.diff(oldVersion, newVersion);
|
|
1237
|
+
if (!diff) return "none";
|
|
1238
|
+
if (diff === "major" || diff === "premajor") return "major";
|
|
1239
|
+
if (diff === "minor" || diff === "preminor") return "minor";
|
|
1240
|
+
if (diff === "patch" || diff === "prepatch" || diff === "prerelease") return "patch";
|
|
1241
|
+
if (semver.gt(newVersion, oldVersion)) return "patch";
|
|
1242
|
+
return "none";
|
|
1243
|
+
}
|
|
1244
|
+
function getPrereleaseIdentifier(version) {
|
|
1245
|
+
const parsed = semver.parse(version);
|
|
1246
|
+
if (!parsed || parsed.prerelease.length === 0) return;
|
|
1247
|
+
const identifier = parsed.prerelease[0];
|
|
1248
|
+
return typeof identifier === "string" ? identifier : void 0;
|
|
1249
|
+
}
|
|
1250
|
+
function getNextPrereleaseVersion(currentVersion, mode, identifier) {
|
|
1251
|
+
if (!isValidSemver(currentVersion)) throw new Error(`Cannot bump prerelease for invalid semver: ${currentVersion}`);
|
|
1252
|
+
const releaseType = mode === "next" ? "prerelease" : mode;
|
|
1253
|
+
const next = identifier ? semver.inc(currentVersion, releaseType, identifier) : semver.inc(currentVersion, releaseType);
|
|
1254
|
+
if (!next) throw new Error(`Failed to compute prerelease version for ${currentVersion}`);
|
|
1255
|
+
return next;
|
|
1256
|
+
}
|
|
1257
|
+
//#endregion
|
|
1258
|
+
//#region src/core/prompts.ts
|
|
1259
|
+
async function selectPackagePrompt(packages) {
|
|
1260
|
+
const response = await prompts({
|
|
1261
|
+
type: "multiselect",
|
|
1262
|
+
name: "selectedPackages",
|
|
1263
|
+
message: "Select packages to release",
|
|
1264
|
+
choices: packages.map((pkg) => ({
|
|
1265
|
+
title: `${pkg.name} (${farver.bold(pkg.version)})`,
|
|
1266
|
+
value: pkg.name,
|
|
1267
|
+
selected: true
|
|
1268
|
+
})),
|
|
1269
|
+
min: 1,
|
|
1270
|
+
hint: "Space to select/deselect. Return to submit.",
|
|
1271
|
+
instructions: false
|
|
1272
|
+
});
|
|
1273
|
+
if (!response.selectedPackages || response.selectedPackages.length === 0) return [];
|
|
457
1274
|
return response.selectedPackages;
|
|
458
1275
|
}
|
|
459
|
-
async function
|
|
460
|
-
const
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
1276
|
+
async function selectVersionPrompt(workspaceRoot, pkg, currentVersion, suggestedVersion, options) {
|
|
1277
|
+
const defaultChoice = options?.defaultChoice ?? "auto";
|
|
1278
|
+
const suggestedSuffix = options?.suggestedHint ? farver.dim(` (${options.suggestedHint})`) : "";
|
|
1279
|
+
const prereleaseIdentifier = getPrereleaseIdentifier(currentVersion);
|
|
1280
|
+
const nextDefaultPrerelease = getNextPrereleaseVersion(currentVersion, "next", prereleaseIdentifier === "alpha" || prereleaseIdentifier === "beta" ? prereleaseIdentifier : "beta");
|
|
1281
|
+
const nextBeta = getNextPrereleaseVersion(currentVersion, "next", "beta");
|
|
1282
|
+
const nextAlpha = getNextPrereleaseVersion(currentVersion, "next", "alpha");
|
|
1283
|
+
const prePatchBeta = getNextPrereleaseVersion(currentVersion, "prepatch", "beta");
|
|
1284
|
+
const preMinorBeta = getNextPrereleaseVersion(currentVersion, "preminor", "beta");
|
|
1285
|
+
const preMajorBeta = getNextPrereleaseVersion(currentVersion, "premajor", "beta");
|
|
1286
|
+
const prePatchAlpha = getNextPrereleaseVersion(currentVersion, "prepatch", "alpha");
|
|
1287
|
+
const preMinorAlpha = getNextPrereleaseVersion(currentVersion, "preminor", "alpha");
|
|
1288
|
+
const preMajorAlpha = getNextPrereleaseVersion(currentVersion, "premajor", "alpha");
|
|
1289
|
+
const isCurrentPrerelease = prereleaseIdentifier != null;
|
|
1290
|
+
const choices = [
|
|
1291
|
+
{
|
|
1292
|
+
value: "skip",
|
|
1293
|
+
title: `skip ${farver.dim("(no change)")}`
|
|
1294
|
+
},
|
|
1295
|
+
{
|
|
1296
|
+
value: "suggested",
|
|
1297
|
+
title: `suggested ${farver.bold(suggestedVersion)}${suggestedSuffix}`
|
|
1298
|
+
},
|
|
1299
|
+
{
|
|
1300
|
+
value: "as-is",
|
|
1301
|
+
title: `as-is ${farver.dim("(keep current version)")}`
|
|
1302
|
+
},
|
|
1303
|
+
...isCurrentPrerelease ? [{
|
|
1304
|
+
value: "next-prerelease",
|
|
1305
|
+
title: `next prerelease ${farver.bold(nextDefaultPrerelease)}`
|
|
1306
|
+
}] : [],
|
|
1307
|
+
{
|
|
1308
|
+
value: "patch",
|
|
1309
|
+
title: `patch ${farver.bold(getNextVersion(pkg.version, "patch"))}`
|
|
1310
|
+
},
|
|
1311
|
+
{
|
|
1312
|
+
value: "minor",
|
|
1313
|
+
title: `minor ${farver.bold(getNextVersion(pkg.version, "minor"))}`
|
|
1314
|
+
},
|
|
1315
|
+
{
|
|
1316
|
+
value: "major",
|
|
1317
|
+
title: `major ${farver.bold(getNextVersion(pkg.version, "major"))}`
|
|
1318
|
+
},
|
|
1319
|
+
{
|
|
1320
|
+
value: "prerelease",
|
|
1321
|
+
title: `prerelease ${farver.dim("(choose strategy)")}`
|
|
1322
|
+
},
|
|
1323
|
+
{
|
|
1324
|
+
value: "custom",
|
|
1325
|
+
title: "custom"
|
|
1326
|
+
}
|
|
1327
|
+
];
|
|
1328
|
+
const initialValue = defaultChoice === "auto" ? suggestedVersion === currentVersion ? "skip" : "suggested" : defaultChoice;
|
|
1329
|
+
const initial = Math.max(0, choices.findIndex((choice) => choice.value === initialValue));
|
|
1330
|
+
const prereleaseVersionByChoice = {
|
|
1331
|
+
"next-prerelease": nextDefaultPrerelease,
|
|
1332
|
+
"next": nextDefaultPrerelease,
|
|
1333
|
+
"next-beta": nextBeta,
|
|
1334
|
+
"next-alpha": nextAlpha,
|
|
1335
|
+
"prepatch-beta": prePatchBeta,
|
|
1336
|
+
"preminor-beta": preMinorBeta,
|
|
1337
|
+
"premajor-beta": preMajorBeta,
|
|
1338
|
+
"prepatch-alpha": prePatchAlpha,
|
|
1339
|
+
"preminor-alpha": preMinorAlpha,
|
|
1340
|
+
"premajor-alpha": preMajorAlpha
|
|
1341
|
+
};
|
|
1342
|
+
const answers = await prompts({
|
|
1343
|
+
type: "autocomplete",
|
|
1344
|
+
name: "version",
|
|
1345
|
+
message: `${pkg.name}: ${farver.green(pkg.version)}`,
|
|
1346
|
+
choices,
|
|
1347
|
+
limit: choices.length,
|
|
1348
|
+
initial
|
|
1349
|
+
});
|
|
1350
|
+
if (!answers.version) return null;
|
|
1351
|
+
if (answers.version === "skip") return null;
|
|
1352
|
+
else if (answers.version === "suggested") return suggestedVersion;
|
|
1353
|
+
else if (answers.version === "custom") {
|
|
1354
|
+
const customAnswer = await prompts({
|
|
1355
|
+
type: "text",
|
|
1356
|
+
name: "custom",
|
|
1357
|
+
message: "Enter the new version number:",
|
|
1358
|
+
initial: suggestedVersion,
|
|
1359
|
+
validate: (custom) => {
|
|
1360
|
+
if (isValidSemver(custom)) return true;
|
|
1361
|
+
return "That's not a valid version number";
|
|
1362
|
+
}
|
|
1363
|
+
});
|
|
1364
|
+
if (!customAnswer.custom) return null;
|
|
1365
|
+
return customAnswer.custom;
|
|
1366
|
+
} else if (answers.version === "as-is") return currentVersion;
|
|
1367
|
+
else if (answers.version === "prerelease") {
|
|
1368
|
+
const prereleaseChoices = [
|
|
1369
|
+
{
|
|
1370
|
+
value: "next",
|
|
1371
|
+
title: `next ${farver.bold(nextDefaultPrerelease)}`
|
|
1372
|
+
},
|
|
1373
|
+
{
|
|
1374
|
+
value: "next-beta",
|
|
1375
|
+
title: `next beta ${farver.bold(nextBeta)}`
|
|
1376
|
+
},
|
|
1377
|
+
{
|
|
1378
|
+
value: "next-alpha",
|
|
1379
|
+
title: `next alpha ${farver.bold(nextAlpha)}`
|
|
1380
|
+
},
|
|
1381
|
+
{
|
|
1382
|
+
value: "prepatch-beta",
|
|
1383
|
+
title: `pre-patch (beta) ${farver.bold(prePatchBeta)}`
|
|
1384
|
+
},
|
|
1385
|
+
{
|
|
1386
|
+
value: "prepatch-alpha",
|
|
1387
|
+
title: `pre-patch (alpha) ${farver.bold(prePatchAlpha)}`
|
|
1388
|
+
},
|
|
1389
|
+
{
|
|
1390
|
+
value: "preminor-beta",
|
|
1391
|
+
title: `pre-minor (beta) ${farver.bold(preMinorBeta)}`
|
|
1392
|
+
},
|
|
1393
|
+
{
|
|
1394
|
+
value: "preminor-alpha",
|
|
1395
|
+
title: `pre-minor (alpha) ${farver.bold(preMinorAlpha)}`
|
|
1396
|
+
},
|
|
1397
|
+
{
|
|
1398
|
+
value: "premajor-beta",
|
|
1399
|
+
title: `pre-major (beta) ${farver.bold(preMajorBeta)}`
|
|
1400
|
+
},
|
|
1401
|
+
{
|
|
1402
|
+
value: "premajor-alpha",
|
|
1403
|
+
title: `pre-major (alpha) ${farver.bold(preMajorAlpha)}`
|
|
1404
|
+
}
|
|
1405
|
+
];
|
|
1406
|
+
const prereleaseAnswer = await prompts({
|
|
1407
|
+
type: "autocomplete",
|
|
1408
|
+
name: "prerelease",
|
|
1409
|
+
message: `${pkg.name}: select prerelease strategy`,
|
|
1410
|
+
choices: prereleaseChoices,
|
|
1411
|
+
limit: prereleaseChoices.length,
|
|
1412
|
+
initial: 0
|
|
1413
|
+
});
|
|
1414
|
+
if (!prereleaseAnswer.prerelease) return null;
|
|
1415
|
+
return prereleaseVersionByChoice[prereleaseAnswer.prerelease];
|
|
1416
|
+
}
|
|
1417
|
+
const prereleaseVersion = prereleaseVersionByChoice[answers.version];
|
|
1418
|
+
if (prereleaseVersion) return prereleaseVersion;
|
|
1419
|
+
return getNextVersion(pkg.version, answers.version);
|
|
1420
|
+
}
|
|
1421
|
+
async function confirmOverridePrompt(pkg, overrideVersion) {
|
|
1422
|
+
const response = await prompts({
|
|
1423
|
+
type: "select",
|
|
1424
|
+
name: "choice",
|
|
1425
|
+
message: `${pkg.name}: use override version ${farver.bold(overrideVersion)}?`,
|
|
1426
|
+
choices: [{
|
|
1427
|
+
title: "use override",
|
|
1428
|
+
value: "use"
|
|
1429
|
+
}, {
|
|
1430
|
+
title: "pick another version",
|
|
1431
|
+
value: "pick"
|
|
1432
|
+
}],
|
|
1433
|
+
initial: 0
|
|
1434
|
+
});
|
|
1435
|
+
if (!response.choice) return null;
|
|
1436
|
+
return response.choice;
|
|
1437
|
+
}
|
|
1438
|
+
//#endregion
|
|
1439
|
+
//#region src/core/workspace.ts
|
|
1440
|
+
function toWorkspaceError(operation, error) {
|
|
1441
|
+
return {
|
|
1442
|
+
type: "workspace",
|
|
1443
|
+
operation,
|
|
1444
|
+
message: error instanceof Error ? error.message : String(error)
|
|
1445
|
+
};
|
|
1446
|
+
}
|
|
1447
|
+
async function discoverWorkspacePackages(workspaceRoot, options) {
|
|
1448
|
+
let workspaceOptions;
|
|
1449
|
+
let explicitPackages;
|
|
1450
|
+
if (options.packages == null || options.packages === true) workspaceOptions = { excludePrivate: false };
|
|
1451
|
+
else if (Array.isArray(options.packages)) {
|
|
1452
|
+
workspaceOptions = {
|
|
1453
|
+
excludePrivate: false,
|
|
1454
|
+
include: options.packages
|
|
1455
|
+
};
|
|
1456
|
+
explicitPackages = options.packages;
|
|
1457
|
+
} else {
|
|
1458
|
+
workspaceOptions = options.packages;
|
|
1459
|
+
if (options.packages.include) explicitPackages = options.packages.include;
|
|
1460
|
+
}
|
|
1461
|
+
let workspacePackages;
|
|
1462
|
+
try {
|
|
1463
|
+
workspacePackages = await findWorkspacePackages(workspaceRoot, workspaceOptions);
|
|
1464
|
+
} catch (error) {
|
|
1465
|
+
return err(toWorkspaceError("discoverWorkspacePackages", error));
|
|
1466
|
+
}
|
|
1467
|
+
if (explicitPackages) {
|
|
1468
|
+
const foundNames = new Set(workspacePackages.map((p) => p.name));
|
|
1469
|
+
const missing = explicitPackages.filter((p) => !foundNames.has(p));
|
|
1470
|
+
if (missing.length > 0) return err(toWorkspaceError("discoverWorkspacePackages", `Package${missing.length > 1 ? "s" : ""} not found in workspace: ${missing.join(", ")}. Check your package names or run 'pnpm ls' to see available packages`));
|
|
1471
|
+
}
|
|
1472
|
+
const isPackagePromptEnabled = options.prompts?.packages !== false;
|
|
1473
|
+
logger.verbose("Package prompt gating", {
|
|
1474
|
+
isCI: getIsCI(),
|
|
1475
|
+
isPackagePromptEnabled,
|
|
1476
|
+
hasExplicitPackages: Boolean(explicitPackages),
|
|
1477
|
+
include: workspaceOptions.include ?? [],
|
|
1478
|
+
exclude: workspaceOptions.exclude ?? [],
|
|
1479
|
+
excludePrivate: workspaceOptions.excludePrivate ?? false
|
|
1480
|
+
});
|
|
1481
|
+
if (!getIsCI() && isPackagePromptEnabled && !explicitPackages) {
|
|
1482
|
+
const selectedNames = await selectPackagePrompt(workspacePackages);
|
|
1483
|
+
workspacePackages = workspacePackages.filter((pkg) => selectedNames.includes(pkg.name));
|
|
1484
|
+
}
|
|
1485
|
+
return ok(workspacePackages);
|
|
1486
|
+
}
|
|
1487
|
+
async function findWorkspacePackages(workspaceRoot, options) {
|
|
1488
|
+
try {
|
|
1489
|
+
const result = await run("pnpm", [
|
|
1490
|
+
"-r",
|
|
1491
|
+
"ls",
|
|
1492
|
+
"--json"
|
|
1493
|
+
], { nodeOptions: {
|
|
1494
|
+
cwd: workspaceRoot,
|
|
1495
|
+
stdio: "pipe"
|
|
1496
|
+
} });
|
|
1497
|
+
const rawProjects = JSON.parse(result.stdout);
|
|
1498
|
+
const allPackageNames = new Set(rawProjects.map((p) => p.name));
|
|
1499
|
+
const excludedPackages = /* @__PURE__ */ new Set();
|
|
1500
|
+
const promises = rawProjects.map(async (rawProject) => {
|
|
1501
|
+
const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
|
|
1502
|
+
const packageJson = JSON.parse(content);
|
|
1503
|
+
if (!shouldIncludePackage(packageJson, options)) {
|
|
1504
|
+
excludedPackages.add(rawProject.name);
|
|
1505
|
+
return null;
|
|
1506
|
+
}
|
|
1507
|
+
return {
|
|
1508
|
+
name: rawProject.name,
|
|
1509
|
+
version: rawProject.version,
|
|
1510
|
+
path: rawProject.path,
|
|
1511
|
+
packageJson,
|
|
1512
|
+
workspaceDependencies: Object.keys(rawProject.dependencies || []).filter((dep) => {
|
|
1513
|
+
return allPackageNames.has(dep);
|
|
1514
|
+
}),
|
|
1515
|
+
workspaceDevDependencies: Object.keys(rawProject.devDependencies || []).filter((dep) => {
|
|
1516
|
+
return allPackageNames.has(dep);
|
|
1517
|
+
})
|
|
1518
|
+
};
|
|
473
1519
|
});
|
|
1520
|
+
const packages = await Promise.all(promises);
|
|
1521
|
+
if (excludedPackages.size > 0) logger.info(`Excluded packages: ${farver.green([...excludedPackages].join(", "))}`);
|
|
1522
|
+
return packages.filter((pkg) => pkg !== null);
|
|
1523
|
+
} catch (err) {
|
|
1524
|
+
logger.error("Error discovering workspace packages:", err);
|
|
1525
|
+
throw err;
|
|
1526
|
+
}
|
|
1527
|
+
}
|
|
1528
|
+
function shouldIncludePackage(pkg, options) {
|
|
1529
|
+
if (!options) return true;
|
|
1530
|
+
if (options.excludePrivate && pkg.private) return false;
|
|
1531
|
+
if (options.include && options.include.length > 0) {
|
|
1532
|
+
if (!options.include.includes(pkg.name)) return false;
|
|
474
1533
|
}
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
1534
|
+
if (options.exclude?.includes(pkg.name)) return false;
|
|
1535
|
+
return true;
|
|
1536
|
+
}
|
|
1537
|
+
//#endregion
|
|
1538
|
+
//#region src/operations/branch.ts
|
|
1539
|
+
async function prepareReleaseBranch(options) {
|
|
1540
|
+
const { workspaceRoot, releaseBranch, defaultBranch } = options;
|
|
1541
|
+
const currentBranch = await getCurrentBranch(workspaceRoot);
|
|
1542
|
+
if (!currentBranch.ok) return currentBranch;
|
|
1543
|
+
if (currentBranch.value !== defaultBranch) return err({
|
|
1544
|
+
type: "git",
|
|
1545
|
+
operation: "validateBranch",
|
|
1546
|
+
message: `Current branch is '${currentBranch.value}'. Please switch to '${defaultBranch}'.`
|
|
478
1547
|
});
|
|
479
|
-
const
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
1548
|
+
const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
|
|
1549
|
+
if (!branchExists.ok) return branchExists;
|
|
1550
|
+
if (!branchExists.value) {
|
|
1551
|
+
const created = await createBranch(releaseBranch, defaultBranch, workspaceRoot);
|
|
1552
|
+
if (!created.ok) return created;
|
|
1553
|
+
}
|
|
1554
|
+
const checkedOut = await checkoutBranch(releaseBranch, workspaceRoot);
|
|
1555
|
+
if (!checkedOut.ok) return checkedOut;
|
|
1556
|
+
if (branchExists.value) {
|
|
1557
|
+
const remoteExists = await doesRemoteBranchExist(releaseBranch, workspaceRoot);
|
|
1558
|
+
if (!remoteExists.ok) return remoteExists;
|
|
1559
|
+
if (remoteExists.value) {
|
|
1560
|
+
const pulled = await pullLatestChanges(releaseBranch, workspaceRoot);
|
|
1561
|
+
if (!pulled.ok) return pulled;
|
|
1562
|
+
if (!pulled.value) logger.warn("Failed to pull latest changes, continuing anyway.");
|
|
1563
|
+
} else logger.info(`Remote branch "origin/${releaseBranch}" does not exist yet, skipping pull.`);
|
|
1564
|
+
}
|
|
1565
|
+
const rebased = await rebaseBranch(defaultBranch, workspaceRoot);
|
|
1566
|
+
if (!rebased.ok) return rebased;
|
|
1567
|
+
return ok(void 0);
|
|
1568
|
+
}
|
|
1569
|
+
async function syncReleaseChanges(options) {
|
|
1570
|
+
const { workspaceRoot, releaseBranch, commitMessage, hasChanges } = options;
|
|
1571
|
+
const committed = hasChanges ? await commitChanges(commitMessage, workspaceRoot) : ok(false);
|
|
1572
|
+
if (!committed.ok) return committed;
|
|
1573
|
+
const isAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
|
|
1574
|
+
if (!isAhead.ok) return isAhead;
|
|
1575
|
+
if (!committed.value && !isAhead.value) return ok(false);
|
|
1576
|
+
const pushed = await pushBranch(releaseBranch, workspaceRoot, { forceWithLease: true });
|
|
1577
|
+
if (!pushed.ok) return pushed;
|
|
1578
|
+
return ok(true);
|
|
497
1579
|
}
|
|
498
|
-
|
|
499
1580
|
//#endregion
|
|
500
|
-
//#region src/
|
|
501
|
-
|
|
502
|
-
|
|
1581
|
+
//#region src/versioning/commits.ts
|
|
1582
|
+
/**
|
|
1583
|
+
* Get commits grouped by workspace package.
|
|
1584
|
+
* For each package, retrieves all commits since its last release tag that affect that package.
|
|
1585
|
+
*
|
|
1586
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
1587
|
+
* @param {WorkspacePackage[]} packages - Array of workspace packages to analyze
|
|
1588
|
+
* @returns {Promise<Map<string, GitCommit[]>>} A map of package names to their commits since their last release
|
|
1589
|
+
*/
|
|
1590
|
+
async function getWorkspacePackageGroupedCommits(workspaceRoot, packages) {
|
|
1591
|
+
const changedPackages = /* @__PURE__ */ new Map();
|
|
1592
|
+
const promises = packages.map(async (pkg) => {
|
|
1593
|
+
const lastTagResult = await getMostRecentPackageTag(workspaceRoot, pkg.name);
|
|
1594
|
+
const lastTag = lastTagResult.ok ? lastTagResult.value : void 0;
|
|
1595
|
+
const allCommits = await getCommits({
|
|
1596
|
+
from: lastTag,
|
|
1597
|
+
to: "HEAD",
|
|
1598
|
+
cwd: workspaceRoot,
|
|
1599
|
+
folder: pkg.path
|
|
1600
|
+
});
|
|
1601
|
+
logger.verbose(`Found ${farver.cyan(allCommits.length)} commits for package ${farver.bold(pkg.name)} since tag ${farver.cyan(lastTag ?? "N/A")}`);
|
|
1602
|
+
return {
|
|
1603
|
+
pkgName: pkg.name,
|
|
1604
|
+
commits: allCommits
|
|
1605
|
+
};
|
|
1606
|
+
});
|
|
1607
|
+
const results = await Promise.all(promises);
|
|
1608
|
+
for (const { pkgName, commits } of results) changedPackages.set(pkgName, commits);
|
|
1609
|
+
return changedPackages;
|
|
503
1610
|
}
|
|
504
|
-
function
|
|
505
|
-
|
|
1611
|
+
async function getPackageCommitsSinceTag(workspaceRoot, pkg, fromTag) {
|
|
1612
|
+
const allCommits = await getCommits({
|
|
1613
|
+
from: fromTag,
|
|
1614
|
+
to: "HEAD",
|
|
1615
|
+
cwd: workspaceRoot,
|
|
1616
|
+
folder: pkg.path
|
|
1617
|
+
});
|
|
1618
|
+
logger.verbose(`Found ${farver.cyan(allCommits.length)} commits for package ${farver.bold(pkg.name)} since ${farver.cyan(fromTag ?? "start")}`);
|
|
1619
|
+
return allCommits;
|
|
506
1620
|
}
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
1621
|
+
/**
|
|
1622
|
+
* Check if a file path touches any package folder.
|
|
1623
|
+
* @param file - The file path to check
|
|
1624
|
+
* @param packagePaths - Set of normalized package paths
|
|
1625
|
+
* @param workspaceRoot - The workspace root for path normalization
|
|
1626
|
+
* @returns true if the file is inside a package folder
|
|
1627
|
+
*/
|
|
1628
|
+
function fileMatchesPackageFolder(file, packagePaths, workspaceRoot) {
|
|
1629
|
+
const normalizedFile = file.startsWith("./") ? file.slice(2) : file;
|
|
1630
|
+
for (const pkgPath of packagePaths) {
|
|
1631
|
+
const normalizedPkgPath = pkgPath.startsWith(workspaceRoot) ? pkgPath.slice(workspaceRoot.length + 1) : pkgPath;
|
|
1632
|
+
if (normalizedFile.startsWith(`${normalizedPkgPath}/`) || normalizedFile === normalizedPkgPath) return true;
|
|
1633
|
+
}
|
|
1634
|
+
return false;
|
|
1635
|
+
}
|
|
1636
|
+
/**
|
|
1637
|
+
* Check if a commit is a "global" commit (doesn't touch any package folder).
|
|
1638
|
+
* @param workspaceRoot - The workspace root
|
|
1639
|
+
* @param files - Array of files changed in the commit
|
|
1640
|
+
* @param packagePaths - Set of normalized package paths
|
|
1641
|
+
* @returns true if this is a global commit
|
|
1642
|
+
*/
|
|
1643
|
+
function isGlobalCommit(workspaceRoot, files, packagePaths) {
|
|
1644
|
+
if (!files || files.length === 0) return false;
|
|
1645
|
+
return !files.some((file) => fileMatchesPackageFolder(file, packagePaths, workspaceRoot));
|
|
1646
|
+
}
|
|
1647
|
+
const DEPENDENCY_FILES = [
|
|
1648
|
+
"package.json",
|
|
1649
|
+
"pnpm-lock.yaml",
|
|
1650
|
+
"pnpm-workspace.yaml",
|
|
1651
|
+
"yarn.lock",
|
|
1652
|
+
"package-lock.json"
|
|
1653
|
+
];
|
|
1654
|
+
/**
|
|
1655
|
+
* Find the oldest and newest commits across all packages.
|
|
1656
|
+
* @param packageCommits - Map of package commits
|
|
1657
|
+
* @returns Object with oldest and newest commit SHAs, or null if no commits
|
|
1658
|
+
*/
|
|
1659
|
+
function findCommitRange(packageCommits) {
|
|
1660
|
+
let oldestCommit = null;
|
|
1661
|
+
let newestCommit = null;
|
|
1662
|
+
for (const commits of packageCommits.values()) {
|
|
1663
|
+
if (commits.length === 0) continue;
|
|
1664
|
+
const firstCommit = commits[0].shortHash;
|
|
1665
|
+
const lastCommit = commits.at(-1).shortHash;
|
|
1666
|
+
if (!newestCommit) newestCommit = firstCommit;
|
|
1667
|
+
oldestCommit = lastCommit;
|
|
529
1668
|
}
|
|
530
|
-
return
|
|
1669
|
+
if (!oldestCommit || !newestCommit) return null;
|
|
1670
|
+
return {
|
|
1671
|
+
oldest: oldestCommit,
|
|
1672
|
+
newest: newestCommit
|
|
1673
|
+
};
|
|
1674
|
+
}
|
|
1675
|
+
/**
|
|
1676
|
+
* Filters commits to find global commits (those not touching any package folder),
|
|
1677
|
+
* optionally further filtered to only dependency-related files.
|
|
1678
|
+
*/
|
|
1679
|
+
function filterGlobalCommits(commits, commitFilesMap, packagePaths, workspaceRoot, mode) {
|
|
1680
|
+
const globalCommits = commits.filter((commit) => {
|
|
1681
|
+
const files = commitFilesMap.get(commit.shortHash);
|
|
1682
|
+
return files ? isGlobalCommit(workspaceRoot, files, packagePaths) : false;
|
|
1683
|
+
});
|
|
1684
|
+
if (mode === "all") return globalCommits;
|
|
1685
|
+
return globalCommits.filter((commit) => {
|
|
1686
|
+
const files = commitFilesMap.get(commit.shortHash);
|
|
1687
|
+
if (!files) return false;
|
|
1688
|
+
return files.some((file) => DEPENDENCY_FILES.includes(file.startsWith("./") ? file.slice(2) : file));
|
|
1689
|
+
});
|
|
531
1690
|
}
|
|
532
1691
|
/**
|
|
533
|
-
*
|
|
1692
|
+
* Get global commits for each package based on their individual commit timelines.
|
|
1693
|
+
* This solves the problem where packages with different release histories need different global commits.
|
|
1694
|
+
*
|
|
1695
|
+
* A "global commit" is a commit that doesn't touch any package folder but may affect all packages
|
|
1696
|
+
* (e.g., root package.json, CI config, README).
|
|
1697
|
+
*
|
|
1698
|
+
* Performance: Makes ONE batched git call to get files for all commits across all packages.
|
|
1699
|
+
*
|
|
1700
|
+
* @param workspaceRoot - The root directory of the workspace
|
|
1701
|
+
* @param packageCommits - Map of package name to their commits (from getWorkspacePackageCommits)
|
|
1702
|
+
* @param allPackages - All workspace packages (used to identify package folders)
|
|
1703
|
+
* @param mode - Filter mode: false (disabled), "all" (all global commits), or "dependencies" (only dependency-related)
|
|
1704
|
+
* @returns Map of package name to their global commits
|
|
534
1705
|
*/
|
|
1706
|
+
async function getGlobalCommitsPerPackage(workspaceRoot, packageCommits, allPackages, mode) {
|
|
1707
|
+
const result = /* @__PURE__ */ new Map();
|
|
1708
|
+
if (!mode) {
|
|
1709
|
+
logger.verbose("Global commits mode disabled");
|
|
1710
|
+
return result;
|
|
1711
|
+
}
|
|
1712
|
+
logger.verbose(`Computing global commits per-package (mode: ${farver.cyan(mode)})`);
|
|
1713
|
+
const commitRange = findCommitRange(packageCommits);
|
|
1714
|
+
if (!commitRange) {
|
|
1715
|
+
logger.verbose("No commits found across packages");
|
|
1716
|
+
return result;
|
|
1717
|
+
}
|
|
1718
|
+
logger.verbose("Fetching files for commits range", `${farver.cyan(commitRange.oldest)}..${farver.cyan(commitRange.newest)}`);
|
|
1719
|
+
const commitFilesMap = await getGroupedFilesByCommitSha(workspaceRoot, commitRange.oldest, commitRange.newest);
|
|
1720
|
+
if (!commitFilesMap.ok) {
|
|
1721
|
+
logger.warn("Failed to get commit file list, returning empty global commits");
|
|
1722
|
+
return result;
|
|
1723
|
+
}
|
|
1724
|
+
logger.verbose("Got file lists for commits", `${farver.cyan(commitFilesMap.value.size)} commits in ONE git call`);
|
|
1725
|
+
const packagePaths = new Set(allPackages.map((p) => p.path));
|
|
1726
|
+
for (const [pkgName, commits] of packageCommits) {
|
|
1727
|
+
logger.verbose("Filtering global commits for package", `${farver.bold(pkgName)} from ${farver.cyan(commits.length)} commits`);
|
|
1728
|
+
const filtered = filterGlobalCommits(commits, commitFilesMap.value, packagePaths, workspaceRoot, mode);
|
|
1729
|
+
logger.verbose("Package global commits found", `${farver.bold(pkgName)}: ${farver.cyan(filtered.length)} global commits`);
|
|
1730
|
+
result.set(pkgName, filtered);
|
|
1731
|
+
}
|
|
1732
|
+
return result;
|
|
1733
|
+
}
|
|
1734
|
+
//#endregion
|
|
1735
|
+
//#region src/operations/version.ts
|
|
1736
|
+
function determineHighestBump(commits) {
|
|
1737
|
+
if (commits.length === 0) return "none";
|
|
1738
|
+
let highestBump = "none";
|
|
1739
|
+
for (const commit of commits) {
|
|
1740
|
+
const bump = determineBumpType(commit);
|
|
1741
|
+
if (bump === "major") return "major";
|
|
1742
|
+
if (bump === "minor") highestBump = "minor";
|
|
1743
|
+
else if (bump === "patch" && highestBump === "none") highestBump = "patch";
|
|
1744
|
+
}
|
|
1745
|
+
return highestBump;
|
|
1746
|
+
}
|
|
535
1747
|
function createVersionUpdate(pkg, bump, hasDirectChanges) {
|
|
536
1748
|
const newVersion = getNextVersion(pkg.version, bump);
|
|
537
1749
|
return {
|
|
@@ -539,61 +1751,402 @@ function createVersionUpdate(pkg, bump, hasDirectChanges) {
|
|
|
539
1751
|
currentVersion: pkg.version,
|
|
540
1752
|
newVersion,
|
|
541
1753
|
bumpType: bump,
|
|
542
|
-
hasDirectChanges
|
|
1754
|
+
hasDirectChanges,
|
|
1755
|
+
changeKind: "dependent"
|
|
1756
|
+
};
|
|
1757
|
+
}
|
|
1758
|
+
function determineBumpType(commit) {
|
|
1759
|
+
if (!commit.isConventional) return "none";
|
|
1760
|
+
if (commit.isBreaking) return "major";
|
|
1761
|
+
if (commit.type === "feat") return "minor";
|
|
1762
|
+
if (commit.type === "fix" || commit.type === "perf") return "patch";
|
|
1763
|
+
return "none";
|
|
1764
|
+
}
|
|
1765
|
+
//#endregion
|
|
1766
|
+
//#region src/versioning/package.ts
|
|
1767
|
+
/**
|
|
1768
|
+
* Build a dependency graph from workspace packages
|
|
1769
|
+
*
|
|
1770
|
+
* Creates a bidirectional graph that maps:
|
|
1771
|
+
* - packages: Map of package name → WorkspacePackage
|
|
1772
|
+
* - dependents: Map of package name → Set of packages that depend on it
|
|
1773
|
+
*
|
|
1774
|
+
* @param packages - All workspace packages
|
|
1775
|
+
* @returns Dependency graph with packages and dependents maps
|
|
1776
|
+
*/
|
|
1777
|
+
function buildPackageDependencyGraph(packages) {
|
|
1778
|
+
const packagesMap = /* @__PURE__ */ new Map();
|
|
1779
|
+
const dependents = /* @__PURE__ */ new Map();
|
|
1780
|
+
for (const pkg of packages) {
|
|
1781
|
+
packagesMap.set(pkg.name, pkg);
|
|
1782
|
+
dependents.set(pkg.name, /* @__PURE__ */ new Set());
|
|
1783
|
+
}
|
|
1784
|
+
for (const pkg of packages) {
|
|
1785
|
+
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
1786
|
+
for (const dep of allDeps) {
|
|
1787
|
+
const depSet = dependents.get(dep);
|
|
1788
|
+
if (depSet) depSet.add(pkg.name);
|
|
1789
|
+
}
|
|
1790
|
+
}
|
|
1791
|
+
return {
|
|
1792
|
+
packages: packagesMap,
|
|
1793
|
+
dependents
|
|
543
1794
|
};
|
|
544
1795
|
}
|
|
545
1796
|
/**
|
|
546
|
-
*
|
|
1797
|
+
* Get all packages affected by changes (including transitive dependents)
|
|
1798
|
+
*
|
|
1799
|
+
* Uses graph traversal to find all packages that need updates:
|
|
1800
|
+
* - Packages with direct changes
|
|
1801
|
+
* - All packages that depend on changed packages (transitively)
|
|
1802
|
+
*
|
|
1803
|
+
* @param graph - Dependency graph
|
|
1804
|
+
* @param changedPackages - Set of package names with direct changes
|
|
1805
|
+
* @returns Set of all package names that need updates
|
|
1806
|
+
*/
|
|
1807
|
+
function getAllAffectedPackages(graph, changedPackages) {
|
|
1808
|
+
const affected = /* @__PURE__ */ new Set();
|
|
1809
|
+
function visitDependents(pkgName) {
|
|
1810
|
+
if (affected.has(pkgName)) return;
|
|
1811
|
+
affected.add(pkgName);
|
|
1812
|
+
const dependents = graph.dependents.get(pkgName);
|
|
1813
|
+
if (dependents) for (const dependent of dependents) visitDependents(dependent);
|
|
1814
|
+
}
|
|
1815
|
+
for (const pkg of changedPackages) visitDependents(pkg);
|
|
1816
|
+
return affected;
|
|
1817
|
+
}
|
|
1818
|
+
/**
|
|
1819
|
+
* Calculate the order in which packages should be published
|
|
1820
|
+
*
|
|
1821
|
+
* Performs topological sorting to ensure dependencies are published before dependents.
|
|
1822
|
+
* Assigns a "level" to each package based on its depth in the dependency tree.
|
|
1823
|
+
*
|
|
1824
|
+
* This is used by the publish command to publish packages in the correct order.
|
|
1825
|
+
*
|
|
1826
|
+
* @param graph - Dependency graph
|
|
1827
|
+
* @param packagesToPublish - Set of package names to publish
|
|
1828
|
+
* @returns Array of packages in publish order with their dependency level
|
|
1829
|
+
*/
|
|
1830
|
+
function getPackagePublishOrder(graph, packagesToPublish) {
|
|
1831
|
+
const result = [];
|
|
1832
|
+
const visited = /* @__PURE__ */ new Set();
|
|
1833
|
+
const toUpdate = new Set(packagesToPublish);
|
|
1834
|
+
const packagesToProcess = new Set(packagesToPublish);
|
|
1835
|
+
for (const pkg of packagesToPublish) {
|
|
1836
|
+
const deps = graph.dependents.get(pkg);
|
|
1837
|
+
if (deps) for (const dep of deps) {
|
|
1838
|
+
packagesToProcess.add(dep);
|
|
1839
|
+
toUpdate.add(dep);
|
|
1840
|
+
}
|
|
1841
|
+
}
|
|
1842
|
+
function visit(pkgName, level) {
|
|
1843
|
+
if (visited.has(pkgName)) return;
|
|
1844
|
+
visited.add(pkgName);
|
|
1845
|
+
const pkg = graph.packages.get(pkgName);
|
|
1846
|
+
if (!pkg) return;
|
|
1847
|
+
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
1848
|
+
let maxDepLevel = level;
|
|
1849
|
+
for (const dep of allDeps) if (toUpdate.has(dep)) {
|
|
1850
|
+
visit(dep, level);
|
|
1851
|
+
const depResult = result.find((r) => r.package.name === dep);
|
|
1852
|
+
if (depResult && depResult.level >= maxDepLevel) maxDepLevel = depResult.level + 1;
|
|
1853
|
+
}
|
|
1854
|
+
result.push({
|
|
1855
|
+
package: pkg,
|
|
1856
|
+
level: maxDepLevel
|
|
1857
|
+
});
|
|
1858
|
+
}
|
|
1859
|
+
for (const pkg of toUpdate) visit(pkg, 0);
|
|
1860
|
+
result.sort((a, b) => a.level - b.level);
|
|
1861
|
+
return result;
|
|
1862
|
+
}
|
|
1863
|
+
/**
|
|
1864
|
+
* Create version updates for all packages affected by dependency changes
|
|
547
1865
|
*
|
|
1866
|
+
* When a package is updated, all packages that depend on it should also be updated.
|
|
1867
|
+
* This function calculates which additional packages need patch bumps due to dependency changes.
|
|
1868
|
+
*
|
|
1869
|
+
* @param graph - Dependency graph
|
|
548
1870
|
* @param workspacePackages - All workspace packages
|
|
549
|
-
* @param
|
|
550
|
-
* @
|
|
551
|
-
* @param showPrompt - Whether to show prompts for version overrides
|
|
552
|
-
* @returns Version updates for packages with changes
|
|
1871
|
+
* @param directUpdates - Packages with direct code changes
|
|
1872
|
+
* @returns All updates including dependent packages that need patch bumps
|
|
553
1873
|
*/
|
|
554
|
-
|
|
555
|
-
const
|
|
556
|
-
|
|
557
|
-
|
|
1874
|
+
function createDependentUpdates(graph, workspacePackages, directUpdates, excludedPackages = /* @__PURE__ */ new Set()) {
|
|
1875
|
+
const allUpdates = [...directUpdates];
|
|
1876
|
+
const directUpdateMap = new Map(directUpdates.map((u) => [u.package.name, u]));
|
|
1877
|
+
const affectedPackages = getAllAffectedPackages(graph, new Set(directUpdates.map((u) => u.package.name)));
|
|
1878
|
+
for (const pkgName of affectedPackages) {
|
|
1879
|
+
logger.verbose(`Processing affected package: ${pkgName}`);
|
|
1880
|
+
if (excludedPackages.has(pkgName)) {
|
|
1881
|
+
logger.verbose(`Skipping ${pkgName}, explicitly excluded from dependent bumps`);
|
|
1882
|
+
continue;
|
|
1883
|
+
}
|
|
1884
|
+
if (directUpdateMap.has(pkgName)) {
|
|
1885
|
+
logger.verbose(`Skipping ${pkgName}, already has a direct update`);
|
|
1886
|
+
continue;
|
|
1887
|
+
}
|
|
558
1888
|
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
559
1889
|
if (!pkg) continue;
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
1890
|
+
allUpdates.push(createVersionUpdate(pkg, "patch", false));
|
|
1891
|
+
}
|
|
1892
|
+
return allUpdates;
|
|
1893
|
+
}
|
|
1894
|
+
//#endregion
|
|
1895
|
+
//#region src/versioning/version.ts
|
|
1896
|
+
const messageColorMap = {
|
|
1897
|
+
feat: farver.green,
|
|
1898
|
+
feature: farver.green,
|
|
1899
|
+
refactor: farver.cyan,
|
|
1900
|
+
style: farver.cyan,
|
|
1901
|
+
docs: farver.blue,
|
|
1902
|
+
doc: farver.blue,
|
|
1903
|
+
types: farver.blue,
|
|
1904
|
+
type: farver.blue,
|
|
1905
|
+
chore: farver.gray,
|
|
1906
|
+
ci: farver.gray,
|
|
1907
|
+
build: farver.gray,
|
|
1908
|
+
deps: farver.gray,
|
|
1909
|
+
dev: farver.gray,
|
|
1910
|
+
fix: farver.yellow,
|
|
1911
|
+
test: farver.yellow,
|
|
1912
|
+
perf: farver.magenta,
|
|
1913
|
+
revert: farver.red,
|
|
1914
|
+
breaking: farver.red
|
|
1915
|
+
};
|
|
1916
|
+
function formatCommitsForDisplay(commits) {
|
|
1917
|
+
if (commits.length === 0) return farver.dim("No commits found");
|
|
1918
|
+
const maxCommitsToShow = 10;
|
|
1919
|
+
const commitsToShow = commits.slice(0, maxCommitsToShow);
|
|
1920
|
+
const hasMore = commits.length > maxCommitsToShow;
|
|
1921
|
+
const typeLength = commits.map(({ type }) => type.length).reduce((a, b) => Math.max(a, b), 0);
|
|
1922
|
+
const scopeLength = commits.map(({ scope }) => scope?.length).reduce((a, b) => Math.max(a || 0, b || 0), 0) || 0;
|
|
1923
|
+
const formattedCommits = commitsToShow.map((commit) => {
|
|
1924
|
+
let color = messageColorMap[commit.type] || ((c) => c);
|
|
1925
|
+
if (commit.isBreaking) color = (s) => farver.inverse.red(s);
|
|
1926
|
+
const paddedType = commit.type.padStart(typeLength + 1, " ");
|
|
1927
|
+
const paddedScope = !commit.scope ? " ".repeat(scopeLength ? scopeLength + 2 : 0) : farver.dim("(") + commit.scope + farver.dim(")") + " ".repeat(scopeLength - commit.scope.length);
|
|
1928
|
+
return [
|
|
1929
|
+
farver.dim(commit.shortHash),
|
|
1930
|
+
" ",
|
|
1931
|
+
color === farver.gray ? color(paddedType) : farver.bold(color(paddedType)),
|
|
1932
|
+
" ",
|
|
1933
|
+
paddedScope,
|
|
1934
|
+
farver.dim(":"),
|
|
1935
|
+
" ",
|
|
1936
|
+
color === farver.gray ? color(commit.description) : commit.description
|
|
1937
|
+
].join("");
|
|
1938
|
+
}).join("\n");
|
|
1939
|
+
if (hasMore) return `${formattedCommits}\n ${farver.dim(`... and ${commits.length - maxCommitsToShow} more commits`)}`;
|
|
1940
|
+
return formattedCommits;
|
|
1941
|
+
}
|
|
1942
|
+
/**
|
|
1943
|
+
* Pure function that resolves version bump from commits and overrides.
|
|
1944
|
+
* No IO, no prompts - fully testable in isolation.
|
|
1945
|
+
*/
|
|
1946
|
+
function resolveAutoVersion(pkg, packageCommits, globalCommits, override) {
|
|
1947
|
+
const determinedBump = determineHighestBump([...packageCommits, ...globalCommits]);
|
|
1948
|
+
const effectiveBump = override?.type || determinedBump;
|
|
1949
|
+
const autoVersion = getNextVersion(pkg.version, determinedBump);
|
|
1950
|
+
return {
|
|
1951
|
+
determinedBump,
|
|
1952
|
+
effectiveBump,
|
|
1953
|
+
autoVersion,
|
|
1954
|
+
resolvedVersion: override?.version || autoVersion
|
|
1955
|
+
};
|
|
1956
|
+
}
|
|
1957
|
+
/**
|
|
1958
|
+
* Pure function that computes the new dependency range.
|
|
1959
|
+
* Returns null if the dependency should not be updated (e.g. workspace:*).
|
|
1960
|
+
*/
|
|
1961
|
+
function computeDependencyRange(currentRange, newVersion, isPeerDependency) {
|
|
1962
|
+
if (currentRange === "workspace:*") return null;
|
|
1963
|
+
if (isPeerDependency) {
|
|
1964
|
+
const majorVersion = newVersion.split(".")[0];
|
|
1965
|
+
return `>=${newVersion} <${Number(majorVersion) + 1}.0.0`;
|
|
1966
|
+
}
|
|
1967
|
+
return `^${newVersion}`;
|
|
1968
|
+
}
|
|
1969
|
+
async function calculateVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides: initialOverrides = {} }) {
|
|
1970
|
+
const versionUpdates = [];
|
|
1971
|
+
const processedPackages = /* @__PURE__ */ new Set();
|
|
1972
|
+
const newOverrides = { ...initialOverrides };
|
|
1973
|
+
const excludedPackages = /* @__PURE__ */ new Set();
|
|
1974
|
+
logger.verbose(`Starting version inference for ${packageCommits.size} packages with commits`);
|
|
1975
|
+
for (const [pkgName, pkgCommits] of packageCommits) {
|
|
1976
|
+
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
1977
|
+
if (!pkg) {
|
|
1978
|
+
logger.error(`Package ${pkgName} not found in workspace packages, skipping`);
|
|
1979
|
+
continue;
|
|
1980
|
+
}
|
|
1981
|
+
processedPackages.add(pkgName);
|
|
1982
|
+
const globalCommits = globalCommitsPerPackage.get(pkgName) || [];
|
|
1983
|
+
const allCommitsForPackage = [...pkgCommits, ...globalCommits];
|
|
1984
|
+
const override = newOverrides[pkgName];
|
|
1985
|
+
const { determinedBump, effectiveBump, autoVersion, resolvedVersion } = resolveAutoVersion(pkg, pkgCommits, globalCommits, override);
|
|
1986
|
+
const canPrompt = !getIsCI() && showPrompt;
|
|
1987
|
+
if (effectiveBump === "none" && !canPrompt) continue;
|
|
1988
|
+
let newVersion = resolvedVersion;
|
|
1989
|
+
let finalBumpType = effectiveBump;
|
|
1990
|
+
if (canPrompt) {
|
|
1991
|
+
logger.clearScreen();
|
|
1992
|
+
logger.section(`📝 Commits for ${farver.cyan(pkg.name)}`);
|
|
1993
|
+
formatCommitsForDisplay(allCommitsForPackage).split("\n").forEach((line) => logger.item(line));
|
|
1994
|
+
logger.item(farver.dim(`Auto bump: ${determinedBump} → ${autoVersion}`));
|
|
1995
|
+
logger.emptyLine();
|
|
1996
|
+
if (override) {
|
|
1997
|
+
const overrideChoice = await confirmOverridePrompt(pkg, override.version);
|
|
1998
|
+
if (overrideChoice === null) continue;
|
|
1999
|
+
if (overrideChoice === "use") {
|
|
2000
|
+
newOverrides[pkgName] = {
|
|
2001
|
+
type: override.type,
|
|
2002
|
+
version: override.version
|
|
2003
|
+
};
|
|
2004
|
+
if (override.version === pkg.version) excludedPackages.add(pkgName);
|
|
2005
|
+
versionUpdates.push({
|
|
2006
|
+
package: pkg,
|
|
2007
|
+
currentVersion: pkg.version,
|
|
2008
|
+
newVersion: override.version,
|
|
2009
|
+
bumpType: override.type,
|
|
2010
|
+
hasDirectChanges: allCommitsForPackage.length > 0,
|
|
2011
|
+
changeKind: override.version === pkg.version ? "as-is" : "manual"
|
|
2012
|
+
});
|
|
2013
|
+
continue;
|
|
2014
|
+
}
|
|
2015
|
+
newVersion = autoVersion;
|
|
2016
|
+
}
|
|
2017
|
+
const selectedVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, newVersion, {
|
|
2018
|
+
defaultChoice: override ? "suggested" : "auto",
|
|
2019
|
+
suggestedHint: `auto: ${determinedBump} → ${autoVersion}`
|
|
2020
|
+
});
|
|
2021
|
+
if (selectedVersion === null) continue;
|
|
2022
|
+
const userBump = calculateBumpType(pkg.version, selectedVersion);
|
|
2023
|
+
finalBumpType = userBump;
|
|
2024
|
+
if (selectedVersion === pkg.version) {
|
|
2025
|
+
excludedPackages.add(pkgName);
|
|
2026
|
+
newOverrides[pkgName] = {
|
|
2027
|
+
type: "none",
|
|
2028
|
+
version: pkg.version
|
|
2029
|
+
};
|
|
2030
|
+
logger.info(`Override set for ${pkgName}: manual as-is (${pkg.version})`);
|
|
2031
|
+
versionUpdates.push({
|
|
2032
|
+
package: pkg,
|
|
2033
|
+
currentVersion: pkg.version,
|
|
2034
|
+
newVersion: pkg.version,
|
|
2035
|
+
bumpType: "none",
|
|
2036
|
+
hasDirectChanges: allCommitsForPackage.length > 0,
|
|
2037
|
+
changeKind: "as-is"
|
|
2038
|
+
});
|
|
2039
|
+
continue;
|
|
2040
|
+
}
|
|
2041
|
+
newOverrides[pkgName] = {
|
|
2042
|
+
type: userBump,
|
|
2043
|
+
version: selectedVersion
|
|
2044
|
+
};
|
|
2045
|
+
logger.info(`Override set for ${pkgName}: manual ${userBump} (${selectedVersion})`);
|
|
2046
|
+
newVersion = selectedVersion;
|
|
2047
|
+
}
|
|
2048
|
+
versionUpdates.push({
|
|
2049
|
+
package: pkg,
|
|
2050
|
+
currentVersion: pkg.version,
|
|
2051
|
+
newVersion,
|
|
2052
|
+
bumpType: finalBumpType,
|
|
2053
|
+
hasDirectChanges: allCommitsForPackage.length > 0,
|
|
2054
|
+
changeKind: canPrompt ? "manual" : "auto"
|
|
2055
|
+
});
|
|
2056
|
+
}
|
|
2057
|
+
if (!getIsCI() && showPrompt) for (const pkg of workspacePackages) {
|
|
2058
|
+
if (processedPackages.has(pkg.name)) continue;
|
|
2059
|
+
logger.clearScreen();
|
|
2060
|
+
logger.section(`📦 Package: ${pkg.name}`);
|
|
2061
|
+
logger.item("No direct commits found");
|
|
2062
|
+
logger.item(farver.dim(`Auto bump: none → ${pkg.version}`));
|
|
2063
|
+
const newVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, pkg.version, {
|
|
2064
|
+
defaultChoice: "auto",
|
|
2065
|
+
suggestedHint: `auto: none → ${pkg.version}`
|
|
2066
|
+
});
|
|
2067
|
+
if (newVersion === null) break;
|
|
2068
|
+
if (newVersion === pkg.version) {
|
|
2069
|
+
excludedPackages.add(pkg.name);
|
|
2070
|
+
newOverrides[pkg.name] = {
|
|
2071
|
+
type: "none",
|
|
2072
|
+
version: pkg.version
|
|
2073
|
+
};
|
|
2074
|
+
logger.info(`Override set for ${pkg.name}: manual as-is (${pkg.version})`);
|
|
563
2075
|
continue;
|
|
564
2076
|
}
|
|
565
|
-
|
|
566
|
-
|
|
2077
|
+
const bumpType = calculateBumpType(pkg.version, newVersion);
|
|
2078
|
+
newOverrides[pkg.name] = {
|
|
2079
|
+
type: bumpType,
|
|
2080
|
+
version: newVersion
|
|
2081
|
+
};
|
|
2082
|
+
logger.info(`Override set for ${pkg.name}: manual ${bumpType} (${newVersion})`);
|
|
567
2083
|
versionUpdates.push({
|
|
568
2084
|
package: pkg,
|
|
569
2085
|
currentVersion: pkg.version,
|
|
570
2086
|
newVersion,
|
|
571
|
-
bumpType
|
|
572
|
-
hasDirectChanges:
|
|
2087
|
+
bumpType,
|
|
2088
|
+
hasDirectChanges: false,
|
|
2089
|
+
changeKind: "manual"
|
|
573
2090
|
});
|
|
574
2091
|
}
|
|
575
|
-
return
|
|
2092
|
+
return {
|
|
2093
|
+
updates: versionUpdates,
|
|
2094
|
+
overrides: newOverrides,
|
|
2095
|
+
excludedPackages
|
|
2096
|
+
};
|
|
2097
|
+
}
|
|
2098
|
+
/**
|
|
2099
|
+
* Calculate version updates and prepare dependent updates
|
|
2100
|
+
* Returns both the updates and a function to apply them
|
|
2101
|
+
*/
|
|
2102
|
+
async function calculateAndPrepareVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides }) {
|
|
2103
|
+
const { updates: directUpdates, overrides: newOverrides, excludedPackages: promptExcludedPackages } = await calculateVersionUpdates({
|
|
2104
|
+
workspacePackages,
|
|
2105
|
+
packageCommits,
|
|
2106
|
+
workspaceRoot,
|
|
2107
|
+
showPrompt,
|
|
2108
|
+
globalCommitsPerPackage,
|
|
2109
|
+
overrides
|
|
2110
|
+
});
|
|
2111
|
+
const graph = buildPackageDependencyGraph(workspacePackages);
|
|
2112
|
+
const overrideExcludedPackages = new Set(Object.entries(newOverrides).filter(([, override]) => override.type === "none").map(([pkgName]) => pkgName));
|
|
2113
|
+
const allUpdates = createDependentUpdates(graph, workspacePackages, directUpdates, new Set([...overrideExcludedPackages, ...promptExcludedPackages]));
|
|
2114
|
+
const applyUpdates = async () => {
|
|
2115
|
+
await Promise.all(allUpdates.map(async (update) => {
|
|
2116
|
+
const depUpdates = getDependencyUpdates(update.package, allUpdates);
|
|
2117
|
+
await updatePackageJson(update.package, update.newVersion, depUpdates);
|
|
2118
|
+
}));
|
|
2119
|
+
};
|
|
2120
|
+
return {
|
|
2121
|
+
allUpdates,
|
|
2122
|
+
applyUpdates,
|
|
2123
|
+
overrides: newOverrides
|
|
2124
|
+
};
|
|
576
2125
|
}
|
|
577
2126
|
async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
|
|
578
2127
|
const packageJsonPath = join(pkg.path, "package.json");
|
|
579
2128
|
const content = await readFile(packageJsonPath, "utf-8");
|
|
580
2129
|
const packageJson = JSON.parse(content);
|
|
581
|
-
packageJson.version = newVersion;
|
|
582
|
-
|
|
583
|
-
if (
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
if (
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
}
|
|
591
|
-
if (packageJson.peerDependencies?.[depName]) {
|
|
592
|
-
if (packageJson.peerDependencies[depName] === "workspace:*") continue;
|
|
593
|
-
packageJson.peerDependencies[depName] = `^${depVersion}`;
|
|
2130
|
+
packageJson.version = newVersion;
|
|
2131
|
+
function updateDependency(deps, depName, depVersion, isPeerDependency = false) {
|
|
2132
|
+
if (!deps) return;
|
|
2133
|
+
const oldVersion = deps[depName];
|
|
2134
|
+
if (!oldVersion) return;
|
|
2135
|
+
const newRange = computeDependencyRange(oldVersion, depVersion, isPeerDependency);
|
|
2136
|
+
if (newRange === null) {
|
|
2137
|
+
logger.verbose(` - Skipping workspace:* dependency: ${depName}`);
|
|
2138
|
+
return;
|
|
594
2139
|
}
|
|
2140
|
+
deps[depName] = newRange;
|
|
2141
|
+
logger.verbose(` - Updated dependency ${depName}: ${oldVersion} → ${newRange}`);
|
|
2142
|
+
}
|
|
2143
|
+
for (const [depName, depVersion] of dependencyUpdates) {
|
|
2144
|
+
updateDependency(packageJson.dependencies, depName, depVersion);
|
|
2145
|
+
updateDependency(packageJson.devDependencies, depName, depVersion);
|
|
2146
|
+
updateDependency(packageJson.peerDependencies, depName, depVersion, true);
|
|
595
2147
|
}
|
|
596
2148
|
await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
|
|
2149
|
+
logger.verbose(` - Successfully wrote updated package.json`);
|
|
597
2150
|
}
|
|
598
2151
|
/**
|
|
599
2152
|
* Get all dependency updates needed for a package
|
|
@@ -603,261 +2156,706 @@ function getDependencyUpdates(pkg, allUpdates) {
|
|
|
603
2156
|
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
604
2157
|
for (const dep of allDeps) {
|
|
605
2158
|
const update = allUpdates.find((u) => u.package.name === dep);
|
|
606
|
-
if (update)
|
|
2159
|
+
if (update) {
|
|
2160
|
+
logger.verbose(` - Dependency ${dep} will be updated: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
|
|
2161
|
+
updates.set(dep, update.newVersion);
|
|
2162
|
+
}
|
|
607
2163
|
}
|
|
2164
|
+
if (updates.size === 0) logger.verbose(` - No dependency updates needed`);
|
|
608
2165
|
return updates;
|
|
609
2166
|
}
|
|
610
|
-
|
|
611
2167
|
//#endregion
|
|
612
|
-
//#region src/
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
2168
|
+
//#region src/operations/calculate.ts
|
|
2169
|
+
async function calculateUpdates(options) {
|
|
2170
|
+
const { workspacePackages, workspaceRoot, showPrompt, overrides, globalCommitMode } = options;
|
|
2171
|
+
try {
|
|
2172
|
+
const grouped = await getWorkspacePackageGroupedCommits(workspaceRoot, workspacePackages);
|
|
2173
|
+
return ok(await calculateAndPrepareVersionUpdates({
|
|
2174
|
+
workspacePackages,
|
|
2175
|
+
packageCommits: grouped,
|
|
2176
|
+
workspaceRoot,
|
|
2177
|
+
showPrompt,
|
|
2178
|
+
globalCommitsPerPackage: await getGlobalCommitsPerPackage(workspaceRoot, grouped, workspacePackages, globalCommitMode),
|
|
2179
|
+
overrides
|
|
2180
|
+
}));
|
|
2181
|
+
} catch (error) {
|
|
2182
|
+
const formatted = formatUnknownError(error);
|
|
2183
|
+
return err({
|
|
2184
|
+
type: "git",
|
|
2185
|
+
operation: "calculateUpdates",
|
|
2186
|
+
message: formatted.message,
|
|
2187
|
+
stderr: formatted.stderr
|
|
2188
|
+
});
|
|
629
2189
|
}
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
2190
|
+
}
|
|
2191
|
+
function ensureHasPackages(packages) {
|
|
2192
|
+
if (packages.length === 0) return err({
|
|
2193
|
+
type: "git",
|
|
2194
|
+
operation: "discoverWorkspacePackages",
|
|
2195
|
+
message: "No packages found to release"
|
|
2196
|
+
});
|
|
2197
|
+
return ok(packages);
|
|
2198
|
+
}
|
|
2199
|
+
//#endregion
|
|
2200
|
+
//#region src/operations/pr.ts
|
|
2201
|
+
async function syncPullRequest(options) {
|
|
2202
|
+
const { github, releaseBranch, defaultBranch, pullRequestTitle, pullRequestBody, updates } = options;
|
|
2203
|
+
let existing = null;
|
|
2204
|
+
try {
|
|
2205
|
+
existing = await github.getExistingPullRequest(releaseBranch);
|
|
2206
|
+
} catch (error) {
|
|
2207
|
+
return {
|
|
2208
|
+
ok: false,
|
|
2209
|
+
error: toGitHubError("getExistingPullRequest", error)
|
|
2210
|
+
};
|
|
2211
|
+
}
|
|
2212
|
+
const doesExist = !!existing;
|
|
2213
|
+
const title = existing?.title || pullRequestTitle || "chore: update package versions";
|
|
2214
|
+
const body = generatePullRequestBody(updates, pullRequestBody);
|
|
2215
|
+
let pr = null;
|
|
2216
|
+
try {
|
|
2217
|
+
pr = await github.upsertPullRequest({
|
|
2218
|
+
pullNumber: existing?.number,
|
|
2219
|
+
title,
|
|
2220
|
+
body,
|
|
2221
|
+
head: releaseBranch,
|
|
2222
|
+
base: defaultBranch
|
|
2223
|
+
});
|
|
2224
|
+
} catch (error) {
|
|
2225
|
+
return {
|
|
2226
|
+
ok: false,
|
|
2227
|
+
error: toGitHubError("upsertPullRequest", error)
|
|
2228
|
+
};
|
|
2229
|
+
}
|
|
2230
|
+
return ok({
|
|
2231
|
+
pullRequest: pr,
|
|
2232
|
+
created: !doesExist
|
|
2233
|
+
});
|
|
2234
|
+
}
|
|
2235
|
+
//#endregion
|
|
2236
|
+
//#region src/workflows/prepare.ts
|
|
2237
|
+
async function prepareWorkflow(options) {
|
|
2238
|
+
if (options.safeguards) {
|
|
2239
|
+
const clean = await isWorkingDirectoryClean(options.workspaceRoot);
|
|
2240
|
+
if (!clean.ok) exitWithError("Failed to verify working directory state.", "Ensure this is a valid git repository and try again.", clean.error);
|
|
2241
|
+
if (!clean.value) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
2242
|
+
}
|
|
2243
|
+
const discovered = await discoverWorkspacePackages(options.workspaceRoot, options);
|
|
2244
|
+
if (!discovered.ok) exitWithError("Failed to discover packages.", void 0, discovered.error);
|
|
2245
|
+
const ensured = ensureHasPackages(discovered.value);
|
|
2246
|
+
if (!ensured.ok) {
|
|
2247
|
+
logger.warn(ensured.error.message);
|
|
2248
|
+
return null;
|
|
2249
|
+
}
|
|
2250
|
+
const workspacePackages = ensured.value;
|
|
2251
|
+
logger.section("📦 Workspace Packages");
|
|
2252
|
+
logger.item(`Found ${workspacePackages.length} packages`);
|
|
2253
|
+
for (const pkg of workspacePackages) {
|
|
2254
|
+
logger.item(`${farver.cyan(pkg.name)} (${farver.bold(pkg.version)})`);
|
|
2255
|
+
logger.item(` ${farver.gray("→")} ${farver.gray(pkg.path)}`);
|
|
2256
|
+
}
|
|
2257
|
+
logger.emptyLine();
|
|
2258
|
+
const prepareBranchResult = await prepareReleaseBranch({
|
|
2259
|
+
workspaceRoot: options.workspaceRoot,
|
|
2260
|
+
releaseBranch: options.branch.release,
|
|
2261
|
+
defaultBranch: options.branch.default
|
|
2262
|
+
});
|
|
2263
|
+
if (!prepareBranchResult.ok) exitWithError("Failed to prepare release branch.", void 0, prepareBranchResult.error);
|
|
2264
|
+
const overridesPath = join(options.workspaceRoot, ucdjsReleaseOverridesPath);
|
|
2265
|
+
let existingOverrides = {};
|
|
2266
|
+
try {
|
|
2267
|
+
const overridesContent = await readFile(overridesPath, "utf-8");
|
|
2268
|
+
existingOverrides = JSON.parse(overridesContent);
|
|
2269
|
+
logger.info("Found existing version overrides file.");
|
|
2270
|
+
} catch (error) {
|
|
2271
|
+
logger.info("No existing version overrides file found. Continuing...");
|
|
2272
|
+
logger.verbose(`Reading overrides file failed: ${formatUnknownError(error).message}`);
|
|
2273
|
+
}
|
|
2274
|
+
if (Object.keys(existingOverrides).length > 0) {
|
|
2275
|
+
const packageNames = new Set(workspacePackages.map((p) => p.name));
|
|
2276
|
+
const staleEntries = [];
|
|
2277
|
+
for (const [pkgName, override] of Object.entries(existingOverrides)) {
|
|
2278
|
+
if (!packageNames.has(pkgName)) {
|
|
2279
|
+
staleEntries.push(pkgName);
|
|
2280
|
+
delete existingOverrides[pkgName];
|
|
2281
|
+
continue;
|
|
2282
|
+
}
|
|
2283
|
+
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
2284
|
+
if (pkg && semver.valid(override.version) && semver.gte(pkg.version, override.version)) {
|
|
2285
|
+
staleEntries.push(pkgName);
|
|
2286
|
+
delete existingOverrides[pkgName];
|
|
2287
|
+
}
|
|
2288
|
+
}
|
|
2289
|
+
if (staleEntries.length > 0) logger.info(`Removed ${staleEntries.length} stale override(s): ${staleEntries.join(", ")}`);
|
|
2290
|
+
}
|
|
2291
|
+
const updatesResult = await calculateUpdates({
|
|
2292
|
+
workspacePackages,
|
|
2293
|
+
workspaceRoot: options.workspaceRoot,
|
|
2294
|
+
showPrompt: options.prompts?.versions !== false,
|
|
2295
|
+
globalCommitMode: options.globalCommitMode === "none" ? false : options.globalCommitMode,
|
|
2296
|
+
overrides: existingOverrides
|
|
2297
|
+
});
|
|
2298
|
+
if (!updatesResult.ok) exitWithError("Failed to calculate package updates.", void 0, updatesResult.error);
|
|
2299
|
+
const { allUpdates, applyUpdates, overrides: newOverrides } = updatesResult.value;
|
|
2300
|
+
const hasOverrideChanges = JSON.stringify(existingOverrides) !== JSON.stringify(newOverrides);
|
|
2301
|
+
if (Object.keys(newOverrides).length > 0 && hasOverrideChanges) {
|
|
2302
|
+
logger.step("Writing version overrides file...");
|
|
2303
|
+
try {
|
|
2304
|
+
await mkdir(join(options.workspaceRoot, ".github"), { recursive: true });
|
|
2305
|
+
await writeFile(overridesPath, JSON.stringify(newOverrides, null, 2), "utf-8");
|
|
2306
|
+
logger.success("Successfully wrote version overrides file.");
|
|
2307
|
+
} catch (e) {
|
|
2308
|
+
logger.error("Failed to write version overrides file:", e);
|
|
2309
|
+
}
|
|
2310
|
+
} else if (Object.keys(newOverrides).length > 0) logger.step("Version overrides unchanged. Skipping write.");
|
|
2311
|
+
if (Object.keys(newOverrides).length === 0 && hasOverrideChanges) {
|
|
2312
|
+
logger.info("Removing obsolete version overrides file...");
|
|
2313
|
+
try {
|
|
2314
|
+
await rm(overridesPath);
|
|
2315
|
+
logger.success("Successfully removed obsolete version overrides file.");
|
|
2316
|
+
} catch (e) {
|
|
2317
|
+
if (formatUnknownError(e).code !== "ENOENT") logger.error("Failed to remove obsolete version overrides file:", e);
|
|
2318
|
+
}
|
|
2319
|
+
}
|
|
2320
|
+
if (allUpdates.filter((u) => u.hasDirectChanges).length === 0) logger.warn("No packages have changes requiring a release");
|
|
2321
|
+
logger.section("🔄 Version Updates");
|
|
2322
|
+
logger.item(`Updating ${allUpdates.length} packages (including dependents)`);
|
|
2323
|
+
for (const update of allUpdates) {
|
|
2324
|
+
const suffix = update.changeKind === "as-is" ? farver.dim(" (as-is)") : "";
|
|
2325
|
+
logger.item(`${update.package.name}: ${update.currentVersion} → ${update.newVersion}${suffix}`);
|
|
2326
|
+
}
|
|
2327
|
+
await applyUpdates();
|
|
2328
|
+
if (options.changelog?.enabled) {
|
|
2329
|
+
logger.step("Updating changelogs");
|
|
2330
|
+
const groupedPackageCommits = await getWorkspacePackageGroupedCommits(options.workspaceRoot, workspacePackages);
|
|
2331
|
+
const globalCommitsPerPackage = await getGlobalCommitsPerPackage(options.workspaceRoot, groupedPackageCommits, workspacePackages, options.globalCommitMode === "none" ? false : options.globalCommitMode);
|
|
2332
|
+
const changelogPromises = allUpdates.map((update) => {
|
|
2333
|
+
return (async () => {
|
|
2334
|
+
let pkgCommits = groupedPackageCommits.get(update.package.name) || [];
|
|
2335
|
+
let globalCommits = globalCommitsPerPackage.get(update.package.name) || [];
|
|
2336
|
+
let previousVersionForChangelog = update.currentVersion !== "0.0.0" ? update.currentVersion : void 0;
|
|
2337
|
+
if (options.changelog.combinePrereleaseIntoFirstStable && semver.prerelease(update.currentVersion) != null && semver.prerelease(update.newVersion) == null) {
|
|
2338
|
+
const stableTagResult = await getMostRecentPackageStableTag(options.workspaceRoot, update.package.name);
|
|
2339
|
+
if (!stableTagResult.ok) logger.warn(`Failed to resolve stable tag for ${update.package.name}: ${stableTagResult.error.message}`);
|
|
2340
|
+
else {
|
|
2341
|
+
const stableTag = stableTagResult.value;
|
|
2342
|
+
if (stableTag) {
|
|
2343
|
+
logger.verbose(`Combining prerelease changelog entries into stable release for ${update.package.name} using base tag ${stableTag}`);
|
|
2344
|
+
const stableBaseCommits = await getPackageCommitsSinceTag(options.workspaceRoot, update.package, stableTag);
|
|
2345
|
+
pkgCommits = stableBaseCommits;
|
|
2346
|
+
globalCommits = (await getGlobalCommitsPerPackage(options.workspaceRoot, new Map([[update.package.name, stableBaseCommits]]), workspacePackages, options.globalCommitMode === "none" ? false : options.globalCommitMode)).get(update.package.name) || [];
|
|
2347
|
+
const atIndex = stableTag.lastIndexOf("@");
|
|
2348
|
+
if (atIndex !== -1) previousVersionForChangelog = stableTag.slice(atIndex + 1);
|
|
2349
|
+
}
|
|
2350
|
+
}
|
|
2351
|
+
}
|
|
2352
|
+
const allCommits = [...pkgCommits, ...globalCommits];
|
|
2353
|
+
if (allCommits.length === 0) logger.verbose(`No commits for ${update.package.name}, writing changelog entry with no-significant-commits note`);
|
|
2354
|
+
logger.verbose(`Updating changelog for ${farver.cyan(update.package.name)}`);
|
|
2355
|
+
await updateChangelog({
|
|
2356
|
+
normalizedOptions: {
|
|
2357
|
+
...options,
|
|
2358
|
+
workspaceRoot: options.workspaceRoot
|
|
2359
|
+
},
|
|
2360
|
+
githubClient: options.githubClient,
|
|
2361
|
+
workspacePackage: update.package,
|
|
2362
|
+
version: update.newVersion,
|
|
2363
|
+
previousVersion: previousVersionForChangelog,
|
|
2364
|
+
commits: allCommits,
|
|
2365
|
+
date: (/* @__PURE__ */ new Date()).toISOString().split("T")[0]
|
|
2366
|
+
});
|
|
2367
|
+
})();
|
|
2368
|
+
}).filter((p) => p != null);
|
|
2369
|
+
const updates = await Promise.all(changelogPromises);
|
|
2370
|
+
logger.success(`Updated ${updates.length} changelog(s)`);
|
|
2371
|
+
}
|
|
2372
|
+
const hasChangesToPush = await syncReleaseChanges({
|
|
2373
|
+
workspaceRoot: options.workspaceRoot,
|
|
2374
|
+
releaseBranch: options.branch.release,
|
|
2375
|
+
commitMessage: "chore: update release versions",
|
|
2376
|
+
hasChanges: true
|
|
2377
|
+
});
|
|
2378
|
+
if (!hasChangesToPush.ok) exitWithError("Failed to sync release changes.", void 0, hasChangesToPush.error);
|
|
2379
|
+
if (!hasChangesToPush.value) {
|
|
2380
|
+
const prResult = await syncPullRequest({
|
|
2381
|
+
github: options.githubClient,
|
|
2382
|
+
releaseBranch: options.branch.release,
|
|
2383
|
+
defaultBranch: options.branch.default,
|
|
2384
|
+
pullRequestTitle: options.pullRequest?.title,
|
|
2385
|
+
pullRequestBody: options.pullRequest?.body,
|
|
2386
|
+
updates: allUpdates
|
|
2387
|
+
});
|
|
2388
|
+
if (!prResult.ok) exitWithError("Failed to sync release pull request.", void 0, prResult.error);
|
|
2389
|
+
if (prResult.value.pullRequest) {
|
|
2390
|
+
logger.item("No updates needed, PR is already up to date");
|
|
2391
|
+
const checkoutResult = await checkoutBranch(options.branch.default, options.workspaceRoot);
|
|
2392
|
+
if (!checkoutResult.ok) exitWithError(`Failed to checkout branch: ${options.branch.default}`, void 0, checkoutResult.error);
|
|
2393
|
+
return {
|
|
2394
|
+
updates: allUpdates,
|
|
2395
|
+
prUrl: prResult.value.pullRequest.html_url,
|
|
2396
|
+
created: prResult.value.created
|
|
2397
|
+
};
|
|
635
2398
|
}
|
|
2399
|
+
logger.error("No changes to commit, and no existing PR. Nothing to do.");
|
|
2400
|
+
return null;
|
|
2401
|
+
}
|
|
2402
|
+
const prResult = await syncPullRequest({
|
|
2403
|
+
github: options.githubClient,
|
|
2404
|
+
releaseBranch: options.branch.release,
|
|
2405
|
+
defaultBranch: options.branch.default,
|
|
2406
|
+
pullRequestTitle: options.pullRequest?.title,
|
|
2407
|
+
pullRequestBody: options.pullRequest?.body,
|
|
2408
|
+
updates: allUpdates
|
|
2409
|
+
});
|
|
2410
|
+
if (!prResult.ok) exitWithError("Failed to sync release pull request.", void 0, prResult.error);
|
|
2411
|
+
if (prResult.value.pullRequest?.html_url) {
|
|
2412
|
+
logger.section("🚀 Pull Request");
|
|
2413
|
+
logger.success(`Pull request ${prResult.value.created ? "created" : "updated"}: ${prResult.value.pullRequest.html_url}`);
|
|
636
2414
|
}
|
|
2415
|
+
const returnToDefault = await checkoutBranch(options.branch.default, options.workspaceRoot);
|
|
2416
|
+
if (!returnToDefault.ok) exitWithError(`Failed to checkout branch: ${options.branch.default}`, void 0, returnToDefault.error);
|
|
2417
|
+
if (!returnToDefault.value) exitWithError(`Failed to checkout branch: ${options.branch.default}`);
|
|
637
2418
|
return {
|
|
638
|
-
|
|
639
|
-
|
|
2419
|
+
updates: allUpdates,
|
|
2420
|
+
prUrl: prResult.value.pullRequest?.html_url,
|
|
2421
|
+
created: prResult.value.created
|
|
2422
|
+
};
|
|
2423
|
+
}
|
|
2424
|
+
//#endregion
|
|
2425
|
+
//#region src/core/npm.ts
|
|
2426
|
+
function toNPMError(operation, error, code) {
|
|
2427
|
+
const formatted = formatUnknownError(error);
|
|
2428
|
+
return {
|
|
2429
|
+
type: "npm",
|
|
2430
|
+
operation,
|
|
2431
|
+
message: formatted.message,
|
|
2432
|
+
code: code || formatted.code,
|
|
2433
|
+
stderr: formatted.stderr,
|
|
2434
|
+
status: formatted.status
|
|
640
2435
|
};
|
|
641
2436
|
}
|
|
2437
|
+
function classifyPublishErrorCode(error) {
|
|
2438
|
+
const formatted = formatUnknownError(error);
|
|
2439
|
+
const combined = [formatted.message, formatted.stderr].filter(Boolean).join("\n");
|
|
2440
|
+
if (combined.includes("E403") || combined.toLowerCase().includes("access token expired or revoked")) return "E403";
|
|
2441
|
+
if (combined.includes("EPUBLISHCONFLICT") || combined.includes("E409") || combined.includes("409 Conflict") || combined.includes("Failed to save packument")) return "EPUBLISHCONFLICT";
|
|
2442
|
+
if (combined.includes("EOTP")) return "EOTP";
|
|
2443
|
+
}
|
|
2444
|
+
function wait(ms) {
|
|
2445
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
2446
|
+
}
|
|
642
2447
|
/**
|
|
643
|
-
* Get
|
|
644
|
-
*
|
|
645
|
-
* Uses graph traversal to find all packages that need updates:
|
|
646
|
-
* - Packages with direct changes
|
|
647
|
-
* - All packages that depend on changed packages (transitively)
|
|
648
|
-
*
|
|
649
|
-
* @param graph - Dependency graph
|
|
650
|
-
* @param changedPackages - Set of package names with direct changes
|
|
651
|
-
* @returns Set of all package names that need updates
|
|
2448
|
+
* Get the NPM registry URL
|
|
2449
|
+
* Respects NPM_CONFIG_REGISTRY environment variable, defaults to npmjs.org
|
|
652
2450
|
*/
|
|
653
|
-
function
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
2451
|
+
function getRegistryURL() {
|
|
2452
|
+
return process.env.NPM_CONFIG_REGISTRY || "https://registry.npmjs.org";
|
|
2453
|
+
}
|
|
2454
|
+
/**
|
|
2455
|
+
* Fetch package metadata from NPM registry
|
|
2456
|
+
* @param packageName - The package name (e.g., "lodash" or "@scope/name")
|
|
2457
|
+
* @returns Result with package metadata or error
|
|
2458
|
+
*/
|
|
2459
|
+
async function getPackageMetadata(packageName) {
|
|
2460
|
+
try {
|
|
2461
|
+
const registry = getRegistryURL();
|
|
2462
|
+
const encodedName = packageName.startsWith("@") ? `@${encodeURIComponent(packageName.slice(1))}` : encodeURIComponent(packageName);
|
|
2463
|
+
const response = await fetch(`${registry}/${encodedName}`, { headers: { Accept: "application/json" } });
|
|
2464
|
+
if (!response.ok) {
|
|
2465
|
+
if (response.status === 404) return err(toNPMError("getPackageMetadata", `Package not found: ${packageName}`, "E404"));
|
|
2466
|
+
return err(toNPMError("getPackageMetadata", `HTTP ${response.status}: ${response.statusText}`));
|
|
2467
|
+
}
|
|
2468
|
+
return ok(await response.json());
|
|
2469
|
+
} catch (error) {
|
|
2470
|
+
return err(toNPMError("getPackageMetadata", error, "ENETWORK"));
|
|
660
2471
|
}
|
|
661
|
-
for (const pkg of changedPackages) visitDependents(pkg);
|
|
662
|
-
return affected;
|
|
663
2472
|
}
|
|
664
2473
|
/**
|
|
665
|
-
*
|
|
666
|
-
*
|
|
667
|
-
*
|
|
668
|
-
*
|
|
669
|
-
*
|
|
670
|
-
* @param graph - Dependency graph
|
|
671
|
-
* @param workspacePackages - All workspace packages
|
|
672
|
-
* @param directUpdates - Packages with direct code changes
|
|
673
|
-
* @returns All updates including dependent packages that need patch bumps
|
|
2474
|
+
* Check if a specific package version exists on NPM
|
|
2475
|
+
* @param packageName - The package name
|
|
2476
|
+
* @param version - The version to check (e.g., "1.2.3")
|
|
2477
|
+
* @returns Result with boolean (true if version exists) or error
|
|
674
2478
|
*/
|
|
675
|
-
function
|
|
676
|
-
const
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
if (directUpdateMap.has(pkgName)) continue;
|
|
681
|
-
const pkg = workspacePackages.find((p) => p.name === pkgName);
|
|
682
|
-
if (!pkg) continue;
|
|
683
|
-
allUpdates.push(createVersionUpdate(pkg, "patch", false));
|
|
2479
|
+
async function checkVersionExists(packageName, version) {
|
|
2480
|
+
const metadataResult = await getPackageMetadata(packageName);
|
|
2481
|
+
if (!metadataResult.ok) {
|
|
2482
|
+
if (metadataResult.error.code === "E404") return ok(false);
|
|
2483
|
+
return err(metadataResult.error);
|
|
684
2484
|
}
|
|
685
|
-
return
|
|
2485
|
+
return ok(version in metadataResult.value.versions);
|
|
686
2486
|
}
|
|
687
2487
|
/**
|
|
688
|
-
*
|
|
689
|
-
*
|
|
690
|
-
*
|
|
691
|
-
*
|
|
692
|
-
* @param
|
|
2488
|
+
* Publish a package to NPM
|
|
2489
|
+
* Uses pnpm to handle workspace protocol and catalog: resolution automatically
|
|
2490
|
+
* @param packageName - The package name to publish
|
|
2491
|
+
* @param version - The package version to publish
|
|
2492
|
+
* @param workspaceRoot - Path to the workspace root
|
|
2493
|
+
* @param options - Normalized release scripts options
|
|
2494
|
+
* @returns Result indicating success or failure
|
|
693
2495
|
*/
|
|
694
|
-
async function
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
2496
|
+
async function publishPackage(packageName, version, workspaceRoot, options) {
|
|
2497
|
+
const args = [
|
|
2498
|
+
"--filter",
|
|
2499
|
+
packageName,
|
|
2500
|
+
"publish",
|
|
2501
|
+
"--access",
|
|
2502
|
+
options.npm.access,
|
|
2503
|
+
"--no-git-checks"
|
|
2504
|
+
];
|
|
2505
|
+
if (options.npm.otp) args.push("--otp", options.npm.otp);
|
|
2506
|
+
const explicitTag = process.env.NPM_CONFIG_TAG;
|
|
2507
|
+
const prereleaseTag = (() => {
|
|
2508
|
+
const prerelease = semver.prerelease(version);
|
|
2509
|
+
if (!prerelease || prerelease.length === 0) return;
|
|
2510
|
+
const identifier = prerelease[0];
|
|
2511
|
+
if (identifier === "alpha" || identifier === "beta") return identifier;
|
|
2512
|
+
return "next";
|
|
2513
|
+
})();
|
|
2514
|
+
const publishTag = explicitTag || prereleaseTag;
|
|
2515
|
+
if (publishTag) args.push("--tag", publishTag);
|
|
2516
|
+
const env = { ...process.env };
|
|
2517
|
+
if (options.npm.provenance) env.NPM_CONFIG_PROVENANCE = "true";
|
|
2518
|
+
const maxAttempts = 4;
|
|
2519
|
+
const backoffMs = [
|
|
2520
|
+
3e3,
|
|
2521
|
+
8e3,
|
|
2522
|
+
15e3
|
|
2523
|
+
];
|
|
2524
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) try {
|
|
2525
|
+
const result = await runIfNotDry("pnpm", args, { nodeOptions: {
|
|
2526
|
+
cwd: workspaceRoot,
|
|
2527
|
+
stdio: "pipe",
|
|
2528
|
+
env
|
|
2529
|
+
} });
|
|
2530
|
+
if (result?.stdout && result.stdout.trim()) logger.verbose(result.stdout.trim());
|
|
2531
|
+
if (result?.stderr && result.stderr.trim()) logger.verbose(result.stderr.trim());
|
|
2532
|
+
return ok(void 0);
|
|
2533
|
+
} catch (error) {
|
|
2534
|
+
const code = classifyPublishErrorCode(error);
|
|
2535
|
+
if (code === "EPUBLISHCONFLICT" && attempt < maxAttempts) {
|
|
2536
|
+
const delay = backoffMs[attempt - 1] ?? backoffMs.at(-1);
|
|
2537
|
+
logger.warn(`Publish conflict for ${packageName}@${version} (attempt ${attempt}/${maxAttempts}). Retrying in ${Math.ceil(delay / 1e3)}s...`);
|
|
2538
|
+
await wait(delay);
|
|
2539
|
+
continue;
|
|
2540
|
+
}
|
|
2541
|
+
return err(toNPMError("publishPackage", error, code));
|
|
2542
|
+
}
|
|
2543
|
+
return err(toNPMError("publishPackage", /* @__PURE__ */ new Error(`Failed to publish ${packageName}@${version} after ${maxAttempts} attempts`), "EPUBLISHCONFLICT"));
|
|
699
2544
|
}
|
|
700
|
-
|
|
701
2545
|
//#endregion
|
|
702
|
-
//#region src/
|
|
703
|
-
async function
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
if (
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
workspacePackages = workspacePackages.filter((pkg) => selectedNames.includes(pkg.name));
|
|
2546
|
+
//#region src/workflows/publish.ts
|
|
2547
|
+
async function getReleaseBodyFromChangelog(workspaceRoot, packageName, packagePath, version) {
|
|
2548
|
+
const changelogPath = join(packagePath, "CHANGELOG.md");
|
|
2549
|
+
try {
|
|
2550
|
+
const entry = parseChangelog(await readFile(changelogPath, "utf-8")).versions.find((v) => v.version === version);
|
|
2551
|
+
if (!entry) return [
|
|
2552
|
+
`## ${packageName}@${version}`,
|
|
2553
|
+
"",
|
|
2554
|
+
"⚠️ Could not find a matching changelog entry for this version.",
|
|
2555
|
+
"",
|
|
2556
|
+
`Expected version ${version} in ${changelogPath}.`
|
|
2557
|
+
].join("\n");
|
|
2558
|
+
const lines = entry.content.trim().split("\n");
|
|
2559
|
+
if (lines[0]?.trim().startsWith("## ")) return lines.slice(1).join("\n").trim();
|
|
2560
|
+
return entry.content.trim();
|
|
2561
|
+
} catch {
|
|
2562
|
+
logger.verbose(`Could not read changelog entry for ${version} at ${changelogPath}`);
|
|
2563
|
+
return [
|
|
2564
|
+
`## ${packageName}@${version}`,
|
|
2565
|
+
"",
|
|
2566
|
+
"⚠️ Could not read package changelog while creating this release.",
|
|
2567
|
+
"",
|
|
2568
|
+
`Expected changelog file: ${changelogPath}`
|
|
2569
|
+
].join("\n");
|
|
727
2570
|
}
|
|
728
|
-
return workspacePackages;
|
|
729
2571
|
}
|
|
730
|
-
async function
|
|
2572
|
+
async function cleanupPublishedOverrides(options, workspacePackages, publishedPackageNames) {
|
|
2573
|
+
if (publishedPackageNames.length === 0) return false;
|
|
2574
|
+
if (options.dryRun) {
|
|
2575
|
+
logger.verbose("Dry-run: skipping override cleanup");
|
|
2576
|
+
return false;
|
|
2577
|
+
}
|
|
2578
|
+
const overridesPath = join(options.workspaceRoot, ucdjsReleaseOverridesPath);
|
|
2579
|
+
let overrides;
|
|
731
2580
|
try {
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
"--json"
|
|
736
|
-
], { nodeOptions: {
|
|
737
|
-
cwd: workspaceRoot,
|
|
738
|
-
stdio: "pipe"
|
|
739
|
-
} });
|
|
740
|
-
const rawProjects = JSON.parse(result.stdout);
|
|
741
|
-
const allPackageNames = new Set(rawProjects.map((p) => p.name));
|
|
742
|
-
const excludedPackages = /* @__PURE__ */ new Set();
|
|
743
|
-
const promises = rawProjects.map(async (rawProject) => {
|
|
744
|
-
const content = await readFile(join(rawProject.path, "package.json"), "utf-8");
|
|
745
|
-
const packageJson = JSON.parse(content);
|
|
746
|
-
if (!shouldIncludePackage(packageJson, options)) {
|
|
747
|
-
excludedPackages.add(rawProject.name);
|
|
748
|
-
return null;
|
|
749
|
-
}
|
|
750
|
-
return {
|
|
751
|
-
name: rawProject.name,
|
|
752
|
-
version: rawProject.version,
|
|
753
|
-
path: rawProject.path,
|
|
754
|
-
packageJson,
|
|
755
|
-
workspaceDependencies: Object.keys(rawProject.dependencies || []).filter((dep) => {
|
|
756
|
-
return allPackageNames.has(dep);
|
|
757
|
-
}),
|
|
758
|
-
workspaceDevDependencies: Object.keys(rawProject.devDependencies || []).filter((dep) => {
|
|
759
|
-
return allPackageNames.has(dep);
|
|
760
|
-
})
|
|
761
|
-
};
|
|
762
|
-
});
|
|
763
|
-
const packages = await Promise.all(promises);
|
|
764
|
-
if (excludedPackages.size > 0) logger.info(`Excluded packages: ${farver.green(Array.from(excludedPackages).join(", "))}`);
|
|
765
|
-
return packages.filter((pkg) => pkg !== null);
|
|
766
|
-
} catch (err) {
|
|
767
|
-
logger.error("Error discovering workspace packages:", err);
|
|
768
|
-
throw err;
|
|
2581
|
+
overrides = JSON.parse(await readFile(overridesPath, "utf-8"));
|
|
2582
|
+
} catch {
|
|
2583
|
+
return false;
|
|
769
2584
|
}
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
2585
|
+
const versionsByPackage = new Map(workspacePackages.map((pkg) => [pkg.name, pkg.version]));
|
|
2586
|
+
const publishedSet = new Set(publishedPackageNames);
|
|
2587
|
+
const removed = [];
|
|
2588
|
+
for (const [pkgName, override] of Object.entries(overrides)) {
|
|
2589
|
+
if (!publishedSet.has(pkgName)) continue;
|
|
2590
|
+
const currentVersion = versionsByPackage.get(pkgName);
|
|
2591
|
+
const current = currentVersion ? semver.valid(currentVersion) : null;
|
|
2592
|
+
const target = semver.valid(override.version);
|
|
2593
|
+
if (current && target && semver.gte(current, target)) {
|
|
2594
|
+
delete overrides[pkgName];
|
|
2595
|
+
removed.push(pkgName);
|
|
2596
|
+
}
|
|
776
2597
|
}
|
|
777
|
-
if (
|
|
2598
|
+
if (removed.length === 0) return false;
|
|
2599
|
+
logger.step(`Cleaning up satisfied overrides (${removed.length})...`);
|
|
2600
|
+
if (Object.keys(overrides).length === 0) {
|
|
2601
|
+
await rm(overridesPath, { force: true });
|
|
2602
|
+
logger.success("Removed release override file (all entries satisfied)");
|
|
2603
|
+
return true;
|
|
2604
|
+
}
|
|
2605
|
+
await writeFile(overridesPath, JSON.stringify(overrides, null, 2), "utf-8");
|
|
2606
|
+
logger.success(`Removed satisfied overrides: ${removed.join(", ")}`);
|
|
778
2607
|
return true;
|
|
779
2608
|
}
|
|
780
|
-
|
|
2609
|
+
async function publishWorkflow(options) {
|
|
2610
|
+
logger.section("📦 Publishing Packages");
|
|
2611
|
+
const discovered = await discoverWorkspacePackages(options.workspaceRoot, options);
|
|
2612
|
+
if (!discovered.ok) exitWithError("Failed to discover packages.", void 0, discovered.error);
|
|
2613
|
+
const workspacePackages = discovered.value;
|
|
2614
|
+
logger.item(`Found ${workspacePackages.length} packages in workspace`);
|
|
2615
|
+
const graph = buildPackageDependencyGraph(workspacePackages);
|
|
2616
|
+
const publicPackages = workspacePackages.filter((pkg) => !pkg.packageJson.private);
|
|
2617
|
+
logger.item(`Publishing ${publicPackages.length} public packages (private packages excluded)`);
|
|
2618
|
+
if (publicPackages.length === 0) {
|
|
2619
|
+
logger.warn("No public packages to publish");
|
|
2620
|
+
return;
|
|
2621
|
+
}
|
|
2622
|
+
const publishOrder = getPackagePublishOrder(graph, new Set(publicPackages.map((p) => p.name)));
|
|
2623
|
+
const status = {
|
|
2624
|
+
published: [],
|
|
2625
|
+
skipped: [],
|
|
2626
|
+
failed: []
|
|
2627
|
+
};
|
|
2628
|
+
for (const order of publishOrder) {
|
|
2629
|
+
const pkg = order.package;
|
|
2630
|
+
const version = pkg.version;
|
|
2631
|
+
const packageName = pkg.name;
|
|
2632
|
+
logger.section(`📦 ${farver.cyan(packageName)} ${farver.gray(`(level ${order.level})`)}`);
|
|
2633
|
+
logger.step(`Checking if ${farver.cyan(`${packageName}@${version}`)} exists on NPM...`);
|
|
2634
|
+
const existsResult = await checkVersionExists(packageName, version);
|
|
2635
|
+
if (!existsResult.ok) {
|
|
2636
|
+
logger.error(`Failed to check version: ${existsResult.error.message}`);
|
|
2637
|
+
status.failed.push(packageName);
|
|
2638
|
+
exitWithError(`Publishing failed for ${packageName}.`, "Check your network connection and NPM registry access", existsResult.error);
|
|
2639
|
+
}
|
|
2640
|
+
if (existsResult.value) {
|
|
2641
|
+
logger.info(`Version ${farver.cyan(version)} already exists on NPM, skipping`);
|
|
2642
|
+
status.skipped.push(packageName);
|
|
2643
|
+
continue;
|
|
2644
|
+
}
|
|
2645
|
+
logger.step(`Publishing ${farver.cyan(`${packageName}@${version}`)} to NPM...`);
|
|
2646
|
+
const publishResult = await publishPackage(packageName, version, options.workspaceRoot, options);
|
|
2647
|
+
if (!publishResult.ok) {
|
|
2648
|
+
logger.error(`Failed to publish: ${publishResult.error.message}`);
|
|
2649
|
+
status.failed.push(packageName);
|
|
2650
|
+
let hint;
|
|
2651
|
+
if (publishResult.error.code === "E403") hint = "Authentication failed. Ensure your NPM token or OIDC configuration is correct";
|
|
2652
|
+
else if (publishResult.error.code === "EPUBLISHCONFLICT") hint = "Version conflict. The version may have been published recently";
|
|
2653
|
+
else if (publishResult.error.code === "EOTP") hint = "2FA/OTP required. Provide the otp option or use OIDC authentication";
|
|
2654
|
+
exitWithError(`Publishing failed for ${packageName}`, hint, publishResult.error);
|
|
2655
|
+
}
|
|
2656
|
+
logger.success(`Published ${farver.cyan(`${packageName}@${version}`)}`);
|
|
2657
|
+
status.published.push(packageName);
|
|
2658
|
+
logger.step(`Creating git tag ${farver.cyan(`${packageName}@${version}`)}...`);
|
|
2659
|
+
const tagResult = await createAndPushPackageTag(packageName, version, options.workspaceRoot);
|
|
2660
|
+
const tagName = `${packageName}@${version}`;
|
|
2661
|
+
if (!tagResult.ok) {
|
|
2662
|
+
logger.error(`Failed to create/push tag: ${tagResult.error.message}`);
|
|
2663
|
+
status.failed.push(packageName);
|
|
2664
|
+
exitWithError(`Publishing failed for ${packageName}: could not create git tag`, "Ensure the workflow token can push tags (contents: write) and git credentials are configured", tagResult.error);
|
|
2665
|
+
}
|
|
2666
|
+
logger.success(`Created and pushed tag ${farver.cyan(tagName)}`);
|
|
2667
|
+
logger.step(`Creating GitHub release for ${farver.cyan(tagName)}...`);
|
|
2668
|
+
try {
|
|
2669
|
+
const releaseBody = await getReleaseBodyFromChangelog(options.workspaceRoot, packageName, pkg.path, version);
|
|
2670
|
+
const releaseResult = await options.githubClient.upsertReleaseByTag({
|
|
2671
|
+
tagName,
|
|
2672
|
+
name: tagName,
|
|
2673
|
+
body: releaseBody,
|
|
2674
|
+
prerelease: Boolean(semver.prerelease(version))
|
|
2675
|
+
});
|
|
2676
|
+
if (releaseResult.release.htmlUrl) logger.success(`${releaseResult.created ? "Created" : "Updated"} GitHub release: ${releaseResult.release.htmlUrl}`);
|
|
2677
|
+
else logger.success(`${releaseResult.created ? "Created" : "Updated"} GitHub release for ${farver.cyan(tagName)}`);
|
|
2678
|
+
} catch (error) {
|
|
2679
|
+
status.failed.push(packageName);
|
|
2680
|
+
exitWithError(`Publishing failed for ${packageName}: could not create GitHub release`, "Ensure the workflow token can write repository contents and releases", error);
|
|
2681
|
+
}
|
|
2682
|
+
}
|
|
2683
|
+
logger.section("📊 Publishing Summary");
|
|
2684
|
+
logger.item(`${farver.green("✓")} Published: ${status.published.length} package(s)`);
|
|
2685
|
+
if (status.published.length > 0) for (const pkg of status.published) logger.item(` ${farver.green("•")} ${pkg}`);
|
|
2686
|
+
if (status.skipped.length > 0) {
|
|
2687
|
+
logger.item(`${farver.yellow("⚠")} Skipped (already exists): ${status.skipped.length} package(s)`);
|
|
2688
|
+
for (const pkg of status.skipped) logger.item(` ${farver.yellow("•")} ${pkg}`);
|
|
2689
|
+
}
|
|
2690
|
+
if (status.failed.length > 0) {
|
|
2691
|
+
logger.item(`${farver.red("✖")} Failed: ${status.failed.length} package(s)`);
|
|
2692
|
+
for (const pkg of status.failed) logger.item(` ${farver.red("•")} ${pkg}`);
|
|
2693
|
+
}
|
|
2694
|
+
if (status.failed.length > 0) exitWithError(`Publishing completed with ${status.failed.length} failure(s)`);
|
|
2695
|
+
if (await cleanupPublishedOverrides(options, workspacePackages, status.published) && !options.dryRun) {
|
|
2696
|
+
logger.step("Committing override cleanup...");
|
|
2697
|
+
const commitResult = await commitPaths([ucdjsReleaseOverridesPath], "chore: cleanup release overrides", options.workspaceRoot);
|
|
2698
|
+
if (!commitResult.ok) exitWithError("Failed to commit override cleanup.", void 0, commitResult.error);
|
|
2699
|
+
if (commitResult.value) {
|
|
2700
|
+
const currentBranch = await getCurrentBranch(options.workspaceRoot);
|
|
2701
|
+
if (!currentBranch.ok) exitWithError("Failed to detect current branch for override cleanup push.", void 0, currentBranch.error);
|
|
2702
|
+
const pushResult = await pushBranch(currentBranch.value, options.workspaceRoot);
|
|
2703
|
+
if (!pushResult.ok) exitWithError("Failed to push override cleanup commit.", void 0, pushResult.error);
|
|
2704
|
+
logger.success(`Pushed override cleanup commit to ${farver.cyan(currentBranch.value)}`);
|
|
2705
|
+
}
|
|
2706
|
+
}
|
|
2707
|
+
logger.success("All packages published successfully!");
|
|
2708
|
+
}
|
|
781
2709
|
//#endregion
|
|
782
|
-
//#region src/
|
|
783
|
-
async function
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
globalOptions.dryRun = normalizedOptions.dryRun;
|
|
789
|
-
const workspaceRoot = normalizedOptions.workspaceRoot;
|
|
790
|
-
if (normalizedOptions.safeguards && !await isWorkingDirectoryClean(workspaceRoot)) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
791
|
-
const workspacePackages = await discoverWorkspacePackages(workspaceRoot, options);
|
|
792
|
-
if (workspacePackages.length === 0) {
|
|
793
|
-
logger.log("No packages found to analyze for release.");
|
|
794
|
-
return null;
|
|
2710
|
+
//#region src/workflows/verify.ts
|
|
2711
|
+
async function verifyWorkflow(options) {
|
|
2712
|
+
if (options.safeguards) {
|
|
2713
|
+
const clean = await isWorkingDirectoryClean(options.workspaceRoot);
|
|
2714
|
+
if (!clean.ok) exitWithError("Failed to verify working directory state.", "Ensure this is a valid git repository and try again.", clean.error);
|
|
2715
|
+
if (!clean.value) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
795
2716
|
}
|
|
796
|
-
const
|
|
797
|
-
|
|
798
|
-
const
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
2717
|
+
const releaseBranch = options.branch.release;
|
|
2718
|
+
const defaultBranch = options.branch.default;
|
|
2719
|
+
const releasePr = await options.githubClient.getExistingPullRequest(releaseBranch);
|
|
2720
|
+
if (!releasePr || !releasePr.head) {
|
|
2721
|
+
logger.warn(`No open release pull request found for branch "${releaseBranch}". Nothing to verify.`);
|
|
2722
|
+
return;
|
|
2723
|
+
}
|
|
2724
|
+
logger.info(`Found release PR #${releasePr.number}. Verifying against default branch "${defaultBranch}"...`);
|
|
2725
|
+
const originalBranch = await getCurrentBranch(options.workspaceRoot);
|
|
2726
|
+
if (!originalBranch.ok) exitWithError("Failed to detect current branch.", void 0, originalBranch.error);
|
|
2727
|
+
if (originalBranch.value !== defaultBranch) {
|
|
2728
|
+
const checkout = await checkoutBranch(defaultBranch, options.workspaceRoot);
|
|
2729
|
+
if (!checkout.ok) exitWithError(`Failed to checkout branch: ${defaultBranch}`, void 0, checkout.error);
|
|
2730
|
+
if (!checkout.value) exitWithError(`Failed to checkout branch: ${defaultBranch}`);
|
|
2731
|
+
}
|
|
2732
|
+
let existingOverrides = {};
|
|
2733
|
+
try {
|
|
2734
|
+
const overridesContent = await readFileFromGit(options.workspaceRoot, releasePr.head.sha, ucdjsReleaseOverridesPath);
|
|
2735
|
+
if (overridesContent.ok && overridesContent.value) {
|
|
2736
|
+
existingOverrides = JSON.parse(overridesContent.value);
|
|
2737
|
+
logger.info("Found existing version overrides file on release branch.");
|
|
2738
|
+
}
|
|
2739
|
+
} catch (error) {
|
|
2740
|
+
logger.info("No version overrides file found on release branch. Continuing...");
|
|
2741
|
+
logger.verbose(`Reading release overrides failed: ${formatUnknownError(error).message}`);
|
|
2742
|
+
}
|
|
2743
|
+
const discovered = await discoverWorkspacePackages(options.workspaceRoot, options);
|
|
2744
|
+
if (!discovered.ok) exitWithError("Failed to discover packages.", void 0, discovered.error);
|
|
2745
|
+
const ensured = ensureHasPackages(discovered.value);
|
|
2746
|
+
if (!ensured.ok) {
|
|
2747
|
+
logger.warn(ensured.error.message);
|
|
2748
|
+
return;
|
|
2749
|
+
}
|
|
2750
|
+
const mainPackages = ensured.value;
|
|
2751
|
+
const updatesResult = await calculateUpdates({
|
|
2752
|
+
workspacePackages: mainPackages,
|
|
2753
|
+
workspaceRoot: options.workspaceRoot,
|
|
2754
|
+
showPrompt: false,
|
|
2755
|
+
globalCommitMode: options.globalCommitMode === "none" ? false : options.globalCommitMode,
|
|
2756
|
+
overrides: existingOverrides
|
|
805
2757
|
});
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
const
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
await
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
logger.log("Pulling latest changes from remote");
|
|
817
|
-
if (!await pullLatestChanges(normalizedOptions.releaseBranch, workspaceRoot)) logger.log("Warning: Failed to pull latest changes, continuing anyway");
|
|
818
|
-
}
|
|
819
|
-
logger.log("Rebasing release branch onto", currentBranch);
|
|
820
|
-
await rebaseBranch(currentBranch, workspaceRoot);
|
|
821
|
-
await updateAllPackageJsonFiles(allUpdates);
|
|
822
|
-
const hasCommitted = await commitChanges("chore: update release versions", workspaceRoot);
|
|
823
|
-
const isBranchAhead = await isBranchAheadOfRemote(normalizedOptions.releaseBranch, workspaceRoot);
|
|
824
|
-
if (!hasCommitted && !isBranchAhead) {
|
|
825
|
-
logger.log("No changes to commit and branch is in sync with remote");
|
|
826
|
-
await checkoutBranch(currentBranch, workspaceRoot);
|
|
827
|
-
if (prExists) {
|
|
828
|
-
logger.log("No updates needed, PR is already up to date");
|
|
829
|
-
return {
|
|
830
|
-
updates: allUpdates,
|
|
831
|
-
prUrl: existingPullRequest.html_url,
|
|
832
|
-
created: false
|
|
833
|
-
};
|
|
834
|
-
} else {
|
|
835
|
-
logger.error("No changes to commit, and no existing PR. Nothing to do.");
|
|
836
|
-
return null;
|
|
2758
|
+
if (!updatesResult.ok) exitWithError("Failed to calculate expected package updates.", void 0, updatesResult.error);
|
|
2759
|
+
const expectedUpdates = updatesResult.value.allUpdates;
|
|
2760
|
+
const expectedVersionMap = new Map(expectedUpdates.map((u) => [u.package.name, u.newVersion]));
|
|
2761
|
+
const prVersionMap = /* @__PURE__ */ new Map();
|
|
2762
|
+
for (const pkg of mainPackages) {
|
|
2763
|
+
const pkgJsonPath = relative(options.workspaceRoot, join(pkg.path, "package.json"));
|
|
2764
|
+
const pkgJsonContent = await readFileFromGit(options.workspaceRoot, releasePr.head.sha, pkgJsonPath);
|
|
2765
|
+
if (pkgJsonContent.ok && pkgJsonContent.value) {
|
|
2766
|
+
const pkgJson = JSON.parse(pkgJsonContent.value);
|
|
2767
|
+
prVersionMap.set(pkg.name, pkgJson.version);
|
|
837
2768
|
}
|
|
838
2769
|
}
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
const
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
2770
|
+
if (originalBranch.value !== defaultBranch) await checkoutBranch(originalBranch.value, options.workspaceRoot);
|
|
2771
|
+
let isOutOfSync = false;
|
|
2772
|
+
for (const [pkgName, expectedVersion] of expectedVersionMap.entries()) {
|
|
2773
|
+
const prVersion = prVersionMap.get(pkgName);
|
|
2774
|
+
if (!prVersion) {
|
|
2775
|
+
logger.warn(`Package "${pkgName}" found in default branch but not in release branch. Skipping.`);
|
|
2776
|
+
continue;
|
|
2777
|
+
}
|
|
2778
|
+
if (gt(expectedVersion, prVersion)) {
|
|
2779
|
+
logger.error(`Package "${pkgName}" is out of sync. Expected version >= ${expectedVersion}, but PR has ${prVersion}.`);
|
|
2780
|
+
isOutOfSync = true;
|
|
2781
|
+
} else logger.success(`Package "${pkgName}" is up to date (PR version: ${prVersion}, Expected: ${expectedVersion})`);
|
|
2782
|
+
}
|
|
2783
|
+
const statusContext = "ucdjs/release-verify";
|
|
2784
|
+
if (isOutOfSync) {
|
|
2785
|
+
await options.githubClient.setCommitStatus({
|
|
2786
|
+
sha: releasePr.head.sha,
|
|
2787
|
+
state: "failure",
|
|
2788
|
+
context: statusContext,
|
|
2789
|
+
description: "Release PR is out of sync with the default branch. Please re-run the release process."
|
|
2790
|
+
});
|
|
2791
|
+
logger.error("Verification failed. Commit status set to 'failure'.");
|
|
2792
|
+
} else {
|
|
2793
|
+
await options.githubClient.setCommitStatus({
|
|
2794
|
+
sha: releasePr.head.sha,
|
|
2795
|
+
state: "success",
|
|
2796
|
+
context: statusContext,
|
|
2797
|
+
description: "Release PR is up to date.",
|
|
2798
|
+
targetUrl: `https://github.com/${options.owner}/${options.repo}/pull/${releasePr.number}`
|
|
2799
|
+
});
|
|
2800
|
+
logger.success("Verification successful. Commit status set to 'success'.");
|
|
2801
|
+
}
|
|
2802
|
+
}
|
|
2803
|
+
//#endregion
|
|
2804
|
+
//#region src/index.ts
|
|
2805
|
+
function withErrorBoundary(fn) {
|
|
2806
|
+
return fn().catch((e) => {
|
|
2807
|
+
if (e instanceof ReleaseError) {
|
|
2808
|
+
printReleaseError(e);
|
|
2809
|
+
process.exit(1);
|
|
2810
|
+
}
|
|
2811
|
+
throw e;
|
|
2812
|
+
});
|
|
2813
|
+
}
|
|
2814
|
+
async function createReleaseScripts(options) {
|
|
2815
|
+
const normalizedOptions = normalizeReleaseScriptsOptions(options);
|
|
2816
|
+
logger.verbose("Release scripts config", {
|
|
2817
|
+
repo: `${normalizedOptions.owner}/${normalizedOptions.repo}`,
|
|
2818
|
+
workspaceRoot: normalizedOptions.workspaceRoot,
|
|
2819
|
+
dryRun: normalizedOptions.dryRun,
|
|
2820
|
+
safeguards: normalizedOptions.safeguards,
|
|
2821
|
+
branch: normalizedOptions.branch,
|
|
2822
|
+
globalCommitMode: normalizedOptions.globalCommitMode,
|
|
2823
|
+
prompts: normalizedOptions.prompts,
|
|
2824
|
+
packages: normalizedOptions.packages,
|
|
2825
|
+
npm: {
|
|
2826
|
+
access: normalizedOptions.npm.access,
|
|
2827
|
+
provenance: normalizedOptions.npm.provenance,
|
|
2828
|
+
otp: normalizedOptions.npm.otp ? "set" : "unset"
|
|
2829
|
+
},
|
|
2830
|
+
changelog: normalizedOptions.changelog
|
|
852
2831
|
});
|
|
853
|
-
logger.log(prExists ? "Updated pull request:" : "Created pull request:", pullRequest?.html_url);
|
|
854
|
-
await checkoutBranch(currentBranch, workspaceRoot);
|
|
855
2832
|
return {
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
2833
|
+
async verify() {
|
|
2834
|
+
return withErrorBoundary(() => verifyWorkflow(normalizedOptions));
|
|
2835
|
+
},
|
|
2836
|
+
async prepare() {
|
|
2837
|
+
return withErrorBoundary(() => prepareWorkflow(normalizedOptions));
|
|
2838
|
+
},
|
|
2839
|
+
async publish() {
|
|
2840
|
+
return withErrorBoundary(() => publishWorkflow(normalizedOptions));
|
|
2841
|
+
},
|
|
2842
|
+
packages: {
|
|
2843
|
+
async list() {
|
|
2844
|
+
return withErrorBoundary(async () => {
|
|
2845
|
+
const result = await discoverWorkspacePackages(normalizedOptions.workspaceRoot, normalizedOptions);
|
|
2846
|
+
if (!result.ok) throw new Error(result.error.message);
|
|
2847
|
+
return result.value;
|
|
2848
|
+
});
|
|
2849
|
+
},
|
|
2850
|
+
async get(packageName) {
|
|
2851
|
+
return withErrorBoundary(async () => {
|
|
2852
|
+
const result = await discoverWorkspacePackages(normalizedOptions.workspaceRoot, normalizedOptions);
|
|
2853
|
+
if (!result.ok) throw new Error(result.error.message);
|
|
2854
|
+
return result.value.find((p) => p.name === packageName);
|
|
2855
|
+
});
|
|
2856
|
+
}
|
|
2857
|
+
}
|
|
859
2858
|
};
|
|
860
2859
|
}
|
|
861
|
-
|
|
862
2860
|
//#endregion
|
|
863
|
-
export {
|
|
2861
|
+
export { createReleaseScripts };
|