@ucdjs/release-scripts 0.1.0-beta.23 → 0.1.0-beta.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{eta-j5TFRbI4.mjs → eta-BV8TCRDW.mjs} +17 -13
- package/dist/index.d.mts +48 -170
- package/dist/index.mjs +1391 -1653
- package/package.json +15 -17
package/dist/index.mjs
CHANGED
|
@@ -1,1766 +1,1504 @@
|
|
|
1
|
-
import { t as Eta } from "./eta-
|
|
2
|
-
import {
|
|
3
|
-
import
|
|
1
|
+
import { t as Eta } from "./eta-BV8TCRDW.mjs";
|
|
2
|
+
import { Console, Context, Data, Effect, Layer, Schema } from "effect";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { Command, CommandExecutor } from "@effect/platform";
|
|
5
|
+
import { NodeCommandExecutor, NodeFileSystem } from "@effect/platform-node";
|
|
6
|
+
import * as CommitParser from "commit-parser";
|
|
4
7
|
import process from "node:process";
|
|
5
|
-
import
|
|
6
|
-
import
|
|
7
|
-
import mri from "mri";
|
|
8
|
-
import { exec } from "tinyexec";
|
|
9
|
-
import { dedent } from "@luxass/utils";
|
|
10
|
-
import { getCommits, groupByType } from "commit-parser";
|
|
11
|
-
import prompts from "prompts";
|
|
12
|
-
import { compare, gt } from "semver";
|
|
8
|
+
import semver from "semver";
|
|
9
|
+
import fs from "node:fs/promises";
|
|
13
10
|
|
|
14
|
-
//#region src/
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
//#endregion
|
|
18
|
-
//#region src/shared/utils.ts
|
|
19
|
-
const args = mri(process.argv.slice(2));
|
|
20
|
-
const isDryRun = !!args.dry;
|
|
21
|
-
const isVerbose = !!args.verbose;
|
|
22
|
-
const isForce = !!args.force;
|
|
23
|
-
const ucdjsReleaseOverridesPath = ".github/ucdjs-release.overrides.json";
|
|
24
|
-
const isCI = typeof process.env.CI === "string" && process.env.CI !== "" && process.env.CI.toLowerCase() !== "false";
|
|
25
|
-
const logger = {
|
|
26
|
-
info: (...args$1) => {
|
|
27
|
-
console.info(...args$1);
|
|
28
|
-
},
|
|
29
|
-
warn: (...args$1) => {
|
|
30
|
-
console.warn(` ${farver.yellow("⚠")}`, ...args$1);
|
|
31
|
-
},
|
|
32
|
-
error: (...args$1) => {
|
|
33
|
-
console.error(` ${farver.red("✖")}`, ...args$1);
|
|
34
|
-
},
|
|
35
|
-
verbose: (...args$1) => {
|
|
36
|
-
if (!isVerbose) return;
|
|
37
|
-
if (args$1.length === 0) {
|
|
38
|
-
console.log();
|
|
39
|
-
return;
|
|
40
|
-
}
|
|
41
|
-
if (args$1.length > 1 && typeof args$1[0] === "string") {
|
|
42
|
-
console.log(farver.dim(args$1[0]), ...args$1.slice(1));
|
|
43
|
-
return;
|
|
44
|
-
}
|
|
45
|
-
console.log(...args$1);
|
|
46
|
-
},
|
|
47
|
-
section: (title) => {
|
|
48
|
-
console.log();
|
|
49
|
-
console.log(` ${farver.bold(title)}`);
|
|
50
|
-
console.log(` ${farver.gray("─".repeat(title.length + 2))}`);
|
|
51
|
-
},
|
|
52
|
-
emptyLine: () => {
|
|
53
|
-
console.log();
|
|
54
|
-
},
|
|
55
|
-
item: (message, ...args$1) => {
|
|
56
|
-
console.log(` ${message}`, ...args$1);
|
|
57
|
-
},
|
|
58
|
-
step: (message) => {
|
|
59
|
-
console.log(` ${farver.blue("→")} ${message}`);
|
|
60
|
-
},
|
|
61
|
-
success: (message) => {
|
|
62
|
-
console.log(` ${farver.green("✓")} ${message}`);
|
|
63
|
-
},
|
|
64
|
-
clearScreen: () => {
|
|
65
|
-
const repeatCount = process.stdout.rows - 2;
|
|
66
|
-
const blank = repeatCount > 0 ? "\n".repeat(repeatCount) : "";
|
|
67
|
-
console.log(blank);
|
|
68
|
-
readline.cursorTo(process.stdout, 0, 0);
|
|
69
|
-
readline.clearScreenDown(process.stdout);
|
|
70
|
-
}
|
|
71
|
-
};
|
|
72
|
-
async function run(bin, args$1, opts = {}) {
|
|
73
|
-
return exec(bin, args$1, {
|
|
74
|
-
throwOnError: true,
|
|
75
|
-
...opts,
|
|
76
|
-
nodeOptions: {
|
|
77
|
-
stdio: "inherit",
|
|
78
|
-
...opts.nodeOptions
|
|
79
|
-
}
|
|
80
|
-
});
|
|
81
|
-
}
|
|
82
|
-
async function dryRun(bin, args$1, opts) {
|
|
83
|
-
return logger.verbose(farver.blue(`[dryrun] ${bin} ${args$1.join(" ")}`), opts || "");
|
|
84
|
-
}
|
|
85
|
-
const runIfNotDry = isDryRun ? dryRun : run;
|
|
86
|
-
function exitWithError(message, hint) {
|
|
87
|
-
logger.error(farver.bold(message));
|
|
88
|
-
if (hint) console.error(farver.gray(` ${hint}`));
|
|
89
|
-
process.exit(1);
|
|
90
|
-
}
|
|
91
|
-
if (isDryRun || isVerbose || isForce) {
|
|
92
|
-
logger.verbose(farver.inverse(farver.yellow(" Running with special flags ")));
|
|
93
|
-
logger.verbose({
|
|
94
|
-
isDryRun,
|
|
95
|
-
isVerbose,
|
|
96
|
-
isForce
|
|
97
|
-
});
|
|
98
|
-
logger.verbose();
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
//#endregion
|
|
102
|
-
//#region src/core/git.ts
|
|
11
|
+
//#region src/utils/changelog-formatters.ts
|
|
12
|
+
const eta$1 = new Eta();
|
|
103
13
|
/**
|
|
104
|
-
*
|
|
105
|
-
* @param {string} workspaceRoot - The root directory of the workspace
|
|
106
|
-
* @returns {Promise<boolean>} A Promise resolving to true if clean, false otherwise
|
|
14
|
+
* Pure function to parse commits into changelog entries
|
|
107
15
|
*/
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
16
|
+
function parseCommits(commits) {
|
|
17
|
+
return commits.filter((commit) => commit.isConventional).map((commit) => ({
|
|
18
|
+
type: commit.type || "other",
|
|
19
|
+
scope: commit.scope,
|
|
20
|
+
description: commit.description,
|
|
21
|
+
breaking: commit.isBreaking || false,
|
|
22
|
+
hash: commit.hash,
|
|
23
|
+
shortHash: commit.shortHash,
|
|
24
|
+
references: commit.references.map((ref) => ({
|
|
25
|
+
type: ref.type,
|
|
26
|
+
value: ref.value
|
|
27
|
+
}))
|
|
28
|
+
}));
|
|
119
29
|
}
|
|
120
30
|
/**
|
|
121
|
-
*
|
|
122
|
-
* @param {string} branch - The branch name to check
|
|
123
|
-
* @param {string} workspaceRoot - The root directory of the workspace
|
|
124
|
-
* @returns {Promise<boolean>} Promise resolving to true if branch exists, false otherwise
|
|
31
|
+
* Pure function to group changelog entries by type
|
|
125
32
|
*/
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
stdio: "pipe"
|
|
135
|
-
} });
|
|
136
|
-
return true;
|
|
137
|
-
} catch {
|
|
138
|
-
return false;
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
/**
|
|
142
|
-
* Retrieves the default branch name from the remote repository.
|
|
143
|
-
* Falls back to "main" if the default branch cannot be determined.
|
|
144
|
-
* @returns {Promise<string>} A Promise resolving to the default branch name as a string.
|
|
145
|
-
*/
|
|
146
|
-
async function getDefaultBranch(workspaceRoot) {
|
|
147
|
-
try {
|
|
148
|
-
const match = (await run("git", ["symbolic-ref", "refs/remotes/origin/HEAD"], { nodeOptions: {
|
|
149
|
-
cwd: workspaceRoot,
|
|
150
|
-
stdio: "pipe"
|
|
151
|
-
} })).stdout.trim().match(/^refs\/remotes\/origin\/(.+)$/);
|
|
152
|
-
if (match && match[1]) return match[1];
|
|
153
|
-
return "main";
|
|
154
|
-
} catch {
|
|
155
|
-
return "main";
|
|
156
|
-
}
|
|
157
|
-
}
|
|
158
|
-
/**
|
|
159
|
-
* Retrieves the name of the current branch in the repository.
|
|
160
|
-
* @param {string} workspaceRoot - The root directory of the workspace
|
|
161
|
-
* @returns {Promise<string>} A Promise resolving to the current branch name as a string
|
|
162
|
-
*/
|
|
163
|
-
async function getCurrentBranch(workspaceRoot) {
|
|
164
|
-
try {
|
|
165
|
-
return (await run("git", [
|
|
166
|
-
"rev-parse",
|
|
167
|
-
"--abbrev-ref",
|
|
168
|
-
"HEAD"
|
|
169
|
-
], { nodeOptions: {
|
|
170
|
-
cwd: workspaceRoot,
|
|
171
|
-
stdio: "pipe"
|
|
172
|
-
} })).stdout.trim();
|
|
173
|
-
} catch (err) {
|
|
174
|
-
logger.error("Error getting current branch:", err);
|
|
175
|
-
throw err;
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
/**
|
|
179
|
-
* Retrieves the list of available branches in the repository.
|
|
180
|
-
* @param {string} workspaceRoot - The root directory of the workspace
|
|
181
|
-
* @returns {Promise<string[]>} A Promise resolving to an array of branch names
|
|
182
|
-
*/
|
|
183
|
-
async function getAvailableBranches(workspaceRoot) {
|
|
184
|
-
try {
|
|
185
|
-
return (await run("git", ["branch", "--list"], { nodeOptions: {
|
|
186
|
-
cwd: workspaceRoot,
|
|
187
|
-
stdio: "pipe"
|
|
188
|
-
} })).stdout.split("\n").map((line) => line.replace("*", "").trim()).filter((line) => line.length > 0);
|
|
189
|
-
} catch (err) {
|
|
190
|
-
logger.error("Error getting available branches:", err);
|
|
191
|
-
throw err;
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
/**
|
|
195
|
-
* Creates a new branch from the specified base branch.
|
|
196
|
-
* @param {string} branch - The name of the new branch to create
|
|
197
|
-
* @param {string} base - The base branch to create the new branch from
|
|
198
|
-
* @param {string} workspaceRoot - The root directory of the workspace
|
|
199
|
-
* @returns {Promise<void>} A Promise that resolves when the branch is created
|
|
200
|
-
*/
|
|
201
|
-
async function createBranch(branch, base, workspaceRoot) {
|
|
202
|
-
try {
|
|
203
|
-
logger.info(`Creating branch: ${farver.green(branch)} from ${farver.cyan(base)}`);
|
|
204
|
-
await runIfNotDry("git", [
|
|
205
|
-
"branch",
|
|
206
|
-
branch,
|
|
207
|
-
base
|
|
208
|
-
], { nodeOptions: {
|
|
209
|
-
cwd: workspaceRoot,
|
|
210
|
-
stdio: "pipe"
|
|
211
|
-
} });
|
|
212
|
-
} catch {
|
|
213
|
-
exitWithError(`Failed to create branch: ${branch}`, `Make sure the branch doesn't already exist and you have a clean working directory`);
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
async function checkoutBranch(branch, workspaceRoot) {
|
|
217
|
-
try {
|
|
218
|
-
logger.info(`Switching to branch: ${farver.green(branch)}`);
|
|
219
|
-
const match = (await run("git", ["checkout", branch], { nodeOptions: {
|
|
220
|
-
cwd: workspaceRoot,
|
|
221
|
-
stdio: "pipe"
|
|
222
|
-
} })).stderr.trim().match(/Switched to branch '(.+)'/);
|
|
223
|
-
if (match && match[1] === branch) {
|
|
224
|
-
logger.info(`Successfully switched to branch: ${farver.green(branch)}`);
|
|
225
|
-
return true;
|
|
226
|
-
}
|
|
227
|
-
return false;
|
|
228
|
-
} catch {
|
|
229
|
-
return false;
|
|
230
|
-
}
|
|
231
|
-
}
|
|
232
|
-
async function pullLatestChanges(branch, workspaceRoot) {
|
|
233
|
-
try {
|
|
234
|
-
await run("git", [
|
|
235
|
-
"pull",
|
|
236
|
-
"origin",
|
|
237
|
-
branch
|
|
238
|
-
], { nodeOptions: {
|
|
239
|
-
cwd: workspaceRoot,
|
|
240
|
-
stdio: "pipe"
|
|
241
|
-
} });
|
|
242
|
-
return true;
|
|
243
|
-
} catch {
|
|
244
|
-
return false;
|
|
245
|
-
}
|
|
246
|
-
}
|
|
247
|
-
async function rebaseBranch(ontoBranch, workspaceRoot) {
|
|
248
|
-
try {
|
|
249
|
-
logger.info(`Rebasing onto: ${farver.cyan(ontoBranch)}`);
|
|
250
|
-
await runIfNotDry("git", ["rebase", ontoBranch], { nodeOptions: {
|
|
251
|
-
cwd: workspaceRoot,
|
|
252
|
-
stdio: "pipe"
|
|
253
|
-
} });
|
|
254
|
-
return true;
|
|
255
|
-
} catch {
|
|
256
|
-
exitWithError(`Failed to rebase onto: ${ontoBranch}`, `You may have merge conflicts. Run 'git rebase --abort' to undo the rebase`);
|
|
257
|
-
}
|
|
258
|
-
}
|
|
259
|
-
async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
260
|
-
try {
|
|
261
|
-
const result = await run("git", [
|
|
262
|
-
"rev-list",
|
|
263
|
-
`origin/${branch}..${branch}`,
|
|
264
|
-
"--count"
|
|
265
|
-
], { nodeOptions: {
|
|
266
|
-
cwd: workspaceRoot,
|
|
267
|
-
stdio: "pipe"
|
|
268
|
-
} });
|
|
269
|
-
return Number.parseInt(result.stdout.trim(), 10) > 0;
|
|
270
|
-
} catch {
|
|
271
|
-
return true;
|
|
272
|
-
}
|
|
273
|
-
}
|
|
274
|
-
async function commitChanges(message, workspaceRoot) {
|
|
275
|
-
try {
|
|
276
|
-
await run("git", ["add", "."], { nodeOptions: {
|
|
277
|
-
cwd: workspaceRoot,
|
|
278
|
-
stdio: "pipe"
|
|
279
|
-
} });
|
|
280
|
-
if (await isWorkingDirectoryClean(workspaceRoot)) return false;
|
|
281
|
-
logger.info(`Committing changes: ${farver.dim(message)}`);
|
|
282
|
-
await runIfNotDry("git", [
|
|
283
|
-
"commit",
|
|
284
|
-
"-m",
|
|
285
|
-
message
|
|
286
|
-
], { nodeOptions: {
|
|
287
|
-
cwd: workspaceRoot,
|
|
288
|
-
stdio: "pipe"
|
|
289
|
-
} });
|
|
290
|
-
return true;
|
|
291
|
-
} catch {
|
|
292
|
-
exitWithError(`Failed to commit changes`, `Make sure you have git configured properly with user.name and user.email`);
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
async function pushBranch(branch, workspaceRoot, options) {
|
|
296
|
-
try {
|
|
297
|
-
const args$1 = [
|
|
298
|
-
"push",
|
|
299
|
-
"origin",
|
|
300
|
-
branch
|
|
301
|
-
];
|
|
302
|
-
if (options?.forceWithLease) {
|
|
303
|
-
args$1.push("--force-with-lease");
|
|
304
|
-
logger.info(`Pushing branch: ${farver.green(branch)} ${farver.dim("(with lease)")}`);
|
|
305
|
-
} else if (options?.force) {
|
|
306
|
-
args$1.push("--force");
|
|
307
|
-
logger.info(`Force pushing branch: ${farver.green(branch)}`);
|
|
308
|
-
} else logger.info(`Pushing branch: ${farver.green(branch)}`);
|
|
309
|
-
await runIfNotDry("git", args$1, { nodeOptions: {
|
|
310
|
-
cwd: workspaceRoot,
|
|
311
|
-
stdio: "pipe"
|
|
312
|
-
} });
|
|
313
|
-
return true;
|
|
314
|
-
} catch {
|
|
315
|
-
exitWithError(`Failed to push branch: ${branch}`, `Make sure you have permission to push to the remote repository`);
|
|
316
|
-
}
|
|
317
|
-
}
|
|
318
|
-
async function readFileFromGit(workspaceRoot, ref, filePath) {
|
|
319
|
-
try {
|
|
320
|
-
return (await run("git", ["show", `${ref}:${filePath}`], { nodeOptions: {
|
|
321
|
-
cwd: workspaceRoot,
|
|
322
|
-
stdio: "pipe"
|
|
323
|
-
} })).stdout;
|
|
324
|
-
} catch {
|
|
325
|
-
return null;
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
async function getMostRecentPackageTag(workspaceRoot, packageName) {
|
|
329
|
-
try {
|
|
330
|
-
const { stdout } = await run("git", [
|
|
331
|
-
"tag",
|
|
332
|
-
"--list",
|
|
333
|
-
`${packageName}@*`
|
|
334
|
-
], { nodeOptions: {
|
|
335
|
-
cwd: workspaceRoot,
|
|
336
|
-
stdio: "pipe"
|
|
337
|
-
} });
|
|
338
|
-
const tags = stdout.split("\n").map((tag) => tag.trim()).filter(Boolean);
|
|
339
|
-
if (tags.length === 0) return;
|
|
340
|
-
return tags.reverse()[0];
|
|
341
|
-
} catch (err) {
|
|
342
|
-
logger.warn(`Failed to get tags for package ${packageName}: ${err.message}`);
|
|
343
|
-
return;
|
|
344
|
-
}
|
|
33
|
+
function groupByType(entries) {
|
|
34
|
+
const groups = /* @__PURE__ */ new Map();
|
|
35
|
+
for (const entry of entries) {
|
|
36
|
+
const type = entry.breaking ? "breaking" : entry.type;
|
|
37
|
+
if (!groups.has(type)) groups.set(type, []);
|
|
38
|
+
groups.get(type).push(entry);
|
|
39
|
+
}
|
|
40
|
+
return groups;
|
|
345
41
|
}
|
|
346
42
|
/**
|
|
347
|
-
*
|
|
348
|
-
* within a given inclusive range.
|
|
349
|
-
*
|
|
350
|
-
* Internally runs:
|
|
351
|
-
* git log --name-only --format=%H <from>^..<to>
|
|
352
|
-
*
|
|
353
|
-
* Notes
|
|
354
|
-
* - This includes the commit identified by `from` (via `from^..to`).
|
|
355
|
-
* - Order of commits in the resulting Map follows `git log` output
|
|
356
|
-
* (reverse chronological, newest first).
|
|
357
|
-
* - On failure (e.g., invalid refs), the function returns null.
|
|
358
|
-
*
|
|
359
|
-
* @param {string} workspaceRoot Absolute path to the git repository root used as cwd.
|
|
360
|
-
* @param {string} from Starting commit/ref (inclusive).
|
|
361
|
-
* @param {string} to Ending commit/ref (inclusive).
|
|
362
|
-
* @returns {Promise<Map<string, string[]> | null>} Promise resolving to a Map where keys are commit SHAs and values are
|
|
363
|
-
* arrays of file paths changed by that commit, or null on error.
|
|
43
|
+
* Changelog template for Eta rendering
|
|
364
44
|
*/
|
|
365
|
-
|
|
366
|
-
const commitsMap = /* @__PURE__ */ new Map();
|
|
367
|
-
try {
|
|
368
|
-
const { stdout } = await run("git", [
|
|
369
|
-
"log",
|
|
370
|
-
"--name-only",
|
|
371
|
-
"--format=%H",
|
|
372
|
-
`${from}^..${to}`
|
|
373
|
-
], { nodeOptions: {
|
|
374
|
-
cwd: workspaceRoot,
|
|
375
|
-
stdio: "pipe"
|
|
376
|
-
} });
|
|
377
|
-
const lines = stdout.trim().split("\n").filter((line) => line.trim() !== "");
|
|
378
|
-
let currentSha = null;
|
|
379
|
-
const HASH_REGEX = /^[0-9a-f]{40}$/i;
|
|
380
|
-
for (const line of lines) {
|
|
381
|
-
const trimmedLine = line.trim();
|
|
382
|
-
if (HASH_REGEX.test(trimmedLine)) {
|
|
383
|
-
currentSha = trimmedLine;
|
|
384
|
-
commitsMap.set(currentSha, []);
|
|
385
|
-
continue;
|
|
386
|
-
}
|
|
387
|
-
if (currentSha === null) continue;
|
|
388
|
-
commitsMap.get(currentSha).push(trimmedLine);
|
|
389
|
-
}
|
|
390
|
-
return commitsMap;
|
|
391
|
-
} catch {
|
|
392
|
-
return null;
|
|
393
|
-
}
|
|
394
|
-
}
|
|
45
|
+
const CHANGELOG_TEMPLATE = `# <%= it.packageName %> v<%= it.version %>
|
|
395
46
|
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
const globalAuthorCache = /* @__PURE__ */ new Map();
|
|
399
|
-
const DEFAULT_CHANGELOG_TEMPLATE = dedent`
|
|
400
|
-
<% if (it.previousVersion) { -%>
|
|
401
|
-
## [<%= it.version %>](<%= it.compareUrl %>) (<%= it.date %>)
|
|
402
|
-
<% } else { -%>
|
|
403
|
-
## <%= it.version %> (<%= it.date %>)
|
|
404
|
-
<% } %>
|
|
47
|
+
**Previous version**: \`<%= it.previousVersion %>\`
|
|
48
|
+
**New version**: \`<%= it.version %>\`
|
|
405
49
|
|
|
406
|
-
|
|
407
|
-
|
|
50
|
+
<% if (it.entries.length === 0) { %>
|
|
51
|
+
*No conventional commits found.*
|
|
52
|
+
<% } else { %>
|
|
53
|
+
<% const groups = it.groupedEntries; %>
|
|
54
|
+
<% const typeOrder = ["breaking", "feat", "fix", "perf", "docs", "style", "refactor", "test", "build", "ci", "chore"]; %>
|
|
55
|
+
<% const typeLabels = {
|
|
56
|
+
breaking: "💥 Breaking Changes",
|
|
57
|
+
feat: "✨ Features",
|
|
58
|
+
fix: "🐛 Bug Fixes",
|
|
59
|
+
perf: "⚡ Performance",
|
|
60
|
+
docs: "📝 Documentation",
|
|
61
|
+
style: "💄 Styling",
|
|
62
|
+
refactor: "♻️ Refactoring",
|
|
63
|
+
test: "✅ Tests",
|
|
64
|
+
build: "📦 Build",
|
|
65
|
+
ci: "👷 CI",
|
|
66
|
+
chore: "🔧 Chores"
|
|
67
|
+
}; %>
|
|
408
68
|
|
|
409
|
-
|
|
410
|
-
|
|
69
|
+
<% for (const type of typeOrder) { %>
|
|
70
|
+
<% const entries = groups.get(type); %>
|
|
71
|
+
<% if (entries && entries.length > 0) { %>
|
|
72
|
+
## <%= typeLabels[type] || type.charAt(0).toUpperCase() + type.slice(1) %>
|
|
411
73
|
|
|
412
|
-
|
|
413
|
-
|
|
74
|
+
<% for (const entry of entries) { %>
|
|
75
|
+
- <% if (entry.scope) { %>**<%= entry.scope %>**: <% } %><%= entry.description %><% if (entry.references.length > 0) { %> (<%= entry.references.map(r => "#" + r.value).join(", ") %>)<% } %> (\`<%= entry.shortHash %>\`)
|
|
76
|
+
<% } %>
|
|
414
77
|
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
78
|
+
<% } %>
|
|
79
|
+
<% } %>
|
|
80
|
+
|
|
81
|
+
<% for (const [type, entries] of groups) { %>
|
|
82
|
+
<% if (!typeOrder.includes(type)) { %>
|
|
83
|
+
## <%= type.charAt(0).toUpperCase() + type.slice(1) %>
|
|
84
|
+
|
|
85
|
+
<% for (const entry of entries) { %>
|
|
86
|
+
- <% if (entry.scope) { %>**<%= entry.scope %>**: <% } %><%= entry.description %> (\`<%= entry.shortHash %>\`)
|
|
87
|
+
<% } %>
|
|
88
|
+
|
|
89
|
+
<% } %>
|
|
90
|
+
<% } %>
|
|
91
|
+
<% } %>`;
|
|
92
|
+
/**
|
|
93
|
+
* Pure function to format changelog as markdown
|
|
94
|
+
*/
|
|
95
|
+
function formatChangelogMarkdown(changelog) {
|
|
96
|
+
const groups = groupByType(changelog.entries);
|
|
97
|
+
return eta$1.renderString(CHANGELOG_TEMPLATE, {
|
|
98
|
+
packageName: changelog.packageName,
|
|
99
|
+
version: changelog.version,
|
|
100
|
+
previousVersion: changelog.previousVersion,
|
|
101
|
+
entries: changelog.entries,
|
|
102
|
+
groupedEntries: groups
|
|
424
103
|
});
|
|
425
|
-
|
|
426
|
-
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Pure function to create a changelog object
|
|
107
|
+
*/
|
|
108
|
+
function createChangelog(packageName, version, previousVersion, commits) {
|
|
109
|
+
return {
|
|
427
110
|
packageName,
|
|
428
111
|
version,
|
|
429
112
|
previousVersion,
|
|
430
|
-
|
|
431
|
-
compareUrl,
|
|
432
|
-
owner,
|
|
433
|
-
repo,
|
|
434
|
-
groups: groups.map((group) => {
|
|
435
|
-
const commitsInGroup = grouped.get(group.name) ?? [];
|
|
436
|
-
if (commitsInGroup.length > 0) logger.verbose(`Found ${commitsInGroup.length} commits for group "${group.name}".`);
|
|
437
|
-
const formattedCommits = commitsInGroup.map((commit) => ({ line: formatCommitLine({
|
|
438
|
-
commit,
|
|
439
|
-
owner,
|
|
440
|
-
repo,
|
|
441
|
-
authors: commitAuthors.get(commit.hash) ?? []
|
|
442
|
-
}) }));
|
|
443
|
-
return {
|
|
444
|
-
name: group.name,
|
|
445
|
-
title: group.title,
|
|
446
|
-
commits: formattedCommits
|
|
447
|
-
};
|
|
448
|
-
})
|
|
113
|
+
entries: parseCommits(commits)
|
|
449
114
|
};
|
|
450
|
-
const eta = new Eta();
|
|
451
|
-
const templateToUse = template || DEFAULT_CHANGELOG_TEMPLATE;
|
|
452
|
-
return eta.renderString(templateToUse, templateData).trim();
|
|
453
115
|
}
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
owner: normalizedOptions.owner,
|
|
467
|
-
repo: normalizedOptions.repo,
|
|
468
|
-
groups: normalizedOptions.groups,
|
|
469
|
-
template: normalizedOptions.changelog?.template,
|
|
470
|
-
githubClient
|
|
471
|
-
});
|
|
472
|
-
let updatedContent;
|
|
473
|
-
if (!existingContent) {
|
|
474
|
-
updatedContent = `# ${workspacePackage.name}\n\n${newEntry}\n`;
|
|
475
|
-
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
476
|
-
return;
|
|
477
|
-
}
|
|
478
|
-
const parsed = parseChangelog(existingContent);
|
|
479
|
-
const lines = existingContent.split("\n");
|
|
480
|
-
const existingVersionIndex = parsed.versions.findIndex((v) => v.version === version);
|
|
481
|
-
if (existingVersionIndex !== -1) {
|
|
482
|
-
const existingVersion = parsed.versions[existingVersionIndex];
|
|
483
|
-
const before = lines.slice(0, existingVersion.lineStart);
|
|
484
|
-
const after = lines.slice(existingVersion.lineEnd + 1);
|
|
485
|
-
updatedContent = [
|
|
486
|
-
...before,
|
|
487
|
-
newEntry,
|
|
488
|
-
...after
|
|
489
|
-
].join("\n");
|
|
490
|
-
} else {
|
|
491
|
-
const insertAt = parsed.headerLineEnd + 1;
|
|
492
|
-
const before = lines.slice(0, insertAt);
|
|
493
|
-
const after = lines.slice(insertAt);
|
|
494
|
-
if (before.length > 0 && before[before.length - 1] !== "") before.push("");
|
|
495
|
-
updatedContent = [
|
|
496
|
-
...before,
|
|
497
|
-
newEntry,
|
|
498
|
-
"",
|
|
499
|
-
...after
|
|
500
|
-
].join("\n");
|
|
501
|
-
}
|
|
502
|
-
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
503
|
-
}
|
|
504
|
-
async function resolveCommitAuthors(commits, githubClient) {
|
|
505
|
-
const authorsToResolve = /* @__PURE__ */ new Set();
|
|
506
|
-
const commitAuthors = /* @__PURE__ */ new Map();
|
|
507
|
-
for (const commit of commits) {
|
|
508
|
-
const authorsForCommit = [];
|
|
509
|
-
commit.authors.forEach((author, idx) => {
|
|
510
|
-
if (!author.email || !author.name) return;
|
|
511
|
-
let info = globalAuthorCache.get(author.email);
|
|
512
|
-
if (!info) {
|
|
513
|
-
info = {
|
|
514
|
-
commits: [],
|
|
515
|
-
name: author.name,
|
|
516
|
-
email: author.email
|
|
116
|
+
|
|
117
|
+
//#endregion
|
|
118
|
+
//#region src/services/changelog.service.ts
|
|
119
|
+
var ChangelogService = class extends Effect.Service()("@ucdjs/release-scripts/ChangelogService", {
|
|
120
|
+
effect: Effect.gen(function* () {
|
|
121
|
+
function generateChangelog(pkg, newVersion, commits) {
|
|
122
|
+
return Effect.gen(function* () {
|
|
123
|
+
const changelog = createChangelog(pkg.name, newVersion, pkg.version, commits);
|
|
124
|
+
return {
|
|
125
|
+
changelog,
|
|
126
|
+
markdown: formatChangelogMarkdown(changelog),
|
|
127
|
+
filePath: `${pkg.path}/CHANGELOG.md`
|
|
517
128
|
};
|
|
518
|
-
|
|
519
|
-
}
|
|
520
|
-
if (idx === 0) info.commits.push(commit.shortHash);
|
|
521
|
-
authorsForCommit.push(info);
|
|
522
|
-
if (!info.login) authorsToResolve.add(info);
|
|
523
|
-
});
|
|
524
|
-
commitAuthors.set(commit.hash, authorsForCommit);
|
|
525
|
-
}
|
|
526
|
-
await Promise.all(Array.from(authorsToResolve).map((info) => githubClient.resolveAuthorInfo(info)));
|
|
527
|
-
return commitAuthors;
|
|
528
|
-
}
|
|
529
|
-
function formatCommitLine({ commit, owner, repo, authors }) {
|
|
530
|
-
const commitUrl = `https://github.com/${owner}/${repo}/commit/${commit.hash}`;
|
|
531
|
-
let line = `${commit.description}`;
|
|
532
|
-
const references = commit.references ?? [];
|
|
533
|
-
if (references.length > 0) logger.verbose("Located references in commit", references.length);
|
|
534
|
-
for (const ref of references) {
|
|
535
|
-
if (!ref.value) continue;
|
|
536
|
-
const number = Number.parseInt(ref.value.replace(/^#/, ""), 10);
|
|
537
|
-
if (Number.isNaN(number)) continue;
|
|
538
|
-
if (ref.type === "issue") {
|
|
539
|
-
line += ` ([Issue ${ref.value}](https://github.com/${owner}/${repo}/issues/${number}))`;
|
|
540
|
-
continue;
|
|
541
|
-
}
|
|
542
|
-
line += ` ([PR ${ref.value}](https://github.com/${owner}/${repo}/pull/${number}))`;
|
|
543
|
-
}
|
|
544
|
-
line += ` ([${commit.shortHash}](${commitUrl}))`;
|
|
545
|
-
if (authors.length > 0) {
|
|
546
|
-
const authorList = authors.map((author) => {
|
|
547
|
-
if (author.login) return `[@${author.login}](https://github.com/${author.login})`;
|
|
548
|
-
return author.name;
|
|
549
|
-
}).join(", ");
|
|
550
|
-
line += ` (by ${authorList})`;
|
|
551
|
-
}
|
|
552
|
-
return line;
|
|
553
|
-
}
|
|
554
|
-
function parseChangelog(content) {
|
|
555
|
-
const lines = content.split("\n");
|
|
556
|
-
let packageName = null;
|
|
557
|
-
let headerLineEnd = -1;
|
|
558
|
-
const versions = [];
|
|
559
|
-
for (let i = 0; i < lines.length; i++) {
|
|
560
|
-
const line = lines[i].trim();
|
|
561
|
-
if (line.startsWith("# ")) {
|
|
562
|
-
packageName = line.slice(2).trim();
|
|
563
|
-
headerLineEnd = i;
|
|
564
|
-
break;
|
|
129
|
+
});
|
|
565
130
|
}
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
131
|
+
return { generateChangelog };
|
|
132
|
+
}),
|
|
133
|
+
dependencies: []
|
|
134
|
+
}) {};
|
|
135
|
+
|
|
136
|
+
//#endregion
|
|
137
|
+
//#region src/services/dependency-graph.service.ts
|
|
138
|
+
var DependencyGraphService = class extends Effect.Service()("@ucdjs/release-scripts/DependencyGraphService", {
|
|
139
|
+
effect: Effect.gen(function* () {
|
|
140
|
+
function buildGraph(packages) {
|
|
141
|
+
const nameToPackage = /* @__PURE__ */ new Map();
|
|
142
|
+
const adjacency = /* @__PURE__ */ new Map();
|
|
143
|
+
const inDegree = /* @__PURE__ */ new Map();
|
|
144
|
+
for (const pkg of packages) {
|
|
145
|
+
nameToPackage.set(pkg.name, pkg);
|
|
146
|
+
adjacency.set(pkg.name, /* @__PURE__ */ new Set());
|
|
147
|
+
inDegree.set(pkg.name, 0);
|
|
148
|
+
}
|
|
149
|
+
for (const pkg of packages) {
|
|
150
|
+
const deps = new Set([...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies]);
|
|
151
|
+
for (const depName of deps) {
|
|
152
|
+
if (!nameToPackage.has(depName)) continue;
|
|
153
|
+
adjacency.get(depName)?.add(pkg.name);
|
|
154
|
+
inDegree.set(pkg.name, (inDegree.get(pkg.name) ?? 0) + 1);
|
|
578
155
|
}
|
|
579
|
-
const versionContent = lines.slice(lineStart, lineEnd + 1).join("\n");
|
|
580
|
-
versions.push({
|
|
581
|
-
version,
|
|
582
|
-
lineStart,
|
|
583
|
-
lineEnd,
|
|
584
|
-
content: versionContent
|
|
585
|
-
});
|
|
586
156
|
}
|
|
157
|
+
return {
|
|
158
|
+
nameToPackage,
|
|
159
|
+
adjacency,
|
|
160
|
+
inDegree
|
|
161
|
+
};
|
|
587
162
|
}
|
|
163
|
+
function topologicalOrder(packages) {
|
|
164
|
+
return Effect.gen(function* () {
|
|
165
|
+
const { nameToPackage, adjacency, inDegree } = buildGraph(packages);
|
|
166
|
+
const queue = [];
|
|
167
|
+
const levels = /* @__PURE__ */ new Map();
|
|
168
|
+
for (const [name, degree] of inDegree) if (degree === 0) {
|
|
169
|
+
queue.push(name);
|
|
170
|
+
levels.set(name, 0);
|
|
171
|
+
}
|
|
172
|
+
let queueIndex = 0;
|
|
173
|
+
const ordered = [];
|
|
174
|
+
while (queueIndex < queue.length) {
|
|
175
|
+
const current = queue[queueIndex++];
|
|
176
|
+
const currentLevel = levels.get(current) ?? 0;
|
|
177
|
+
const pkg = nameToPackage.get(current);
|
|
178
|
+
if (pkg) ordered.push({
|
|
179
|
+
package: pkg,
|
|
180
|
+
level: currentLevel
|
|
181
|
+
});
|
|
182
|
+
for (const neighbor of adjacency.get(current) ?? []) {
|
|
183
|
+
const nextLevel = currentLevel + 1;
|
|
184
|
+
if (nextLevel > (levels.get(neighbor) ?? 0)) levels.set(neighbor, nextLevel);
|
|
185
|
+
const newDegree = (inDegree.get(neighbor) ?? 0) - 1;
|
|
186
|
+
inDegree.set(neighbor, newDegree);
|
|
187
|
+
if (newDegree === 0) queue.push(neighbor);
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
if (ordered.length !== packages.length) {
|
|
191
|
+
const processed = new Set(ordered.map((o) => o.package.name));
|
|
192
|
+
const unprocessed = packages.filter((p) => !processed.has(p.name)).map((p) => p.name);
|
|
193
|
+
return yield* Effect.fail(/* @__PURE__ */ new Error(`Cycle detected in workspace dependencies. Packages involved: ${unprocessed.join(", ")}`));
|
|
194
|
+
}
|
|
195
|
+
return ordered;
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
return { topologicalOrder };
|
|
199
|
+
}),
|
|
200
|
+
dependencies: []
|
|
201
|
+
}) {};
|
|
202
|
+
|
|
203
|
+
//#endregion
|
|
204
|
+
//#region src/errors.ts
|
|
205
|
+
var GitCommandError = class extends Data.TaggedError("GitCommandError") {};
|
|
206
|
+
var ExternalCommitParserError = class extends Data.TaggedError("ExternalCommitParserError") {};
|
|
207
|
+
var WorkspaceError = class extends Data.TaggedError("WorkspaceError") {};
|
|
208
|
+
var GitHubError = class extends Data.TaggedError("GitHubError") {};
|
|
209
|
+
var VersionCalculationError = class extends Data.TaggedError("VersionCalculationError") {};
|
|
210
|
+
var OverridesLoadError = class extends Data.TaggedError("OverridesLoadError") {};
|
|
211
|
+
var NPMError = class extends Data.TaggedError("NPMError") {};
|
|
212
|
+
var PublishError = class extends Data.TaggedError("PublishError") {};
|
|
213
|
+
var TagError = class extends Data.TaggedError("TagError") {};
|
|
214
|
+
|
|
215
|
+
//#endregion
|
|
216
|
+
//#region src/options.ts
|
|
217
|
+
const DEFAULT_PR_BODY_TEMPLATE = `## Summary\n\nThis PR contains the following changes:\n\n- Updated package versions\n- Updated changelogs\n\n## Packages\n\nThe following packages will be released:\n\n{{packages}}`;
|
|
218
|
+
const DEFAULT_CHANGELOG_TEMPLATE = `# Changelog\n\n{{releases}}`;
|
|
219
|
+
const DEFAULT_TYPES = {
|
|
220
|
+
feat: {
|
|
221
|
+
title: "🚀 Features",
|
|
222
|
+
color: "green"
|
|
223
|
+
},
|
|
224
|
+
fix: {
|
|
225
|
+
title: "🐞 Bug Fixes",
|
|
226
|
+
color: "red"
|
|
227
|
+
},
|
|
228
|
+
refactor: {
|
|
229
|
+
title: "🔧 Code Refactoring",
|
|
230
|
+
color: "blue"
|
|
231
|
+
},
|
|
232
|
+
perf: {
|
|
233
|
+
title: "🏎 Performance",
|
|
234
|
+
color: "orange"
|
|
235
|
+
},
|
|
236
|
+
docs: {
|
|
237
|
+
title: "📚 Documentation",
|
|
238
|
+
color: "purple"
|
|
239
|
+
},
|
|
240
|
+
style: {
|
|
241
|
+
title: "🎨 Styles",
|
|
242
|
+
color: "pink"
|
|
588
243
|
}
|
|
244
|
+
};
|
|
245
|
+
function normalizeReleaseScriptsOptions(options) {
|
|
246
|
+
const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, branch = {}, globalCommitMode = "dependencies", pullRequest = {}, changelog = {}, types = {}, dryRun = false, npm = {} } = options;
|
|
247
|
+
const token = githubToken.trim();
|
|
248
|
+
if (!token) throw new Error("GitHub token is required. Pass it in via options.");
|
|
249
|
+
if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) throw new Error("Repository (repo) is required. Specify in 'owner/repo' format (e.g., 'octocat/hello-world').");
|
|
250
|
+
const [owner, repo] = fullRepo.split("/");
|
|
251
|
+
if (!owner || !repo) throw new Error(`Invalid repo format: "${fullRepo}". Expected format: "owner/repo" (e.g., "octocat/hello-world").`);
|
|
589
252
|
return {
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
253
|
+
dryRun,
|
|
254
|
+
workspaceRoot,
|
|
255
|
+
githubToken: token,
|
|
256
|
+
owner,
|
|
257
|
+
repo,
|
|
258
|
+
packages: typeof packages === "object" && !Array.isArray(packages) ? {
|
|
259
|
+
exclude: packages.exclude ?? [],
|
|
260
|
+
include: packages.include ?? [],
|
|
261
|
+
excludePrivate: packages.excludePrivate ?? false
|
|
262
|
+
} : packages,
|
|
263
|
+
branch: {
|
|
264
|
+
release: branch.release ?? "release/next",
|
|
265
|
+
default: branch.default ?? "main"
|
|
266
|
+
},
|
|
267
|
+
globalCommitMode,
|
|
268
|
+
pullRequest: {
|
|
269
|
+
title: pullRequest.title ?? "chore: release new version",
|
|
270
|
+
body: pullRequest.body ?? DEFAULT_PR_BODY_TEMPLATE
|
|
271
|
+
},
|
|
272
|
+
changelog: {
|
|
273
|
+
enabled: changelog.enabled ?? true,
|
|
274
|
+
template: changelog.template ?? DEFAULT_CHANGELOG_TEMPLATE,
|
|
275
|
+
emojis: changelog.emojis ?? true
|
|
276
|
+
},
|
|
277
|
+
types: options.types ? {
|
|
278
|
+
...DEFAULT_TYPES,
|
|
279
|
+
...types
|
|
280
|
+
} : DEFAULT_TYPES,
|
|
281
|
+
npm: {
|
|
282
|
+
otp: npm.otp,
|
|
283
|
+
provenance: npm.provenance ?? true
|
|
284
|
+
}
|
|
593
285
|
};
|
|
594
286
|
}
|
|
287
|
+
var ReleaseScriptsOptions = class extends Context.Tag("@ucdjs/release-scripts/ReleaseScriptsOptions")() {};
|
|
595
288
|
|
|
596
289
|
//#endregion
|
|
597
|
-
//#region src/
|
|
598
|
-
var
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
const
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
}
|
|
290
|
+
//#region src/services/git.service.ts
|
|
291
|
+
var GitService = class extends Effect.Service()("@ucdjs/release-scripts/GitService", {
|
|
292
|
+
effect: Effect.gen(function* () {
|
|
293
|
+
const executor = yield* CommandExecutor.CommandExecutor;
|
|
294
|
+
const config = yield* ReleaseScriptsOptions;
|
|
295
|
+
const execGitCommand = (args) => executor.string(Command.make("git", ...args).pipe(Command.workingDirectory(config.workspaceRoot))).pipe(Effect.mapError((err) => {
|
|
296
|
+
return new GitCommandError({
|
|
297
|
+
command: `git ${args.join(" ")}`,
|
|
298
|
+
stderr: err.message
|
|
299
|
+
});
|
|
300
|
+
}));
|
|
301
|
+
const execGitCommandIfNotDry = config.dryRun ? (args) => Effect.succeed(`Dry run mode: skipping git command "git ${args.join(" ")}"`) : execGitCommand;
|
|
302
|
+
const isWithinRepository = Effect.gen(function* () {
|
|
303
|
+
return (yield* execGitCommand(["rev-parse", "--is-inside-work-tree"]).pipe(Effect.catchAll(() => Effect.succeed("false")))).trim() === "true";
|
|
304
|
+
});
|
|
305
|
+
const listBranches = Effect.gen(function* () {
|
|
306
|
+
return (yield* execGitCommand(["branch", "--list"])).trim().split("\n").filter((line) => line.length > 0).map((line) => line.replace(/^\* /, "").trim()).map((line) => line.trim());
|
|
307
|
+
});
|
|
308
|
+
const isWorkingDirectoryClean = Effect.gen(function* () {
|
|
309
|
+
return (yield* execGitCommand(["status", "--porcelain"])).trim().length === 0;
|
|
618
310
|
});
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
throw new Error(`GitHub API request failed with status ${res.status}: ${errorText || "No response body"}`);
|
|
311
|
+
function doesBranchExist(branch) {
|
|
312
|
+
return listBranches.pipe(Effect.map((branches) => branches.includes(branch)));
|
|
622
313
|
}
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
const
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
314
|
+
function createBranch(branch, base = config.branch.default) {
|
|
315
|
+
return execGitCommandIfNotDry([
|
|
316
|
+
"branch",
|
|
317
|
+
branch,
|
|
318
|
+
base
|
|
319
|
+
]);
|
|
320
|
+
}
|
|
321
|
+
const getBranch = Effect.gen(function* () {
|
|
322
|
+
return (yield* execGitCommand([
|
|
323
|
+
"rev-parse",
|
|
324
|
+
"--abbrev-ref",
|
|
325
|
+
"HEAD"
|
|
326
|
+
])).trim();
|
|
327
|
+
});
|
|
328
|
+
function checkoutBranch(branch) {
|
|
329
|
+
return execGitCommand(["checkout", branch]);
|
|
330
|
+
}
|
|
331
|
+
function rebaseBranch(onto) {
|
|
332
|
+
return execGitCommandIfNotDry(["rebase", onto]);
|
|
333
|
+
}
|
|
334
|
+
function stageChanges(files) {
|
|
335
|
+
return Effect.gen(function* () {
|
|
336
|
+
if (files.length === 0) return yield* Effect.fail(/* @__PURE__ */ new Error("No files to stage."));
|
|
337
|
+
return yield* execGitCommandIfNotDry(["add", ...files]);
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
function writeCommit(message) {
|
|
341
|
+
return execGitCommandIfNotDry([
|
|
342
|
+
"commit",
|
|
343
|
+
"-m",
|
|
344
|
+
message
|
|
345
|
+
]);
|
|
346
|
+
}
|
|
347
|
+
function pushChanges(branch, remote = "origin") {
|
|
348
|
+
return execGitCommandIfNotDry([
|
|
349
|
+
"push",
|
|
350
|
+
remote,
|
|
351
|
+
branch
|
|
352
|
+
]);
|
|
353
|
+
}
|
|
354
|
+
function forcePushChanges(branch, remote = "origin") {
|
|
355
|
+
return execGitCommandIfNotDry([
|
|
356
|
+
"push",
|
|
357
|
+
"--force-with-lease",
|
|
358
|
+
remote,
|
|
359
|
+
branch
|
|
360
|
+
]);
|
|
361
|
+
}
|
|
362
|
+
function readFile(filePath, ref = "HEAD") {
|
|
363
|
+
return execGitCommand(["show", `${ref}:${filePath}`]);
|
|
364
|
+
}
|
|
365
|
+
function getMostRecentPackageTag(packageName) {
|
|
366
|
+
return execGitCommand([
|
|
367
|
+
"tag",
|
|
368
|
+
"--list",
|
|
369
|
+
"--sort=-version:refname",
|
|
370
|
+
`${packageName}@*`
|
|
371
|
+
]).pipe(Effect.map((tags) => {
|
|
372
|
+
return tags.trim().split("\n").map((tag) => tag.trim()).filter((tag) => tag.length > 0)[0] || null;
|
|
373
|
+
}), Effect.flatMap((tag) => {
|
|
374
|
+
if (tag === null) return Effect.succeed(null);
|
|
375
|
+
return execGitCommand(["rev-parse", tag]).pipe(Effect.map((sha) => ({
|
|
376
|
+
name: tag,
|
|
377
|
+
sha: sha.trim()
|
|
378
|
+
})));
|
|
379
|
+
}));
|
|
380
|
+
}
|
|
381
|
+
function createTag(name, message) {
|
|
382
|
+
return execGitCommandIfNotDry(message ? [
|
|
383
|
+
"tag",
|
|
384
|
+
"-a",
|
|
385
|
+
name,
|
|
386
|
+
"-m",
|
|
387
|
+
message
|
|
388
|
+
] : ["tag", name]).pipe(Effect.mapError((err) => new TagError({
|
|
389
|
+
message: `Failed to create tag "${name}"`,
|
|
390
|
+
tagName: name,
|
|
391
|
+
operation: "create",
|
|
392
|
+
cause: err
|
|
393
|
+
})));
|
|
394
|
+
}
|
|
395
|
+
function pushTag(name, remote = "origin") {
|
|
396
|
+
return execGitCommandIfNotDry([
|
|
397
|
+
"push",
|
|
398
|
+
remote,
|
|
399
|
+
name
|
|
400
|
+
]).pipe(Effect.mapError((err) => new TagError({
|
|
401
|
+
message: `Failed to push tag "${name}" to ${remote}`,
|
|
402
|
+
tagName: name,
|
|
403
|
+
operation: "push",
|
|
404
|
+
cause: err
|
|
405
|
+
})));
|
|
406
|
+
}
|
|
407
|
+
function getCommits(options) {
|
|
408
|
+
return Effect.tryPromise({
|
|
409
|
+
try: async () => CommitParser.getCommits({
|
|
410
|
+
from: options?.from,
|
|
411
|
+
to: options?.to,
|
|
412
|
+
folder: options?.folder,
|
|
413
|
+
cwd: config.workspaceRoot
|
|
414
|
+
}),
|
|
415
|
+
catch: (e) => new ExternalCommitParserError({
|
|
416
|
+
message: `commit-parser getCommits`,
|
|
417
|
+
cause: e instanceof Error ? e.message : String(e)
|
|
418
|
+
})
|
|
419
|
+
});
|
|
420
|
+
}
|
|
421
|
+
function filesChangesBetweenRefs(from, to) {
|
|
422
|
+
const commitsMap = /* @__PURE__ */ new Map();
|
|
423
|
+
return execGitCommand([
|
|
424
|
+
"log",
|
|
425
|
+
"--name-only",
|
|
426
|
+
"--format=%H",
|
|
427
|
+
`${from}^..${to}`
|
|
428
|
+
]).pipe(Effect.map((output) => {
|
|
429
|
+
const lines = output.trim().split("\n").filter((line) => line.trim() !== "");
|
|
430
|
+
let currentSha = null;
|
|
431
|
+
const HASH_REGEX = /^[0-9a-f]{40}$/i;
|
|
432
|
+
for (const line of lines) {
|
|
433
|
+
const trimmedLine = line.trim();
|
|
434
|
+
if (HASH_REGEX.test(trimmedLine)) {
|
|
435
|
+
currentSha = trimmedLine;
|
|
436
|
+
commitsMap.set(currentSha, []);
|
|
437
|
+
continue;
|
|
438
|
+
}
|
|
439
|
+
if (currentSha === null) continue;
|
|
440
|
+
commitsMap.get(currentSha).push(trimmedLine);
|
|
441
|
+
}
|
|
442
|
+
return commitsMap;
|
|
443
|
+
}));
|
|
444
|
+
}
|
|
445
|
+
const assertWorkspaceReady = Effect.gen(function* () {
|
|
446
|
+
if (!(yield* isWithinRepository)) return yield* Effect.fail(/* @__PURE__ */ new Error("Not within a Git repository."));
|
|
447
|
+
if (!(yield* isWorkingDirectoryClean)) return yield* Effect.fail(/* @__PURE__ */ new Error("Working directory is not clean."));
|
|
448
|
+
return true;
|
|
449
|
+
});
|
|
450
|
+
return {
|
|
451
|
+
branches: {
|
|
452
|
+
list: listBranches,
|
|
453
|
+
exists: doesBranchExist,
|
|
454
|
+
create: createBranch,
|
|
455
|
+
checkout: checkoutBranch,
|
|
456
|
+
rebase: rebaseBranch,
|
|
457
|
+
get: getBranch
|
|
458
|
+
},
|
|
459
|
+
commits: {
|
|
460
|
+
stage: stageChanges,
|
|
461
|
+
write: writeCommit,
|
|
462
|
+
push: pushChanges,
|
|
463
|
+
forcePush: forcePushChanges,
|
|
464
|
+
get: getCommits,
|
|
465
|
+
filesChangesBetweenRefs
|
|
466
|
+
},
|
|
467
|
+
tags: {
|
|
468
|
+
mostRecentForPackage: getMostRecentPackageTag,
|
|
469
|
+
create: createTag,
|
|
470
|
+
push: pushTag
|
|
471
|
+
},
|
|
472
|
+
workspace: {
|
|
473
|
+
readFile,
|
|
474
|
+
isWithinRepository,
|
|
475
|
+
isWorkingDirectoryClean,
|
|
476
|
+
assertWorkspaceReady
|
|
477
|
+
}
|
|
641
478
|
};
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
479
|
+
}),
|
|
480
|
+
dependencies: [NodeCommandExecutor.layer]
|
|
481
|
+
}) {};
|
|
482
|
+
|
|
483
|
+
//#endregion
|
|
484
|
+
//#region src/services/github.service.ts
|
|
485
|
+
const eta = new Eta();
|
|
486
|
+
const PullRequestSchema = Schema.Struct({
|
|
487
|
+
number: Schema.Number,
|
|
488
|
+
title: Schema.String,
|
|
489
|
+
body: Schema.String,
|
|
490
|
+
head: Schema.Struct({
|
|
491
|
+
ref: Schema.String,
|
|
492
|
+
sha: Schema.String
|
|
493
|
+
}),
|
|
494
|
+
base: Schema.Struct({
|
|
495
|
+
ref: Schema.String,
|
|
496
|
+
sha: Schema.String
|
|
497
|
+
}),
|
|
498
|
+
state: Schema.Literal("open", "closed", "merged"),
|
|
499
|
+
draft: Schema.Boolean,
|
|
500
|
+
mergeable: Schema.NullOr(Schema.Boolean),
|
|
501
|
+
url: Schema.String,
|
|
502
|
+
html_url: Schema.String
|
|
503
|
+
});
|
|
504
|
+
const CreatePullRequestOptionsSchema = Schema.Struct({
|
|
505
|
+
title: Schema.String,
|
|
506
|
+
body: Schema.NullOr(Schema.String),
|
|
507
|
+
head: Schema.String,
|
|
508
|
+
base: Schema.String,
|
|
509
|
+
draft: Schema.optional(Schema.Boolean)
|
|
510
|
+
});
|
|
511
|
+
const UpdatePullRequestOptionsSchema = Schema.Struct({
|
|
512
|
+
title: Schema.optional(Schema.String),
|
|
513
|
+
body: Schema.optional(Schema.String),
|
|
514
|
+
state: Schema.optional(Schema.Literal("open", "closed"))
|
|
515
|
+
});
|
|
516
|
+
const CommitStatusSchema = Schema.Struct({
|
|
517
|
+
state: Schema.Literal("pending", "success", "error", "failure"),
|
|
518
|
+
target_url: Schema.optional(Schema.String),
|
|
519
|
+
description: Schema.optional(Schema.String),
|
|
520
|
+
context: Schema.String
|
|
521
|
+
});
|
|
522
|
+
const RepositoryInfoSchema = Schema.Struct({
|
|
523
|
+
owner: Schema.String,
|
|
524
|
+
repo: Schema.String
|
|
525
|
+
});
|
|
526
|
+
var GitHubService = class extends Effect.Service()("@ucdjs/release-scripts/GitHubService", {
|
|
527
|
+
effect: Effect.gen(function* () {
|
|
528
|
+
const config = yield* ReleaseScriptsOptions;
|
|
529
|
+
function makeRequest(endpoint, schema, options = {}) {
|
|
530
|
+
const url = `https://api.github.com/repos/${config.owner}/${config.repo}/${endpoint}`;
|
|
531
|
+
return Effect.tryPromise({
|
|
532
|
+
try: async () => {
|
|
533
|
+
const res = await fetch(url, {
|
|
534
|
+
...options,
|
|
535
|
+
headers: {
|
|
536
|
+
"Authorization": `token ${config.githubToken}`,
|
|
537
|
+
"Accept": "application/vnd.github.v3+json",
|
|
538
|
+
"Content-Type": "application/json",
|
|
539
|
+
"User-Agent": "ucdjs-release-scripts (https://github.com/ucdjs/release-scripts)",
|
|
540
|
+
...options.headers
|
|
541
|
+
}
|
|
542
|
+
});
|
|
543
|
+
if (!res.ok) {
|
|
544
|
+
const text = await res.text();
|
|
545
|
+
throw new Error(`GitHub API request failed with status ${res.status}: ${text}`);
|
|
546
|
+
}
|
|
547
|
+
if (res.status === 204) return;
|
|
548
|
+
return res.json();
|
|
549
|
+
},
|
|
550
|
+
catch: (e) => new GitHubError({
|
|
551
|
+
message: String(e),
|
|
552
|
+
operation: "request",
|
|
553
|
+
cause: e
|
|
554
|
+
})
|
|
555
|
+
}).pipe(Effect.flatMap((json) => json === void 0 ? Effect.succeed(void 0) : Schema.decodeUnknown(schema)(json).pipe(Effect.mapError((e) => new GitHubError({
|
|
556
|
+
message: "Failed to decode GitHub response",
|
|
557
|
+
operation: "request",
|
|
558
|
+
cause: e
|
|
559
|
+
})))));
|
|
560
|
+
}
|
|
561
|
+
function getPullRequestByBranch(branch) {
|
|
562
|
+
const head = branch.includes(":") ? branch : `${config.owner}:${branch}`;
|
|
563
|
+
return makeRequest(`pulls?state=open&head=${encodeURIComponent(head)}`, Schema.Array(PullRequestSchema)).pipe(Effect.map((pulls) => pulls.length > 0 ? pulls[0] : null), Effect.mapError((e) => new GitHubError({
|
|
564
|
+
message: e.message,
|
|
565
|
+
operation: "getPullRequestByBranch",
|
|
566
|
+
cause: e.cause
|
|
567
|
+
})));
|
|
568
|
+
}
|
|
569
|
+
function setCommitStatus(sha, status) {
|
|
570
|
+
return makeRequest(`statuses/${sha}`, Schema.Unknown, {
|
|
571
|
+
method: "POST",
|
|
572
|
+
body: JSON.stringify(status)
|
|
573
|
+
}).pipe(Effect.map(() => status), Effect.catchAll((e) => Effect.fail(new GitHubError({
|
|
574
|
+
message: e.message,
|
|
575
|
+
operation: "setCommitStatus",
|
|
576
|
+
cause: e.cause
|
|
577
|
+
}))));
|
|
578
|
+
}
|
|
579
|
+
function updatePullRequest(number, options) {
|
|
580
|
+
return makeRequest(`pulls/${number}`, PullRequestSchema, {
|
|
581
|
+
method: "PATCH",
|
|
582
|
+
body: JSON.stringify(options)
|
|
583
|
+
}).pipe(Effect.mapError((e) => new GitHubError({
|
|
584
|
+
message: e.message,
|
|
585
|
+
operation: "updatePullRequest",
|
|
586
|
+
cause: e.cause
|
|
587
|
+
})));
|
|
588
|
+
}
|
|
589
|
+
const prBodyTemplate = `## Release Summary
|
|
590
|
+
|
|
591
|
+
This PR prepares the release of <%= it.count %> package<%= it.count === 1 ? "" : "s" %>:
|
|
592
|
+
|
|
593
|
+
<% for (const release of it.releases) { %>
|
|
594
|
+
- **<%= release.packageName %>**: \`<%= release.previousVersion %>\` → \`<%= release.version %>\`
|
|
595
|
+
<% } %>
|
|
596
|
+
|
|
597
|
+
## Changes
|
|
598
|
+
|
|
599
|
+
See individual package changelogs for details.
|
|
600
|
+
`;
|
|
601
|
+
function generateReleasePRBody(releases) {
|
|
602
|
+
return Effect.gen(function* () {
|
|
603
|
+
return eta.renderString(prBodyTemplate, {
|
|
604
|
+
count: releases.length,
|
|
605
|
+
releases
|
|
606
|
+
});
|
|
607
|
+
});
|
|
608
|
+
}
|
|
609
|
+
return {
|
|
610
|
+
getPullRequestByBranch,
|
|
611
|
+
setCommitStatus,
|
|
612
|
+
updatePullRequest,
|
|
613
|
+
generateReleasePRBody
|
|
657
614
|
};
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
615
|
+
}),
|
|
616
|
+
dependencies: []
|
|
617
|
+
}) {};
|
|
618
|
+
|
|
619
|
+
//#endregion
|
|
620
|
+
//#region src/services/npm.service.ts
|
|
621
|
+
const PackumentSchema = Schema.Struct({
|
|
622
|
+
"name": Schema.String,
|
|
623
|
+
"dist-tags": Schema.Record({
|
|
624
|
+
key: Schema.String,
|
|
625
|
+
value: Schema.String
|
|
626
|
+
}),
|
|
627
|
+
"versions": Schema.Record({
|
|
628
|
+
key: Schema.String,
|
|
629
|
+
value: Schema.Struct({
|
|
630
|
+
name: Schema.String,
|
|
631
|
+
version: Schema.String,
|
|
632
|
+
description: Schema.optional(Schema.String),
|
|
633
|
+
dist: Schema.Struct({
|
|
634
|
+
tarball: Schema.String,
|
|
635
|
+
shasum: Schema.String,
|
|
636
|
+
integrity: Schema.optional(Schema.String)
|
|
637
|
+
})
|
|
638
|
+
})
|
|
639
|
+
})
|
|
640
|
+
});
|
|
641
|
+
var NPMService = class extends Effect.Service()("@ucdjs/release-scripts/NPMService", {
|
|
642
|
+
effect: Effect.gen(function* () {
|
|
643
|
+
const executor = yield* CommandExecutor.CommandExecutor;
|
|
644
|
+
const config = yield* ReleaseScriptsOptions;
|
|
645
|
+
const fetchPackument = (packageName) => Effect.tryPromise({
|
|
646
|
+
try: async () => {
|
|
647
|
+
const response = await fetch(`https://registry.npmjs.org/${packageName}`);
|
|
648
|
+
if (response.status === 404) return null;
|
|
649
|
+
if (!response.ok) throw new Error(`Failed to fetch packument: ${response.statusText}`);
|
|
650
|
+
return await response.json();
|
|
651
|
+
},
|
|
652
|
+
catch: (error) => {
|
|
653
|
+
return new NPMError({
|
|
654
|
+
message: error instanceof Error ? error.message : String(error),
|
|
655
|
+
operation: "fetchPackument"
|
|
656
|
+
});
|
|
657
|
+
}
|
|
658
|
+
}).pipe(Effect.flatMap((data) => {
|
|
659
|
+
if (data === null) return Effect.succeed(null);
|
|
660
|
+
return Schema.decodeUnknown(PackumentSchema)(data).pipe(Effect.mapError((error) => new NPMError({
|
|
661
|
+
message: `Failed to parse packument: ${error}`,
|
|
662
|
+
operation: "fetchPackument"
|
|
663
|
+
})));
|
|
664
|
+
}));
|
|
665
|
+
const versionExists = (packageName, version) => fetchPackument(packageName).pipe(Effect.map((packument) => {
|
|
666
|
+
if (!packument) return false;
|
|
667
|
+
return version in packument.versions;
|
|
668
|
+
}));
|
|
669
|
+
const getLatestVersion = (packageName) => fetchPackument(packageName).pipe(Effect.map((packument) => {
|
|
670
|
+
if (!packument) return null;
|
|
671
|
+
return packument["dist-tags"].latest || null;
|
|
672
|
+
}));
|
|
673
|
+
const publish = (options) => Effect.gen(function* () {
|
|
674
|
+
const args = ["publish"];
|
|
675
|
+
if (options.tagName) args.push("--tag", options.tagName);
|
|
676
|
+
if (options.otp) args.push("--otp", options.otp);
|
|
677
|
+
if (options.provenance !== false) args.push("--provenance");
|
|
678
|
+
if (options.dryRun ?? config.dryRun) args.push("--dry-run");
|
|
679
|
+
const command = Command.make("pnpm", ...args).pipe(Command.workingDirectory(options.packagePath));
|
|
680
|
+
return (yield* executor.string(command).pipe(Effect.mapError((err) => new PublishError({
|
|
681
|
+
message: `Failed to publish package at ${options.packagePath}: ${err.message}`,
|
|
682
|
+
cause: err
|
|
683
|
+
})))).trim();
|
|
662
684
|
});
|
|
663
|
-
if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
664
|
-
const action = isUpdate ? "Updated" : "Created";
|
|
665
|
-
logger.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
|
|
666
685
|
return {
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
html_url: pr.html_url
|
|
686
|
+
fetchPackument,
|
|
687
|
+
versionExists,
|
|
688
|
+
getLatestVersion,
|
|
689
|
+
publish
|
|
672
690
|
};
|
|
673
|
-
}
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
691
|
+
}),
|
|
692
|
+
dependencies: []
|
|
693
|
+
}) {};
|
|
694
|
+
|
|
695
|
+
//#endregion
|
|
696
|
+
//#region src/services/workspace.service.ts
|
|
697
|
+
const DependencyObjectSchema = Schema.Record({
|
|
698
|
+
key: Schema.String,
|
|
699
|
+
value: Schema.String
|
|
700
|
+
});
|
|
701
|
+
const PackageJsonSchema = Schema.Struct({
|
|
702
|
+
name: Schema.String,
|
|
703
|
+
private: Schema.optional(Schema.Boolean),
|
|
704
|
+
version: Schema.optional(Schema.String),
|
|
705
|
+
dependencies: Schema.optional(DependencyObjectSchema),
|
|
706
|
+
devDependencies: Schema.optional(DependencyObjectSchema),
|
|
707
|
+
peerDependencies: Schema.optional(DependencyObjectSchema)
|
|
708
|
+
});
|
|
709
|
+
const WorkspacePackageSchema = Schema.Struct({
|
|
710
|
+
name: Schema.String,
|
|
711
|
+
version: Schema.String,
|
|
712
|
+
path: Schema.String,
|
|
713
|
+
packageJson: PackageJsonSchema,
|
|
714
|
+
workspaceDependencies: Schema.Array(Schema.String),
|
|
715
|
+
workspaceDevDependencies: Schema.Array(Schema.String)
|
|
716
|
+
});
|
|
717
|
+
const WorkspaceListSchema = Schema.Array(Schema.Struct({
|
|
718
|
+
name: Schema.String,
|
|
719
|
+
path: Schema.String,
|
|
720
|
+
version: Schema.String,
|
|
721
|
+
private: Schema.Boolean,
|
|
722
|
+
dependencies: Schema.optional(DependencyObjectSchema),
|
|
723
|
+
devDependencies: Schema.optional(DependencyObjectSchema),
|
|
724
|
+
peerDependencies: Schema.optional(DependencyObjectSchema)
|
|
725
|
+
}));
|
|
726
|
+
var WorkspaceService = class extends Effect.Service()("@ucdjs/release-scripts/WorkspaceService", {
|
|
727
|
+
effect: Effect.gen(function* () {
|
|
728
|
+
const executor = yield* CommandExecutor.CommandExecutor;
|
|
729
|
+
const config = yield* ReleaseScriptsOptions;
|
|
730
|
+
const workspacePackageListOutput = yield* executor.string(Command.make("pnpm", "-r", "ls", "--json").pipe(Command.workingDirectory(config.workspaceRoot))).pipe(Effect.flatMap((stdout) => Effect.try({
|
|
731
|
+
try: () => JSON.parse(stdout),
|
|
732
|
+
catch: (e) => new WorkspaceError({
|
|
733
|
+
message: "Failed to parse pnpm JSON output",
|
|
734
|
+
operation: "discover",
|
|
735
|
+
cause: e
|
|
684
736
|
})
|
|
737
|
+
})), Effect.flatMap((json) => Schema.decodeUnknown(WorkspaceListSchema)(json).pipe(Effect.mapError((e) => new WorkspaceError({
|
|
738
|
+
message: "Failed to decode pnpm output",
|
|
739
|
+
operation: "discover",
|
|
740
|
+
cause: e
|
|
741
|
+
})))), Effect.cached);
|
|
742
|
+
function readPackageJson(pkgPath) {
|
|
743
|
+
return Effect.tryPromise({
|
|
744
|
+
try: async () => JSON.parse(await fs.readFile(path.join(pkgPath, "package.json"), "utf8")),
|
|
745
|
+
catch: (e) => new WorkspaceError({
|
|
746
|
+
message: `Failed to read package.json for ${pkgPath}`,
|
|
747
|
+
cause: e,
|
|
748
|
+
operation: "readPackageJson"
|
|
749
|
+
})
|
|
750
|
+
}).pipe(Effect.flatMap((json) => Schema.decodeUnknown(PackageJsonSchema)(json).pipe(Effect.mapError((e) => new WorkspaceError({
|
|
751
|
+
message: `Invalid package.json for ${pkgPath}`,
|
|
752
|
+
cause: e,
|
|
753
|
+
operation: "readPackageJson"
|
|
754
|
+
})))));
|
|
755
|
+
}
|
|
756
|
+
function writePackageJson(pkgPath, json) {
|
|
757
|
+
const fullPath = path.join(pkgPath, "package.json");
|
|
758
|
+
const content = `${JSON.stringify(json, null, 2)}\n`;
|
|
759
|
+
if (config.dryRun) return Effect.succeed(`Dry run: skip writing ${fullPath}`);
|
|
760
|
+
return Effect.tryPromise({
|
|
761
|
+
try: async () => await fs.writeFile(fullPath, content, "utf8"),
|
|
762
|
+
catch: (e) => new WorkspaceError({
|
|
763
|
+
message: `Failed to write package.json for ${pkgPath}`,
|
|
764
|
+
cause: e,
|
|
765
|
+
operation: "writePackageJson"
|
|
766
|
+
})
|
|
767
|
+
});
|
|
768
|
+
}
|
|
769
|
+
const discoverWorkspacePackages = Effect.gen(function* () {
|
|
770
|
+
let workspaceOptions;
|
|
771
|
+
let explicitPackages;
|
|
772
|
+
if (config.packages == null || config.packages === true) workspaceOptions = { excludePrivate: false };
|
|
773
|
+
else if (Array.isArray(config.packages)) {
|
|
774
|
+
workspaceOptions = {
|
|
775
|
+
excludePrivate: false,
|
|
776
|
+
include: config.packages
|
|
777
|
+
};
|
|
778
|
+
explicitPackages = config.packages;
|
|
779
|
+
} else {
|
|
780
|
+
workspaceOptions = config.packages;
|
|
781
|
+
if (config.packages.include) explicitPackages = config.packages.include;
|
|
782
|
+
}
|
|
783
|
+
const workspacePackages = yield* findWorkspacePackages(workspaceOptions);
|
|
784
|
+
if (explicitPackages) {
|
|
785
|
+
const foundNames = new Set(workspacePackages.map((p) => p.name));
|
|
786
|
+
const missing = explicitPackages.filter((p) => !foundNames.has(p));
|
|
787
|
+
if (missing.length > 0) return yield* Effect.fail(/* @__PURE__ */ new Error(`Package${missing.length > 1 ? "s" : ""} not found in workspace: ${missing.join(", ")}`));
|
|
788
|
+
}
|
|
789
|
+
return workspacePackages;
|
|
685
790
|
});
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
791
|
+
function findWorkspacePackages(options) {
|
|
792
|
+
return workspacePackageListOutput.pipe(Effect.flatMap((rawProjects) => {
|
|
793
|
+
const allPackageNames = new Set(rawProjects.map((p) => p.name));
|
|
794
|
+
return Effect.all(rawProjects.map((rawProject) => readPackageJson(rawProject.path).pipe(Effect.flatMap((packageJson) => {
|
|
795
|
+
if (!shouldIncludePackage(packageJson, options)) return Effect.succeed(null);
|
|
796
|
+
const pkg = {
|
|
797
|
+
name: rawProject.name,
|
|
798
|
+
version: rawProject.version,
|
|
799
|
+
path: rawProject.path,
|
|
800
|
+
packageJson,
|
|
801
|
+
workspaceDependencies: Object.keys(rawProject.dependencies || {}).filter((dep) => allPackageNames.has(dep)),
|
|
802
|
+
workspaceDevDependencies: Object.keys(rawProject.devDependencies || {}).filter((dep) => allPackageNames.has(dep))
|
|
803
|
+
};
|
|
804
|
+
return Schema.decodeUnknown(WorkspacePackageSchema)(pkg).pipe(Effect.mapError((e) => new WorkspaceError({
|
|
805
|
+
message: `Invalid workspace package structure for ${rawProject.name}`,
|
|
806
|
+
cause: e,
|
|
807
|
+
operation: "findWorkspacePackages"
|
|
808
|
+
})));
|
|
809
|
+
}), Effect.catchAll(() => {
|
|
810
|
+
return Effect.logWarning(`Skipping invalid package ${rawProject.name}`).pipe(Effect.as(null));
|
|
811
|
+
})))).pipe(Effect.map((packages) => packages.filter((pkg) => pkg !== null)));
|
|
812
|
+
}));
|
|
813
|
+
}
|
|
814
|
+
function shouldIncludePackage(pkg, options) {
|
|
815
|
+
if (!options) return true;
|
|
816
|
+
if (options.excludePrivate && pkg.private) return false;
|
|
817
|
+
if (options.include && options.include.length > 0) {
|
|
818
|
+
if (!options.include.includes(pkg.name)) return false;
|
|
819
|
+
}
|
|
820
|
+
if (options.exclude?.includes(pkg.name)) return false;
|
|
821
|
+
return true;
|
|
697
822
|
}
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
823
|
+
function findPackageByName(packageName) {
|
|
824
|
+
return discoverWorkspacePackages.pipe(Effect.map((packages) => packages.find((pkg) => pkg.name === packageName) || null));
|
|
825
|
+
}
|
|
826
|
+
return {
|
|
827
|
+
readPackageJson,
|
|
828
|
+
writePackageJson,
|
|
829
|
+
findWorkspacePackages,
|
|
830
|
+
discoverWorkspacePackages,
|
|
831
|
+
findPackageByName
|
|
832
|
+
};
|
|
833
|
+
}),
|
|
834
|
+
dependencies: []
|
|
835
|
+
}) {};
|
|
836
|
+
|
|
837
|
+
//#endregion
|
|
838
|
+
//#region src/services/package-updater.service.ts
|
|
839
|
+
const DASH_RE = / - /;
|
|
840
|
+
const RANGE_OPERATION_RE = /^(?:>=|<=|[><=])/;
|
|
841
|
+
function nextRange(oldRange, newVersion) {
|
|
842
|
+
const workspacePrefix = oldRange.startsWith("workspace:") ? "workspace:" : "";
|
|
843
|
+
const raw = workspacePrefix ? oldRange.slice(10) : oldRange;
|
|
844
|
+
if (raw === "*" || raw === "latest") return `${workspacePrefix}${raw}`;
|
|
845
|
+
if (raw.includes("||") || DASH_RE.test(raw) || RANGE_OPERATION_RE.test(raw) || raw.includes(" ") && !DASH_RE.test(raw)) {
|
|
846
|
+
if (semver.satisfies(newVersion, raw)) return `${workspacePrefix}${raw}`;
|
|
847
|
+
throw new Error(`Cannot update range "${oldRange}" to version ${newVersion}: new version is outside the existing range. Complex range updating is not yet implemented.`);
|
|
848
|
+
}
|
|
849
|
+
return `${workspacePrefix}${raw.startsWith("^") || raw.startsWith("~") ? raw[0] : ""}${newVersion}`;
|
|
850
|
+
}
|
|
851
|
+
function updateDependencyRecord(record, releaseMap) {
|
|
852
|
+
if (!record) return {
|
|
853
|
+
updated: false,
|
|
854
|
+
next: void 0
|
|
855
|
+
};
|
|
856
|
+
let changed = false;
|
|
857
|
+
const next = { ...record };
|
|
858
|
+
for (const [dep, currentRange] of Object.entries(record)) {
|
|
859
|
+
const bumped = releaseMap.get(dep);
|
|
860
|
+
if (!bumped) continue;
|
|
861
|
+
const updatedRange = nextRange(currentRange, bumped);
|
|
862
|
+
if (updatedRange !== currentRange) {
|
|
863
|
+
next[dep] = updatedRange;
|
|
864
|
+
changed = true;
|
|
704
865
|
}
|
|
705
|
-
return info;
|
|
706
866
|
}
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
867
|
+
return {
|
|
868
|
+
updated: changed,
|
|
869
|
+
next: changed ? next : record
|
|
870
|
+
};
|
|
710
871
|
}
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
872
|
+
var PackageUpdaterService = class extends Effect.Service()("@ucdjs/release-scripts/PackageUpdaterService", {
|
|
873
|
+
effect: Effect.gen(function* () {
|
|
874
|
+
const workspace = yield* WorkspaceService;
|
|
875
|
+
function applyReleases(allPackages, releases) {
|
|
876
|
+
const releaseMap = /* @__PURE__ */ new Map();
|
|
877
|
+
for (const release of releases) releaseMap.set(release.package.name, release.newVersion);
|
|
878
|
+
return Effect.all(allPackages.map((pkg) => Effect.gen(function* () {
|
|
879
|
+
const releaseVersion = releaseMap.get(pkg.name);
|
|
880
|
+
const nextJson = { ...pkg.packageJson };
|
|
881
|
+
let updated = false;
|
|
882
|
+
if (releaseVersion && pkg.packageJson.version !== releaseVersion) {
|
|
883
|
+
nextJson.version = releaseVersion;
|
|
884
|
+
updated = true;
|
|
885
|
+
}
|
|
886
|
+
const depsResult = updateDependencyRecord(pkg.packageJson.dependencies, releaseMap);
|
|
887
|
+
if (depsResult.updated) {
|
|
888
|
+
nextJson.dependencies = depsResult.next;
|
|
889
|
+
updated = true;
|
|
890
|
+
}
|
|
891
|
+
const devDepsResult = updateDependencyRecord(pkg.packageJson.devDependencies, releaseMap);
|
|
892
|
+
if (devDepsResult.updated) {
|
|
893
|
+
nextJson.devDependencies = devDepsResult.next;
|
|
894
|
+
updated = true;
|
|
895
|
+
}
|
|
896
|
+
const peerDepsResult = updateDependencyRecord(pkg.packageJson.peerDependencies, releaseMap);
|
|
897
|
+
if (peerDepsResult.updated) {
|
|
898
|
+
nextJson.peerDependencies = peerDepsResult.next;
|
|
899
|
+
updated = true;
|
|
900
|
+
}
|
|
901
|
+
if (!updated) return "skipped";
|
|
902
|
+
return yield* workspace.writePackageJson(pkg.path, nextJson).pipe(Effect.map(() => "written"));
|
|
903
|
+
})));
|
|
904
|
+
}
|
|
905
|
+
return { applyReleases };
|
|
906
|
+
}),
|
|
907
|
+
dependencies: [WorkspaceService.Default]
|
|
908
|
+
}) {};
|
|
723
909
|
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
910
|
+
//#endregion
|
|
911
|
+
//#region src/services/version-calculator.service.ts
|
|
912
|
+
const BUMP_PRIORITY = {
|
|
913
|
+
none: 0,
|
|
914
|
+
patch: 1,
|
|
915
|
+
minor: 2,
|
|
916
|
+
major: 3
|
|
917
|
+
};
|
|
918
|
+
function maxBump(current, incoming) {
|
|
919
|
+
return (BUMP_PRIORITY[incoming] ?? 0) > (BUMP_PRIORITY[current] ?? 0) ? incoming : current;
|
|
920
|
+
}
|
|
921
|
+
function bumpFromCommit(commit) {
|
|
922
|
+
if (commit.isBreaking) return "major";
|
|
923
|
+
if (commit.type === "feat") return "minor";
|
|
924
|
+
if (commit.type === "fix" || commit.type === "perf") return "patch";
|
|
925
|
+
return "none";
|
|
731
926
|
}
|
|
732
|
-
function
|
|
733
|
-
|
|
734
|
-
const bodyTemplate = body ? dedentString(body) : DEFAULT_PR_BODY_TEMPLATE;
|
|
735
|
-
return eta.renderString(bodyTemplate, { packages: updates.map((u) => ({
|
|
736
|
-
name: u.package.name,
|
|
737
|
-
currentVersion: u.currentVersion,
|
|
738
|
-
newVersion: u.newVersion,
|
|
739
|
-
bumpType: u.bumpType,
|
|
740
|
-
hasDirectChanges: u.hasDirectChanges
|
|
741
|
-
})) });
|
|
927
|
+
function determineBump(commits) {
|
|
928
|
+
return commits.reduce((acc, commit) => maxBump(acc, bumpFromCommit(commit)), "none");
|
|
742
929
|
}
|
|
930
|
+
var VersionCalculatorService = class extends Effect.Service()("@ucdjs/release-scripts/VersionCalculatorService", {
|
|
931
|
+
effect: Effect.gen(function* () {
|
|
932
|
+
function calculateBumps(packages, overrides) {
|
|
933
|
+
return Effect.all(packages.map((pkg) => Effect.gen(function* () {
|
|
934
|
+
const bumpType = determineBump([...pkg.commits, ...pkg.globalCommits]);
|
|
935
|
+
const hasDirectChanges = pkg.commits.length > 0;
|
|
936
|
+
let nextVersion = null;
|
|
937
|
+
const override = overrides[pkg.name];
|
|
938
|
+
if (override) {
|
|
939
|
+
if (!semver.valid(override)) return yield* Effect.fail(new VersionCalculationError({
|
|
940
|
+
message: `Invalid override version for ${pkg.name}: ${override}`,
|
|
941
|
+
packageName: pkg.name
|
|
942
|
+
}));
|
|
943
|
+
nextVersion = override;
|
|
944
|
+
}
|
|
945
|
+
if (nextVersion === null) if (bumpType === "none") nextVersion = pkg.version;
|
|
946
|
+
else {
|
|
947
|
+
const bumped = semver.inc(pkg.version, bumpType);
|
|
948
|
+
if (!bumped) return yield* Effect.fail(new VersionCalculationError({
|
|
949
|
+
message: `Failed to bump version for ${pkg.name} using bump type ${bumpType}`,
|
|
950
|
+
packageName: pkg.name
|
|
951
|
+
}));
|
|
952
|
+
nextVersion = bumped;
|
|
953
|
+
}
|
|
954
|
+
return {
|
|
955
|
+
package: {
|
|
956
|
+
name: pkg.name,
|
|
957
|
+
version: pkg.version,
|
|
958
|
+
path: pkg.path,
|
|
959
|
+
packageJson: pkg.packageJson,
|
|
960
|
+
workspaceDependencies: pkg.workspaceDependencies,
|
|
961
|
+
workspaceDevDependencies: pkg.workspaceDevDependencies
|
|
962
|
+
},
|
|
963
|
+
currentVersion: pkg.version,
|
|
964
|
+
newVersion: nextVersion,
|
|
965
|
+
bumpType,
|
|
966
|
+
hasDirectChanges
|
|
967
|
+
};
|
|
968
|
+
})), { concurrency: 10 });
|
|
969
|
+
}
|
|
970
|
+
return { calculateBumps };
|
|
971
|
+
}),
|
|
972
|
+
dependencies: []
|
|
973
|
+
}) {};
|
|
743
974
|
|
|
744
975
|
//#endregion
|
|
745
|
-
//#region src/
|
|
746
|
-
function
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
976
|
+
//#region src/utils/helpers.ts
|
|
977
|
+
function loadOverrides(options) {
|
|
978
|
+
return Effect.gen(function* () {
|
|
979
|
+
return yield* (yield* GitService).workspace.readFile(options.overridesPath, options.sha).pipe(Effect.flatMap((content) => Effect.try({
|
|
980
|
+
try: () => JSON.parse(content),
|
|
981
|
+
catch: (err) => new OverridesLoadError({
|
|
982
|
+
message: "Failed to parse overrides file.",
|
|
983
|
+
cause: err
|
|
984
|
+
})
|
|
985
|
+
})), Effect.catchAll(() => Effect.succeed({})));
|
|
986
|
+
});
|
|
987
|
+
}
|
|
988
|
+
const GitCommitSchema = Schema.Struct({
|
|
989
|
+
isConventional: Schema.Boolean,
|
|
990
|
+
isBreaking: Schema.Boolean,
|
|
991
|
+
type: Schema.String,
|
|
992
|
+
scope: Schema.Union(Schema.String, Schema.Undefined),
|
|
993
|
+
description: Schema.String,
|
|
994
|
+
references: Schema.Array(Schema.Struct({
|
|
995
|
+
type: Schema.Union(Schema.Literal("issue"), Schema.Literal("pull-request")),
|
|
996
|
+
value: Schema.String
|
|
997
|
+
})),
|
|
998
|
+
authors: Schema.Array(Schema.Struct({
|
|
999
|
+
name: Schema.String,
|
|
1000
|
+
email: Schema.String,
|
|
1001
|
+
profile: Schema.optional(Schema.String)
|
|
1002
|
+
})),
|
|
1003
|
+
hash: Schema.String,
|
|
1004
|
+
shortHash: Schema.String,
|
|
1005
|
+
body: Schema.String,
|
|
1006
|
+
message: Schema.String,
|
|
1007
|
+
date: Schema.String
|
|
1008
|
+
});
|
|
1009
|
+
const WorkspacePackageWithCommitsSchema = Schema.Struct({
|
|
1010
|
+
...WorkspacePackageSchema.fields,
|
|
1011
|
+
commits: Schema.Array(GitCommitSchema),
|
|
1012
|
+
globalCommits: Schema.Array(GitCommitSchema).pipe(Schema.propertySignature, Schema.withConstructorDefault(() => []))
|
|
1013
|
+
});
|
|
1014
|
+
function mergePackageCommitsIntoPackages(packages) {
|
|
1015
|
+
return Effect.gen(function* () {
|
|
1016
|
+
const git = yield* GitService;
|
|
1017
|
+
return yield* Effect.forEach(packages, (pkg) => Effect.gen(function* () {
|
|
1018
|
+
const lastTag = yield* git.tags.mostRecentForPackage(pkg.name);
|
|
1019
|
+
const commits = yield* git.commits.get({
|
|
1020
|
+
from: lastTag?.name || void 0,
|
|
1021
|
+
to: "HEAD",
|
|
1022
|
+
folder: pkg.path
|
|
1023
|
+
});
|
|
1024
|
+
const withCommits = {
|
|
1025
|
+
...pkg,
|
|
1026
|
+
commits,
|
|
1027
|
+
globalCommits: []
|
|
1028
|
+
};
|
|
1029
|
+
return yield* Schema.decode(WorkspacePackageWithCommitsSchema)(withCommits).pipe(Effect.mapError((e) => /* @__PURE__ */ new Error(`Failed to decode package with commits for ${pkg.name}: ${e}`)));
|
|
1030
|
+
}));
|
|
1031
|
+
});
|
|
756
1032
|
}
|
|
757
1033
|
/**
|
|
758
|
-
*
|
|
759
|
-
*
|
|
1034
|
+
* Retrieves global commits that affect all packages in a monorepo.
|
|
1035
|
+
*
|
|
1036
|
+
* This function handles an important edge case in monorepo releases:
|
|
1037
|
+
* When pkg-a is released, then a global change is made, and then pkg-b is released,
|
|
1038
|
+
* we need to ensure that the global change is only attributed to pkg-a's release,
|
|
1039
|
+
* not re-counted for pkg-b.
|
|
1040
|
+
*
|
|
1041
|
+
* Algorithm:
|
|
1042
|
+
* 1. Find the overall commit range across all packages
|
|
1043
|
+
* 2. Fetch all commits and file changes once for this range
|
|
1044
|
+
* 3. For each package, filter commits based on its last tag cutoff
|
|
1045
|
+
* 4. Apply mode-specific filtering for global commits
|
|
1046
|
+
*
|
|
1047
|
+
* Example scenario:
|
|
1048
|
+
* - pkg-a: last released at commit A
|
|
1049
|
+
* - global change at commit B (after A)
|
|
1050
|
+
* - pkg-b: last released at commit C (after B)
|
|
760
1051
|
*
|
|
761
|
-
*
|
|
762
|
-
*
|
|
763
|
-
*
|
|
1052
|
+
* Result:
|
|
1053
|
+
* - For pkg-a: includes commits from A to HEAD (including B)
|
|
1054
|
+
* - For pkg-b: includes commits from C to HEAD (excluding B, since it was already in pkg-b's release range)
|
|
1055
|
+
*
|
|
1056
|
+
* @param packages - Array of workspace packages with their associated commits
|
|
1057
|
+
* @param mode - Determines which global commits to include:
|
|
1058
|
+
* - "none": No global commits (returns empty map)
|
|
1059
|
+
* - "all": All commits that touch files outside any package directory
|
|
1060
|
+
* - "dependencies": Only commits that touch dependency-related files (package.json, lock files, etc.)
|
|
1061
|
+
*
|
|
1062
|
+
* @returns A map of package names to their relevant global commits
|
|
764
1063
|
*/
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
const
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
1064
|
+
function mergeCommitsAffectingGloballyIntoPackage(packages, mode) {
|
|
1065
|
+
return Effect.gen(function* () {
|
|
1066
|
+
const git = yield* GitService;
|
|
1067
|
+
if (mode === "none") return packages;
|
|
1068
|
+
const [oldestCommitSha, newestCommitSha] = findCommitRange(packages);
|
|
1069
|
+
if (oldestCommitSha == null || newestCommitSha == null) return packages;
|
|
1070
|
+
const allCommits = yield* git.commits.get({
|
|
1071
|
+
from: oldestCommitSha,
|
|
1072
|
+
to: newestCommitSha,
|
|
1073
|
+
folder: "."
|
|
774
1074
|
});
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
1075
|
+
const affectedFilesPerCommit = yield* git.commits.filesChangesBetweenRefs(oldestCommitSha, newestCommitSha);
|
|
1076
|
+
const commitTimestamps = new Map(allCommits.map((c) => [c.hash, new Date(c.date).getTime()]));
|
|
1077
|
+
const packagePaths = new Set(packages.map((p) => p.path));
|
|
1078
|
+
const result = /* @__PURE__ */ new Map();
|
|
1079
|
+
for (const pkg of packages) {
|
|
1080
|
+
const lastTag = yield* git.tags.mostRecentForPackage(pkg.name);
|
|
1081
|
+
const cutoffTimestamp = lastTag ? commitTimestamps.get(lastTag.sha) ?? 0 : 0;
|
|
1082
|
+
const globalCommits = [];
|
|
1083
|
+
for (const commit of allCommits) {
|
|
1084
|
+
const commitTimestamp = commitTimestamps.get(commit.hash);
|
|
1085
|
+
if (commitTimestamp == null || commitTimestamp <= cutoffTimestamp) continue;
|
|
1086
|
+
const files = affectedFilesPerCommit.get(commit.hash);
|
|
1087
|
+
if (!files) continue;
|
|
1088
|
+
if (isGlobalCommit(files, packagePaths)) if (mode === "dependencies") {
|
|
1089
|
+
if (files.some((file) => isDependencyFile(file))) globalCommits.push(commit);
|
|
1090
|
+
} else globalCommits.push(commit);
|
|
1091
|
+
}
|
|
1092
|
+
result.set(pkg.name, globalCommits);
|
|
1093
|
+
}
|
|
1094
|
+
return yield* Effect.succeed(packages.map((pkg) => ({
|
|
1095
|
+
...pkg,
|
|
1096
|
+
globalCommits: result.get(pkg.name) || []
|
|
1097
|
+
})));
|
|
780
1098
|
});
|
|
781
|
-
const results = await Promise.all(promises);
|
|
782
|
-
for (const { pkgName, commits } of results) changedPackages.set(pkgName, commits);
|
|
783
|
-
return changedPackages;
|
|
784
1099
|
}
|
|
785
1100
|
/**
|
|
786
|
-
*
|
|
787
|
-
*
|
|
788
|
-
* @param
|
|
789
|
-
* @param
|
|
790
|
-
* @returns true if
|
|
1101
|
+
* Determines if a commit is "global" (affects files outside any package directory).
|
|
1102
|
+
*
|
|
1103
|
+
* @param files - List of files changed in the commit
|
|
1104
|
+
* @param packagePaths - Set of package directory paths
|
|
1105
|
+
* @returns true if at least one file is outside all package directories
|
|
791
1106
|
*/
|
|
792
|
-
function
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
const
|
|
796
|
-
|
|
797
|
-
}
|
|
798
|
-
return false;
|
|
1107
|
+
function isGlobalCommit(files, packagePaths) {
|
|
1108
|
+
return files.some((file) => {
|
|
1109
|
+
const normalized = file.startsWith("./") ? file.slice(2) : file;
|
|
1110
|
+
for (const pkgPath of packagePaths) if (normalized === pkgPath || normalized.startsWith(`${pkgPath}/`)) return false;
|
|
1111
|
+
return true;
|
|
1112
|
+
});
|
|
799
1113
|
}
|
|
800
1114
|
/**
|
|
801
|
-
*
|
|
802
|
-
* @param workspaceRoot - The workspace root
|
|
803
|
-
* @param files - Array of files changed in the commit
|
|
804
|
-
* @param packagePaths - Set of normalized package paths
|
|
805
|
-
* @returns true if this is a global commit
|
|
1115
|
+
* Files that are considered dependency-related in a monorepo.
|
|
806
1116
|
*/
|
|
807
|
-
|
|
808
|
-
if (!files || files.length === 0) return false;
|
|
809
|
-
return !files.some((file) => fileMatchesPackageFolder(file, packagePaths, workspaceRoot));
|
|
810
|
-
}
|
|
811
|
-
const DEPENDENCY_FILES = [
|
|
1117
|
+
const DEPENDENCY_FILES = new Set([
|
|
812
1118
|
"package.json",
|
|
813
1119
|
"pnpm-lock.yaml",
|
|
814
|
-
"pnpm-workspace.yaml",
|
|
815
1120
|
"yarn.lock",
|
|
816
|
-
"package-lock.json"
|
|
817
|
-
|
|
1121
|
+
"package-lock.json",
|
|
1122
|
+
"pnpm-workspace.yaml"
|
|
1123
|
+
]);
|
|
818
1124
|
/**
|
|
819
|
-
*
|
|
820
|
-
*
|
|
821
|
-
* @
|
|
1125
|
+
* Determines if a file is dependency-related.
|
|
1126
|
+
*
|
|
1127
|
+
* @param file - File path to check
|
|
1128
|
+
* @returns true if the file is a dependency file (package.json, lock files, etc.)
|
|
822
1129
|
*/
|
|
823
|
-
function
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
if (commits.length === 0) continue;
|
|
828
|
-
const firstCommit = commits[0].shortHash;
|
|
829
|
-
const lastCommit = commits[commits.length - 1].shortHash;
|
|
830
|
-
if (!newestCommit) newestCommit = firstCommit;
|
|
831
|
-
oldestCommit = lastCommit;
|
|
832
|
-
}
|
|
833
|
-
if (!oldestCommit || !newestCommit) return null;
|
|
834
|
-
return {
|
|
835
|
-
oldest: oldestCommit,
|
|
836
|
-
newest: newestCommit
|
|
837
|
-
};
|
|
1130
|
+
function isDependencyFile(file) {
|
|
1131
|
+
const normalized = file.startsWith("./") ? file.slice(2) : file;
|
|
1132
|
+
if (DEPENDENCY_FILES.has(normalized)) return true;
|
|
1133
|
+
return Array.from(DEPENDENCY_FILES).some((dep) => normalized.endsWith(`/${dep}`));
|
|
838
1134
|
}
|
|
839
1135
|
/**
|
|
840
|
-
*
|
|
841
|
-
* This solves the problem where packages with different release histories need different global commits.
|
|
842
|
-
*
|
|
843
|
-
* A "global commit" is a commit that doesn't touch any package folder but may affect all packages
|
|
844
|
-
* (e.g., root package.json, CI config, README).
|
|
1136
|
+
* Finds the oldest and newest commits across all packages.
|
|
845
1137
|
*
|
|
846
|
-
*
|
|
1138
|
+
* This establishes the overall time range we need to analyze for global commits.
|
|
847
1139
|
*
|
|
848
|
-
* @param
|
|
849
|
-
* @
|
|
850
|
-
* @param allPackages - All workspace packages (used to identify package folders)
|
|
851
|
-
* @param mode - Filter mode: false (disabled), "all" (all global commits), or "dependencies" (only dependency-related)
|
|
852
|
-
* @returns Map of package name to their global commits
|
|
1140
|
+
* @param packages - Array of packages with their commits
|
|
1141
|
+
* @returns Tuple of [oldestCommitSha, newestCommitSha], or [null, null] if no commits found
|
|
853
1142
|
*/
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
}
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
if (!commitFilesMap) {
|
|
869
|
-
logger.warn("Failed to get commit file list, returning empty global commits");
|
|
870
|
-
return result;
|
|
871
|
-
}
|
|
872
|
-
logger.verbose("Got file lists for commits", `${farver.cyan(commitFilesMap.size)} commits in ONE git call`);
|
|
873
|
-
const packagePaths = new Set(allPackages.map((p) => p.path));
|
|
874
|
-
for (const [pkgName, commits] of packageCommits) {
|
|
875
|
-
const globalCommitsAffectingPackage = [];
|
|
876
|
-
logger.verbose("Filtering global commits for package", `${farver.bold(pkgName)} from ${farver.cyan(commits.length)} commits`);
|
|
877
|
-
for (const commit of commits) {
|
|
878
|
-
const files = commitFilesMap.get(commit.shortHash);
|
|
879
|
-
if (!files) continue;
|
|
880
|
-
if (isGlobalCommit(workspaceRoot, files, packagePaths)) globalCommitsAffectingPackage.push(commit);
|
|
881
|
-
}
|
|
882
|
-
logger.verbose("Package global commits found", `${farver.bold(pkgName)}: ${farver.cyan(globalCommitsAffectingPackage.length)} global commits`);
|
|
883
|
-
if (mode === "all") {
|
|
884
|
-
result.set(pkgName, globalCommitsAffectingPackage);
|
|
885
|
-
continue;
|
|
886
|
-
}
|
|
887
|
-
const dependencyCommits = [];
|
|
888
|
-
for (const commit of globalCommitsAffectingPackage) {
|
|
889
|
-
const files = commitFilesMap.get(commit.shortHash);
|
|
890
|
-
if (!files) continue;
|
|
891
|
-
if (files.some((file) => DEPENDENCY_FILES.includes(file.startsWith("./") ? file.slice(2) : file))) {
|
|
892
|
-
logger.verbose("Global commit affects dependencies", `${farver.bold(pkgName)}: commit ${farver.cyan(commit.shortHash)} affects dependencies`);
|
|
893
|
-
dependencyCommits.push(commit);
|
|
894
|
-
}
|
|
895
|
-
}
|
|
896
|
-
logger.verbose("Global commits affect dependencies", `${farver.bold(pkgName)}: ${farver.cyan(dependencyCommits.length)} global commits affect dependencies`);
|
|
897
|
-
result.set(pkgName, dependencyCommits);
|
|
898
|
-
}
|
|
899
|
-
return result;
|
|
900
|
-
}
|
|
901
|
-
function determineBumpType(commit) {
|
|
902
|
-
if (commit.isBreaking) return "major";
|
|
903
|
-
if (!commit.isConventional || !commit.type) return "none";
|
|
904
|
-
switch (commit.type) {
|
|
905
|
-
case "feat": return "minor";
|
|
906
|
-
case "fix":
|
|
907
|
-
case "perf": return "patch";
|
|
908
|
-
case "docs":
|
|
909
|
-
case "style":
|
|
910
|
-
case "refactor":
|
|
911
|
-
case "test":
|
|
912
|
-
case "build":
|
|
913
|
-
case "ci":
|
|
914
|
-
case "chore":
|
|
915
|
-
case "revert": return "none";
|
|
916
|
-
default: return "none";
|
|
917
|
-
}
|
|
1143
|
+
function findCommitRange(packages) {
|
|
1144
|
+
let oldestCommit = null;
|
|
1145
|
+
let newestCommit = null;
|
|
1146
|
+
for (const pkg of packages) {
|
|
1147
|
+
if (pkg.commits.length === 0) continue;
|
|
1148
|
+
const firstCommit = pkg.commits[0];
|
|
1149
|
+
if (!firstCommit) throw new Error(`No commits found for package ${pkg.name}`);
|
|
1150
|
+
const lastCommit = pkg.commits[pkg.commits.length - 1];
|
|
1151
|
+
if (!lastCommit) throw new Error(`No commits found for package ${pkg.name}`);
|
|
1152
|
+
if (newestCommit == null || new Date(lastCommit.date) > new Date(newestCommit.date)) newestCommit = lastCommit;
|
|
1153
|
+
if (oldestCommit == null || new Date(firstCommit.date) < new Date(oldestCommit.date)) oldestCommit = firstCommit;
|
|
1154
|
+
}
|
|
1155
|
+
if (oldestCommit == null || newestCommit == null) return [null, null];
|
|
1156
|
+
return [oldestCommit.hash, newestCommit.hash];
|
|
918
1157
|
}
|
|
919
1158
|
|
|
920
1159
|
//#endregion
|
|
921
|
-
//#region src/
|
|
922
|
-
|
|
923
|
-
*
|
|
924
|
-
*
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
*
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
}
|
|
939
|
-
for (const pkg of packages) {
|
|
940
|
-
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
941
|
-
for (const dep of allDeps) {
|
|
942
|
-
const depSet = dependents.get(dep);
|
|
943
|
-
if (depSet) depSet.add(pkg.name);
|
|
1160
|
+
//#region src/prepare.ts
|
|
1161
|
+
function constructPrepareProgram(config) {
|
|
1162
|
+
return Effect.gen(function* () {
|
|
1163
|
+
const changelog = yield* ChangelogService;
|
|
1164
|
+
const git = yield* GitService;
|
|
1165
|
+
const github = yield* GitHubService;
|
|
1166
|
+
const dependencyGraph = yield* DependencyGraphService;
|
|
1167
|
+
const packageUpdater = yield* PackageUpdaterService;
|
|
1168
|
+
const versionCalculator = yield* VersionCalculatorService;
|
|
1169
|
+
const workspace = yield* WorkspaceService;
|
|
1170
|
+
yield* git.workspace.assertWorkspaceReady;
|
|
1171
|
+
const releasePullRequest = yield* github.getPullRequestByBranch(config.branch.release);
|
|
1172
|
+
if (!releasePullRequest || !releasePullRequest.head) return yield* Effect.fail(/* @__PURE__ */ new Error(`Release pull request for branch "${config.branch.release}" does not exist.`));
|
|
1173
|
+
yield* Console.log(`✅ Release pull request #${releasePullRequest.number} exists.`);
|
|
1174
|
+
if ((yield* git.branches.get) !== config.branch.release) {
|
|
1175
|
+
yield* git.branches.checkout(config.branch.release);
|
|
1176
|
+
yield* Console.log(`✅ Checked out to release branch "${config.branch.release}".`);
|
|
944
1177
|
}
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
*
|
|
954
|
-
*
|
|
955
|
-
*
|
|
956
|
-
*
|
|
957
|
-
*
|
|
958
|
-
*
|
|
959
|
-
|
|
960
|
-
*
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
const
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
*
|
|
979
|
-
* @param graph - Dependency graph
|
|
980
|
-
* @param workspacePackages - All workspace packages
|
|
981
|
-
* @param directUpdates - Packages with direct code changes
|
|
982
|
-
* @returns All updates including dependent packages that need patch bumps
|
|
983
|
-
*/
|
|
984
|
-
function createDependentUpdates(graph, workspacePackages, directUpdates) {
|
|
985
|
-
const allUpdates = [...directUpdates];
|
|
986
|
-
const directUpdateMap = new Map(directUpdates.map((u) => [u.package.name, u]));
|
|
987
|
-
const affectedPackages = getAllAffectedPackages(graph, new Set(directUpdates.map((u) => u.package.name)));
|
|
988
|
-
for (const pkgName of affectedPackages) {
|
|
989
|
-
logger.verbose(`Processing affected package: ${pkgName}`);
|
|
990
|
-
if (directUpdateMap.has(pkgName)) {
|
|
991
|
-
logger.verbose(`Skipping ${pkgName}, already has a direct update`);
|
|
992
|
-
continue;
|
|
1178
|
+
yield* Console.log(`🔄 Rebasing "${config.branch.release}" onto "${config.branch.default}"...`);
|
|
1179
|
+
yield* git.branches.rebase(config.branch.default);
|
|
1180
|
+
yield* Console.log(`✅ Rebase complete.`);
|
|
1181
|
+
const overrides = yield* loadOverrides({
|
|
1182
|
+
sha: config.branch.default,
|
|
1183
|
+
overridesPath: ".github/ucdjs-release.overrides.json"
|
|
1184
|
+
});
|
|
1185
|
+
if (Object.keys(overrides).length > 0) yield* Console.log("📋 Loaded version overrides:", overrides);
|
|
1186
|
+
const originalBranch = yield* git.branches.get;
|
|
1187
|
+
yield* git.branches.checkout(config.branch.default);
|
|
1188
|
+
const packages = yield* workspace.discoverWorkspacePackages.pipe(Effect.flatMap(mergePackageCommitsIntoPackages), Effect.flatMap((pkgs) => mergeCommitsAffectingGloballyIntoPackage(pkgs, config.globalCommitMode)));
|
|
1189
|
+
yield* Console.log(`📦 Discovered ${packages.length} packages with commits.`);
|
|
1190
|
+
const releases = yield* versionCalculator.calculateBumps(packages, overrides);
|
|
1191
|
+
yield* dependencyGraph.topologicalOrder(packages);
|
|
1192
|
+
const releasesCount = releases.length;
|
|
1193
|
+
yield* Console.log(`📊 ${releasesCount} package${releasesCount === 1 ? "" : "s"} will be released.`);
|
|
1194
|
+
yield* git.branches.checkout(originalBranch);
|
|
1195
|
+
yield* Console.log("✏️ Updating package.json files...");
|
|
1196
|
+
yield* packageUpdater.applyReleases(packages, releases);
|
|
1197
|
+
yield* Console.log("✅ package.json files updated.");
|
|
1198
|
+
yield* Console.log("📝 Generating changelogs...");
|
|
1199
|
+
const changelogFiles = [];
|
|
1200
|
+
for (const release of releases) {
|
|
1201
|
+
const pkg = packages.find((p) => p.name === release.package.name);
|
|
1202
|
+
if (!pkg || !pkg.commits) continue;
|
|
1203
|
+
const result = yield* changelog.generateChangelog(pkg, release.newVersion, pkg.commits);
|
|
1204
|
+
yield* Effect.tryPromise({
|
|
1205
|
+
try: async () => {
|
|
1206
|
+
await (await import("node:fs/promises")).writeFile(result.filePath, result.markdown, "utf-8");
|
|
1207
|
+
},
|
|
1208
|
+
catch: (e) => /* @__PURE__ */ new Error(`Failed to write changelog: ${String(e)}`)
|
|
1209
|
+
});
|
|
1210
|
+
changelogFiles.push(result.filePath);
|
|
993
1211
|
}
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
1212
|
+
yield* Console.log(`✅ Generated ${changelogFiles.length} changelog file${changelogFiles.length === 1 ? "" : "s"}.`);
|
|
1213
|
+
const filesToStage = [...releases.map((r) => `${r.package.path}/package.json`), ...changelogFiles];
|
|
1214
|
+
yield* Console.log(`📌 Staging ${filesToStage.length} file${filesToStage.length === 1 ? "" : "s"}...`);
|
|
1215
|
+
yield* git.commits.stage(filesToStage);
|
|
1216
|
+
const commitMessage = `chore(release): prepare release
|
|
1217
|
+
|
|
1218
|
+
${releasesCount} package${releasesCount === 1 ? "" : "s"} updated:
|
|
1219
|
+
${releases.map((r) => ` - ${r.package.name}@${r.newVersion}`).join("\n")}`;
|
|
1220
|
+
yield* Console.log("💾 Creating commit...");
|
|
1221
|
+
yield* git.commits.write(commitMessage);
|
|
1222
|
+
yield* Console.log("✅ Commit created.");
|
|
1223
|
+
yield* Console.log(`⬆️ Force pushing to "${config.branch.release}"...`);
|
|
1224
|
+
yield* git.commits.forcePush(config.branch.release);
|
|
1225
|
+
yield* Console.log("✅ Force push complete.");
|
|
1226
|
+
yield* Console.log("📄 Updating pull request...");
|
|
1227
|
+
const prBody = yield* github.generateReleasePRBody(releases.map((r) => ({
|
|
1228
|
+
packageName: r.package.name,
|
|
1229
|
+
version: r.newVersion,
|
|
1230
|
+
previousVersion: r.package.version
|
|
1231
|
+
})));
|
|
1232
|
+
yield* github.updatePullRequest(releasePullRequest.number, { body: prBody });
|
|
1233
|
+
yield* Console.log("✅ Pull request updated.");
|
|
1234
|
+
yield* Console.log(`\n🎉 Release preparation complete! View PR: #${releasePullRequest.number}`);
|
|
1235
|
+
});
|
|
999
1236
|
}
|
|
1000
1237
|
|
|
1001
1238
|
//#endregion
|
|
1002
|
-
//#region src/
|
|
1003
|
-
function
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
function getNextVersion(currentVersion, bump) {
|
|
1007
|
-
if (bump === "none") {
|
|
1008
|
-
logger.verbose(`No version bump needed, keeping version ${currentVersion}`);
|
|
1009
|
-
return currentVersion;
|
|
1010
|
-
}
|
|
1011
|
-
if (!isValidSemver(currentVersion)) throw new Error(`Cannot bump version for invalid semver: ${currentVersion}`);
|
|
1012
|
-
const match = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)(.*)$/);
|
|
1013
|
-
if (!match) throw new Error(`Invalid semver version: ${currentVersion}`);
|
|
1014
|
-
const [, major, minor, patch] = match;
|
|
1015
|
-
let newMajor = Number.parseInt(major, 10);
|
|
1016
|
-
let newMinor = Number.parseInt(minor, 10);
|
|
1017
|
-
let newPatch = Number.parseInt(patch, 10);
|
|
1018
|
-
switch (bump) {
|
|
1019
|
-
case "major":
|
|
1020
|
-
newMajor += 1;
|
|
1021
|
-
newMinor = 0;
|
|
1022
|
-
newPatch = 0;
|
|
1023
|
-
break;
|
|
1024
|
-
case "minor":
|
|
1025
|
-
newMinor += 1;
|
|
1026
|
-
newPatch = 0;
|
|
1027
|
-
break;
|
|
1028
|
-
case "patch":
|
|
1029
|
-
newPatch += 1;
|
|
1030
|
-
break;
|
|
1031
|
-
}
|
|
1032
|
-
return `${newMajor}.${newMinor}.${newPatch}`;
|
|
1033
|
-
}
|
|
1034
|
-
function createVersionUpdate(pkg, bump, hasDirectChanges) {
|
|
1035
|
-
const newVersion = getNextVersion(pkg.version, bump);
|
|
1036
|
-
return {
|
|
1037
|
-
package: pkg,
|
|
1038
|
-
currentVersion: pkg.version,
|
|
1039
|
-
newVersion,
|
|
1040
|
-
bumpType: bump,
|
|
1041
|
-
hasDirectChanges
|
|
1042
|
-
};
|
|
1239
|
+
//#region src/publish.ts
|
|
1240
|
+
function isPrerelease(version) {
|
|
1241
|
+
const parsed = semver.parse(version);
|
|
1242
|
+
return parsed !== null && parsed.prerelease.length > 0;
|
|
1043
1243
|
}
|
|
1044
|
-
function
|
|
1045
|
-
|
|
1046
|
-
const oldParts = oldVersion.split(".").map(Number);
|
|
1047
|
-
const newParts = newVersion.split(".").map(Number);
|
|
1048
|
-
if (newParts[0] > oldParts[0]) return "major";
|
|
1049
|
-
if (newParts[1] > oldParts[1]) return "minor";
|
|
1050
|
-
if (newParts[2] > oldParts[2]) return "patch";
|
|
1051
|
-
return "none";
|
|
1244
|
+
function getDistTag(version) {
|
|
1245
|
+
return isPrerelease(version) ? "next" : "latest";
|
|
1052
1246
|
}
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
doc: farver.blue,
|
|
1060
|
-
types: farver.blue,
|
|
1061
|
-
type: farver.blue,
|
|
1062
|
-
chore: farver.gray,
|
|
1063
|
-
ci: farver.gray,
|
|
1064
|
-
build: farver.gray,
|
|
1065
|
-
deps: farver.gray,
|
|
1066
|
-
dev: farver.gray,
|
|
1067
|
-
fix: farver.yellow,
|
|
1068
|
-
test: farver.yellow,
|
|
1069
|
-
perf: farver.magenta,
|
|
1070
|
-
revert: farver.red,
|
|
1071
|
-
breaking: farver.red
|
|
1072
|
-
};
|
|
1073
|
-
function formatCommitsForDisplay(commits) {
|
|
1074
|
-
if (commits.length === 0) return farver.dim("No commits found");
|
|
1075
|
-
const maxCommitsToShow = 10;
|
|
1076
|
-
const commitsToShow = commits.slice(0, maxCommitsToShow);
|
|
1077
|
-
const hasMore = commits.length > maxCommitsToShow;
|
|
1078
|
-
const typeLength = commits.map(({ type }) => type.length).reduce((a, b) => Math.max(a, b), 0);
|
|
1079
|
-
const scopeLength = commits.map(({ scope }) => scope?.length).reduce((a, b) => Math.max(a || 0, b || 0), 0) || 0;
|
|
1080
|
-
const formattedCommits = commitsToShow.map((commit) => {
|
|
1081
|
-
let color = messageColorMap[commit.type] || ((c) => c);
|
|
1082
|
-
if (commit.isBreaking) color = (s) => farver.inverse.red(s);
|
|
1083
|
-
const paddedType = commit.type.padStart(typeLength + 1, " ");
|
|
1084
|
-
const paddedScope = !commit.scope ? " ".repeat(scopeLength ? scopeLength + 2 : 0) : farver.dim("(") + commit.scope + farver.dim(")") + " ".repeat(scopeLength - commit.scope.length);
|
|
1085
|
-
return [
|
|
1086
|
-
farver.dim(commit.shortHash),
|
|
1087
|
-
" ",
|
|
1088
|
-
color === farver.gray ? color(paddedType) : farver.bold(color(paddedType)),
|
|
1089
|
-
" ",
|
|
1090
|
-
paddedScope,
|
|
1091
|
-
farver.dim(":"),
|
|
1092
|
-
" ",
|
|
1093
|
-
color === farver.gray ? color(commit.description) : commit.description
|
|
1094
|
-
].join("");
|
|
1095
|
-
}).join("\n");
|
|
1096
|
-
if (hasMore) return `${formattedCommits}\n ${farver.dim(`... and ${commits.length - maxCommitsToShow} more commits`)}`;
|
|
1097
|
-
return formattedCommits;
|
|
1247
|
+
function buildPackage(packagePath) {
|
|
1248
|
+
return Effect.gen(function* () {
|
|
1249
|
+
const executor = yield* CommandExecutor.CommandExecutor;
|
|
1250
|
+
const command = Command.make("pnpm", "run", "build").pipe(Command.workingDirectory(packagePath));
|
|
1251
|
+
return (yield* executor.string(command).pipe(Effect.mapError((err) => /* @__PURE__ */ new Error(`Failed to build package at ${packagePath}: ${err.message}`)))).trim();
|
|
1252
|
+
});
|
|
1098
1253
|
}
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
const
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
const
|
|
1131
|
-
|
|
1132
|
-
const
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1254
|
+
function constructPublishProgram(config) {
|
|
1255
|
+
return Effect.gen(function* () {
|
|
1256
|
+
const git = yield* GitService;
|
|
1257
|
+
const npm = yield* NPMService;
|
|
1258
|
+
const workspace = yield* WorkspaceService;
|
|
1259
|
+
const dependencyGraph = yield* DependencyGraphService;
|
|
1260
|
+
yield* git.workspace.assertWorkspaceReady;
|
|
1261
|
+
const currentBranch = yield* git.branches.get;
|
|
1262
|
+
if (currentBranch !== config.branch.default) return yield* Effect.fail(/* @__PURE__ */ new Error(`Publish must be run on the default branch "${config.branch.default}". Current branch: "${currentBranch}"`));
|
|
1263
|
+
yield* Console.log(`✅ On default branch "${config.branch.default}".`);
|
|
1264
|
+
const publicPackages = (yield* workspace.discoverWorkspacePackages).filter((pkg) => !pkg.packageJson.private);
|
|
1265
|
+
yield* Console.log(`📦 Found ${publicPackages.length} public package${publicPackages.length === 1 ? "" : "s"} to check.`);
|
|
1266
|
+
const orderedPackages = yield* dependencyGraph.topologicalOrder(publicPackages);
|
|
1267
|
+
const results = [];
|
|
1268
|
+
for (const updateOrder of orderedPackages) {
|
|
1269
|
+
const pkg = updateOrder.package;
|
|
1270
|
+
const version = pkg.version;
|
|
1271
|
+
const tagName = `${pkg.name}@${version}`;
|
|
1272
|
+
if (yield* npm.versionExists(pkg.name, version)) {
|
|
1273
|
+
yield* Console.log(`⏭️ Skipping ${pkg.name}@${version} - already published.`);
|
|
1274
|
+
results.push({
|
|
1275
|
+
packageName: pkg.name,
|
|
1276
|
+
version,
|
|
1277
|
+
status: "skipped",
|
|
1278
|
+
reason: "Already published to npm"
|
|
1279
|
+
});
|
|
1280
|
+
continue;
|
|
1281
|
+
}
|
|
1282
|
+
yield* Console.log(`🔨 Building ${pkg.name}...`);
|
|
1283
|
+
yield* buildPackage(pkg.path);
|
|
1284
|
+
yield* Console.log(`✅ Build complete for ${pkg.name}.`);
|
|
1285
|
+
const distTag = getDistTag(version);
|
|
1286
|
+
yield* Console.log(`🚀 Publishing ${pkg.name}@${version} with tag "${distTag}"...`);
|
|
1287
|
+
const publishResult = yield* npm.publish({
|
|
1288
|
+
packagePath: pkg.path,
|
|
1289
|
+
tagName: distTag,
|
|
1290
|
+
otp: config.npm.otp,
|
|
1291
|
+
provenance: config.npm.provenance,
|
|
1292
|
+
dryRun: config.dryRun
|
|
1293
|
+
}).pipe(Effect.map(() => ({ success: true })), Effect.catchAll((err) => Effect.succeed({
|
|
1294
|
+
success: false,
|
|
1295
|
+
error: err
|
|
1296
|
+
})));
|
|
1297
|
+
if (publishResult.success) {
|
|
1298
|
+
yield* Console.log(`✅ Published ${pkg.name}@${version}.`);
|
|
1299
|
+
if (!config.dryRun) {
|
|
1300
|
+
yield* Console.log(`🏷️ Creating tag ${tagName}...`);
|
|
1301
|
+
yield* git.tags.create(tagName, `Release ${tagName}`);
|
|
1302
|
+
yield* git.tags.push(tagName);
|
|
1303
|
+
yield* Console.log(`✅ Tag ${tagName} created and pushed.`);
|
|
1304
|
+
} else yield* Console.log(`🏷️ [Dry Run] Would create and push tag ${tagName}.`);
|
|
1305
|
+
results.push({
|
|
1306
|
+
packageName: pkg.name,
|
|
1307
|
+
version,
|
|
1308
|
+
status: "published"
|
|
1309
|
+
});
|
|
1310
|
+
} else {
|
|
1311
|
+
const error = publishResult.error;
|
|
1312
|
+
yield* Console.log(`❌ Failed to publish ${pkg.name}@${version}: ${error.message}`);
|
|
1313
|
+
results.push({
|
|
1314
|
+
packageName: pkg.name,
|
|
1315
|
+
version,
|
|
1316
|
+
status: "failed",
|
|
1317
|
+
reason: error.message
|
|
1318
|
+
});
|
|
1143
1319
|
}
|
|
1144
|
-
newVersion = selectedVersion;
|
|
1145
1320
|
}
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
});
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
logger.section(`📦 Package: ${pkg.name}`);
|
|
1158
|
-
logger.item("No direct commits found");
|
|
1159
|
-
const newVersion = await selectVersionPrompt(workspaceRoot, pkg, pkg.version, pkg.version);
|
|
1160
|
-
if (newVersion === null) break;
|
|
1161
|
-
if (newVersion !== pkg.version) {
|
|
1162
|
-
const bumpType = _calculateBumpType(pkg.version, newVersion);
|
|
1163
|
-
versionUpdates.push({
|
|
1164
|
-
package: pkg,
|
|
1165
|
-
currentVersion: pkg.version,
|
|
1166
|
-
newVersion,
|
|
1167
|
-
bumpType,
|
|
1168
|
-
hasDirectChanges: false
|
|
1169
|
-
});
|
|
1321
|
+
const published = results.filter((r) => r.status === "published");
|
|
1322
|
+
const skipped = results.filter((r) => r.status === "skipped");
|
|
1323
|
+
const failed = results.filter((r) => r.status === "failed");
|
|
1324
|
+
yield* Console.log("\n📊 Publish Summary:");
|
|
1325
|
+
yield* Console.log(` Published: ${published.length}`);
|
|
1326
|
+
yield* Console.log(` Skipped: ${skipped.length}`);
|
|
1327
|
+
yield* Console.log(` Failed: ${failed.length}`);
|
|
1328
|
+
if (failed.length > 0) {
|
|
1329
|
+
yield* Console.log("\n❌ Failed packages:");
|
|
1330
|
+
for (const f of failed) yield* Console.log(` - ${f.packageName}@${f.version}: ${f.reason}`);
|
|
1331
|
+
return yield* Effect.fail(/* @__PURE__ */ new Error("Some packages failed to publish."));
|
|
1170
1332
|
}
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
updates: versionUpdates,
|
|
1174
|
-
overrides: newOverrides
|
|
1175
|
-
};
|
|
1176
|
-
}
|
|
1177
|
-
/**
|
|
1178
|
-
* Calculate version updates and prepare dependent updates
|
|
1179
|
-
* Returns both the updates and a function to apply them
|
|
1180
|
-
*/
|
|
1181
|
-
async function calculateAndPrepareVersionUpdates({ workspacePackages, packageCommits, workspaceRoot, showPrompt, globalCommitsPerPackage, overrides }) {
|
|
1182
|
-
const { updates: directUpdates, overrides: newOverrides } = await calculateVersionUpdates({
|
|
1183
|
-
workspacePackages,
|
|
1184
|
-
packageCommits,
|
|
1185
|
-
workspaceRoot,
|
|
1186
|
-
showPrompt,
|
|
1187
|
-
globalCommitsPerPackage,
|
|
1188
|
-
overrides
|
|
1333
|
+
if (published.length === 0 && skipped.length > 0) yield* Console.log("\n✅ All packages were already published.");
|
|
1334
|
+
else if (published.length > 0) yield* Console.log("\n🎉 Publish complete!");
|
|
1189
1335
|
});
|
|
1190
|
-
const allUpdates = createDependentUpdates(buildPackageDependencyGraph(workspacePackages), workspacePackages, directUpdates);
|
|
1191
|
-
const applyUpdates = async () => {
|
|
1192
|
-
await Promise.all(allUpdates.map(async (update) => {
|
|
1193
|
-
const depUpdates = getDependencyUpdates(update.package, allUpdates);
|
|
1194
|
-
await updatePackageJson(update.package, update.newVersion, depUpdates);
|
|
1195
|
-
}));
|
|
1196
|
-
};
|
|
1197
|
-
return {
|
|
1198
|
-
allUpdates,
|
|
1199
|
-
applyUpdates,
|
|
1200
|
-
overrides: newOverrides
|
|
1201
|
-
};
|
|
1202
|
-
}
|
|
1203
|
-
async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
|
|
1204
|
-
const packageJsonPath = join(pkg.path, "package.json");
|
|
1205
|
-
const content = await readFile(packageJsonPath, "utf-8");
|
|
1206
|
-
const packageJson = JSON.parse(content);
|
|
1207
|
-
packageJson.version = newVersion;
|
|
1208
|
-
function updateDependency(deps, depName, depVersion, isPeerDependency = false) {
|
|
1209
|
-
if (!deps) return;
|
|
1210
|
-
const oldVersion = deps[depName];
|
|
1211
|
-
if (!oldVersion) return;
|
|
1212
|
-
if (oldVersion === "workspace:*") {
|
|
1213
|
-
logger.verbose(` - Skipping workspace:* dependency: ${depName}`);
|
|
1214
|
-
return;
|
|
1215
|
-
}
|
|
1216
|
-
if (isPeerDependency) {
|
|
1217
|
-
const majorVersion = depVersion.split(".")[0];
|
|
1218
|
-
deps[depName] = `>=${depVersion} <${Number(majorVersion) + 1}.0.0`;
|
|
1219
|
-
} else deps[depName] = `^${depVersion}`;
|
|
1220
|
-
logger.verbose(` - Updated dependency ${depName}: ${oldVersion} → ${deps[depName]}`);
|
|
1221
|
-
}
|
|
1222
|
-
for (const [depName, depVersion] of dependencyUpdates) {
|
|
1223
|
-
updateDependency(packageJson.dependencies, depName, depVersion);
|
|
1224
|
-
updateDependency(packageJson.devDependencies, depName, depVersion);
|
|
1225
|
-
updateDependency(packageJson.peerDependencies, depName, depVersion, true);
|
|
1226
|
-
}
|
|
1227
|
-
await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
|
|
1228
|
-
logger.verbose(` - Successfully wrote updated package.json`);
|
|
1229
|
-
}
|
|
1230
|
-
/**
|
|
1231
|
-
* Get all dependency updates needed for a package
|
|
1232
|
-
*/
|
|
1233
|
-
function getDependencyUpdates(pkg, allUpdates) {
|
|
1234
|
-
const updates = /* @__PURE__ */ new Map();
|
|
1235
|
-
const allDeps = [...pkg.workspaceDependencies, ...pkg.workspaceDevDependencies];
|
|
1236
|
-
for (const dep of allDeps) {
|
|
1237
|
-
const update = allUpdates.find((u) => u.package.name === dep);
|
|
1238
|
-
if (update) {
|
|
1239
|
-
logger.verbose(` - Dependency ${dep} will be updated: ${update.currentVersion} → ${update.newVersion} (${update.bumpType})`);
|
|
1240
|
-
updates.set(dep, update.newVersion);
|
|
1241
|
-
}
|
|
1242
|
-
}
|
|
1243
|
-
if (updates.size === 0) logger.verbose(` - No dependency updates needed`);
|
|
1244
|
-
return updates;
|
|
1245
1336
|
}
|
|
1246
1337
|
|
|
1247
1338
|
//#endregion
|
|
1248
|
-
//#region src/
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
})),
|
|
1259
|
-
min: 1,
|
|
1260
|
-
hint: "Space to select/deselect. Return to submit.",
|
|
1261
|
-
instructions: false
|
|
1339
|
+
//#region src/verify.ts
|
|
1340
|
+
function satisfiesRange(range, version) {
|
|
1341
|
+
return semver.satisfies(version, range, { includePrerelease: true });
|
|
1342
|
+
}
|
|
1343
|
+
function snapshotPackageJson(pkg, ref) {
|
|
1344
|
+
return Effect.gen(function* () {
|
|
1345
|
+
return yield* (yield* GitService).workspace.readFile(`${pkg.path}/package.json`, ref).pipe(Effect.flatMap((content) => Effect.try({
|
|
1346
|
+
try: () => JSON.parse(content),
|
|
1347
|
+
catch: (e) => /* @__PURE__ */ new Error(`Failed to parse package.json for ${pkg.name} at ${ref}: ${String(e)}`)
|
|
1348
|
+
})));
|
|
1262
1349
|
});
|
|
1263
|
-
if (!response.selectedPackages || response.selectedPackages.length === 0) return [];
|
|
1264
|
-
return response.selectedPackages;
|
|
1265
1350
|
}
|
|
1266
|
-
|
|
1267
|
-
const
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
{
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
}
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1351
|
+
function findDrift(packages, releases, branchSnapshots) {
|
|
1352
|
+
const releaseVersionByName = /* @__PURE__ */ new Map();
|
|
1353
|
+
for (const rel of releases) releaseVersionByName.set(rel.package.name, rel.newVersion);
|
|
1354
|
+
const reasons = [];
|
|
1355
|
+
for (const pkg of packages) {
|
|
1356
|
+
const snapshot = branchSnapshots.get(pkg.name);
|
|
1357
|
+
if (snapshot == null) {
|
|
1358
|
+
reasons.push({
|
|
1359
|
+
packageName: pkg.name,
|
|
1360
|
+
reason: "package.json missing on release branch"
|
|
1361
|
+
});
|
|
1362
|
+
continue;
|
|
1363
|
+
}
|
|
1364
|
+
if (snapshot instanceof Error) {
|
|
1365
|
+
reasons.push({
|
|
1366
|
+
packageName: pkg.name,
|
|
1367
|
+
reason: snapshot.message
|
|
1368
|
+
});
|
|
1369
|
+
continue;
|
|
1370
|
+
}
|
|
1371
|
+
const expectedVersion = releaseVersionByName.get(pkg.name) ?? pkg.version;
|
|
1372
|
+
const branchVersion = typeof snapshot.version === "string" ? snapshot.version : void 0;
|
|
1373
|
+
if (!branchVersion) {
|
|
1374
|
+
reasons.push({
|
|
1375
|
+
packageName: pkg.name,
|
|
1376
|
+
reason: "package.json on release branch lacks version"
|
|
1377
|
+
});
|
|
1378
|
+
continue;
|
|
1379
|
+
}
|
|
1380
|
+
if (branchVersion !== expectedVersion) reasons.push({
|
|
1381
|
+
packageName: pkg.name,
|
|
1382
|
+
reason: `version mismatch: expected ${expectedVersion}, found ${branchVersion}`
|
|
1383
|
+
});
|
|
1384
|
+
for (const section of [
|
|
1385
|
+
"dependencies",
|
|
1386
|
+
"devDependencies",
|
|
1387
|
+
"peerDependencies"
|
|
1388
|
+
]) {
|
|
1389
|
+
const deps = snapshot[section];
|
|
1390
|
+
if (!deps || typeof deps !== "object") continue;
|
|
1391
|
+
for (const [depName, range] of Object.entries(deps)) {
|
|
1392
|
+
const bumpedVersion = releaseVersionByName.get(depName);
|
|
1393
|
+
if (!bumpedVersion) continue;
|
|
1394
|
+
if (typeof range !== "string") {
|
|
1395
|
+
reasons.push({
|
|
1396
|
+
packageName: pkg.name,
|
|
1397
|
+
reason: `${section}.${depName} is not a string range`
|
|
1398
|
+
});
|
|
1399
|
+
continue;
|
|
1400
|
+
}
|
|
1401
|
+
if (!satisfiesRange(range, bumpedVersion)) reasons.push({
|
|
1402
|
+
packageName: pkg.name,
|
|
1403
|
+
reason: `${section}.${depName} does not include ${bumpedVersion}`
|
|
1404
|
+
});
|
|
1299
1405
|
}
|
|
1300
|
-
],
|
|
1301
|
-
initial: suggestedVersion === currentVersion ? 0 : 4
|
|
1302
|
-
}, {
|
|
1303
|
-
type: (prev) => prev === "custom" ? "text" : null,
|
|
1304
|
-
name: "custom",
|
|
1305
|
-
message: "Enter the new version number:",
|
|
1306
|
-
initial: suggestedVersion,
|
|
1307
|
-
validate: (custom) => {
|
|
1308
|
-
if (isValidSemver(custom)) return true;
|
|
1309
|
-
return "That's not a valid version number";
|
|
1310
1406
|
}
|
|
1311
|
-
}]);
|
|
1312
|
-
if (!answers.version) return null;
|
|
1313
|
-
if (answers.version === "skip") return null;
|
|
1314
|
-
else if (answers.version === "suggested") return suggestedVersion;
|
|
1315
|
-
else if (answers.version === "custom") {
|
|
1316
|
-
if (!answers.custom) return null;
|
|
1317
|
-
return answers.custom;
|
|
1318
|
-
} else if (answers.version === "as-is") return currentVersion;
|
|
1319
|
-
else return getNextVersion(pkg.version, answers.version);
|
|
1320
|
-
}
|
|
1321
|
-
|
|
1322
|
-
//#endregion
|
|
1323
|
-
//#region src/core/workspace.ts
|
|
1324
|
-
async function discoverWorkspacePackages(workspaceRoot, options) {
|
|
1325
|
-
let workspaceOptions;
|
|
1326
|
-
let explicitPackages;
|
|
1327
|
-
if (options.packages == null || options.packages === true) workspaceOptions = { excludePrivate: false };
|
|
1328
|
-
else if (Array.isArray(options.packages)) {
|
|
1329
|
-
workspaceOptions = {
|
|
1330
|
-
excludePrivate: false,
|
|
1331
|
-
include: options.packages
|
|
1332
|
-
};
|
|
1333
|
-
explicitPackages = options.packages;
|
|
1334
|
-
} else {
|
|
1335
|
-
workspaceOptions = options.packages;
|
|
1336
|
-
if (options.packages.include) explicitPackages = options.packages.include;
|
|
1337
|
-
}
|
|
1338
|
-
let workspacePackages = await findWorkspacePackages(workspaceRoot, workspaceOptions);
|
|
1339
|
-
if (explicitPackages) {
|
|
1340
|
-
const foundNames = new Set(workspacePackages.map((p) => p.name));
|
|
1341
|
-
const missing = explicitPackages.filter((p) => !foundNames.has(p));
|
|
1342
|
-
if (missing.length > 0) exitWithError(`Package${missing.length > 1 ? "s" : ""} not found in workspace: ${missing.join(", ")}`, "Check your package names or run 'pnpm ls' to see available packages");
|
|
1343
1407
|
}
|
|
1344
|
-
|
|
1345
|
-
if (!isCI && isPackagePromptEnabled && !explicitPackages) {
|
|
1346
|
-
const selectedNames = await selectPackagePrompt(workspacePackages);
|
|
1347
|
-
workspacePackages = workspacePackages.filter((pkg) => selectedNames.includes(pkg.name));
|
|
1348
|
-
}
|
|
1349
|
-
return workspacePackages;
|
|
1408
|
+
return reasons;
|
|
1350
1409
|
}
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
const
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
}
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
return null;
|
|
1370
|
-
}
|
|
1371
|
-
return {
|
|
1372
|
-
name: rawProject.name,
|
|
1373
|
-
version: rawProject.version,
|
|
1374
|
-
path: rawProject.path,
|
|
1375
|
-
packageJson,
|
|
1376
|
-
workspaceDependencies: Object.keys(rawProject.dependencies || []).filter((dep) => {
|
|
1377
|
-
return allPackageNames.has(dep);
|
|
1378
|
-
}),
|
|
1379
|
-
workspaceDevDependencies: Object.keys(rawProject.devDependencies || []).filter((dep) => {
|
|
1380
|
-
return allPackageNames.has(dep);
|
|
1381
|
-
})
|
|
1382
|
-
};
|
|
1410
|
+
function constructVerifyProgram(config) {
|
|
1411
|
+
return Effect.gen(function* () {
|
|
1412
|
+
const git = yield* GitService;
|
|
1413
|
+
const github = yield* GitHubService;
|
|
1414
|
+
const dependencyGraph = yield* DependencyGraphService;
|
|
1415
|
+
const versionCalculator = yield* VersionCalculatorService;
|
|
1416
|
+
const workspace = yield* WorkspaceService;
|
|
1417
|
+
yield* git.workspace.assertWorkspaceReady;
|
|
1418
|
+
const releasePullRequest = yield* github.getPullRequestByBranch(config.branch.release);
|
|
1419
|
+
if (!releasePullRequest || !releasePullRequest.head) return yield* Effect.fail(/* @__PURE__ */ new Error(`Release pull request for branch "${config.branch.release}" does not exist.`));
|
|
1420
|
+
yield* Console.log(`✅ Release pull request #${releasePullRequest.number} exists.`);
|
|
1421
|
+
if ((yield* git.branches.get) !== config.branch.default) {
|
|
1422
|
+
yield* git.branches.checkout(config.branch.default);
|
|
1423
|
+
yield* Console.log(`✅ Checked out to default branch "${config.branch.default}".`);
|
|
1424
|
+
}
|
|
1425
|
+
const overrides = yield* loadOverrides({
|
|
1426
|
+
sha: releasePullRequest.head.sha,
|
|
1427
|
+
overridesPath: ".github/ucdjs-release.overrides.json"
|
|
1383
1428
|
});
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
if (!options.include.includes(pkg.name)) return false;
|
|
1397
|
-
}
|
|
1398
|
-
if (options.exclude?.includes(pkg.name)) return false;
|
|
1399
|
-
return true;
|
|
1400
|
-
}
|
|
1401
|
-
|
|
1402
|
-
//#endregion
|
|
1403
|
-
//#region src/shared/options.ts
|
|
1404
|
-
const DEFAULT_COMMIT_GROUPS = [
|
|
1405
|
-
{
|
|
1406
|
-
name: "features",
|
|
1407
|
-
title: "Features",
|
|
1408
|
-
types: ["feat"]
|
|
1409
|
-
},
|
|
1410
|
-
{
|
|
1411
|
-
name: "fixes",
|
|
1412
|
-
title: "Bug Fixes",
|
|
1413
|
-
types: ["fix", "perf"]
|
|
1414
|
-
},
|
|
1415
|
-
{
|
|
1416
|
-
name: "refactor",
|
|
1417
|
-
title: "Refactoring",
|
|
1418
|
-
types: ["refactor"]
|
|
1419
|
-
},
|
|
1420
|
-
{
|
|
1421
|
-
name: "docs",
|
|
1422
|
-
title: "Documentation",
|
|
1423
|
-
types: ["docs"]
|
|
1424
|
-
}
|
|
1425
|
-
];
|
|
1426
|
-
function normalizeSharedOptions(options) {
|
|
1427
|
-
const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, prompts: prompts$1 = {
|
|
1428
|
-
packages: true,
|
|
1429
|
-
versions: true
|
|
1430
|
-
}, groups = DEFAULT_COMMIT_GROUPS } = options;
|
|
1431
|
-
if (!githubToken.trim()) exitWithError("GitHub token is required", "Set GITHUB_TOKEN environment variable or pass it in options");
|
|
1432
|
-
if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) exitWithError("Repository (repo) is required", "Specify the repository in 'owner/repo' format (e.g., 'octocat/hello-world')");
|
|
1433
|
-
const [owner, repo] = fullRepo.split("/");
|
|
1434
|
-
if (!owner || !repo) exitWithError(`Invalid repo format: "${fullRepo}"`, "Expected format: \"owner/repo\" (e.g., \"octocat/hello-world\")");
|
|
1435
|
-
return {
|
|
1436
|
-
packages: typeof packages === "object" && !Array.isArray(packages) ? {
|
|
1437
|
-
exclude: packages.exclude ?? [],
|
|
1438
|
-
include: packages.include ?? [],
|
|
1439
|
-
excludePrivate: packages.excludePrivate ?? false
|
|
1440
|
-
} : packages,
|
|
1441
|
-
prompts: {
|
|
1442
|
-
packages: prompts$1?.packages ?? true,
|
|
1443
|
-
versions: prompts$1?.versions ?? true
|
|
1444
|
-
},
|
|
1445
|
-
workspaceRoot,
|
|
1446
|
-
githubToken,
|
|
1447
|
-
owner,
|
|
1448
|
-
repo,
|
|
1449
|
-
groups
|
|
1450
|
-
};
|
|
1451
|
-
}
|
|
1452
|
-
async function normalizeReleaseOptions(options) {
|
|
1453
|
-
const normalized = normalizeSharedOptions(options);
|
|
1454
|
-
let defaultBranch = options.branch?.default?.trim();
|
|
1455
|
-
const releaseBranch = options.branch?.release?.trim() ?? "release/next";
|
|
1456
|
-
if (defaultBranch == null || defaultBranch === "") {
|
|
1457
|
-
defaultBranch = await getDefaultBranch(normalized.workspaceRoot);
|
|
1458
|
-
if (!defaultBranch) exitWithError("Could not determine default branch", "Please specify the default branch in options");
|
|
1459
|
-
}
|
|
1460
|
-
if (defaultBranch === releaseBranch) exitWithError(`Default branch and release branch cannot be the same: "${defaultBranch}"`, "Specify different branches for default and release");
|
|
1461
|
-
const localBranchExists = await doesBranchExist(defaultBranch, normalized.workspaceRoot);
|
|
1462
|
-
const remoteBranchExists = await doesBranchExist(`origin/${defaultBranch}`, normalized.workspaceRoot);
|
|
1463
|
-
if (!localBranchExists && !remoteBranchExists) {
|
|
1464
|
-
const availableBranches = await getAvailableBranches(normalized.workspaceRoot);
|
|
1465
|
-
exitWithError(`Default branch "${defaultBranch}" does not exist in the repository`, `Couldn't find it locally or on the remote 'origin'.\nAvailable local branches: ${availableBranches.join(", ")}`);
|
|
1466
|
-
}
|
|
1467
|
-
logger.verbose(`Using default branch: ${farver.green(defaultBranch)}`);
|
|
1468
|
-
return {
|
|
1469
|
-
...normalized,
|
|
1470
|
-
branch: {
|
|
1471
|
-
release: releaseBranch,
|
|
1472
|
-
default: defaultBranch
|
|
1473
|
-
},
|
|
1474
|
-
safeguards: options.safeguards ?? true,
|
|
1475
|
-
globalCommitMode: options.globalCommitMode ?? "dependencies",
|
|
1476
|
-
pullRequest: {
|
|
1477
|
-
title: options.pullRequest?.title ?? "chore: release new version",
|
|
1478
|
-
body: options.pullRequest?.body ?? DEFAULT_PR_BODY_TEMPLATE
|
|
1479
|
-
},
|
|
1480
|
-
changelog: {
|
|
1481
|
-
enabled: options.changelog?.enabled ?? true,
|
|
1482
|
-
template: options.changelog?.template ?? DEFAULT_CHANGELOG_TEMPLATE
|
|
1429
|
+
yield* Console.log("Loaded overrides:", overrides);
|
|
1430
|
+
const packages = yield* workspace.discoverWorkspacePackages.pipe(Effect.flatMap(mergePackageCommitsIntoPackages), Effect.flatMap((pkgs) => mergeCommitsAffectingGloballyIntoPackage(pkgs, config.globalCommitMode)));
|
|
1431
|
+
yield* Console.log("Discovered packages with commits and global commits:", packages);
|
|
1432
|
+
const releases = yield* versionCalculator.calculateBumps(packages, overrides);
|
|
1433
|
+
const ordered = yield* dependencyGraph.topologicalOrder(packages);
|
|
1434
|
+
yield* Console.log("Calculated releases:", releases);
|
|
1435
|
+
yield* Console.log("Release order:", ordered);
|
|
1436
|
+
const releaseHeadSha = releasePullRequest.head.sha;
|
|
1437
|
+
const branchSnapshots = /* @__PURE__ */ new Map();
|
|
1438
|
+
for (const pkg of packages) {
|
|
1439
|
+
const snapshot = yield* snapshotPackageJson(pkg, releaseHeadSha).pipe(Effect.catchAll((err) => Effect.succeed(err instanceof Error ? err : new Error(String(err)))));
|
|
1440
|
+
branchSnapshots.set(pkg.name, snapshot);
|
|
1483
1441
|
}
|
|
1484
|
-
|
|
1442
|
+
const drift = findDrift(packages, releases, branchSnapshots);
|
|
1443
|
+
if (drift.length === 0) yield* Console.log("✅ Release branch is in sync with expected releases.");
|
|
1444
|
+
else yield* Console.log("❌ Release branch is out of sync:", drift);
|
|
1445
|
+
const status = drift.length === 0 ? {
|
|
1446
|
+
state: "success",
|
|
1447
|
+
description: "Release artifacts in sync",
|
|
1448
|
+
context: "release/verify"
|
|
1449
|
+
} : {
|
|
1450
|
+
state: "failure",
|
|
1451
|
+
description: "Release branch out of sync",
|
|
1452
|
+
context: "release/verify"
|
|
1453
|
+
};
|
|
1454
|
+
yield* github.setCommitStatus(releaseHeadSha, status);
|
|
1455
|
+
if (drift.length > 0) return yield* Effect.fail(/* @__PURE__ */ new Error("Release branch is out of sync."));
|
|
1456
|
+
});
|
|
1485
1457
|
}
|
|
1486
1458
|
|
|
1487
1459
|
//#endregion
|
|
1488
|
-
//#region src/
|
|
1489
|
-
async function
|
|
1490
|
-
const
|
|
1491
|
-
|
|
1492
|
-
const
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
logger.item(`Found ${workspacePackages.length} packages`);
|
|
1499
|
-
for (const pkg of workspacePackages) {
|
|
1500
|
-
logger.item(`${farver.cyan(pkg.name)} (${farver.bold(pkg.version)})`);
|
|
1501
|
-
logger.item(` ${farver.gray("→")} ${farver.gray(pkg.path)}`);
|
|
1502
|
-
}
|
|
1503
|
-
logger.emptyLine();
|
|
1504
|
-
const groupedPackageCommits = await getWorkspacePackageGroupedCommits(workspaceRoot, workspacePackages);
|
|
1505
|
-
const globalCommitsPerPackage = await getGlobalCommitsPerPackage(workspaceRoot, groupedPackageCommits, workspacePackages, normalizedOptions.globalCommitMode);
|
|
1506
|
-
const githubClient = createGitHubClient({
|
|
1507
|
-
owner: normalizedOptions.owner,
|
|
1508
|
-
repo: normalizedOptions.repo,
|
|
1509
|
-
githubToken: normalizedOptions.githubToken
|
|
1510
|
-
});
|
|
1511
|
-
const prOps = await orchestrateReleasePullRequest({
|
|
1512
|
-
workspaceRoot,
|
|
1513
|
-
githubClient,
|
|
1514
|
-
releaseBranch: normalizedOptions.branch.release,
|
|
1515
|
-
defaultBranch: normalizedOptions.branch.default,
|
|
1516
|
-
pullRequestTitle: options.pullRequest?.title,
|
|
1517
|
-
pullRequestBody: options.pullRequest?.body
|
|
1460
|
+
//#region src/index.ts
|
|
1461
|
+
async function createReleaseScripts(options) {
|
|
1462
|
+
const config = normalizeReleaseScriptsOptions(options);
|
|
1463
|
+
const AppLayer = Layer.succeed(ReleaseScriptsOptions, config).pipe(Layer.provide(NodeCommandExecutor.layer), Layer.provide(NodeFileSystem.layer), Layer.provide(ChangelogService.Default), Layer.provide(GitService.Default), Layer.provide(GitHubService.Default), Layer.provide(DependencyGraphService.Default), Layer.provide(NPMService.Default), Layer.provide(PackageUpdaterService.Default), Layer.provide(VersionCalculatorService.Default), Layer.provide(WorkspaceService.Default));
|
|
1464
|
+
const runProgram = (program) => {
|
|
1465
|
+
const provided = program.pipe(Effect.provide(AppLayer));
|
|
1466
|
+
return Effect.runPromise(provided);
|
|
1467
|
+
};
|
|
1468
|
+
const safeguardProgram = Effect.gen(function* () {
|
|
1469
|
+
return yield* (yield* GitService).workspace.assertWorkspaceReady;
|
|
1518
1470
|
});
|
|
1519
|
-
await prOps.prepareBranch();
|
|
1520
|
-
const overridesPath = join(workspaceRoot, ucdjsReleaseOverridesPath);
|
|
1521
|
-
let existingOverrides = {};
|
|
1522
1471
|
try {
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
|
|
1527
|
-
|
|
1528
|
-
}
|
|
1529
|
-
const { allUpdates, applyUpdates, overrides: newOverrides } = await calculateAndPrepareVersionUpdates({
|
|
1530
|
-
workspacePackages,
|
|
1531
|
-
packageCommits: groupedPackageCommits,
|
|
1532
|
-
workspaceRoot,
|
|
1533
|
-
showPrompt: options.prompts?.versions !== false,
|
|
1534
|
-
globalCommitsPerPackage,
|
|
1535
|
-
overrides: existingOverrides
|
|
1536
|
-
});
|
|
1537
|
-
if (Object.keys(newOverrides).length > 0) {
|
|
1538
|
-
logger.info("Writing version overrides file...");
|
|
1539
|
-
try {
|
|
1540
|
-
await mkdir(join(workspaceRoot, ".github"), { recursive: true });
|
|
1541
|
-
await writeFile(overridesPath, JSON.stringify(newOverrides, null, 2), "utf-8");
|
|
1542
|
-
logger.success("Successfully wrote version overrides file.");
|
|
1543
|
-
} catch (e) {
|
|
1544
|
-
logger.error("Failed to write version overrides file:", e);
|
|
1545
|
-
}
|
|
1546
|
-
}
|
|
1547
|
-
if (Object.keys(newOverrides).length === 0 && Object.keys(existingOverrides).length > 0) {
|
|
1548
|
-
let shouldRemoveOverrides = false;
|
|
1549
|
-
for (const update of allUpdates) {
|
|
1550
|
-
const overriddenVersion = existingOverrides[update.package.name];
|
|
1551
|
-
if (overriddenVersion) {
|
|
1552
|
-
if (compare(update.newVersion, overriddenVersion.version) > 0) {
|
|
1553
|
-
shouldRemoveOverrides = true;
|
|
1554
|
-
break;
|
|
1555
|
-
}
|
|
1556
|
-
}
|
|
1557
|
-
}
|
|
1558
|
-
if (shouldRemoveOverrides) {
|
|
1559
|
-
logger.info("Removing obsolete version overrides file...");
|
|
1560
|
-
try {
|
|
1561
|
-
await rm(overridesPath);
|
|
1562
|
-
logger.success("Successfully removed obsolete version overrides file.");
|
|
1563
|
-
} catch (e) {
|
|
1564
|
-
logger.error("Failed to remove obsolete version overrides file:", e);
|
|
1565
|
-
}
|
|
1566
|
-
}
|
|
1567
|
-
}
|
|
1568
|
-
if (allUpdates.filter((u) => u.hasDirectChanges).length === 0) logger.warn("No packages have changes requiring a release");
|
|
1569
|
-
logger.section("🔄 Version Updates");
|
|
1570
|
-
logger.item(`Updating ${allUpdates.length} packages (including dependents)`);
|
|
1571
|
-
for (const update of allUpdates) logger.item(`${update.package.name}: ${update.currentVersion} → ${update.newVersion}`);
|
|
1572
|
-
await applyUpdates();
|
|
1573
|
-
if (normalizedOptions.changelog.enabled) {
|
|
1574
|
-
logger.step("Updating changelogs");
|
|
1575
|
-
const changelogPromises = allUpdates.map((update) => {
|
|
1576
|
-
const pkgCommits = groupedPackageCommits.get(update.package.name) || [];
|
|
1577
|
-
const globalCommits = globalCommitsPerPackage.get(update.package.name) || [];
|
|
1578
|
-
const allCommits = [...pkgCommits, ...globalCommits];
|
|
1579
|
-
if (allCommits.length === 0) {
|
|
1580
|
-
logger.verbose(`No commits for ${update.package.name}, skipping changelog`);
|
|
1581
|
-
return Promise.resolve();
|
|
1582
|
-
}
|
|
1583
|
-
logger.verbose(`Updating changelog for ${farver.cyan(update.package.name)}`);
|
|
1584
|
-
return updateChangelog({
|
|
1585
|
-
normalizedOptions: {
|
|
1586
|
-
...normalizedOptions,
|
|
1587
|
-
workspaceRoot
|
|
1588
|
-
},
|
|
1589
|
-
githubClient,
|
|
1590
|
-
workspacePackage: update.package,
|
|
1591
|
-
version: update.newVersion,
|
|
1592
|
-
previousVersion: update.currentVersion !== "0.0.0" ? update.currentVersion : void 0,
|
|
1593
|
-
commits: allCommits,
|
|
1594
|
-
date: (/* @__PURE__ */ new Date()).toISOString().split("T")[0]
|
|
1595
|
-
});
|
|
1596
|
-
}).filter((p) => p != null);
|
|
1597
|
-
const updates = await Promise.all(changelogPromises);
|
|
1598
|
-
logger.success(`Updated ${updates.length} changelog(s)`);
|
|
1599
|
-
}
|
|
1600
|
-
if (!await prOps.syncChanges(true)) if (prOps.doesReleasePRExist && prOps.existingPullRequest) {
|
|
1601
|
-
logger.item("No updates needed, PR is already up to date");
|
|
1602
|
-
const { pullRequest: pullRequest$1, created: created$1 } = await prOps.syncPullRequest(allUpdates);
|
|
1603
|
-
await prOps.cleanup();
|
|
1604
|
-
return {
|
|
1605
|
-
updates: allUpdates,
|
|
1606
|
-
prUrl: pullRequest$1?.html_url,
|
|
1607
|
-
created: created$1
|
|
1608
|
-
};
|
|
1609
|
-
} else {
|
|
1610
|
-
logger.error("No changes to commit, and no existing PR. Nothing to do.");
|
|
1611
|
-
return null;
|
|
1612
|
-
}
|
|
1613
|
-
const { pullRequest, created } = await prOps.syncPullRequest(allUpdates);
|
|
1614
|
-
await prOps.cleanup();
|
|
1615
|
-
if (pullRequest?.html_url) {
|
|
1616
|
-
logger.section("🚀 Pull Request");
|
|
1617
|
-
logger.success(`Pull request ${created ? "created" : "updated"}: ${pullRequest.html_url}`);
|
|
1472
|
+
await runProgram(safeguardProgram);
|
|
1473
|
+
} catch (err) {
|
|
1474
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1475
|
+
await Effect.runPromise(Console.error(`❌ Initialization failed: ${message}`));
|
|
1476
|
+
throw err;
|
|
1618
1477
|
}
|
|
1619
1478
|
return {
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
created
|
|
1623
|
-
};
|
|
1624
|
-
}
|
|
1625
|
-
async function orchestrateReleasePullRequest({ workspaceRoot, githubClient, releaseBranch, defaultBranch, pullRequestTitle, pullRequestBody }) {
|
|
1626
|
-
const currentBranch = await getCurrentBranch(workspaceRoot);
|
|
1627
|
-
if (currentBranch !== defaultBranch) exitWithError(`Current branch is '${currentBranch}'. Please switch to the default branch '${defaultBranch}' before proceeding.`, `git checkout ${defaultBranch}`);
|
|
1628
|
-
const existingPullRequest = await githubClient.getExistingPullRequest(releaseBranch);
|
|
1629
|
-
const doesReleasePRExist = !!existingPullRequest;
|
|
1630
|
-
if (doesReleasePRExist) logger.item("Found existing release pull request");
|
|
1631
|
-
else logger.item("Will create new pull request");
|
|
1632
|
-
const branchExists = await doesBranchExist(releaseBranch, workspaceRoot);
|
|
1633
|
-
return {
|
|
1634
|
-
existingPullRequest,
|
|
1635
|
-
doesReleasePRExist,
|
|
1636
|
-
async prepareBranch() {
|
|
1637
|
-
if (!branchExists) await createBranch(releaseBranch, defaultBranch, workspaceRoot);
|
|
1638
|
-
logger.step(`Checking out release branch: ${releaseBranch}`);
|
|
1639
|
-
if (!await checkoutBranch(releaseBranch, workspaceRoot)) throw new Error(`Failed to checkout branch: ${releaseBranch}`);
|
|
1640
|
-
if (branchExists) {
|
|
1641
|
-
logger.step("Pulling latest changes from remote");
|
|
1642
|
-
if (!await pullLatestChanges(releaseBranch, workspaceRoot)) logger.warn("Failed to pull latest changes, continuing anyway");
|
|
1643
|
-
}
|
|
1644
|
-
logger.step(`Rebasing onto ${defaultBranch}`);
|
|
1645
|
-
if (!await rebaseBranch(defaultBranch, workspaceRoot)) throw new Error(`Failed to rebase onto ${defaultBranch}. Please resolve conflicts manually.`);
|
|
1479
|
+
async verify() {
|
|
1480
|
+
return runProgram(constructVerifyProgram(config));
|
|
1646
1481
|
},
|
|
1647
|
-
async
|
|
1648
|
-
|
|
1649
|
-
const isBranchAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
|
|
1650
|
-
if (!hasCommitted && !isBranchAhead) {
|
|
1651
|
-
logger.item("No changes to commit and branch is in sync with remote");
|
|
1652
|
-
return false;
|
|
1653
|
-
}
|
|
1654
|
-
logger.step("Pushing changes to remote");
|
|
1655
|
-
if (!await pushBranch(releaseBranch, workspaceRoot, { forceWithLease: true })) throw new Error(`Failed to push changes to ${releaseBranch}. Remote may have been updated.`);
|
|
1656
|
-
return true;
|
|
1482
|
+
async prepare() {
|
|
1483
|
+
return runProgram(constructPrepareProgram(config));
|
|
1657
1484
|
},
|
|
1658
|
-
async
|
|
1659
|
-
|
|
1660
|
-
const prBody = generatePullRequestBody(updates, pullRequestBody);
|
|
1661
|
-
const pullRequest = await githubClient.upsertPullRequest({
|
|
1662
|
-
pullNumber: existingPullRequest?.number,
|
|
1663
|
-
title: prTitle,
|
|
1664
|
-
body: prBody,
|
|
1665
|
-
head: releaseBranch,
|
|
1666
|
-
base: defaultBranch
|
|
1667
|
-
});
|
|
1668
|
-
logger.success(`${doesReleasePRExist ? "Updated" : "Created"} pull request: ${pullRequest?.html_url}`);
|
|
1669
|
-
return {
|
|
1670
|
-
pullRequest,
|
|
1671
|
-
created: !doesReleasePRExist
|
|
1672
|
-
};
|
|
1485
|
+
async publish() {
|
|
1486
|
+
return runProgram(constructPublishProgram(config));
|
|
1673
1487
|
},
|
|
1674
|
-
|
|
1675
|
-
|
|
1488
|
+
packages: {
|
|
1489
|
+
async list() {
|
|
1490
|
+
return runProgram(Effect.gen(function* () {
|
|
1491
|
+
return yield* (yield* WorkspaceService).discoverWorkspacePackages;
|
|
1492
|
+
}));
|
|
1493
|
+
},
|
|
1494
|
+
async get(packageName) {
|
|
1495
|
+
return runProgram(Effect.gen(function* () {
|
|
1496
|
+
return (yield* (yield* WorkspaceService).findPackageByName(packageName)) || null;
|
|
1497
|
+
}));
|
|
1498
|
+
}
|
|
1676
1499
|
}
|
|
1677
1500
|
};
|
|
1678
1501
|
}
|
|
1679
1502
|
|
|
1680
1503
|
//#endregion
|
|
1681
|
-
|
|
1682
|
-
async function verify(options) {
|
|
1683
|
-
const { workspaceRoot,...normalizedOptions } = await normalizeReleaseOptions(options);
|
|
1684
|
-
if (normalizedOptions.safeguards && !await isWorkingDirectoryClean(workspaceRoot)) exitWithError("Working directory is not clean. Please commit or stash your changes before proceeding.");
|
|
1685
|
-
const githubClient = createGitHubClient({
|
|
1686
|
-
owner: normalizedOptions.owner,
|
|
1687
|
-
repo: normalizedOptions.repo,
|
|
1688
|
-
githubToken: normalizedOptions.githubToken
|
|
1689
|
-
});
|
|
1690
|
-
const releaseBranch = normalizedOptions.branch.release;
|
|
1691
|
-
const defaultBranch = normalizedOptions.branch.default;
|
|
1692
|
-
const releasePr = await githubClient.getExistingPullRequest(releaseBranch);
|
|
1693
|
-
if (!releasePr || !releasePr.head) {
|
|
1694
|
-
logger.warn(`No open release pull request found for branch "${releaseBranch}". Nothing to verify.`);
|
|
1695
|
-
return;
|
|
1696
|
-
}
|
|
1697
|
-
logger.info(`Found release PR #${releasePr.number}. Verifying against default branch "${defaultBranch}"...`);
|
|
1698
|
-
const originalBranch = await getCurrentBranch(workspaceRoot);
|
|
1699
|
-
if (originalBranch !== defaultBranch) await checkoutBranch(defaultBranch, workspaceRoot);
|
|
1700
|
-
const overridesPath = ucdjsReleaseOverridesPath;
|
|
1701
|
-
let existingOverrides = {};
|
|
1702
|
-
try {
|
|
1703
|
-
const overridesContent = await readFileFromGit(workspaceRoot, releasePr.head.sha, overridesPath);
|
|
1704
|
-
if (overridesContent) {
|
|
1705
|
-
existingOverrides = JSON.parse(overridesContent);
|
|
1706
|
-
logger.info("Found existing version overrides file on release branch.");
|
|
1707
|
-
}
|
|
1708
|
-
} catch {
|
|
1709
|
-
logger.info("No version overrides file found on release branch. Continuing...");
|
|
1710
|
-
}
|
|
1711
|
-
const mainPackages = await discoverWorkspacePackages(workspaceRoot, options);
|
|
1712
|
-
const mainCommits = await getWorkspacePackageGroupedCommits(workspaceRoot, mainPackages);
|
|
1713
|
-
const { allUpdates: expectedUpdates } = await calculateAndPrepareVersionUpdates({
|
|
1714
|
-
workspacePackages: mainPackages,
|
|
1715
|
-
packageCommits: mainCommits,
|
|
1716
|
-
workspaceRoot,
|
|
1717
|
-
showPrompt: false,
|
|
1718
|
-
globalCommitsPerPackage: await getGlobalCommitsPerPackage(workspaceRoot, mainCommits, mainPackages, normalizedOptions.globalCommitMode),
|
|
1719
|
-
overrides: existingOverrides
|
|
1720
|
-
});
|
|
1721
|
-
const expectedVersionMap = new Map(expectedUpdates.map((u) => [u.package.name, u.newVersion]));
|
|
1722
|
-
const prVersionMap = /* @__PURE__ */ new Map();
|
|
1723
|
-
for (const pkg of mainPackages) {
|
|
1724
|
-
const pkgJsonPath = relative(workspaceRoot, join(pkg.path, "package.json"));
|
|
1725
|
-
const pkgJsonContent = await readFileFromGit(workspaceRoot, releasePr.head.sha, pkgJsonPath);
|
|
1726
|
-
if (pkgJsonContent) {
|
|
1727
|
-
const pkgJson = JSON.parse(pkgJsonContent);
|
|
1728
|
-
prVersionMap.set(pkg.name, pkgJson.version);
|
|
1729
|
-
}
|
|
1730
|
-
}
|
|
1731
|
-
if (originalBranch !== defaultBranch) await checkoutBranch(originalBranch, workspaceRoot);
|
|
1732
|
-
let isOutOfSync = false;
|
|
1733
|
-
for (const [pkgName, expectedVersion] of expectedVersionMap.entries()) {
|
|
1734
|
-
const prVersion = prVersionMap.get(pkgName);
|
|
1735
|
-
if (!prVersion) {
|
|
1736
|
-
logger.warn(`Package "${pkgName}" found in default branch but not in release branch. Skipping.`);
|
|
1737
|
-
continue;
|
|
1738
|
-
}
|
|
1739
|
-
if (gt(expectedVersion, prVersion)) {
|
|
1740
|
-
logger.error(`Package "${pkgName}" is out of sync. Expected version >= ${expectedVersion}, but PR has ${prVersion}.`);
|
|
1741
|
-
isOutOfSync = true;
|
|
1742
|
-
} else logger.success(`Package "${pkgName}" is up to date (PR version: ${prVersion}, Expected: ${expectedVersion})`);
|
|
1743
|
-
}
|
|
1744
|
-
const statusContext = "ucdjs/release-verify";
|
|
1745
|
-
if (isOutOfSync) {
|
|
1746
|
-
await githubClient.setCommitStatus({
|
|
1747
|
-
sha: releasePr.head.sha,
|
|
1748
|
-
state: "failure",
|
|
1749
|
-
context: statusContext,
|
|
1750
|
-
description: "Release PR is out of sync with the default branch. Please re-run the release process."
|
|
1751
|
-
});
|
|
1752
|
-
logger.error("Verification failed. Commit status set to 'failure'.");
|
|
1753
|
-
} else {
|
|
1754
|
-
await githubClient.setCommitStatus({
|
|
1755
|
-
sha: releasePr.head.sha,
|
|
1756
|
-
state: "success",
|
|
1757
|
-
context: statusContext,
|
|
1758
|
-
description: "Release PR is up to date.",
|
|
1759
|
-
targetUrl: `https://github.com/${normalizedOptions.owner}/${normalizedOptions.repo}/pull/${releasePr.number}`
|
|
1760
|
-
});
|
|
1761
|
-
logger.success("Verification successful. Commit status set to 'success'.");
|
|
1762
|
-
}
|
|
1763
|
-
}
|
|
1764
|
-
|
|
1765
|
-
//#endregion
|
|
1766
|
-
export { publish, release, verify };
|
|
1504
|
+
export { createReleaseScripts };
|