@ucdjs/release-scripts 0.1.0-beta.14 → 0.1.0-beta.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{eta-Boh7yPZi.mjs → eta-j5TFRbI4.mjs} +3 -3
- package/dist/index.d.mts +30 -2
- package/dist/index.mjs +538 -258
- package/package.json +6 -4
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import * as fs from "node:fs";
|
|
2
1
|
import * as path from "node:path";
|
|
2
|
+
import * as fs from "node:fs";
|
|
3
3
|
|
|
4
4
|
//#region node_modules/.pnpm/eta@4.0.1/node_modules/eta/dist/index.js
|
|
5
5
|
var EtaError = class extends Error {
|
|
@@ -90,8 +90,8 @@ function resolvePath(templatePath, options) {
|
|
|
90
90
|
} else throw new EtaFileResolutionError(`Template '${templatePath}' is not in the views directory`);
|
|
91
91
|
}
|
|
92
92
|
function dirIsChild(parent, dir) {
|
|
93
|
-
const relative = path.relative(parent, dir);
|
|
94
|
-
return relative && !relative.startsWith("..") && !path.isAbsolute(relative);
|
|
93
|
+
const relative$1 = path.relative(parent, dir);
|
|
94
|
+
return relative$1 && !relative$1.startsWith("..") && !path.isAbsolute(relative$1);
|
|
95
95
|
}
|
|
96
96
|
const absolutePathRegExp = /^\\|^\//;
|
|
97
97
|
/* istanbul ignore next */
|
package/dist/index.d.mts
CHANGED
|
@@ -11,11 +11,25 @@ interface WorkspacePackage {
|
|
|
11
11
|
//#region src/shared/types.d.ts
|
|
12
12
|
type BumpKind = "none" | "patch" | "minor" | "major";
|
|
13
13
|
type GlobalCommitMode = false | "dependencies" | "all";
|
|
14
|
+
interface CommitGroup {
|
|
15
|
+
/**
|
|
16
|
+
* Unique identifier for the group
|
|
17
|
+
*/
|
|
18
|
+
name: string;
|
|
19
|
+
/**
|
|
20
|
+
* Display title (e.g., "Features", "Bug Fixes")
|
|
21
|
+
*/
|
|
22
|
+
title: string;
|
|
23
|
+
/**
|
|
24
|
+
* Conventional commit types to include in this group
|
|
25
|
+
*/
|
|
26
|
+
types: string[];
|
|
27
|
+
}
|
|
14
28
|
interface SharedOptions {
|
|
15
29
|
/**
|
|
16
30
|
* Repository identifier (e.g., "owner/repo")
|
|
17
31
|
*/
|
|
18
|
-
repo: string
|
|
32
|
+
repo: `${string}/${string}`;
|
|
19
33
|
/**
|
|
20
34
|
* Root directory of the workspace (defaults to process.cwd())
|
|
21
35
|
*/
|
|
@@ -44,6 +58,12 @@ interface SharedOptions {
|
|
|
44
58
|
*/
|
|
45
59
|
versions?: boolean;
|
|
46
60
|
};
|
|
61
|
+
/**
|
|
62
|
+
* Commit grouping configuration
|
|
63
|
+
* Used for changelog generation and commit display
|
|
64
|
+
* @default DEFAULT_COMMIT_GROUPS
|
|
65
|
+
*/
|
|
66
|
+
groups?: CommitGroup[];
|
|
47
67
|
}
|
|
48
68
|
interface PackageJson {
|
|
49
69
|
name: string;
|
|
@@ -136,6 +156,10 @@ interface ReleaseOptions extends SharedOptions {
|
|
|
136
156
|
* @default true
|
|
137
157
|
*/
|
|
138
158
|
enabled?: boolean;
|
|
159
|
+
/**
|
|
160
|
+
* Custom changelog entry template (ETA format)
|
|
161
|
+
*/
|
|
162
|
+
template?: string;
|
|
139
163
|
};
|
|
140
164
|
globalCommitMode?: GlobalCommitMode;
|
|
141
165
|
}
|
|
@@ -155,4 +179,8 @@ interface ReleaseResult {
|
|
|
155
179
|
}
|
|
156
180
|
declare function release(options: ReleaseOptions): Promise<ReleaseResult | null>;
|
|
157
181
|
//#endregion
|
|
158
|
-
|
|
182
|
+
//#region src/verify.d.ts
|
|
183
|
+
interface VerifyOptions extends SharedOptions {}
|
|
184
|
+
declare function verify(_options: VerifyOptions): void;
|
|
185
|
+
//#endregion
|
|
186
|
+
export { type PublishOptions, type ReleaseOptions, type ReleaseResult, type VerifyOptions, publish, release, verify };
|
package/dist/index.mjs
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import { t as Eta } from "./eta-
|
|
1
|
+
import { t as Eta } from "./eta-j5TFRbI4.mjs";
|
|
2
|
+
import { readFile, writeFile } from "node:fs/promises";
|
|
3
|
+
import { join, relative } from "node:path";
|
|
2
4
|
import process from "node:process";
|
|
3
5
|
import farver from "farver";
|
|
4
6
|
import mri from "mri";
|
|
5
7
|
import { exec } from "tinyexec";
|
|
6
8
|
import { dedent } from "@luxass/utils";
|
|
7
|
-
import {
|
|
8
|
-
import { readFile, writeFile } from "node:fs/promises";
|
|
9
|
-
import { getCommits } from "commit-parser";
|
|
9
|
+
import { getCommits, groupByType } from "commit-parser";
|
|
10
10
|
import prompts from "prompts";
|
|
11
11
|
|
|
12
12
|
//#region src/publish.ts
|
|
@@ -49,8 +49,8 @@ const logger = {
|
|
|
49
49
|
emptyLine: () => {
|
|
50
50
|
console.log();
|
|
51
51
|
},
|
|
52
|
-
item: (message) => {
|
|
53
|
-
console.log(` ${message}
|
|
52
|
+
item: (message, ...args$1) => {
|
|
53
|
+
console.log(` ${message}`, ...args$1);
|
|
54
54
|
},
|
|
55
55
|
step: (message) => {
|
|
56
56
|
console.log(` ${farver.blue("→")} ${message}`);
|
|
@@ -78,28 +78,6 @@ function exitWithError(message, hint) {
|
|
|
78
78
|
if (hint) console.error(farver.gray(` ${hint}`));
|
|
79
79
|
process.exit(1);
|
|
80
80
|
}
|
|
81
|
-
function normalizeSharedOptions(options) {
|
|
82
|
-
const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, prompts: prompts$1 = {
|
|
83
|
-
packages: true,
|
|
84
|
-
versions: true
|
|
85
|
-
},...rest } = options;
|
|
86
|
-
if (!githubToken.trim()) exitWithError("GitHub token is required", "Set GITHUB_TOKEN environment variable or pass it in options");
|
|
87
|
-
if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) exitWithError("Repository (repo) is required", "Specify the repository in 'owner/repo' format (e.g., 'octocat/hello-world')");
|
|
88
|
-
const [owner, repo] = fullRepo.split("/");
|
|
89
|
-
if (!owner || !repo) exitWithError(`Invalid repo format: "${fullRepo}"`, "Expected format: \"owner/repo\" (e.g., \"octocat/hello-world\")");
|
|
90
|
-
return {
|
|
91
|
-
...rest,
|
|
92
|
-
packages,
|
|
93
|
-
prompts: {
|
|
94
|
-
packages: prompts$1?.packages ?? true,
|
|
95
|
-
versions: prompts$1?.versions ?? true
|
|
96
|
-
},
|
|
97
|
-
workspaceRoot,
|
|
98
|
-
githubToken,
|
|
99
|
-
owner,
|
|
100
|
-
repo
|
|
101
|
-
};
|
|
102
|
-
}
|
|
103
81
|
if (isDryRun || isVerbose || isForce) {
|
|
104
82
|
logger.verbose(farver.inverse(farver.yellow(" Running with special flags ")));
|
|
105
83
|
logger.verbose({
|
|
@@ -284,21 +262,15 @@ async function isBranchAheadOfRemote(branch, workspaceRoot) {
|
|
|
284
262
|
return true;
|
|
285
263
|
}
|
|
286
264
|
}
|
|
287
|
-
async function hasChangesToCommit(workspaceRoot) {
|
|
288
|
-
return (await run("git", ["status", "--porcelain"], { nodeOptions: {
|
|
289
|
-
cwd: workspaceRoot,
|
|
290
|
-
stdio: "pipe"
|
|
291
|
-
} })).stdout.trim() !== "";
|
|
292
|
-
}
|
|
293
265
|
async function commitChanges(message, workspaceRoot) {
|
|
294
266
|
try {
|
|
295
267
|
await run("git", ["add", "."], { nodeOptions: {
|
|
296
268
|
cwd: workspaceRoot,
|
|
297
269
|
stdio: "pipe"
|
|
298
270
|
} });
|
|
299
|
-
if (
|
|
271
|
+
if (await isWorkingDirectoryClean(workspaceRoot)) return false;
|
|
300
272
|
logger.info(`Committing changes: ${farver.dim(message)}`);
|
|
301
|
-
await
|
|
273
|
+
await runIfNotDry("git", [
|
|
302
274
|
"commit",
|
|
303
275
|
"-m",
|
|
304
276
|
message
|
|
@@ -334,19 +306,291 @@ async function pushBranch(branch, workspaceRoot, options) {
|
|
|
334
306
|
exitWithError(`Failed to push branch: ${branch}`, `Make sure you have permission to push to the remote repository`);
|
|
335
307
|
}
|
|
336
308
|
}
|
|
309
|
+
async function readFileFromGit(workspaceRoot, ref, filePath) {
|
|
310
|
+
try {
|
|
311
|
+
return (await run("git", ["show", `${ref}:${filePath}`], { nodeOptions: {
|
|
312
|
+
cwd: workspaceRoot,
|
|
313
|
+
stdio: "pipe"
|
|
314
|
+
} })).stdout;
|
|
315
|
+
} catch {
|
|
316
|
+
return null;
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
async function getMostRecentPackageTag(workspaceRoot, packageName) {
|
|
320
|
+
try {
|
|
321
|
+
const { stdout } = await run("git", [
|
|
322
|
+
"tag",
|
|
323
|
+
"--list",
|
|
324
|
+
`${packageName}@*`
|
|
325
|
+
], { nodeOptions: {
|
|
326
|
+
cwd: workspaceRoot,
|
|
327
|
+
stdio: "pipe"
|
|
328
|
+
} });
|
|
329
|
+
const tags = stdout.split("\n").map((tag) => tag.trim()).filter(Boolean);
|
|
330
|
+
if (tags.length === 0) return;
|
|
331
|
+
return tags.reverse()[0];
|
|
332
|
+
} catch (err) {
|
|
333
|
+
logger.warn(`Failed to get tags for package ${packageName}: ${err.message}`);
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
/**
|
|
338
|
+
* Builds a mapping of commit SHAs to the list of files changed in each commit
|
|
339
|
+
* within a given inclusive range.
|
|
340
|
+
*
|
|
341
|
+
* Internally runs:
|
|
342
|
+
* git log --name-only --format=%H <from>^..<to>
|
|
343
|
+
*
|
|
344
|
+
* Notes
|
|
345
|
+
* - This includes the commit identified by `from` (via `from^..to`).
|
|
346
|
+
* - Order of commits in the resulting Map follows `git log` output
|
|
347
|
+
* (reverse chronological, newest first).
|
|
348
|
+
* - On failure (e.g., invalid refs), the function returns null.
|
|
349
|
+
*
|
|
350
|
+
* @param {string} workspaceRoot Absolute path to the git repository root used as cwd.
|
|
351
|
+
* @param {string} from Starting commit/ref (inclusive).
|
|
352
|
+
* @param {string} to Ending commit/ref (inclusive).
|
|
353
|
+
* @returns {Promise<Map<string, string[]> | null>} Promise resolving to a Map where keys are commit SHAs and values are
|
|
354
|
+
* arrays of file paths changed by that commit, or null on error.
|
|
355
|
+
*/
|
|
356
|
+
async function getGroupedFilesByCommitSha(workspaceRoot, from, to) {
|
|
357
|
+
const commitsMap = /* @__PURE__ */ new Map();
|
|
358
|
+
try {
|
|
359
|
+
const { stdout } = await run("git", [
|
|
360
|
+
"log",
|
|
361
|
+
"--name-only",
|
|
362
|
+
"--format=%H",
|
|
363
|
+
`${from}^..${to}`
|
|
364
|
+
], { nodeOptions: {
|
|
365
|
+
cwd: workspaceRoot,
|
|
366
|
+
stdio: "pipe"
|
|
367
|
+
} });
|
|
368
|
+
const lines = stdout.trim().split("\n").filter((line) => line.trim() !== "");
|
|
369
|
+
let currentSha = null;
|
|
370
|
+
const HASH_REGEX = /^[0-9a-f]{40}$/i;
|
|
371
|
+
for (const line of lines) {
|
|
372
|
+
const trimmedLine = line.trim();
|
|
373
|
+
if (HASH_REGEX.test(trimmedLine)) {
|
|
374
|
+
currentSha = trimmedLine;
|
|
375
|
+
commitsMap.set(currentSha, []);
|
|
376
|
+
continue;
|
|
377
|
+
}
|
|
378
|
+
if (currentSha === null) continue;
|
|
379
|
+
commitsMap.get(currentSha).push(trimmedLine);
|
|
380
|
+
}
|
|
381
|
+
return commitsMap;
|
|
382
|
+
} catch {
|
|
383
|
+
return null;
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
//#endregion
|
|
388
|
+
//#region src/core/changelog.ts
|
|
389
|
+
const DEFAULT_CHANGELOG_TEMPLATE = dedent`
|
|
390
|
+
<% if (it.previousVersion) { %>
|
|
391
|
+
## [<%= it.version %>](<%= it.compareUrl %>) (<%= it.date %>)
|
|
392
|
+
<% } else { %>
|
|
393
|
+
## <%= it.version %> (<%= it.date %>)
|
|
394
|
+
<% } %>
|
|
395
|
+
|
|
396
|
+
<% it.groups.forEach((group) => { %>
|
|
397
|
+
<% if (group.commits.length > 0) { %>
|
|
398
|
+
### <%= group.title %>
|
|
399
|
+
|
|
400
|
+
<% group.commits.forEach((commit) => { %>
|
|
401
|
+
* <%= commit.line %>
|
|
402
|
+
<% }) %>
|
|
403
|
+
|
|
404
|
+
<% } %>
|
|
405
|
+
<% }) %>
|
|
406
|
+
`;
|
|
407
|
+
async function generateChangelogEntry(options) {
|
|
408
|
+
const { packageName, version, previousVersion, date, commits, owner, repo, groups, template, githubClient } = options;
|
|
409
|
+
const compareUrl = previousVersion ? `https://github.com/${owner}/${repo}/compare/${packageName}@${previousVersion}...${packageName}@${version}` : void 0;
|
|
410
|
+
const grouped = groupByType(commits, {
|
|
411
|
+
includeNonConventional: false,
|
|
412
|
+
mergeKeys: Object.fromEntries(groups.map((g) => [g.name, g.types]))
|
|
413
|
+
});
|
|
414
|
+
for (const commit of commits) {
|
|
415
|
+
if (commit.authors.length === 0) continue;
|
|
416
|
+
commit.resolvedAuthors = await Promise.all(commit.authors.map(async (author) => {
|
|
417
|
+
const username = await githubClient.resolveAuthorInfo(author.email);
|
|
418
|
+
return {
|
|
419
|
+
...author,
|
|
420
|
+
username
|
|
421
|
+
};
|
|
422
|
+
}));
|
|
423
|
+
}
|
|
424
|
+
const templateData = {
|
|
425
|
+
packageName,
|
|
426
|
+
version,
|
|
427
|
+
previousVersion,
|
|
428
|
+
date,
|
|
429
|
+
compareUrl,
|
|
430
|
+
owner,
|
|
431
|
+
repo,
|
|
432
|
+
groups: groups.map((group) => {
|
|
433
|
+
const commitsInGroup = grouped.get(group.name) ?? [];
|
|
434
|
+
if (commitsInGroup.length > 0) logger.verbose(`Found ${commitsInGroup.length} commits for group "${group.name}".`);
|
|
435
|
+
const formattedCommits = commitsInGroup.map((commit) => {
|
|
436
|
+
const commitUrl = `https://github.com/${owner}/${repo}/commit/${commit.hash}`;
|
|
437
|
+
let line = `${commit.description}`;
|
|
438
|
+
if (commit.references.length > 0) logger.verbose("Located references in commit", commit.references.length);
|
|
439
|
+
for (const ref of commit.references) {
|
|
440
|
+
if (!ref.value) continue;
|
|
441
|
+
const number = Number.parseInt(ref.value.replace(/^#/, ""), 10);
|
|
442
|
+
if (Number.isNaN(number)) continue;
|
|
443
|
+
if (ref.type === "issue") {
|
|
444
|
+
line += ` ([Issue ${ref.value}](https://github.com/${owner}/${repo}/issues/${number}))`;
|
|
445
|
+
continue;
|
|
446
|
+
}
|
|
447
|
+
line += ` ([PR ${ref.value}](https://github.com/${owner}/${repo}/pull/${number}))`;
|
|
448
|
+
}
|
|
449
|
+
line += ` ([${commit.shortHash}](${commitUrl}))`;
|
|
450
|
+
if (commit.authors.length > 0) {
|
|
451
|
+
console.log(commit.resolvedAuthors);
|
|
452
|
+
line += ` (by ${commit.authors.map((a) => a.name).join(", ")})`;
|
|
453
|
+
}
|
|
454
|
+
return { line };
|
|
455
|
+
});
|
|
456
|
+
return {
|
|
457
|
+
name: group.name,
|
|
458
|
+
title: group.title,
|
|
459
|
+
commits: formattedCommits
|
|
460
|
+
};
|
|
461
|
+
})
|
|
462
|
+
};
|
|
463
|
+
const eta = new Eta();
|
|
464
|
+
const templateToUse = template || DEFAULT_CHANGELOG_TEMPLATE;
|
|
465
|
+
return eta.renderString(templateToUse, templateData).trim();
|
|
466
|
+
}
|
|
467
|
+
async function updateChangelog(options) {
|
|
468
|
+
const { version, previousVersion, commits, date, normalizedOptions, workspacePackage } = options;
|
|
469
|
+
const changelogPath = join(workspacePackage.path, "CHANGELOG.md");
|
|
470
|
+
const changelogRelativePath = relative(normalizedOptions.workspaceRoot, join(workspacePackage.path, "CHANGELOG.md"));
|
|
471
|
+
const existingContent = await readFileFromGit(normalizedOptions.workspaceRoot, normalizedOptions.branch.default, changelogRelativePath);
|
|
472
|
+
logger.verbose("Existing content found: ", Boolean(existingContent));
|
|
473
|
+
const newEntry = await generateChangelogEntry({
|
|
474
|
+
packageName: workspacePackage.name,
|
|
475
|
+
version,
|
|
476
|
+
previousVersion,
|
|
477
|
+
date,
|
|
478
|
+
commits,
|
|
479
|
+
owner: normalizedOptions.owner,
|
|
480
|
+
repo: normalizedOptions.repo,
|
|
481
|
+
groups: normalizedOptions.groups,
|
|
482
|
+
template: normalizedOptions.changelog?.template,
|
|
483
|
+
githubClient: options.githubClient
|
|
484
|
+
});
|
|
485
|
+
let updatedContent;
|
|
486
|
+
if (!existingContent) {
|
|
487
|
+
updatedContent = `# ${workspacePackage.name}\n\n${newEntry}\n`;
|
|
488
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
489
|
+
return;
|
|
490
|
+
}
|
|
491
|
+
const parsed = parseChangelog(existingContent);
|
|
492
|
+
const lines = existingContent.split("\n");
|
|
493
|
+
const existingVersionIndex = parsed.versions.findIndex((v) => v.version === version);
|
|
494
|
+
if (existingVersionIndex !== -1) {
|
|
495
|
+
const existingVersion = parsed.versions[existingVersionIndex];
|
|
496
|
+
const before = lines.slice(0, existingVersion.lineStart);
|
|
497
|
+
const after = lines.slice(existingVersion.lineEnd + 1);
|
|
498
|
+
updatedContent = [
|
|
499
|
+
...before,
|
|
500
|
+
newEntry,
|
|
501
|
+
...after
|
|
502
|
+
].join("\n");
|
|
503
|
+
} else {
|
|
504
|
+
const insertAt = parsed.headerLineEnd + 1;
|
|
505
|
+
const before = lines.slice(0, insertAt);
|
|
506
|
+
const after = lines.slice(insertAt);
|
|
507
|
+
if (before.length > 0 && before[before.length - 1] !== "") before.push("");
|
|
508
|
+
updatedContent = [
|
|
509
|
+
...before,
|
|
510
|
+
newEntry,
|
|
511
|
+
"",
|
|
512
|
+
...after
|
|
513
|
+
].join("\n");
|
|
514
|
+
}
|
|
515
|
+
await writeFile(changelogPath, updatedContent, "utf-8");
|
|
516
|
+
}
|
|
517
|
+
function parseChangelog(content) {
|
|
518
|
+
const lines = content.split("\n");
|
|
519
|
+
let packageName = null;
|
|
520
|
+
let headerLineEnd = -1;
|
|
521
|
+
const versions = [];
|
|
522
|
+
for (let i = 0; i < lines.length; i++) {
|
|
523
|
+
const line = lines[i].trim();
|
|
524
|
+
if (line.startsWith("# ")) {
|
|
525
|
+
packageName = line.slice(2).trim();
|
|
526
|
+
headerLineEnd = i;
|
|
527
|
+
break;
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
for (let i = headerLineEnd + 1; i < lines.length; i++) {
|
|
531
|
+
const line = lines[i].trim();
|
|
532
|
+
if (line.startsWith("## ")) {
|
|
533
|
+
const versionMatch = line.match(/##\s+(?:<small>)?\[?([^\](\s<]+)/);
|
|
534
|
+
if (versionMatch) {
|
|
535
|
+
const version = versionMatch[1];
|
|
536
|
+
const lineStart = i;
|
|
537
|
+
let lineEnd = lines.length - 1;
|
|
538
|
+
for (let j = i + 1; j < lines.length; j++) if (lines[j].trim().startsWith("## ")) {
|
|
539
|
+
lineEnd = j - 1;
|
|
540
|
+
break;
|
|
541
|
+
}
|
|
542
|
+
const versionContent = lines.slice(lineStart, lineEnd + 1).join("\n");
|
|
543
|
+
versions.push({
|
|
544
|
+
version,
|
|
545
|
+
lineStart,
|
|
546
|
+
lineEnd,
|
|
547
|
+
content: versionContent
|
|
548
|
+
});
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
return {
|
|
553
|
+
packageName,
|
|
554
|
+
versions,
|
|
555
|
+
headerLineEnd
|
|
556
|
+
};
|
|
557
|
+
}
|
|
337
558
|
|
|
338
559
|
//#endregion
|
|
339
560
|
//#region src/core/github.ts
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
561
|
+
var GitHubClient = class {
|
|
562
|
+
owner;
|
|
563
|
+
repo;
|
|
564
|
+
githubToken;
|
|
565
|
+
apiBase = "https://api.github.com";
|
|
566
|
+
constructor({ owner, repo, githubToken }) {
|
|
567
|
+
this.owner = owner;
|
|
568
|
+
this.repo = repo;
|
|
569
|
+
this.githubToken = githubToken;
|
|
570
|
+
}
|
|
571
|
+
async request(path, init = {}) {
|
|
572
|
+
const url = path.startsWith("http") ? path : `${this.apiBase}${path}`;
|
|
573
|
+
const res = await fetch(url, {
|
|
574
|
+
...init,
|
|
575
|
+
headers: {
|
|
576
|
+
...init.headers,
|
|
577
|
+
Accept: "application/vnd.github.v3+json",
|
|
578
|
+
Authorization: `token ${this.githubToken}`
|
|
579
|
+
}
|
|
580
|
+
});
|
|
581
|
+
if (!res.ok) {
|
|
582
|
+
const errorText = await res.text();
|
|
583
|
+
throw new Error(`GitHub API request failed with status ${res.status}: ${errorText || "No response body"}`);
|
|
584
|
+
}
|
|
585
|
+
if (res.status === 204) return;
|
|
586
|
+
return res.json();
|
|
587
|
+
}
|
|
588
|
+
async getExistingPullRequest(branch) {
|
|
589
|
+
const head = branch.includes(":") ? branch : `${this.owner}:${branch}`;
|
|
590
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/pulls?state=open&head=${encodeURIComponent(head)}`;
|
|
591
|
+
logger.verbose(`Requesting pull request for branch: ${branch} (url: ${this.apiBase}${endpoint})`);
|
|
592
|
+
const pulls = await this.request(endpoint);
|
|
593
|
+
if (!Array.isArray(pulls) || pulls.length === 0) return null;
|
|
350
594
|
const firstPullRequest = pulls[0];
|
|
351
595
|
if (typeof firstPullRequest !== "object" || firstPullRequest === null || !("number" in firstPullRequest) || typeof firstPullRequest.number !== "number" || !("title" in firstPullRequest) || typeof firstPullRequest.title !== "string" || !("body" in firstPullRequest) || typeof firstPullRequest.body !== "string" || !("draft" in firstPullRequest) || typeof firstPullRequest.draft !== "boolean" || !("html_url" in firstPullRequest) || typeof firstPullRequest.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
352
596
|
const pullRequest = {
|
|
@@ -354,20 +598,15 @@ async function getExistingPullRequest({ owner, repo, branch, githubToken }) {
|
|
|
354
598
|
title: firstPullRequest.title,
|
|
355
599
|
body: firstPullRequest.body,
|
|
356
600
|
draft: firstPullRequest.draft,
|
|
357
|
-
html_url: firstPullRequest.html_url
|
|
601
|
+
html_url: firstPullRequest.html_url,
|
|
602
|
+
head: "head" in firstPullRequest && typeof firstPullRequest.head === "object" && firstPullRequest.head !== null && "sha" in firstPullRequest.head && typeof firstPullRequest.head.sha === "string" ? { sha: firstPullRequest.head.sha } : void 0
|
|
358
603
|
};
|
|
359
604
|
logger.info(`Found existing pull request: ${farver.yellow(`#${pullRequest.number}`)}`);
|
|
360
605
|
return pullRequest;
|
|
361
|
-
} catch (err) {
|
|
362
|
-
logger.error("Error fetching pull request:", err);
|
|
363
|
-
return null;
|
|
364
606
|
}
|
|
365
|
-
}
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
const isUpdate = pullNumber != null;
|
|
369
|
-
const url = isUpdate ? `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}` : `https://api.github.com/repos/${owner}/${repo}/pulls`;
|
|
370
|
-
const method = isUpdate ? "PATCH" : "POST";
|
|
607
|
+
async upsertPullRequest({ title, body, head, base, pullNumber }) {
|
|
608
|
+
const isUpdate = typeof pullNumber === "number";
|
|
609
|
+
const endpoint = isUpdate ? `/repos/${this.owner}/${this.repo}/pulls/${pullNumber}` : `/repos/${this.owner}/${this.repo}/pulls`;
|
|
371
610
|
const requestBody = isUpdate ? {
|
|
372
611
|
title,
|
|
373
612
|
body
|
|
@@ -378,17 +617,11 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
|
|
|
378
617
|
base,
|
|
379
618
|
draft: true
|
|
380
619
|
};
|
|
381
|
-
logger.verbose(`${isUpdate ? "Updating" : "Creating"} pull request (url: ${
|
|
382
|
-
const
|
|
383
|
-
method,
|
|
384
|
-
headers: {
|
|
385
|
-
Accept: "application/vnd.github.v3+json",
|
|
386
|
-
Authorization: `token ${githubToken}`
|
|
387
|
-
},
|
|
620
|
+
logger.verbose(`${isUpdate ? "Updating" : "Creating"} pull request (url: ${this.apiBase}${endpoint})`);
|
|
621
|
+
const pr = await this.request(endpoint, {
|
|
622
|
+
method: isUpdate ? "PATCH" : "POST",
|
|
388
623
|
body: JSON.stringify(requestBody)
|
|
389
624
|
});
|
|
390
|
-
if (!res.ok) throw new Error(`GitHub API request failed with status ${res.status}`);
|
|
391
|
-
const pr = await res.json();
|
|
392
625
|
if (typeof pr !== "object" || pr === null || !("number" in pr) || typeof pr.number !== "number" || !("title" in pr) || typeof pr.title !== "string" || !("body" in pr) || typeof pr.body !== "string" || !("draft" in pr) || typeof pr.draft !== "boolean" || !("html_url" in pr) || typeof pr.html_url !== "string") throw new TypeError("Pull request data validation failed");
|
|
393
626
|
const action = isUpdate ? "Updated" : "Created";
|
|
394
627
|
logger.info(`${action} pull request: ${farver.yellow(`#${pr.number}`)}`);
|
|
@@ -399,12 +632,32 @@ async function upsertPullRequest({ owner, repo, title, body, head, base, pullNum
|
|
|
399
632
|
draft: pr.draft,
|
|
400
633
|
html_url: pr.html_url
|
|
401
634
|
};
|
|
402
|
-
} catch (err) {
|
|
403
|
-
logger.error(`Error upserting pull request:`, err);
|
|
404
|
-
throw err;
|
|
405
635
|
}
|
|
636
|
+
async setCommitStatus({ sha, state, targetUrl, description, context }) {
|
|
637
|
+
const endpoint = `/repos/${this.owner}/${this.repo}/statuses/${sha}`;
|
|
638
|
+
logger.verbose(`Setting commit status on ${sha} to ${state} (url: ${this.apiBase}${endpoint})`);
|
|
639
|
+
await this.request(endpoint, {
|
|
640
|
+
method: "POST",
|
|
641
|
+
body: JSON.stringify({
|
|
642
|
+
state,
|
|
643
|
+
target_url: targetUrl,
|
|
644
|
+
description: description || "",
|
|
645
|
+
context
|
|
646
|
+
})
|
|
647
|
+
});
|
|
648
|
+
logger.info(`Commit status set to ${farver.cyan(state)} for ${farver.gray(sha.substring(0, 7))}`);
|
|
649
|
+
}
|
|
650
|
+
async resolveAuthorInfo(email) {
|
|
651
|
+
const q = encodeURIComponent(`${email} type:user in:email`);
|
|
652
|
+
const data = await this.request(`/search/users?q=${q}`);
|
|
653
|
+
if (!data.items || data.items.length === 0) return null;
|
|
654
|
+
return data.items[0].login;
|
|
655
|
+
}
|
|
656
|
+
};
|
|
657
|
+
function createGitHubClient(options) {
|
|
658
|
+
return new GitHubClient(options);
|
|
406
659
|
}
|
|
407
|
-
const
|
|
660
|
+
const DEFAULT_PR_BODY_TEMPLATE = dedent`
|
|
408
661
|
This PR was automatically generated by the release script.
|
|
409
662
|
|
|
410
663
|
The following packages have been prepared for release:
|
|
@@ -427,7 +680,7 @@ function dedentString(str) {
|
|
|
427
680
|
}
|
|
428
681
|
function generatePullRequestBody(updates, body) {
|
|
429
682
|
const eta = new Eta();
|
|
430
|
-
const bodyTemplate = body ? dedentString(body) :
|
|
683
|
+
const bodyTemplate = body ? dedentString(body) : DEFAULT_PR_BODY_TEMPLATE;
|
|
431
684
|
return eta.renderString(bodyTemplate, { packages: updates.map((u) => ({
|
|
432
685
|
name: u.package.name,
|
|
433
686
|
currentVersion: u.currentVersion,
|
|
@@ -439,22 +692,6 @@ function generatePullRequestBody(updates, body) {
|
|
|
439
692
|
|
|
440
693
|
//#endregion
|
|
441
694
|
//#region src/versioning/commits.ts
|
|
442
|
-
async function getLastPackageTag(packageName, workspaceRoot) {
|
|
443
|
-
try {
|
|
444
|
-
const { stdout } = await run("git", [
|
|
445
|
-
"tag",
|
|
446
|
-
"--list",
|
|
447
|
-
`${packageName}@*`
|
|
448
|
-
], { nodeOptions: {
|
|
449
|
-
cwd: workspaceRoot,
|
|
450
|
-
stdio: "pipe"
|
|
451
|
-
} });
|
|
452
|
-
return stdout.split("\n").map((tag) => tag.trim()).filter(Boolean).reverse()[0];
|
|
453
|
-
} catch (err) {
|
|
454
|
-
logger.warn(`Failed to get tags for package ${packageName}: ${err.message}`);
|
|
455
|
-
return;
|
|
456
|
-
}
|
|
457
|
-
}
|
|
458
695
|
function determineHighestBump(commits) {
|
|
459
696
|
if (commits.length === 0) return "none";
|
|
460
697
|
let highestBump = "none";
|
|
@@ -467,78 +704,33 @@ function determineHighestBump(commits) {
|
|
|
467
704
|
return highestBump;
|
|
468
705
|
}
|
|
469
706
|
/**
|
|
470
|
-
*
|
|
707
|
+
* Get commits grouped by workspace package.
|
|
708
|
+
* For each package, retrieves all commits since its last release tag that affect that package.
|
|
471
709
|
*
|
|
472
|
-
* @param {string} workspaceRoot - The root directory of the workspace
|
|
473
|
-
* @param {WorkspacePackage}
|
|
474
|
-
* @returns {Promise<GitCommit[]
|
|
710
|
+
* @param {string} workspaceRoot - The root directory of the workspace
|
|
711
|
+
* @param {WorkspacePackage[]} packages - Array of workspace packages to analyze
|
|
712
|
+
* @returns {Promise<Map<string, GitCommit[]>>} A map of package names to their commits since their last release
|
|
475
713
|
*/
|
|
476
|
-
async function
|
|
477
|
-
const lastTag = await getLastPackageTag(pkg.name, workspaceRoot);
|
|
478
|
-
const allCommits = getCommits({
|
|
479
|
-
from: lastTag,
|
|
480
|
-
to: "HEAD",
|
|
481
|
-
cwd: workspaceRoot
|
|
482
|
-
});
|
|
483
|
-
logger.verbose("Found commits for package", `${farver.cyan(allCommits.length)} for ${farver.bold(pkg.name)} since ${lastTag || "beginning"}`);
|
|
484
|
-
const commitsAffectingPackage = getCommits({
|
|
485
|
-
from: lastTag,
|
|
486
|
-
to: "HEAD",
|
|
487
|
-
cwd: workspaceRoot,
|
|
488
|
-
folder: pkg.path
|
|
489
|
-
});
|
|
490
|
-
const affectingCommitShas = /* @__PURE__ */ new Set();
|
|
491
|
-
for (const commit of commitsAffectingPackage) affectingCommitShas.add(commit.shortHash);
|
|
492
|
-
const packageCommits = allCommits.filter((commit) => {
|
|
493
|
-
return affectingCommitShas.has(commit.shortHash);
|
|
494
|
-
});
|
|
495
|
-
logger.verbose("Commits affect package", `${farver.cyan(packageCommits.length)} affect ${farver.bold(pkg.name)}`);
|
|
496
|
-
return packageCommits;
|
|
497
|
-
}
|
|
498
|
-
async function getWorkspacePackageCommits(workspaceRoot, packages) {
|
|
714
|
+
async function getWorkspacePackageGroupedCommits(workspaceRoot, packages) {
|
|
499
715
|
const changedPackages = /* @__PURE__ */ new Map();
|
|
500
716
|
const promises = packages.map(async (pkg) => {
|
|
717
|
+
const lastTag = await getMostRecentPackageTag(workspaceRoot, pkg.name);
|
|
718
|
+
const allCommits = await getCommits({
|
|
719
|
+
from: lastTag,
|
|
720
|
+
to: "HEAD",
|
|
721
|
+
cwd: workspaceRoot,
|
|
722
|
+
folder: pkg.path
|
|
723
|
+
});
|
|
724
|
+
logger.verbose(`Found ${farver.cyan(allCommits.length)} commits for package ${farver.bold(pkg.name)} since tag ${farver.cyan(lastTag ?? "N/A")}`);
|
|
501
725
|
return {
|
|
502
726
|
pkgName: pkg.name,
|
|
503
|
-
commits:
|
|
727
|
+
commits: allCommits
|
|
504
728
|
};
|
|
505
729
|
});
|
|
506
730
|
const results = await Promise.all(promises);
|
|
507
731
|
for (const { pkgName, commits } of results) changedPackages.set(pkgName, commits);
|
|
508
732
|
return changedPackages;
|
|
509
733
|
}
|
|
510
|
-
async function getCommitFileList(workspaceRoot, from, to) {
|
|
511
|
-
const map = /* @__PURE__ */ new Map();
|
|
512
|
-
try {
|
|
513
|
-
const { stdout } = await run("git", [
|
|
514
|
-
"log",
|
|
515
|
-
"--name-only",
|
|
516
|
-
"--format=%H",
|
|
517
|
-
`${from}^..${to}`
|
|
518
|
-
], { nodeOptions: {
|
|
519
|
-
cwd: workspaceRoot,
|
|
520
|
-
stdio: "pipe"
|
|
521
|
-
} });
|
|
522
|
-
const lines = stdout.trim().split("\n");
|
|
523
|
-
let currentSha = null;
|
|
524
|
-
for (const line of lines) {
|
|
525
|
-
const trimmedLine = line.trim();
|
|
526
|
-
if (trimmedLine === "") {
|
|
527
|
-
currentSha = null;
|
|
528
|
-
continue;
|
|
529
|
-
}
|
|
530
|
-
if (currentSha === null) {
|
|
531
|
-
currentSha = trimmedLine;
|
|
532
|
-
map.set(currentSha, []);
|
|
533
|
-
continue;
|
|
534
|
-
}
|
|
535
|
-
map.get(currentSha).push(trimmedLine);
|
|
536
|
-
}
|
|
537
|
-
return map;
|
|
538
|
-
} catch {
|
|
539
|
-
return null;
|
|
540
|
-
}
|
|
541
|
-
}
|
|
542
734
|
/**
|
|
543
735
|
* Check if a file path touches any package folder.
|
|
544
736
|
* @param file - The file path to check
|
|
@@ -556,15 +748,43 @@ function fileMatchesPackageFolder(file, packagePaths, workspaceRoot) {
|
|
|
556
748
|
}
|
|
557
749
|
/**
|
|
558
750
|
* Check if a commit is a "global" commit (doesn't touch any package folder).
|
|
751
|
+
* @param workspaceRoot - The workspace root
|
|
559
752
|
* @param files - Array of files changed in the commit
|
|
560
753
|
* @param packagePaths - Set of normalized package paths
|
|
561
|
-
* @param workspaceRoot - The workspace root
|
|
562
754
|
* @returns true if this is a global commit
|
|
563
755
|
*/
|
|
564
|
-
function isGlobalCommit(files, packagePaths
|
|
756
|
+
function isGlobalCommit(workspaceRoot, files, packagePaths) {
|
|
565
757
|
if (!files || files.length === 0) return false;
|
|
566
758
|
return !files.some((file) => fileMatchesPackageFolder(file, packagePaths, workspaceRoot));
|
|
567
759
|
}
|
|
760
|
+
const DEPENDENCY_FILES = [
|
|
761
|
+
"package.json",
|
|
762
|
+
"pnpm-lock.yaml",
|
|
763
|
+
"pnpm-workspace.yaml",
|
|
764
|
+
"yarn.lock",
|
|
765
|
+
"package-lock.json"
|
|
766
|
+
];
|
|
767
|
+
/**
|
|
768
|
+
* Find the oldest and newest commits across all packages.
|
|
769
|
+
* @param packageCommits - Map of package commits
|
|
770
|
+
* @returns Object with oldest and newest commit SHAs, or null if no commits
|
|
771
|
+
*/
|
|
772
|
+
function findCommitRange(packageCommits) {
|
|
773
|
+
let oldestCommit = null;
|
|
774
|
+
let newestCommit = null;
|
|
775
|
+
for (const commits of packageCommits.values()) {
|
|
776
|
+
if (commits.length === 0) continue;
|
|
777
|
+
const firstCommit = commits[0].shortHash;
|
|
778
|
+
const lastCommit = commits[commits.length - 1].shortHash;
|
|
779
|
+
if (!newestCommit) newestCommit = firstCommit;
|
|
780
|
+
oldestCommit = lastCommit;
|
|
781
|
+
}
|
|
782
|
+
if (!oldestCommit || !newestCommit) return null;
|
|
783
|
+
return {
|
|
784
|
+
oldest: oldestCommit,
|
|
785
|
+
newest: newestCommit
|
|
786
|
+
};
|
|
787
|
+
}
|
|
568
788
|
/**
|
|
569
789
|
* Get global commits for each package based on their individual commit timelines.
|
|
570
790
|
* This solves the problem where packages with different release histories need different global commits.
|
|
@@ -587,18 +807,13 @@ async function getGlobalCommitsPerPackage(workspaceRoot, packageCommits, allPack
|
|
|
587
807
|
return result;
|
|
588
808
|
}
|
|
589
809
|
logger.verbose(`Computing global commits per-package (mode: ${farver.cyan(mode)})`);
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
for (const commits of packageCommits.values()) if (commits.length > 0) {
|
|
593
|
-
if (!newestCommit) newestCommit = commits[0].shortHash;
|
|
594
|
-
oldestCommit = commits[commits.length - 1].shortHash;
|
|
595
|
-
}
|
|
596
|
-
if (!oldestCommit || !newestCommit) {
|
|
810
|
+
const commitRange = findCommitRange(packageCommits);
|
|
811
|
+
if (!commitRange) {
|
|
597
812
|
logger.verbose("No commits found across packages");
|
|
598
813
|
return result;
|
|
599
814
|
}
|
|
600
|
-
logger.verbose("Fetching files for commits range", `${farver.cyan(
|
|
601
|
-
const commitFilesMap = await
|
|
815
|
+
logger.verbose("Fetching files for commits range", `${farver.cyan(commitRange.oldest)}..${farver.cyan(commitRange.newest)}`);
|
|
816
|
+
const commitFilesMap = await getGroupedFilesByCommitSha(workspaceRoot, commitRange.oldest, commitRange.newest);
|
|
602
817
|
if (!commitFilesMap) {
|
|
603
818
|
logger.warn("Failed to get commit file list, returning empty global commits");
|
|
604
819
|
return result;
|
|
@@ -606,34 +821,29 @@ async function getGlobalCommitsPerPackage(workspaceRoot, packageCommits, allPack
|
|
|
606
821
|
logger.verbose("Got file lists for commits", `${farver.cyan(commitFilesMap.size)} commits in ONE git call`);
|
|
607
822
|
const packagePaths = new Set(allPackages.map((p) => p.path));
|
|
608
823
|
for (const [pkgName, commits] of packageCommits) {
|
|
609
|
-
const
|
|
824
|
+
const globalCommitsAffectingPackage = [];
|
|
610
825
|
logger.verbose("Filtering global commits for package", `${farver.bold(pkgName)} from ${farver.cyan(commits.length)} commits`);
|
|
611
|
-
for (const commit of commits)
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
return dependencyFiles.includes(normalizedFile);
|
|
629
|
-
})) {
|
|
630
|
-
logger.verbose("Global commit affects dependencies", `${farver.bold(pkgName)}: commit ${farver.cyan(commit.shortHash)} affects dependencies`);
|
|
631
|
-
dependencyCommits.push(commit);
|
|
632
|
-
}
|
|
826
|
+
for (const commit of commits) {
|
|
827
|
+
const files = commitFilesMap.get(commit.shortHash);
|
|
828
|
+
if (!files) continue;
|
|
829
|
+
if (isGlobalCommit(workspaceRoot, files, packagePaths)) globalCommitsAffectingPackage.push(commit);
|
|
830
|
+
}
|
|
831
|
+
logger.verbose("Package global commits found", `${farver.bold(pkgName)}: ${farver.cyan(globalCommitsAffectingPackage.length)} global commits`);
|
|
832
|
+
if (mode === "all") {
|
|
833
|
+
result.set(pkgName, globalCommitsAffectingPackage);
|
|
834
|
+
continue;
|
|
835
|
+
}
|
|
836
|
+
const dependencyCommits = [];
|
|
837
|
+
for (const commit of globalCommitsAffectingPackage) {
|
|
838
|
+
const files = commitFilesMap.get(commit.shortHash);
|
|
839
|
+
if (!files) continue;
|
|
840
|
+
if (files.some((file) => DEPENDENCY_FILES.includes(file.startsWith("./") ? file.slice(2) : file))) {
|
|
841
|
+
logger.verbose("Global commit affects dependencies", `${farver.bold(pkgName)}: commit ${farver.cyan(commit.shortHash)} affects dependencies`);
|
|
842
|
+
dependencyCommits.push(commit);
|
|
633
843
|
}
|
|
634
|
-
logger.verbose("Global commits affect dependencies", `${farver.bold(pkgName)}: ${farver.cyan(dependencyCommits.length)} global commits affect dependencies`);
|
|
635
|
-
result.set(pkgName, dependencyCommits);
|
|
636
844
|
}
|
|
845
|
+
logger.verbose("Global commits affect dependencies", `${farver.bold(pkgName)}: ${farver.cyan(dependencyCommits.length)} global commits affect dependencies`);
|
|
846
|
+
result.set(pkgName, dependencyCommits);
|
|
637
847
|
}
|
|
638
848
|
return result;
|
|
639
849
|
}
|
|
@@ -818,7 +1028,7 @@ function formatCommitsForDisplay(commits) {
|
|
|
818
1028
|
const commitsToShow = commits.slice(0, maxCommitsToShow);
|
|
819
1029
|
const hasMore = commits.length > maxCommitsToShow;
|
|
820
1030
|
const typeLength = commits.map(({ type }) => type.length).reduce((a, b) => Math.max(a, b), 0);
|
|
821
|
-
const scopeLength = commits.map(({ scope }) => scope
|
|
1031
|
+
const scopeLength = commits.map(({ scope }) => scope?.length).reduce((a, b) => Math.max(a || 0, b || 0), 0) || 0;
|
|
822
1032
|
const formattedCommits = commitsToShow.map((commit) => {
|
|
823
1033
|
let color = messageColorMap[commit.type] || ((c) => c);
|
|
824
1034
|
if (commit.isBreaking) color = (s) => farver.inverse.red(s);
|
|
@@ -866,7 +1076,7 @@ async function calculateVersionUpdates({ workspacePackages, packageCommits, work
|
|
|
866
1076
|
if (selectedVersion === null) continue;
|
|
867
1077
|
newVersion = selectedVersion;
|
|
868
1078
|
}
|
|
869
|
-
logger.
|
|
1079
|
+
logger.verbose(`Version update: ${pkg.version} → ${newVersion}`);
|
|
870
1080
|
versionUpdates.push({
|
|
871
1081
|
package: pkg,
|
|
872
1082
|
currentVersion: pkg.version,
|
|
@@ -923,35 +1133,24 @@ async function updatePackageJson(pkg, newVersion, dependencyUpdates) {
|
|
|
923
1133
|
const content = await readFile(packageJsonPath, "utf-8");
|
|
924
1134
|
const packageJson = JSON.parse(content);
|
|
925
1135
|
packageJson.version = newVersion;
|
|
926
|
-
|
|
927
|
-
if (
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
packageJson.dependencies[depName] = `^${depVersion}`;
|
|
934
|
-
logger.verbose(` - Updated dependency ${depName}: ${oldVersion} → ^${depVersion}`);
|
|
935
|
-
}
|
|
936
|
-
if (packageJson.devDependencies?.[depName]) {
|
|
937
|
-
const oldVersion = packageJson.devDependencies[depName];
|
|
938
|
-
if (oldVersion === "workspace:*") {
|
|
939
|
-
logger.verbose(` - Skipping workspace:* devDependency: ${depName}`);
|
|
940
|
-
continue;
|
|
941
|
-
}
|
|
942
|
-
packageJson.devDependencies[depName] = `^${depVersion}`;
|
|
943
|
-
logger.verbose(` - Updated devDependency ${depName}: ${oldVersion} → ^${depVersion}`);
|
|
1136
|
+
function updateDependency(deps, depName, depVersion, isPeerDependency = false) {
|
|
1137
|
+
if (!deps) return;
|
|
1138
|
+
const oldVersion = deps[depName];
|
|
1139
|
+
if (!oldVersion) return;
|
|
1140
|
+
if (oldVersion === "workspace:*") {
|
|
1141
|
+
logger.verbose(` - Skipping workspace:* dependency: ${depName}`);
|
|
1142
|
+
return;
|
|
944
1143
|
}
|
|
945
|
-
if (
|
|
946
|
-
const oldVersion = packageJson.peerDependencies[depName];
|
|
947
|
-
if (oldVersion === "workspace:*") {
|
|
948
|
-
logger.verbose(` - Skipping workspace:* peerDependency: ${depName}`);
|
|
949
|
-
continue;
|
|
950
|
-
}
|
|
1144
|
+
if (isPeerDependency) {
|
|
951
1145
|
const majorVersion = depVersion.split(".")[0];
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
}
|
|
1146
|
+
deps[depName] = `>=${depVersion} <${Number(majorVersion) + 1}.0.0`;
|
|
1147
|
+
} else deps[depName] = `^${depVersion}`;
|
|
1148
|
+
logger.verbose(` - Updated dependency ${depName}: ${oldVersion} → ${deps[depName]}`);
|
|
1149
|
+
}
|
|
1150
|
+
for (const [depName, depVersion] of dependencyUpdates) {
|
|
1151
|
+
updateDependency(packageJson.dependencies, depName, depVersion);
|
|
1152
|
+
updateDependency(packageJson.devDependencies, depName, depVersion);
|
|
1153
|
+
updateDependency(packageJson.peerDependencies, depName, depVersion, true);
|
|
955
1154
|
}
|
|
956
1155
|
await writeFile(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, "utf-8");
|
|
957
1156
|
logger.verbose(` - Successfully wrote updated package.json`);
|
|
@@ -1123,6 +1322,84 @@ function shouldIncludePackage(pkg, options) {
|
|
|
1123
1322
|
return true;
|
|
1124
1323
|
}
|
|
1125
1324
|
|
|
1325
|
+
//#endregion
|
|
1326
|
+
//#region src/shared/options.ts
|
|
1327
|
+
const DEFAULT_COMMIT_GROUPS = [
|
|
1328
|
+
{
|
|
1329
|
+
name: "features",
|
|
1330
|
+
title: "Features",
|
|
1331
|
+
types: ["feat"]
|
|
1332
|
+
},
|
|
1333
|
+
{
|
|
1334
|
+
name: "fixes",
|
|
1335
|
+
title: "Bug Fixes",
|
|
1336
|
+
types: ["fix", "perf"]
|
|
1337
|
+
},
|
|
1338
|
+
{
|
|
1339
|
+
name: "refactor",
|
|
1340
|
+
title: "Refactoring",
|
|
1341
|
+
types: ["refactor"]
|
|
1342
|
+
},
|
|
1343
|
+
{
|
|
1344
|
+
name: "docs",
|
|
1345
|
+
title: "Documentation",
|
|
1346
|
+
types: ["docs"]
|
|
1347
|
+
}
|
|
1348
|
+
];
|
|
1349
|
+
function normalizeSharedOptions(options) {
|
|
1350
|
+
const { workspaceRoot = process.cwd(), githubToken = "", repo: fullRepo, packages = true, prompts: prompts$1 = {
|
|
1351
|
+
packages: true,
|
|
1352
|
+
versions: true
|
|
1353
|
+
}, groups = DEFAULT_COMMIT_GROUPS } = options;
|
|
1354
|
+
if (!githubToken.trim()) exitWithError("GitHub token is required", "Set GITHUB_TOKEN environment variable or pass it in options");
|
|
1355
|
+
if (!fullRepo || !fullRepo.trim() || !fullRepo.includes("/")) exitWithError("Repository (repo) is required", "Specify the repository in 'owner/repo' format (e.g., 'octocat/hello-world')");
|
|
1356
|
+
const [owner, repo] = fullRepo.split("/");
|
|
1357
|
+
if (!owner || !repo) exitWithError(`Invalid repo format: "${fullRepo}"`, "Expected format: \"owner/repo\" (e.g., \"octocat/hello-world\")");
|
|
1358
|
+
return {
|
|
1359
|
+
packages: typeof packages === "object" && !Array.isArray(packages) ? {
|
|
1360
|
+
exclude: packages.exclude ?? [],
|
|
1361
|
+
include: packages.include ?? [],
|
|
1362
|
+
excludePrivate: packages.excludePrivate ?? false
|
|
1363
|
+
} : packages,
|
|
1364
|
+
prompts: {
|
|
1365
|
+
packages: prompts$1?.packages ?? true,
|
|
1366
|
+
versions: prompts$1?.versions ?? true
|
|
1367
|
+
},
|
|
1368
|
+
workspaceRoot,
|
|
1369
|
+
githubToken,
|
|
1370
|
+
owner,
|
|
1371
|
+
repo,
|
|
1372
|
+
groups
|
|
1373
|
+
};
|
|
1374
|
+
}
|
|
1375
|
+
async function normalizeReleaseOptions(options) {
|
|
1376
|
+
const normalized = normalizeSharedOptions(options);
|
|
1377
|
+
let defaultBranch = options.branch?.default?.trim();
|
|
1378
|
+
const releaseBranch = options.branch?.release?.trim() ?? "release/next";
|
|
1379
|
+
if (defaultBranch == null || defaultBranch === "") {
|
|
1380
|
+
defaultBranch = await getDefaultBranch(normalized.workspaceRoot);
|
|
1381
|
+
if (!defaultBranch) exitWithError("Could not determine default branch", "Please specify the default branch in options");
|
|
1382
|
+
}
|
|
1383
|
+
if (defaultBranch === releaseBranch) exitWithError(`Default branch and release branch cannot be the same: "${defaultBranch}"`, "Specify different branches for default and release");
|
|
1384
|
+
const availableBranches = await getAvailableBranches(normalized.workspaceRoot);
|
|
1385
|
+
if (!availableBranches.includes(defaultBranch)) exitWithError(`Default branch "${defaultBranch}" does not exist in the repository`, `Available branches: ${availableBranches.join(", ")}`);
|
|
1386
|
+
logger.verbose(`Using default branch: ${farver.green(defaultBranch)}`);
|
|
1387
|
+
return {
|
|
1388
|
+
...normalized,
|
|
1389
|
+
branch: {
|
|
1390
|
+
release: releaseBranch,
|
|
1391
|
+
default: defaultBranch
|
|
1392
|
+
},
|
|
1393
|
+
safeguards: options.safeguards ?? true,
|
|
1394
|
+
globalCommitMode: options.globalCommitMode ?? "dependencies",
|
|
1395
|
+
pullRequest: {
|
|
1396
|
+
title: options.pullRequest?.title ?? "chore: release new version",
|
|
1397
|
+
body: options.pullRequest?.body ?? DEFAULT_PR_BODY_TEMPLATE
|
|
1398
|
+
},
|
|
1399
|
+
changelog: { enabled: options.changelog?.enabled ?? true }
|
|
1400
|
+
};
|
|
1401
|
+
}
|
|
1402
|
+
|
|
1126
1403
|
//#endregion
|
|
1127
1404
|
//#region src/release.ts
|
|
1128
1405
|
async function release(options) {
|
|
@@ -1140,11 +1417,11 @@ async function release(options) {
|
|
|
1140
1417
|
logger.item(` ${farver.gray("→")} ${farver.gray(pkg.path)}`);
|
|
1141
1418
|
}
|
|
1142
1419
|
logger.emptyLine();
|
|
1143
|
-
const
|
|
1144
|
-
const globalCommitsPerPackage = await getGlobalCommitsPerPackage(workspaceRoot,
|
|
1420
|
+
const groupedPackageCommits = await getWorkspacePackageGroupedCommits(workspaceRoot, workspacePackages);
|
|
1421
|
+
const globalCommitsPerPackage = await getGlobalCommitsPerPackage(workspaceRoot, groupedPackageCommits, workspacePackages, normalizedOptions.globalCommitMode);
|
|
1145
1422
|
const { allUpdates, applyUpdates } = await calculateAndPrepareVersionUpdates({
|
|
1146
1423
|
workspacePackages,
|
|
1147
|
-
packageCommits,
|
|
1424
|
+
packageCommits: groupedPackageCommits,
|
|
1148
1425
|
workspaceRoot,
|
|
1149
1426
|
showPrompt: options.prompts?.versions !== false,
|
|
1150
1427
|
globalCommitsPerPackage
|
|
@@ -1153,11 +1430,14 @@ async function release(options) {
|
|
|
1153
1430
|
logger.section("🔄 Version Updates");
|
|
1154
1431
|
logger.item(`Updating ${allUpdates.length} packages (including dependents)`);
|
|
1155
1432
|
for (const update of allUpdates) logger.item(`${update.package.name}: ${update.currentVersion} → ${update.newVersion}`);
|
|
1156
|
-
const
|
|
1157
|
-
workspaceRoot,
|
|
1433
|
+
const githubClient = createGitHubClient({
|
|
1158
1434
|
owner: normalizedOptions.owner,
|
|
1159
1435
|
repo: normalizedOptions.repo,
|
|
1160
|
-
githubToken: normalizedOptions.githubToken
|
|
1436
|
+
githubToken: normalizedOptions.githubToken
|
|
1437
|
+
});
|
|
1438
|
+
const prOps = await orchestrateReleasePullRequest({
|
|
1439
|
+
workspaceRoot,
|
|
1440
|
+
githubClient,
|
|
1161
1441
|
releaseBranch: normalizedOptions.branch.release,
|
|
1162
1442
|
defaultBranch: normalizedOptions.branch.default,
|
|
1163
1443
|
pullRequestTitle: options.pullRequest?.title,
|
|
@@ -1165,12 +1445,41 @@ async function release(options) {
|
|
|
1165
1445
|
});
|
|
1166
1446
|
await prOps.prepareBranch();
|
|
1167
1447
|
await applyUpdates();
|
|
1448
|
+
if (normalizedOptions.changelog.enabled) {
|
|
1449
|
+
logger.step("Updating changelogs");
|
|
1450
|
+
const changelogPromises = allUpdates.map((update) => {
|
|
1451
|
+
const pkgCommits = groupedPackageCommits.get(update.package.name) || [];
|
|
1452
|
+
const globalCommits = globalCommitsPerPackage.get(update.package.name) || [];
|
|
1453
|
+
const allCommits = [...pkgCommits, ...globalCommits];
|
|
1454
|
+
if (allCommits.length === 0) {
|
|
1455
|
+
logger.verbose(`No commits for ${update.package.name}, skipping changelog`);
|
|
1456
|
+
return Promise.resolve();
|
|
1457
|
+
}
|
|
1458
|
+
logger.verbose(`Updating changelog for ${farver.cyan(update.package.name)}`);
|
|
1459
|
+
return updateChangelog({
|
|
1460
|
+
normalizedOptions: {
|
|
1461
|
+
...normalizedOptions,
|
|
1462
|
+
workspaceRoot
|
|
1463
|
+
},
|
|
1464
|
+
githubClient,
|
|
1465
|
+
workspacePackage: update.package,
|
|
1466
|
+
version: update.newVersion,
|
|
1467
|
+
previousVersion: update.currentVersion !== "0.0.0" ? update.currentVersion : void 0,
|
|
1468
|
+
commits: allCommits,
|
|
1469
|
+
date: (/* @__PURE__ */ new Date()).toISOString().split("T")[0]
|
|
1470
|
+
});
|
|
1471
|
+
}).filter((p) => p != null);
|
|
1472
|
+
const updates = await Promise.all(changelogPromises);
|
|
1473
|
+
logger.success(`Updated ${updates.length} changelog(s)`);
|
|
1474
|
+
}
|
|
1168
1475
|
if (!await prOps.syncChanges(true)) if (prOps.doesReleasePRExist && prOps.existingPullRequest) {
|
|
1169
1476
|
logger.item("No updates needed, PR is already up to date");
|
|
1477
|
+
const { pullRequest: pullRequest$1, created: created$1 } = await prOps.syncPullRequest(allUpdates);
|
|
1478
|
+
await prOps.cleanup();
|
|
1170
1479
|
return {
|
|
1171
1480
|
updates: allUpdates,
|
|
1172
|
-
prUrl:
|
|
1173
|
-
created:
|
|
1481
|
+
prUrl: pullRequest$1?.html_url,
|
|
1482
|
+
created: created$1
|
|
1174
1483
|
};
|
|
1175
1484
|
} else {
|
|
1176
1485
|
logger.error("No changes to commit, and no existing PR. Nothing to do.");
|
|
@@ -1188,39 +1497,10 @@ async function release(options) {
|
|
|
1188
1497
|
created
|
|
1189
1498
|
};
|
|
1190
1499
|
}
|
|
1191
|
-
async function
|
|
1192
|
-
const normalized = normalizeSharedOptions(options);
|
|
1193
|
-
let defaultBranch = options.branch?.default?.trim();
|
|
1194
|
-
const releaseBranch = options.branch?.release?.trim() ?? "release/next";
|
|
1195
|
-
if (defaultBranch == null || defaultBranch === "") {
|
|
1196
|
-
defaultBranch = await getDefaultBranch(normalized.workspaceRoot);
|
|
1197
|
-
if (!defaultBranch) exitWithError("Could not determine default branch", "Please specify the default branch in options");
|
|
1198
|
-
}
|
|
1199
|
-
if (defaultBranch === releaseBranch) exitWithError(`Default branch and release branch cannot be the same: "${defaultBranch}"`, "Specify different branches for default and release");
|
|
1200
|
-
const availableBranches = await getAvailableBranches(normalized.workspaceRoot);
|
|
1201
|
-
if (!availableBranches.includes(defaultBranch)) exitWithError(`Default branch "${defaultBranch}" does not exist in the repository`, `Available branches: ${availableBranches.join(", ")}`);
|
|
1202
|
-
logger.verbose(`Using default branch: ${farver.green(defaultBranch)}`);
|
|
1203
|
-
return {
|
|
1204
|
-
...normalized,
|
|
1205
|
-
branch: {
|
|
1206
|
-
release: releaseBranch,
|
|
1207
|
-
default: defaultBranch
|
|
1208
|
-
},
|
|
1209
|
-
safeguards: options.safeguards ?? true,
|
|
1210
|
-
globalCommitMode: options.globalCommitMode ?? "dependencies",
|
|
1211
|
-
pullRequest: options.pullRequest,
|
|
1212
|
-
changelog: { enabled: options.changelog?.enabled ?? true }
|
|
1213
|
-
};
|
|
1214
|
-
}
|
|
1215
|
-
async function orchestrateReleasePullRequest({ workspaceRoot, owner, repo, githubToken, releaseBranch, defaultBranch, pullRequestTitle, pullRequestBody }) {
|
|
1500
|
+
async function orchestrateReleasePullRequest({ workspaceRoot, githubClient, releaseBranch, defaultBranch, pullRequestTitle, pullRequestBody }) {
|
|
1216
1501
|
const currentBranch = await getCurrentBranch(workspaceRoot);
|
|
1217
1502
|
if (currentBranch !== defaultBranch) exitWithError(`Current branch is '${currentBranch}'. Please switch to the default branch '${defaultBranch}' before proceeding.`, `git checkout ${defaultBranch}`);
|
|
1218
|
-
const existingPullRequest = await getExistingPullRequest(
|
|
1219
|
-
owner,
|
|
1220
|
-
repo,
|
|
1221
|
-
branch: releaseBranch,
|
|
1222
|
-
githubToken
|
|
1223
|
-
});
|
|
1503
|
+
const existingPullRequest = await githubClient.getExistingPullRequest(releaseBranch);
|
|
1224
1504
|
const doesReleasePRExist = !!existingPullRequest;
|
|
1225
1505
|
if (doesReleasePRExist) logger.item("Found existing release pull request");
|
|
1226
1506
|
else logger.item("Will create new pull request");
|
|
@@ -1244,7 +1524,6 @@ async function orchestrateReleasePullRequest({ workspaceRoot, owner, repo, githu
|
|
|
1244
1524
|
const isBranchAhead = await isBranchAheadOfRemote(releaseBranch, workspaceRoot);
|
|
1245
1525
|
if (!hasCommitted && !isBranchAhead) {
|
|
1246
1526
|
logger.item("No changes to commit and branch is in sync with remote");
|
|
1247
|
-
await checkoutBranch(defaultBranch, workspaceRoot);
|
|
1248
1527
|
return false;
|
|
1249
1528
|
}
|
|
1250
1529
|
logger.step("Pushing changes to remote");
|
|
@@ -1253,16 +1532,13 @@ async function orchestrateReleasePullRequest({ workspaceRoot, owner, repo, githu
|
|
|
1253
1532
|
},
|
|
1254
1533
|
async syncPullRequest(updates) {
|
|
1255
1534
|
const prTitle = existingPullRequest?.title || pullRequestTitle || "chore: update package versions";
|
|
1256
|
-
const prBody =
|
|
1257
|
-
const pullRequest = await upsertPullRequest({
|
|
1258
|
-
owner,
|
|
1259
|
-
repo,
|
|
1535
|
+
const prBody = generatePullRequestBody(updates, pullRequestBody);
|
|
1536
|
+
const pullRequest = await githubClient.upsertPullRequest({
|
|
1260
1537
|
pullNumber: existingPullRequest?.number,
|
|
1261
1538
|
title: prTitle,
|
|
1262
1539
|
body: prBody,
|
|
1263
1540
|
head: releaseBranch,
|
|
1264
|
-
base: defaultBranch
|
|
1265
|
-
githubToken
|
|
1541
|
+
base: defaultBranch
|
|
1266
1542
|
});
|
|
1267
1543
|
logger.success(`${doesReleasePRExist ? "Updated" : "Created"} pull request: ${pullRequest?.html_url}`);
|
|
1268
1544
|
return {
|
|
@@ -1277,4 +1553,8 @@ async function orchestrateReleasePullRequest({ workspaceRoot, owner, repo, githu
|
|
|
1277
1553
|
}
|
|
1278
1554
|
|
|
1279
1555
|
//#endregion
|
|
1280
|
-
|
|
1556
|
+
//#region src/verify.ts
|
|
1557
|
+
function verify(_options) {}
|
|
1558
|
+
|
|
1559
|
+
//#endregion
|
|
1560
|
+
export { publish, release, verify };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ucdjs/release-scripts",
|
|
3
|
-
"version": "0.1.0-beta.
|
|
3
|
+
"version": "0.1.0-beta.16",
|
|
4
4
|
"description": "@ucdjs release scripts",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|
|
@@ -13,7 +13,8 @@
|
|
|
13
13
|
"#versioning/*": "./src/versioning/*.ts",
|
|
14
14
|
"#shared/*": "./src/shared/*.ts",
|
|
15
15
|
"#release": "./src/release.ts",
|
|
16
|
-
"#publish": "./src/publish.ts"
|
|
16
|
+
"#publish": "./src/publish.ts",
|
|
17
|
+
"#verify": "./src/verify.ts"
|
|
17
18
|
},
|
|
18
19
|
"exports": {
|
|
19
20
|
".": "./dist/index.mjs",
|
|
@@ -27,7 +28,7 @@
|
|
|
27
28
|
],
|
|
28
29
|
"dependencies": {
|
|
29
30
|
"@luxass/utils": "2.7.2",
|
|
30
|
-
"commit-parser": "
|
|
31
|
+
"commit-parser": "1.3.0",
|
|
31
32
|
"farver": "1.0.0-beta.1",
|
|
32
33
|
"mri": "1.2.0",
|
|
33
34
|
"prompts": "2.4.2",
|
|
@@ -41,7 +42,8 @@
|
|
|
41
42
|
"eta": "4.0.1",
|
|
42
43
|
"tsdown": "0.16.0",
|
|
43
44
|
"typescript": "5.9.3",
|
|
44
|
-
"vitest": "4.0.4"
|
|
45
|
+
"vitest": "4.0.4",
|
|
46
|
+
"vitest-testdirs": "4.3.0"
|
|
45
47
|
},
|
|
46
48
|
"scripts": {
|
|
47
49
|
"build": "tsdown",
|