split-by-codeowners 1.0.5 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -2
- package/action.yml +5 -0
- package/dist/index.js +162 -136
- package/dist/index.js.map +1 -1
- package/dist-cli/index.js +157 -136
- package/dist-cli/index.js.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -56,6 +56,7 @@ npx split-by-codeowners --create-prs --base-branch main
|
|
|
56
56
|
|
|
57
57
|
| Name | Required | Default | Description |
|
|
58
58
|
| -------------------- | -------- | ------------------------------------- | ----------------------------------------------------------------------------- |
|
|
59
|
+
| `repo_path` | no | `.` | Repo root path relative to `GITHUB_WORKSPACE` |
|
|
59
60
|
| `codeowners_path` | no | `CODEOWNERS` | Path to CODEOWNERS file |
|
|
60
61
|
| `base_ref` | no | `""` | Base ref for changed-files discovery (currently workspace-focused; see notes) |
|
|
61
62
|
| `include_unowned` | no | `"true"` | Include files with no owners in a special bucket |
|
|
@@ -66,7 +67,7 @@ npx split-by-codeowners --create-prs --base-branch main
|
|
|
66
67
|
| `bucket_prefix` | no | `bucket` | Patch file prefix |
|
|
67
68
|
| `dry_run` | no | `"false"` | Compute buckets but don’t write patches |
|
|
68
69
|
| `cleanup_patches` | no | `"false"` | Delete `patch_dir` after a successful run |
|
|
69
|
-
| `create_prs` | no | `"
|
|
70
|
+
| `create_prs` | no | `"true"` | Create/update one PR per bucket |
|
|
70
71
|
| `github_token` | no | `""` | Token used for pushing branches + GitHub API (defaults to env `GITHUB_TOKEN`) |
|
|
71
72
|
| `base_branch` | no | `""` | Base branch for PRs (defaults to repo default branch) |
|
|
72
73
|
| `branch_prefix` | no | `codemods/` | Prefix for created branches |
|
|
@@ -90,7 +91,7 @@ npx split-by-codeowners --create-prs --base-branch main
|
|
|
90
91
|
id: split
|
|
91
92
|
uses: anatoliisf/split-by-codeowners@v1
|
|
92
93
|
with:
|
|
93
|
-
|
|
94
|
+
draft: "true"
|
|
94
95
|
|
|
95
96
|
- name: Use matrix
|
|
96
97
|
run: echo '${{ steps.split.outputs.matrix_json }}'
|
|
@@ -120,6 +121,7 @@ npx split-by-codeowners --help
|
|
|
120
121
|
|
|
121
122
|
#### Common
|
|
122
123
|
|
|
124
|
+
- **`--repo-path <path>`**: Repo root path (relative to current working directory)
|
|
123
125
|
- **`--codeowners <path>`**: Path to CODEOWNERS file (default: `CODEOWNERS`)
|
|
124
126
|
- **`--exclude <file|->`**: File containing newline-separated glob patterns to exclude, or `-` to read from stdin
|
|
125
127
|
- **`--include-unowned <true|false>`**: Include files with no owners in an `__UNOWNED__` bucket (default: `true`)
|
package/action.yml
CHANGED
|
@@ -6,6 +6,11 @@ branding:
|
|
|
6
6
|
color: "blue"
|
|
7
7
|
|
|
8
8
|
inputs:
|
|
9
|
+
repo_path:
|
|
10
|
+
description: "Relative path under GITHUB_WORKSPACE to the repo root."
|
|
11
|
+
required: false
|
|
12
|
+
default: "."
|
|
13
|
+
|
|
9
14
|
codeowners_path:
|
|
10
15
|
description: "Path to CODEOWNERS. Defaults to CODEOWNERS at repo root."
|
|
11
16
|
required: false
|
package/dist/index.js
CHANGED
|
@@ -27482,118 +27482,140 @@ function ensureDirExists(dir) {
|
|
|
27482
27482
|
(0, buckets_1.ensureDir)(dir);
|
|
27483
27483
|
}
|
|
27484
27484
|
async function runSplit(config, logger) {
|
|
27485
|
-
|
|
27486
|
-
|
|
27487
|
-
|
|
27488
|
-
|
|
27489
|
-
|
|
27490
|
-
|
|
27491
|
-
logger.info(`Changed files: ${changed.length} (after excludes: ${filtered.length})`);
|
|
27492
|
-
if (!filtered.length) {
|
|
27493
|
-
return { buckets: [], matrix: { include: [] }, prs: [] };
|
|
27494
|
-
}
|
|
27495
|
-
// 2) parse CODEOWNERS + bucketize by owners-set
|
|
27496
|
-
const rules = (0, codeowners_1.parseCodeowners)(config.codeownersPath);
|
|
27497
|
-
const bucketsMap = new Map();
|
|
27498
|
-
for (const file of filtered) {
|
|
27499
|
-
const { owners, rule } = (0, codeowners_1.ownersForFile)(file, rules);
|
|
27500
|
-
const sortedOwners = (owners ?? []).slice().sort();
|
|
27501
|
-
const isUnowned = sortedOwners.length === 0;
|
|
27502
|
-
if (isUnowned && !config.includeUnowned)
|
|
27503
|
-
continue;
|
|
27504
|
-
const key = isUnowned
|
|
27505
|
-
? config.unownedBucketKey
|
|
27506
|
-
: sortedOwners.join("|").replaceAll("@", "").replaceAll("/", "-").replaceAll(" ", "");
|
|
27507
|
-
const existing = bucketsMap.get(key);
|
|
27508
|
-
if (!existing) {
|
|
27509
|
-
bucketsMap.set(key, {
|
|
27510
|
-
key,
|
|
27511
|
-
owners: sortedOwners,
|
|
27512
|
-
files: [{ file, owners: sortedOwners, rule }]
|
|
27513
|
-
});
|
|
27514
|
-
}
|
|
27515
|
-
else {
|
|
27516
|
-
existing.files.push({ file, owners: sortedOwners, rule });
|
|
27485
|
+
const originalCwd = process.cwd();
|
|
27486
|
+
let resolvedRepoPath;
|
|
27487
|
+
if (config.repoPath) {
|
|
27488
|
+
resolvedRepoPath = node_path_1.default.resolve(originalCwd, config.repoPath);
|
|
27489
|
+
if (!node_fs_1.default.existsSync(resolvedRepoPath)) {
|
|
27490
|
+
throw new Error(`repo_path does not exist: ${resolvedRepoPath}`);
|
|
27517
27491
|
}
|
|
27492
|
+
process.chdir(resolvedRepoPath);
|
|
27493
|
+
logger.info(`Using repo_path: ${resolvedRepoPath}`);
|
|
27518
27494
|
}
|
|
27519
|
-
|
|
27520
|
-
|
|
27521
|
-
|
|
27522
|
-
|
|
27523
|
-
|
|
27524
|
-
|
|
27525
|
-
|
|
27526
|
-
|
|
27527
|
-
|
|
27528
|
-
|
|
27529
|
-
|
|
27530
|
-
|
|
27531
|
-
|
|
27532
|
-
|
|
27533
|
-
|
|
27534
|
-
|
|
27535
|
-
|
|
27536
|
-
|
|
27537
|
-
|
|
27538
|
-
|
|
27539
|
-
|
|
27540
|
-
|
|
27541
|
-
|
|
27542
|
-
|
|
27543
|
-
|
|
27544
|
-
|
|
27545
|
-
|
|
27546
|
-
|
|
27547
|
-
|
|
27548
|
-
|
|
27495
|
+
try {
|
|
27496
|
+
if (config.createPrs && config.dryRun) {
|
|
27497
|
+
throw new Error("create_prs=true requires dry_run=false (we need patch files to create bucket branches/PRs).");
|
|
27498
|
+
}
|
|
27499
|
+
// 1) discover + filter changed files
|
|
27500
|
+
const changed = (0, git_1.getChangedFiles)(config.baseRef);
|
|
27501
|
+
const filtered = (0, buckets_1.applyExcludes)(changed, config.excludePatterns);
|
|
27502
|
+
logger.info(`Changed files: ${changed.length} (after excludes: ${filtered.length})`);
|
|
27503
|
+
if (!filtered.length) {
|
|
27504
|
+
return { buckets: [], matrix: { include: [] }, prs: [] };
|
|
27505
|
+
}
|
|
27506
|
+
// 2) parse CODEOWNERS + bucketize by owners-set
|
|
27507
|
+
const rules = (0, codeowners_1.parseCodeowners)(config.codeownersPath);
|
|
27508
|
+
const bucketsMap = new Map();
|
|
27509
|
+
for (const file of filtered) {
|
|
27510
|
+
const { owners, rule } = (0, codeowners_1.ownersForFile)(file, rules);
|
|
27511
|
+
const sortedOwners = (owners ?? []).slice().sort();
|
|
27512
|
+
const isUnowned = sortedOwners.length === 0;
|
|
27513
|
+
if (isUnowned && !config.includeUnowned)
|
|
27514
|
+
continue;
|
|
27515
|
+
const key = isUnowned
|
|
27516
|
+
? config.unownedBucketKey
|
|
27517
|
+
: sortedOwners.join("|").replaceAll("@", "").replaceAll("/", "-").replaceAll(" ", "");
|
|
27518
|
+
const existing = bucketsMap.get(key);
|
|
27519
|
+
if (!existing) {
|
|
27520
|
+
bucketsMap.set(key, {
|
|
27521
|
+
key,
|
|
27522
|
+
owners: sortedOwners,
|
|
27523
|
+
files: [{ file, owners: sortedOwners, rule }]
|
|
27524
|
+
});
|
|
27549
27525
|
}
|
|
27526
|
+
else {
|
|
27527
|
+
existing.files.push({ file, owners: sortedOwners, rule });
|
|
27528
|
+
}
|
|
27529
|
+
}
|
|
27530
|
+
const buckets = [...bucketsMap.values()].sort((a, b) => a.key.localeCompare(b.key));
|
|
27531
|
+
if (buckets.length > config.maxBuckets) {
|
|
27532
|
+
throw new Error(`Too many buckets: ${buckets.length} > max_buckets=${config.maxBuckets}`);
|
|
27533
|
+
}
|
|
27534
|
+
// 3) write per-bucket patches
|
|
27535
|
+
if (!config.dryRun) {
|
|
27536
|
+
ensureDirExists(config.patchDir);
|
|
27537
|
+
buckets.forEach((b, idx) => {
|
|
27538
|
+
const patchPath = node_path_1.default.posix.join(config.patchDir.replaceAll("\\", "/"), `${config.bucketPrefix}-${idx + 1}.patch`);
|
|
27539
|
+
const paths = b.files.map((f) => f.file);
|
|
27540
|
+
logger.info(`Writing ${patchPath} (${paths.length} files) for bucket=${b.key}`);
|
|
27541
|
+
(0, git_1.writePatchForPaths)(patchPath, paths);
|
|
27542
|
+
});
|
|
27550
27543
|
}
|
|
27551
27544
|
else {
|
|
27552
|
-
(
|
|
27553
|
-
}
|
|
27554
|
-
const
|
|
27555
|
-
|
|
27556
|
-
|
|
27557
|
-
|
|
27558
|
-
|
|
27559
|
-
|
|
27560
|
-
|
|
27561
|
-
|
|
27562
|
-
|
|
27563
|
-
|
|
27564
|
-
|
|
27565
|
-
|
|
27566
|
-
|
|
27567
|
-
try {
|
|
27568
|
-
(0, git_1.applyPatch)(patchPath, worktreeDir);
|
|
27569
|
-
const committed = (0, git_1.commitAllStaged)(config.commitMessage, worktreeDir);
|
|
27570
|
-
if (!committed) {
|
|
27571
|
-
logger.warn(`No staged changes for bucket=${b.key}; skipping push/PR.`);
|
|
27572
|
-
continue;
|
|
27545
|
+
logger.info("dry_run=true; not generating patches.");
|
|
27546
|
+
}
|
|
27547
|
+
const matrix = (0, buckets_1.toMatrix)(buckets, config.patchDir, config.bucketPrefix);
|
|
27548
|
+
// 4) optionally create PRs (worktrees so we don't disturb the current working tree)
|
|
27549
|
+
let prs = undefined;
|
|
27550
|
+
if (config.createPrs) {
|
|
27551
|
+
const token = config.githubToken || process.env.GITHUB_TOKEN || process.env.GH_TOKEN || "";
|
|
27552
|
+
const repo = config.repo ?? (0, git_1.parseGitHubRemote)((0, git_1.getRemoteUrl)(config.remoteName));
|
|
27553
|
+
const isGitHubActions = process.env.GITHUB_ACTIONS === "true";
|
|
27554
|
+
// Auth mode selection:
|
|
27555
|
+
// - In GitHub Actions: ALWAYS use token-based API (gh may not be installed/auth'd).
|
|
27556
|
+
// - Locally: ALWAYS use gh CLI for best DevX (no token-based local mode).
|
|
27557
|
+
if (isGitHubActions) {
|
|
27558
|
+
if (!token) {
|
|
27559
|
+
throw new Error("Missing GitHub token (set github_token input or GITHUB_TOKEN / GH_TOKEN env var)");
|
|
27573
27560
|
}
|
|
27574
|
-
|
|
27575
|
-
|
|
27576
|
-
|
|
27577
|
-
|
|
27578
|
-
|
|
27579
|
-
|
|
27580
|
-
|
|
27581
|
-
|
|
27582
|
-
|
|
27583
|
-
|
|
27584
|
-
|
|
27585
|
-
|
|
27586
|
-
|
|
27587
|
-
|
|
27588
|
-
|
|
27589
|
-
|
|
27590
|
-
|
|
27591
|
-
|
|
27592
|
-
|
|
27593
|
-
|
|
27594
|
-
|
|
27595
|
-
|
|
27596
|
-
|
|
27561
|
+
}
|
|
27562
|
+
else {
|
|
27563
|
+
(0, ghcli_1.assertGhAuthenticated)(process.cwd());
|
|
27564
|
+
}
|
|
27565
|
+
const useGhCli = !isGitHubActions;
|
|
27566
|
+
const octokit = useGhCli ? null : (0, github_1.getOctokit)(token);
|
|
27567
|
+
const baseBranch = config.baseBranch || (useGhCli ? (0, ghcli_1.getDefaultBranchViaGh)(process.cwd()) : await (0, github_1.getDefaultBranch)(octokit, repo));
|
|
27568
|
+
const baseRef = "HEAD";
|
|
27569
|
+
ensureDirExists((0, git_1.worktreeBaseDir)());
|
|
27570
|
+
prs = [];
|
|
27571
|
+
for (let i = 0; i < buckets.length; i++) {
|
|
27572
|
+
const b = buckets[i];
|
|
27573
|
+
const patchPath = node_path_1.default.posix.join(config.patchDir.replaceAll("\\", "/"), `${config.bucketPrefix}-${i + 1}.patch`);
|
|
27574
|
+
const branch = `${config.branchPrefix}${b.key}`.replaceAll(" ", "");
|
|
27575
|
+
const worktreeDir = (0, git_1.tempDirForBucket)(b.key);
|
|
27576
|
+
logger.info(`Creating PR for bucket=${b.key} on branch=${branch}`);
|
|
27577
|
+
(0, git_1.worktreeAdd)(branch, baseRef, worktreeDir);
|
|
27578
|
+
try {
|
|
27579
|
+
(0, git_1.applyPatch)(patchPath, worktreeDir);
|
|
27580
|
+
const committed = (0, git_1.commitAllStaged)(config.commitMessage, worktreeDir);
|
|
27581
|
+
if (!committed) {
|
|
27582
|
+
logger.warn(`No staged changes for bucket=${b.key}; skipping push/PR.`);
|
|
27583
|
+
continue;
|
|
27584
|
+
}
|
|
27585
|
+
(0, git_1.pushBranch)(config.remoteName, branch, worktreeDir);
|
|
27586
|
+
const ownersStr = b.owners.length ? b.owners.join(", ") : "(unowned)";
|
|
27587
|
+
const filesStr = b.files.map(f => `- ${f.file}`).join("\n");
|
|
27588
|
+
const bucketInfo = formatTemplate("Automated changes bucketed by CODEOWNERS.\n\nOwners: {owners}\nBucket key: {bucket_key}\n\nFiles:\n{files}\n", { owners: ownersStr, bucket_key: b.key, files: filesStr });
|
|
27589
|
+
const title = formatTemplate(config.prTitle, { owners: ownersStr, bucket_key: b.key });
|
|
27590
|
+
let body;
|
|
27591
|
+
if (config.prBodyMode === "none") {
|
|
27592
|
+
body = undefined;
|
|
27593
|
+
}
|
|
27594
|
+
else if (config.prBodyMode === "custom") {
|
|
27595
|
+
body = formatTemplate(config.prBody, { owners: ownersStr, bucket_key: b.key, files: filesStr });
|
|
27596
|
+
}
|
|
27597
|
+
else {
|
|
27598
|
+
const template = (0, pr_template_1.readPrTemplate)(worktreeDir, config.prTemplatePath) ?? "";
|
|
27599
|
+
body =
|
|
27600
|
+
config.prBodyMode === "template_with_bucket"
|
|
27601
|
+
? (template ? template.trimEnd() + "\n\n---\n\n" + bucketInfo : bucketInfo)
|
|
27602
|
+
: (template || bucketInfo);
|
|
27603
|
+
}
|
|
27604
|
+
const pr = useGhCli
|
|
27605
|
+
? (() => {
|
|
27606
|
+
return (0, ghcli_1.upsertPullRequestViaGh)({
|
|
27607
|
+
cwd: worktreeDir,
|
|
27608
|
+
base: baseBranch,
|
|
27609
|
+
head: branch,
|
|
27610
|
+
title,
|
|
27611
|
+
body: body ?? "",
|
|
27612
|
+
draft: config.draft,
|
|
27613
|
+
bucketKey: b.key
|
|
27614
|
+
});
|
|
27615
|
+
})()
|
|
27616
|
+
: await (0, github_1.upsertPullRequest)({
|
|
27617
|
+
octokit: octokit,
|
|
27618
|
+
repo,
|
|
27597
27619
|
base: baseBranch,
|
|
27598
27620
|
head: branch,
|
|
27599
27621
|
title,
|
|
@@ -27601,41 +27623,35 @@ async function runSplit(config, logger) {
|
|
|
27601
27623
|
draft: config.draft,
|
|
27602
27624
|
bucketKey: b.key
|
|
27603
27625
|
});
|
|
27604
|
-
|
|
27605
|
-
:
|
|
27606
|
-
|
|
27607
|
-
|
|
27608
|
-
|
|
27609
|
-
|
|
27610
|
-
|
|
27611
|
-
|
|
27612
|
-
|
|
27613
|
-
|
|
27614
|
-
|
|
27615
|
-
|
|
27616
|
-
|
|
27626
|
+
prs.push(pr);
|
|
27627
|
+
logger.info(`PR: ${pr.url}`);
|
|
27628
|
+
}
|
|
27629
|
+
finally {
|
|
27630
|
+
(0, git_1.worktreeRemove)(worktreeDir);
|
|
27631
|
+
}
|
|
27632
|
+
}
|
|
27633
|
+
}
|
|
27634
|
+
if (config.cleanupPatches && !config.dryRun) {
|
|
27635
|
+
const cwd = process.cwd();
|
|
27636
|
+
const abs = node_path_1.default.resolve(cwd, config.patchDir);
|
|
27637
|
+
const safePrefix = cwd.endsWith(node_path_1.default.sep) ? cwd : cwd + node_path_1.default.sep;
|
|
27638
|
+
if (!abs.startsWith(safePrefix)) {
|
|
27639
|
+
throw new Error(`Refusing to delete patch_dir outside repo: ${abs}`);
|
|
27617
27640
|
}
|
|
27618
|
-
|
|
27619
|
-
(
|
|
27641
|
+
if (abs === cwd) {
|
|
27642
|
+
throw new Error("Refusing to delete patch_dir equal to repo root.");
|
|
27643
|
+
}
|
|
27644
|
+
if (node_fs_1.default.existsSync(abs)) {
|
|
27645
|
+
logger.info(`Cleaning up patches dir: ${config.patchDir}`);
|
|
27646
|
+
node_fs_1.default.rmSync(abs, { recursive: true, force: true });
|
|
27620
27647
|
}
|
|
27621
27648
|
}
|
|
27649
|
+
return { buckets, matrix, prs };
|
|
27622
27650
|
}
|
|
27623
|
-
|
|
27624
|
-
|
|
27625
|
-
|
|
27626
|
-
const safePrefix = cwd.endsWith(node_path_1.default.sep) ? cwd : cwd + node_path_1.default.sep;
|
|
27627
|
-
if (!abs.startsWith(safePrefix)) {
|
|
27628
|
-
throw new Error(`Refusing to delete patch_dir outside repo: ${abs}`);
|
|
27629
|
-
}
|
|
27630
|
-
if (abs === cwd) {
|
|
27631
|
-
throw new Error("Refusing to delete patch_dir equal to repo root.");
|
|
27632
|
-
}
|
|
27633
|
-
if (node_fs_1.default.existsSync(abs)) {
|
|
27634
|
-
logger.info(`Cleaning up patches dir: ${config.patchDir}`);
|
|
27635
|
-
node_fs_1.default.rmSync(abs, { recursive: true, force: true });
|
|
27636
|
-
}
|
|
27651
|
+
finally {
|
|
27652
|
+
if (resolvedRepoPath)
|
|
27653
|
+
process.chdir(originalCwd);
|
|
27637
27654
|
}
|
|
27638
|
-
return { buckets, matrix, prs };
|
|
27639
27655
|
}
|
|
27640
27656
|
|
|
27641
27657
|
|
|
@@ -28156,13 +28172,23 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
28156
28172
|
return result;
|
|
28157
28173
|
};
|
|
28158
28174
|
})();
|
|
28175
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
28176
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
28177
|
+
};
|
|
28159
28178
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
28160
28179
|
const core = __importStar(__nccwpck_require__(7484));
|
|
28180
|
+
const node_path_1 = __importDefault(__nccwpck_require__(6760));
|
|
28161
28181
|
const buckets_1 = __nccwpck_require__(4140);
|
|
28162
28182
|
const app_1 = __nccwpck_require__(168);
|
|
28163
28183
|
async function run() {
|
|
28164
28184
|
try {
|
|
28185
|
+
const repoPathInput = core.getInput("repo_path") || ".";
|
|
28186
|
+
const workspace = process.env.GITHUB_WORKSPACE || "";
|
|
28187
|
+
const repoPath = repoPathInput
|
|
28188
|
+
? (node_path_1.default.isAbsolute(repoPathInput) ? repoPathInput : node_path_1.default.resolve(workspace || process.cwd(), repoPathInput))
|
|
28189
|
+
: undefined;
|
|
28165
28190
|
const cfg = {
|
|
28191
|
+
repoPath,
|
|
28166
28192
|
codeownersPath: core.getInput("codeowners_path") || "CODEOWNERS",
|
|
28167
28193
|
baseRef: core.getInput("base_ref") || "",
|
|
28168
28194
|
includeUnowned: (0, buckets_1.parseBool)(core.getInput("include_unowned")),
|