nugit-cli 0.0.1-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +30 -0
- package/src/api-client.js +182 -0
- package/src/auth-token.js +14 -0
- package/src/cli-output.js +228 -0
- package/src/git-info.js +60 -0
- package/src/github-device-flow.js +64 -0
- package/src/github-pr-social.js +126 -0
- package/src/github-rest.js +212 -0
- package/src/nugit-stack.js +289 -0
- package/src/nugit-start.js +211 -0
- package/src/nugit.js +829 -0
- package/src/open-browser.js +21 -0
- package/src/split-view/run-split.js +181 -0
- package/src/split-view/split-git.js +88 -0
- package/src/split-view/split-ink.js +104 -0
- package/src/stack-discover.js +284 -0
- package/src/stack-discovery-config.js +91 -0
- package/src/stack-extra-commands.js +353 -0
- package/src/stack-graph.js +214 -0
- package/src/stack-helpers.js +58 -0
- package/src/stack-propagate.js +422 -0
- package/src/stack-view/fetch-pr-data.js +126 -0
- package/src/stack-view/ink-app.js +421 -0
- package/src/stack-view/loader.js +101 -0
- package/src/stack-view/open-url.js +18 -0
- package/src/stack-view/prompt-line.js +47 -0
- package/src/stack-view/run-stack-view.js +366 -0
- package/src/stack-view/static-render.js +98 -0
- package/src/token-store.js +45 -0
- package/src/user-config.js +169 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { spawn } from "child_process";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Open a URL in the default browser (best-effort; no-op failure is ok for headless).
|
|
5
|
+
* @param {string} url
|
|
6
|
+
*/
|
|
7
|
+
export function openInBrowser(url) {
|
|
8
|
+
const platform = process.platform;
|
|
9
|
+
if (platform === "darwin") {
|
|
10
|
+
const c = spawn("open", [url], { detached: true, stdio: "ignore" });
|
|
11
|
+
c.unref();
|
|
12
|
+
return;
|
|
13
|
+
}
|
|
14
|
+
if (platform === "win32") {
|
|
15
|
+
const c = spawn("cmd", ["/c", "start", "", url], { detached: true, stdio: "ignore" });
|
|
16
|
+
c.unref();
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
const c = spawn("xdg-open", [url], { detached: true, stdio: "ignore" });
|
|
20
|
+
c.unref();
|
|
21
|
+
}
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import React from "react";
|
|
2
|
+
import { render } from "ink";
|
|
3
|
+
import { getPull, createPullRequest } from "../api-client.js";
|
|
4
|
+
import { githubPostIssueComment } from "../github-pr-social.js";
|
|
5
|
+
import {
|
|
6
|
+
readStackFile,
|
|
7
|
+
writeStackFile,
|
|
8
|
+
validateStackDoc,
|
|
9
|
+
stackEntryFromGithubPull
|
|
10
|
+
} from "../nugit-stack.js";
|
|
11
|
+
import { appendStackHistory } from "../stack-graph.js";
|
|
12
|
+
import {
|
|
13
|
+
assertCleanWorkingTree,
|
|
14
|
+
gitExec,
|
|
15
|
+
gitFetchRefs,
|
|
16
|
+
listChangedFilesBetween,
|
|
17
|
+
commitLayerFromPaths,
|
|
18
|
+
gitPushBranch
|
|
19
|
+
} from "./split-git.js";
|
|
20
|
+
import { SplitInkApp } from "./split-ink.js";
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* @param {object} ctx
|
|
24
|
+
* @param {string} ctx.root
|
|
25
|
+
* @param {string} ctx.owner
|
|
26
|
+
* @param {string} ctx.repo
|
|
27
|
+
* @param {number} ctx.prNumber
|
|
28
|
+
* @param {boolean} [ctx.dryRun]
|
|
29
|
+
* @param {string} [ctx.remote]
|
|
30
|
+
*/
|
|
31
|
+
export async function runSplitCommand(ctx) {
|
|
32
|
+
const { root, owner, repo, prNumber, dryRun = false, remote = "origin" } = ctx;
|
|
33
|
+
assertCleanWorkingTree(root);
|
|
34
|
+
const pull = await getPull(owner, repo, prNumber);
|
|
35
|
+
const headRepo =
|
|
36
|
+
pull.head && typeof pull.head === "object" && pull.head.repo && typeof pull.head.repo === "object"
|
|
37
|
+
? String(pull.head.repo.full_name || "")
|
|
38
|
+
: "";
|
|
39
|
+
const here = `${owner}/${repo}`.toLowerCase();
|
|
40
|
+
if (headRepo && headRepo.toLowerCase() !== here) {
|
|
41
|
+
throw new Error(
|
|
42
|
+
`nugit split does not support fork PRs in v1 (head repo ${headRepo}; expected ${owner}/${repo})`
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
const baseBranch = pull.base.ref;
|
|
46
|
+
const headBranch = pull.head.ref;
|
|
47
|
+
gitFetchRefs(root, remote, baseBranch, headBranch);
|
|
48
|
+
const baseRef = `${remote}/${baseBranch}`;
|
|
49
|
+
const headRef = `${remote}/${headBranch}`;
|
|
50
|
+
const files = listChangedFilesBetween(root, baseRef, headRef);
|
|
51
|
+
if (!files.length) {
|
|
52
|
+
throw new Error("No file changes between merge-base of base and head");
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const exitPayload = { next: null };
|
|
56
|
+
const { waitUntilExit } = render(React.createElement(SplitInkApp, { files, exitPayload }));
|
|
57
|
+
await waitUntilExit();
|
|
58
|
+
|
|
59
|
+
const next = exitPayload.next;
|
|
60
|
+
if (!next || next.type !== "confirm") {
|
|
61
|
+
console.error("Split cancelled.");
|
|
62
|
+
try {
|
|
63
|
+
gitExec(root, ["checkout", baseBranch]);
|
|
64
|
+
} catch {
|
|
65
|
+
/* ignore */
|
|
66
|
+
}
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
const { byLayer, layerCount } = next;
|
|
70
|
+
for (let L = 0; L < layerCount; L++) {
|
|
71
|
+
if (!byLayer[L]?.length) {
|
|
72
|
+
throw new Error(`Layer ${L} has no files — assign every changed file to a layer`);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const prefix = `nugit-split/pr-${prNumber}`;
|
|
77
|
+
/** @type {string[]} */
|
|
78
|
+
const newBranches = [];
|
|
79
|
+
let startRef = baseRef;
|
|
80
|
+
for (let i = 0; i < layerCount; i++) {
|
|
81
|
+
const b = `${prefix}-L${i}`;
|
|
82
|
+
const did = commitLayerFromPaths(
|
|
83
|
+
root,
|
|
84
|
+
remote,
|
|
85
|
+
b,
|
|
86
|
+
startRef,
|
|
87
|
+
headRef,
|
|
88
|
+
byLayer[i],
|
|
89
|
+
`nugit split: PR #${prNumber} layer ${i + 1}/${layerCount}`
|
|
90
|
+
);
|
|
91
|
+
if (!did) {
|
|
92
|
+
throw new Error(`No commit produced for layer ${i}`);
|
|
93
|
+
}
|
|
94
|
+
newBranches.push(b);
|
|
95
|
+
startRef = b;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (dryRun) {
|
|
99
|
+
console.error("Dry-run: branches (not pushed):", newBranches.join(", "));
|
|
100
|
+
gitExec(root, ["checkout", baseBranch]);
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
for (const b of newBranches) {
|
|
105
|
+
gitPushBranch(root, remote, b);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/** @type {number[]} */
|
|
109
|
+
const newPrNumbers = [];
|
|
110
|
+
let prevBase = baseBranch;
|
|
111
|
+
for (let i = 0; i < newBranches.length; i++) {
|
|
112
|
+
const title =
|
|
113
|
+
pull.title != null
|
|
114
|
+
? `[split ${i + 1}/${newBranches.length}] ${pull.title}`
|
|
115
|
+
: `Split of #${prNumber} (${i + 1}/${newBranches.length})`;
|
|
116
|
+
const created = await createPullRequest(owner, repo, {
|
|
117
|
+
title,
|
|
118
|
+
head: newBranches[i],
|
|
119
|
+
base: prevBase,
|
|
120
|
+
body: `Split from #${prNumber} (nugit split layer ${i + 1}).\n\nOriginal: ${pull.html_url || ""}`
|
|
121
|
+
});
|
|
122
|
+
const num = /** @type {{ number?: number }} */ (created).number;
|
|
123
|
+
if (typeof num !== "number") {
|
|
124
|
+
throw new Error("GitHub did not return PR number");
|
|
125
|
+
}
|
|
126
|
+
newPrNumbers.push(num);
|
|
127
|
+
prevBase = newBranches[i];
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/** @type {Record<string, unknown> | null} */
|
|
131
|
+
let docForHistory = null;
|
|
132
|
+
const doc = readStackFile(root);
|
|
133
|
+
if (doc) {
|
|
134
|
+
validateStackDoc(doc);
|
|
135
|
+
const idx = doc.prs.findIndex((p) => p.pr_number === prNumber);
|
|
136
|
+
if (idx >= 0) {
|
|
137
|
+
doc.prs.splice(idx, 1);
|
|
138
|
+
const insertAt = idx;
|
|
139
|
+
for (let i = 0; i < newPrNumbers.length; i++) {
|
|
140
|
+
const p2 = await getPull(owner, repo, newPrNumbers[i]);
|
|
141
|
+
doc.prs.splice(insertAt + i, 0, stackEntryFromGithubPull(p2, insertAt + i));
|
|
142
|
+
}
|
|
143
|
+
for (let j = 0; j < doc.prs.length; j++) {
|
|
144
|
+
doc.prs[j].position = j;
|
|
145
|
+
}
|
|
146
|
+
writeStackFile(root, doc);
|
|
147
|
+
docForHistory = doc;
|
|
148
|
+
} else {
|
|
149
|
+
console.error(
|
|
150
|
+
`Warning: PR #${prNumber} not in .nugit/stack.json — local stack file left unchanged.`
|
|
151
|
+
);
|
|
152
|
+
}
|
|
153
|
+
} else {
|
|
154
|
+
console.error("No .nugit/stack.json — skipped local stack file update.");
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
appendStackHistory(root, {
|
|
158
|
+
action: "split",
|
|
159
|
+
repo_full_name: `${owner}/${repo}`,
|
|
160
|
+
tip_pr_number: newPrNumbers[newPrNumbers.length - 1],
|
|
161
|
+
head_branch: newBranches[newBranches.length - 1],
|
|
162
|
+
...(docForHistory ? { snapshot: docForHistory } : {}),
|
|
163
|
+
from_pr: prNumber,
|
|
164
|
+
new_prs: newPrNumbers
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
await githubPostIssueComment(
|
|
168
|
+
owner,
|
|
169
|
+
repo,
|
|
170
|
+
prNumber,
|
|
171
|
+
`This PR was split into: ${newPrNumbers.map((n) => `#${n}`).join(", ")}. You can close this PR when the new stack is ready.`
|
|
172
|
+
);
|
|
173
|
+
|
|
174
|
+
try {
|
|
175
|
+
gitExec(root, ["checkout", baseBranch]);
|
|
176
|
+
} catch {
|
|
177
|
+
/* ignore */
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
console.error(`Split complete. New PRs: ${newPrNumbers.join(", ")}`);
|
|
181
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { execFileSync } from "child_process";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @param {string} root
|
|
5
|
+
* @param {string[]} args
|
|
6
|
+
* @param {{ stdio?: string }} [io]
|
|
7
|
+
*/
|
|
8
|
+
export function gitExec(root, args, io) {
|
|
9
|
+
try {
|
|
10
|
+
return execFileSync("git", args, {
|
|
11
|
+
cwd: root,
|
|
12
|
+
encoding: "utf8",
|
|
13
|
+
stdio: io?.stdio || "pipe",
|
|
14
|
+
maxBuffer: 20 * 1024 * 1024
|
|
15
|
+
}).trim();
|
|
16
|
+
} catch (e) {
|
|
17
|
+
const err = /** @type {{ stderr?: Buffer, stdout?: Buffer, message?: string }} */ (e);
|
|
18
|
+
const msg =
|
|
19
|
+
(err.stderr && err.stderr.toString()) ||
|
|
20
|
+
(err.stdout && err.stdout.toString()) ||
|
|
21
|
+
err.message ||
|
|
22
|
+
String(e);
|
|
23
|
+
throw new Error(msg.trim().slice(0, 800));
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* @param {string} root
|
|
29
|
+
* @param {string} remote
|
|
30
|
+
* @param {string} baseBranch
|
|
31
|
+
* @param {string} headBranch
|
|
32
|
+
*/
|
|
33
|
+
export function gitFetchRefs(root, remote, baseBranch, headBranch) {
|
|
34
|
+
gitExec(root, ["fetch", remote, baseBranch, headBranch], { stdio: "pipe" });
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* @param {string} root
|
|
39
|
+
* @param {string} baseRef e.g. origin/main
|
|
40
|
+
* @param {string} headRef e.g. origin/feat
|
|
41
|
+
*/
|
|
42
|
+
export function listChangedFilesBetween(root, baseRef, headRef) {
|
|
43
|
+
const out = gitExec(root, ["diff", "--name-only", `${baseRef}...${headRef}`]);
|
|
44
|
+
return out.split("\n").map((s) => s.trim()).filter(Boolean);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* @param {string} root
|
|
49
|
+
*/
|
|
50
|
+
export function assertCleanWorkingTree(root) {
|
|
51
|
+
const st = gitExec(root, ["status", "--porcelain"]);
|
|
52
|
+
if (st) {
|
|
53
|
+
throw new Error("Working tree is not clean; commit or stash before nugit split.");
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* @param {string} root
|
|
59
|
+
* @param {string} remote
|
|
60
|
+
* @param {string} branchName local branch to create
|
|
61
|
+
* @param {string} startRef branch or commit to start from
|
|
62
|
+
* @param {string} headRef where to checkout paths from (e.g. origin/feat)
|
|
63
|
+
* @param {string[]} paths
|
|
64
|
+
* @param {string} message
|
|
65
|
+
*/
|
|
66
|
+
export function commitLayerFromPaths(root, remote, branchName, startRef, headRef, paths, message) {
|
|
67
|
+
const uniq = [...new Set(paths)].filter(Boolean);
|
|
68
|
+
gitExec(root, ["checkout", "-B", branchName, startRef]);
|
|
69
|
+
if (uniq.length) {
|
|
70
|
+
gitExec(root, ["checkout", headRef, "--", ...uniq]);
|
|
71
|
+
}
|
|
72
|
+
gitExec(root, ["add", "-A"]);
|
|
73
|
+
const st = gitExec(root, ["status", "--porcelain"]);
|
|
74
|
+
if (!st) {
|
|
75
|
+
return false;
|
|
76
|
+
}
|
|
77
|
+
gitExec(root, ["commit", "-m", message]);
|
|
78
|
+
return true;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* @param {string} root
|
|
83
|
+
* @param {string} remote
|
|
84
|
+
* @param {string} branchName
|
|
85
|
+
*/
|
|
86
|
+
export function gitPushBranch(root, remote, branchName) {
|
|
87
|
+
gitExec(root, ["push", "-u", remote, branchName], { stdio: "inherit" });
|
|
88
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import React, { useMemo, useState } from "react";
|
|
2
|
+
import { Box, Text, useApp, useInput } from "ink";
|
|
3
|
+
import chalk from "chalk";
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* @param {object} props
|
|
7
|
+
* @param {string[]} props.files
|
|
8
|
+
* @param {{ next: null | Record<string, unknown> }} props.exitPayload
|
|
9
|
+
*/
|
|
10
|
+
export function SplitInkApp({ files, exitPayload }) {
|
|
11
|
+
const { exit } = useApp();
|
|
12
|
+
const [fileIdx, setFileIdx] = useState(0);
|
|
13
|
+
const [layerCount, setLayerCount] = useState(2);
|
|
14
|
+
const [assign, setAssign] = useState(() => {
|
|
15
|
+
/** @type {Record<string, number>} */
|
|
16
|
+
const a = {};
|
|
17
|
+
for (const f of files) {
|
|
18
|
+
a[f] = 0;
|
|
19
|
+
}
|
|
20
|
+
return a;
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const safeIdx = files.length ? Math.min(fileIdx, files.length - 1) : 0;
|
|
24
|
+
const current = files[safeIdx] || "";
|
|
25
|
+
|
|
26
|
+
const byLayer = useMemo(() => {
|
|
27
|
+
/** @type {string[][]} */
|
|
28
|
+
const buckets = Array.from({ length: layerCount }, () => []);
|
|
29
|
+
for (const f of files) {
|
|
30
|
+
const L = Math.min(layerCount - 1, Math.max(0, assign[f] ?? 0));
|
|
31
|
+
buckets[L].push(f);
|
|
32
|
+
}
|
|
33
|
+
return buckets;
|
|
34
|
+
}, [files, assign, layerCount]);
|
|
35
|
+
|
|
36
|
+
useInput((input, key) => {
|
|
37
|
+
if (input === "q" || key.escape) {
|
|
38
|
+
exitPayload.next = { type: "cancel" };
|
|
39
|
+
exit();
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
if (input === "c") {
|
|
43
|
+
exitPayload.next = {
|
|
44
|
+
type: "confirm",
|
|
45
|
+
layerCount,
|
|
46
|
+
assignment: { ...assign },
|
|
47
|
+
byLayer
|
|
48
|
+
};
|
|
49
|
+
exit();
|
|
50
|
+
return;
|
|
51
|
+
}
|
|
52
|
+
if (input === "j" || key.downArrow) {
|
|
53
|
+
setFileIdx((i) => Math.min(i + 1, Math.max(0, files.length - 1)));
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
if (input === "k" || key.upArrow) {
|
|
57
|
+
setFileIdx((i) => Math.max(i - 1, 0));
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
if (input === "+" || input === "=") {
|
|
61
|
+
setLayerCount((n) => Math.min(9, n + 1));
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
if (input === "-") {
|
|
65
|
+
setLayerCount((n) => Math.max(2, n - 1));
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
const d = Number.parseInt(input, 10);
|
|
69
|
+
if (Number.isInteger(d) && d >= 0 && d < layerCount && current) {
|
|
70
|
+
setAssign((prev) => ({ ...prev, [current]: d }));
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
if (!files.length) {
|
|
75
|
+
return React.createElement(Text, { color: "red" }, "No files to split.");
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
return React.createElement(
|
|
79
|
+
Box,
|
|
80
|
+
{ flexDirection: "column", padding: 1 },
|
|
81
|
+
React.createElement(Text, { color: "cyan", bold: true }, "nugit split"),
|
|
82
|
+
React.createElement(
|
|
83
|
+
Text,
|
|
84
|
+
{ dimColor: true },
|
|
85
|
+
`Layers: ${layerCount} (+/-) | assign file to layer 0–${layerCount - 1} (digit) | c confirm | q cancel`
|
|
86
|
+
),
|
|
87
|
+
React.createElement(Text, { marginTop: 1 }, chalk.bold("Files:")),
|
|
88
|
+
...files.slice(0, 18).map((f, i) =>
|
|
89
|
+
React.createElement(
|
|
90
|
+
Text,
|
|
91
|
+
{ key: f },
|
|
92
|
+
`${i === safeIdx ? "▶" : " "} [L${assign[f] ?? 0}] ${f.slice(0, 72)}`
|
|
93
|
+
)
|
|
94
|
+
),
|
|
95
|
+
React.createElement(Text, { marginTop: 1, color: "magenta" }, "Preview by layer:"),
|
|
96
|
+
...byLayer.map((bucket, li) =>
|
|
97
|
+
React.createElement(
|
|
98
|
+
Text,
|
|
99
|
+
{ key: String(li) },
|
|
100
|
+
chalk.yellow(`L${li}: `) + chalk.dim(bucket.join(", ").slice(0, 100) || "(empty)")
|
|
101
|
+
)
|
|
102
|
+
)
|
|
103
|
+
);
|
|
104
|
+
}
|
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
import { githubListOpenPulls } from "./github-rest.js";
|
|
2
|
+
import { getGithubContents, decodeGithubFileContent, getPull } from "./api-client.js";
|
|
3
|
+
import { validateStackDoc } from "./nugit-stack.js";
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Stack tip PR # from layer.tip, else top entry by position in doc.prs.
|
|
7
|
+
* @param {Record<string, unknown>} doc
|
|
8
|
+
* @returns {number | null}
|
|
9
|
+
*/
|
|
10
|
+
export function stackTipPrNumber(doc) {
|
|
11
|
+
const layer = doc.layer;
|
|
12
|
+
if (layer && typeof layer === "object") {
|
|
13
|
+
const tip = /** @type {{ tip?: { pr_number?: number } }} */ (layer).tip;
|
|
14
|
+
if (tip && typeof tip === "object" && typeof tip.pr_number === "number" && tip.pr_number >= 1) {
|
|
15
|
+
return tip.pr_number;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
const prs = Array.isArray(doc.prs) ? doc.prs : [];
|
|
19
|
+
if (!prs.length) {
|
|
20
|
+
return null;
|
|
21
|
+
}
|
|
22
|
+
const sorted = [...prs].sort(
|
|
23
|
+
(a, b) =>
|
|
24
|
+
(/** @type {{ position?: number }} */ (a).position ?? 0) -
|
|
25
|
+
(/** @type {{ position?: number }} */ (b).position ?? 0)
|
|
26
|
+
);
|
|
27
|
+
const top = sorted[sorted.length - 1];
|
|
28
|
+
const n = top && typeof top === "object" ? /** @type {{ pr_number?: number }} */ (top).pr_number : undefined;
|
|
29
|
+
return typeof n === "number" && n >= 1 ? n : null;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* @param {Record<string, unknown>} doc
|
|
34
|
+
* @param {string} owner
|
|
35
|
+
* @param {string} repo
|
|
36
|
+
*/
|
|
37
|
+
export function docRepoMatches(doc, owner, repo) {
|
|
38
|
+
const full = String(doc.repo_full_name || "").toLowerCase();
|
|
39
|
+
return full === `${owner}/${repo}`.toLowerCase();
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* @template T
|
|
44
|
+
* @param {T[]} items
|
|
45
|
+
* @param {number} concurrency
|
|
46
|
+
* @param {(item: T, index: number) => Promise<void>} fn
|
|
47
|
+
*/
|
|
48
|
+
async function forEachPool(items, concurrency, fn) {
|
|
49
|
+
let idx = 0;
|
|
50
|
+
const workers = Array.from({ length: Math.min(concurrency, items.length) }, async () => {
|
|
51
|
+
for (;;) {
|
|
52
|
+
const i = idx++;
|
|
53
|
+
if (i >= items.length) {
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
await fn(items[i], i);
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
await Promise.all(workers);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* @param {string} owner
|
|
64
|
+
* @param {string} repo
|
|
65
|
+
* @param {string} ref
|
|
66
|
+
* @returns {Promise<Record<string, unknown> | null>}
|
|
67
|
+
*/
|
|
68
|
+
async function tryLoadStackDocAtRef(owner, repo, ref) {
|
|
69
|
+
try {
|
|
70
|
+
const item = await getGithubContents(owner, repo, ".nugit/stack.json", ref);
|
|
71
|
+
const text = decodeGithubFileContent(item);
|
|
72
|
+
if (!text) {
|
|
73
|
+
return null;
|
|
74
|
+
}
|
|
75
|
+
const doc = JSON.parse(text);
|
|
76
|
+
validateStackDoc(doc);
|
|
77
|
+
if (!docRepoMatches(doc, owner, repo)) {
|
|
78
|
+
return null;
|
|
79
|
+
}
|
|
80
|
+
return /** @type {Record<string, unknown>} */ (doc);
|
|
81
|
+
} catch {
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
/**
|
|
87
|
+
* If this is a propagated prefix, try to load the full tip stack doc.
|
|
88
|
+
* @param {string} owner
|
|
89
|
+
* @param {string} repo
|
|
90
|
+
* @param {Record<string, unknown>} doc
|
|
91
|
+
*/
|
|
92
|
+
async function maybeExpandPrefixDoc(owner, repo, doc) {
|
|
93
|
+
const layer = doc.layer && typeof doc.layer === "object" ? /** @type {Record<string, unknown>} */ (doc.layer) : null;
|
|
94
|
+
if (!layer) {
|
|
95
|
+
return doc;
|
|
96
|
+
}
|
|
97
|
+
const stackSize = layer.stack_size;
|
|
98
|
+
const prs = Array.isArray(doc.prs) ? doc.prs : [];
|
|
99
|
+
const tip = layer.tip && typeof layer.tip === "object" ? /** @type {Record<string, unknown>} */ (layer.tip) : null;
|
|
100
|
+
const tipHead = tip && typeof tip.head_branch === "string" ? tip.head_branch.trim() : "";
|
|
101
|
+
if (
|
|
102
|
+
typeof stackSize !== "number" ||
|
|
103
|
+
!Number.isFinite(stackSize) ||
|
|
104
|
+
stackSize <= prs.length ||
|
|
105
|
+
!tipHead
|
|
106
|
+
) {
|
|
107
|
+
return doc;
|
|
108
|
+
}
|
|
109
|
+
const full = await tryLoadStackDocAtRef(owner, repo, tipHead);
|
|
110
|
+
if (!full) {
|
|
111
|
+
return doc;
|
|
112
|
+
}
|
|
113
|
+
const fullPrs = Array.isArray(full.prs) ? full.prs : [];
|
|
114
|
+
return fullPrs.length > prs.length ? full : doc;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Scan open PRs; any head with committed `.nugit/stack.json` counts. Deduplicate by stack tip PR # (layer.tip or top of prs).
|
|
119
|
+
*
|
|
120
|
+
* @param {string} owner
|
|
121
|
+
* @param {string} repo
|
|
122
|
+
* @param {{
|
|
123
|
+
* maxOpenPrs?: number,
|
|
124
|
+
* listPerPage?: number,
|
|
125
|
+
* enrich?: boolean,
|
|
126
|
+
* fetchConcurrency?: number,
|
|
127
|
+
* onProgress?: (msg: string) => void
|
|
128
|
+
* }} [opts]
|
|
129
|
+
*/
|
|
130
|
+
export async function discoverStacksInRepo(owner, repo, opts = {}) {
|
|
131
|
+
const maxOpenPrs = opts.maxOpenPrs ?? 500;
|
|
132
|
+
const listPerPage = Math.min(100, Math.max(1, opts.listPerPage ?? 100));
|
|
133
|
+
const fetchConc = Math.max(1, Math.min(32, opts.fetchConcurrency ?? 8));
|
|
134
|
+
const enrich = opts.enrich !== false;
|
|
135
|
+
const onProgress = typeof opts.onProgress === "function" ? opts.onProgress : null;
|
|
136
|
+
|
|
137
|
+
/** @type {unknown[]} */
|
|
138
|
+
const allPulls = [];
|
|
139
|
+
let page = 1;
|
|
140
|
+
let truncated = false;
|
|
141
|
+
for (;;) {
|
|
142
|
+
if (maxOpenPrs > 0 && allPulls.length >= maxOpenPrs) {
|
|
143
|
+
truncated = true;
|
|
144
|
+
break;
|
|
145
|
+
}
|
|
146
|
+
const pulls = await githubListOpenPulls(owner, repo, page, listPerPage);
|
|
147
|
+
if (!Array.isArray(pulls) || pulls.length === 0) {
|
|
148
|
+
break;
|
|
149
|
+
}
|
|
150
|
+
for (const p of pulls) {
|
|
151
|
+
if (maxOpenPrs > 0 && allPulls.length >= maxOpenPrs) {
|
|
152
|
+
truncated = true;
|
|
153
|
+
break;
|
|
154
|
+
}
|
|
155
|
+
allPulls.push(p);
|
|
156
|
+
}
|
|
157
|
+
if (truncated) {
|
|
158
|
+
break;
|
|
159
|
+
}
|
|
160
|
+
if (onProgress) {
|
|
161
|
+
onProgress(`listed open PRs: ${allPulls.length}`);
|
|
162
|
+
}
|
|
163
|
+
if (pulls.length < listPerPage) {
|
|
164
|
+
break;
|
|
165
|
+
}
|
|
166
|
+
page += 1;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/** @type {({ doc: Record<string, unknown>, discoveredFromPr: number, headRef: string } | null)[]} */
|
|
170
|
+
const rowSlots = Array(allPulls.length).fill(null);
|
|
171
|
+
|
|
172
|
+
let checkedHeads = 0;
|
|
173
|
+
await forEachPool(allPulls, fetchConc, async (pull, i) => {
|
|
174
|
+
const p = pull && typeof pull === "object" ? /** @type {Record<string, unknown>} */ (pull) : {};
|
|
175
|
+
const head = p.head && typeof p.head === "object" ? /** @type {Record<string, unknown>} */ (p.head) : {};
|
|
176
|
+
const ref = typeof head.ref === "string" ? head.ref : "";
|
|
177
|
+
const num = p.number;
|
|
178
|
+
if (!ref || typeof num !== "number") {
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
const doc = await tryLoadStackDocAtRef(owner, repo, ref);
|
|
182
|
+
checkedHeads += 1;
|
|
183
|
+
if (onProgress && (checkedHeads % 10 === 0 || checkedHeads === allPulls.length)) {
|
|
184
|
+
onProgress(`checked stack.json on PR heads: ${checkedHeads}/${allPulls.length}`);
|
|
185
|
+
}
|
|
186
|
+
if (!doc) {
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
rowSlots[i] = { doc, discoveredFromPr: num, headRef: ref };
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
const found = rowSlots.filter(Boolean);
|
|
193
|
+
/** @type {Map<number, { doc: Record<string, unknown>, discoveredFromPr: number, headRef: string }>} */
|
|
194
|
+
const byTip = new Map();
|
|
195
|
+
|
|
196
|
+
for (const row of found) {
|
|
197
|
+
if (!row) {
|
|
198
|
+
continue;
|
|
199
|
+
}
|
|
200
|
+
const expandedDoc = await maybeExpandPrefixDoc(owner, repo, row.doc);
|
|
201
|
+
const tip = stackTipPrNumber(expandedDoc);
|
|
202
|
+
if (tip == null) {
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
const prev = byTip.get(tip);
|
|
206
|
+
const score = Array.isArray(expandedDoc.prs) ? expandedDoc.prs.length : 0;
|
|
207
|
+
const prevScore = prev ? (Array.isArray(prev.doc.prs) ? prev.doc.prs.length : 0) : -1;
|
|
208
|
+
if (!prev || score > prevScore || (score === prevScore && row.discoveredFromPr === tip)) {
|
|
209
|
+
byTip.set(tip, {
|
|
210
|
+
doc: expandedDoc,
|
|
211
|
+
discoveredFromPr: row.discoveredFromPr,
|
|
212
|
+
headRef: row.headRef
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
const repoFull = `${owner}/${repo}`;
|
|
218
|
+
/** @type {import("./stack-discover.js").DiscoveredStack[]} */
|
|
219
|
+
const stacks = [];
|
|
220
|
+
|
|
221
|
+
for (const [tipPr, meta] of [...byTip.entries()].sort((a, b) => a[0] - b[0])) {
|
|
222
|
+
const doc = meta.doc;
|
|
223
|
+
const prs = Array.isArray(doc.prs) ? doc.prs : [];
|
|
224
|
+
const sorted = [...prs].sort(
|
|
225
|
+
(a, b) =>
|
|
226
|
+
(/** @type {{ position?: number }} */ (a).position ?? 0) -
|
|
227
|
+
(/** @type {{ position?: number }} */ (b).position ?? 0)
|
|
228
|
+
);
|
|
229
|
+
|
|
230
|
+
/** @type {{ pr_number: number, position: number, title?: string, html_url?: string, head_branch?: string }[]} */
|
|
231
|
+
let prRows = sorted.map((entry) => {
|
|
232
|
+
const e = entry && typeof entry === "object" ? /** @type {Record<string, unknown>} */ (entry) : {};
|
|
233
|
+
return {
|
|
234
|
+
pr_number: /** @type {number} */ (e.pr_number),
|
|
235
|
+
position: /** @type {number} */ (e.position),
|
|
236
|
+
head_branch: typeof e.head_branch === "string" ? e.head_branch : undefined
|
|
237
|
+
};
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
if (enrich) {
|
|
241
|
+
if (onProgress) {
|
|
242
|
+
onProgress(`loading PR titles for stack tip #${tipPr}`);
|
|
243
|
+
}
|
|
244
|
+
await forEachPool(prRows, fetchConc, async (row) => {
|
|
245
|
+
try {
|
|
246
|
+
const g = await getPull(owner, repo, row.pr_number);
|
|
247
|
+
row.title = typeof g.title === "string" ? g.title : undefined;
|
|
248
|
+
row.html_url = typeof g.html_url === "string" ? g.html_url : undefined;
|
|
249
|
+
} catch {
|
|
250
|
+
/* keep without title */
|
|
251
|
+
}
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
const tipEntry = sorted.find((e) => {
|
|
256
|
+
const o = e && typeof e === "object" ? /** @type {{ pr_number?: number }} */ (e) : {};
|
|
257
|
+
return o.pr_number === tipPr;
|
|
258
|
+
});
|
|
259
|
+
const tipObj = tipEntry && typeof tipEntry === "object" ? /** @type {Record<string, unknown>} */ (tipEntry) : {};
|
|
260
|
+
const tipHeadBranch =
|
|
261
|
+
typeof tipObj.head_branch === "string"
|
|
262
|
+
? tipObj.head_branch
|
|
263
|
+
: meta.headRef;
|
|
264
|
+
|
|
265
|
+
stacks.push({
|
|
266
|
+
tip_pr_number: tipPr,
|
|
267
|
+
created_by: String(doc.created_by || ""),
|
|
268
|
+
discovered_from_pr: meta.discoveredFromPr,
|
|
269
|
+
pr_count: prRows.length,
|
|
270
|
+
prs: prRows,
|
|
271
|
+
tip_head_branch: tipHeadBranch,
|
|
272
|
+
fetch_command: `nugit stack fetch --repo ${repoFull} --ref ${tipHeadBranch}`,
|
|
273
|
+
view_command: `nugit stack view --repo ${repoFull} --ref ${tipHeadBranch}`
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
return {
|
|
278
|
+
repo_full_name: repoFull,
|
|
279
|
+
scanned_open_prs: allPulls.length,
|
|
280
|
+
open_prs_truncated: truncated,
|
|
281
|
+
stacks_found: stacks.length,
|
|
282
|
+
stacks
|
|
283
|
+
};
|
|
284
|
+
}
|