newpr 0.6.5 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/history/store.ts +25 -0
- package/src/stack/balance.ts +128 -0
- package/src/stack/coupling.test.ts +158 -0
- package/src/stack/coupling.ts +135 -0
- package/src/stack/delta.test.ts +223 -0
- package/src/stack/delta.ts +264 -0
- package/src/stack/execute.test.ts +176 -0
- package/src/stack/execute.ts +194 -0
- package/src/stack/feasibility.test.ts +185 -0
- package/src/stack/feasibility.ts +286 -0
- package/src/stack/integration.test.ts +266 -0
- package/src/stack/merge-groups.test.ts +97 -0
- package/src/stack/merge-groups.ts +87 -0
- package/src/stack/partition.test.ts +233 -0
- package/src/stack/partition.ts +273 -0
- package/src/stack/plan.test.ts +154 -0
- package/src/stack/plan.ts +139 -0
- package/src/stack/pr-title.ts +64 -0
- package/src/stack/publish.ts +96 -0
- package/src/stack/split.ts +173 -0
- package/src/stack/types.ts +202 -0
- package/src/stack/verify.test.ts +137 -0
- package/src/stack/verify.ts +201 -0
- package/src/web/client/components/FeasibilityAlert.tsx +64 -0
- package/src/web/client/components/InputScreen.tsx +100 -89
- package/src/web/client/components/ResultsScreen.tsx +10 -2
- package/src/web/client/components/StackGroupCard.tsx +171 -0
- package/src/web/client/components/StackWarnings.tsx +135 -0
- package/src/web/client/hooks/useStack.ts +301 -0
- package/src/web/client/panels/StackPanel.tsx +289 -0
- package/src/web/server/routes.ts +114 -0
- package/src/web/server/stack-manager.ts +580 -0
- package/src/web/server.ts +15 -0
- package/src/web/styles/built.css +1 -1
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
import type { FileGroup } from "../types/output.ts";
|
|
2
|
+
import type { PrCommit } from "../types/github.ts";
|
|
3
|
+
import type { LlmClient } from "../llm/client.ts";
|
|
4
|
+
import type { PartitionResult, ReattributedFile, StackWarning } from "./types.ts";
|
|
5
|
+
|
|
6
|
+
interface FileSummaryInput {
|
|
7
|
+
path: string;
|
|
8
|
+
status: string;
|
|
9
|
+
summary: string;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface PartitionInput {
|
|
13
|
+
groups: FileGroup[];
|
|
14
|
+
changed_files: string[];
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export interface AmbiguityReport {
|
|
18
|
+
exclusive: Map<string, string>;
|
|
19
|
+
ambiguous: Array<{ path: string; groups: string[] }>;
|
|
20
|
+
unassigned: string[];
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function detectAmbiguousPaths(input: PartitionInput): AmbiguityReport {
|
|
24
|
+
const { groups, changed_files } = input;
|
|
25
|
+
|
|
26
|
+
const pathToGroups = new Map<string, string[]>();
|
|
27
|
+
|
|
28
|
+
for (const group of groups) {
|
|
29
|
+
for (const file of group.files) {
|
|
30
|
+
const existing = pathToGroups.get(file) ?? [];
|
|
31
|
+
existing.push(group.name);
|
|
32
|
+
pathToGroups.set(file, existing);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const exclusive = new Map<string, string>();
|
|
37
|
+
const ambiguous: Array<{ path: string; groups: string[] }> = [];
|
|
38
|
+
const unassigned: string[] = [];
|
|
39
|
+
|
|
40
|
+
for (const file of changed_files) {
|
|
41
|
+
const groups = pathToGroups.get(file);
|
|
42
|
+
if (!groups || groups.length === 0) {
|
|
43
|
+
unassigned.push(file);
|
|
44
|
+
} else if (groups.length === 1) {
|
|
45
|
+
exclusive.set(file, groups[0]!);
|
|
46
|
+
} else {
|
|
47
|
+
ambiguous.push({ path: file, groups });
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return { exclusive, ambiguous, unassigned };
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export function buildStackPartitionPrompt(
|
|
55
|
+
ambiguous: Array<{ path: string; groups: string[] }>,
|
|
56
|
+
unassigned: string[],
|
|
57
|
+
groups: FileGroup[],
|
|
58
|
+
fileSummaries: FileSummaryInput[],
|
|
59
|
+
commits: PrCommit[],
|
|
60
|
+
): { system: string; user: string } {
|
|
61
|
+
const groupDescriptions = groups
|
|
62
|
+
.map((g) => `- "${g.name}" (${g.type}): ${g.description}`)
|
|
63
|
+
.join("\n");
|
|
64
|
+
|
|
65
|
+
const ambiguousSection = ambiguous.length > 0
|
|
66
|
+
? `\n\nAmbiguous files (appear in multiple groups):\n${ambiguous
|
|
67
|
+
.map((a) => `- ${a.path} → in groups: ${a.groups.join(", ")}`)
|
|
68
|
+
.join("\n")}`
|
|
69
|
+
: "";
|
|
70
|
+
|
|
71
|
+
const unassignedSection = unassigned.length > 0
|
|
72
|
+
? `\n\nUnassigned files (not in any group):\n${unassigned.map((f) => `- ${f}`).join("\n")}`
|
|
73
|
+
: "";
|
|
74
|
+
|
|
75
|
+
const fileSummarySection = fileSummaries.length > 0
|
|
76
|
+
? `\n\nFile summaries:\n${fileSummaries.map((f) => `- ${f.path}: ${f.summary}`).join("\n")}`
|
|
77
|
+
: "";
|
|
78
|
+
|
|
79
|
+
const commitSection = commits.length > 0
|
|
80
|
+
? `\n\nCommit history:\n${commits.map((c) => `- ${c.sha.substring(0, 7)} ${c.message}`).join("\n")}`
|
|
81
|
+
: "";
|
|
82
|
+
|
|
83
|
+
return {
|
|
84
|
+
system: `You are a code organization expert. Your task is to assign each file to EXACTLY ONE group for PR stacking.
|
|
85
|
+
|
|
86
|
+
Rules:
|
|
87
|
+
1. Each file must be assigned to exactly one group
|
|
88
|
+
2. Do not change files that are already exclusively assigned
|
|
89
|
+
3. For ambiguous files, choose the group where the file's changes are most relevant
|
|
90
|
+
4. For unassigned files, assign them to the most appropriate existing group
|
|
91
|
+
5. You may create a "Shared Foundation" group ONLY if files truly don't fit any existing group
|
|
92
|
+
6. Respond ONLY with a JSON object
|
|
93
|
+
|
|
94
|
+
Response format:
|
|
95
|
+
{
|
|
96
|
+
"assignments": [
|
|
97
|
+
{ "path": "file.ts", "group": "group-name", "reason": "brief reason" }
|
|
98
|
+
],
|
|
99
|
+
"shared_foundation": null
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
If creating a Shared Foundation group:
|
|
103
|
+
{
|
|
104
|
+
"assignments": [...],
|
|
105
|
+
"shared_foundation": { "name": "Shared Foundation", "description": "Common infrastructure changes", "files": [...] }
|
|
106
|
+
}`,
|
|
107
|
+
user: `Groups:\n${groupDescriptions}${ambiguousSection}${unassignedSection}${fileSummarySection}${commitSection}\n\nAssign each ambiguous/unassigned file to exactly one group.`,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export async function partitionGroups(
|
|
112
|
+
client: LlmClient,
|
|
113
|
+
groups: FileGroup[],
|
|
114
|
+
changedFiles: string[],
|
|
115
|
+
fileSummaries: FileSummaryInput[],
|
|
116
|
+
commits: PrCommit[],
|
|
117
|
+
): Promise<PartitionResult> {
|
|
118
|
+
const report = detectAmbiguousPaths({ groups, changed_files: changedFiles });
|
|
119
|
+
|
|
120
|
+
if (report.ambiguous.length === 0 && report.unassigned.length === 0) {
|
|
121
|
+
return {
|
|
122
|
+
ownership: report.exclusive,
|
|
123
|
+
reattributed: [],
|
|
124
|
+
warnings: [],
|
|
125
|
+
structured_warnings: [],
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const prompt = buildStackPartitionPrompt(
|
|
130
|
+
report.ambiguous,
|
|
131
|
+
report.unassigned,
|
|
132
|
+
groups,
|
|
133
|
+
fileSummaries,
|
|
134
|
+
commits,
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
const response = await client.complete(prompt.system, prompt.user);
|
|
138
|
+
const parsed = parsePartitionResponse(response.content, report, groups);
|
|
139
|
+
|
|
140
|
+
return parsed;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
function parsePartitionResponse(
|
|
144
|
+
raw: string,
|
|
145
|
+
report: AmbiguityReport,
|
|
146
|
+
groups: FileGroup[],
|
|
147
|
+
): PartitionResult {
|
|
148
|
+
const jsonStr = extractJson(raw);
|
|
149
|
+
const parsed: unknown = JSON.parse(jsonStr);
|
|
150
|
+
|
|
151
|
+
if (!parsed || typeof parsed !== "object") {
|
|
152
|
+
throw new Error("Expected JSON object for partition response");
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const data = parsed as Record<string, unknown>;
|
|
156
|
+
const assignments = data.assignments;
|
|
157
|
+
|
|
158
|
+
if (!Array.isArray(assignments)) {
|
|
159
|
+
throw new Error("Expected 'assignments' array in partition response");
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
const ownership = new Map(report.exclusive);
|
|
163
|
+
const reattributed: ReattributedFile[] = [];
|
|
164
|
+
const warnings: string[] = [];
|
|
165
|
+
const structuredWarnings: StackWarning[] = [];
|
|
166
|
+
|
|
167
|
+
const validGroupNames = new Set(groups.map((g) => g.name));
|
|
168
|
+
|
|
169
|
+
for (const item of assignments) {
|
|
170
|
+
const entry = item as Record<string, unknown>;
|
|
171
|
+
const path = String(entry.path ?? "");
|
|
172
|
+
const group = String(entry.group ?? "");
|
|
173
|
+
const reason = String(entry.reason ?? "");
|
|
174
|
+
|
|
175
|
+
if (!path || !group) {
|
|
176
|
+
warnings.push(`Invalid assignment entry: ${JSON.stringify(item)}`);
|
|
177
|
+
continue;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
if (!validGroupNames.has(group)) {
|
|
181
|
+
warnings.push(`Unknown group "${group}" for file "${path}", skipping`);
|
|
182
|
+
structuredWarnings.push({
|
|
183
|
+
category: "system",
|
|
184
|
+
severity: "warn",
|
|
185
|
+
title: "Invalid group in AI response",
|
|
186
|
+
message: `"${path}" was assigned to unknown group "${group}" — skipped`,
|
|
187
|
+
});
|
|
188
|
+
continue;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
const ambiguousEntry = report.ambiguous.find((a) => a.path === path);
|
|
192
|
+
const isUnassigned = report.unassigned.includes(path);
|
|
193
|
+
|
|
194
|
+
if (ambiguousEntry) {
|
|
195
|
+
reattributed.push({
|
|
196
|
+
path,
|
|
197
|
+
from_groups: ambiguousEntry.groups,
|
|
198
|
+
to_group: group,
|
|
199
|
+
reason,
|
|
200
|
+
});
|
|
201
|
+
} else if (isUnassigned) {
|
|
202
|
+
reattributed.push({
|
|
203
|
+
path,
|
|
204
|
+
from_groups: [],
|
|
205
|
+
to_group: group,
|
|
206
|
+
reason,
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
ownership.set(path, group);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
const stillUnassigned = report.unassigned.filter((p) => !ownership.has(p));
|
|
214
|
+
const stillAmbiguous = report.ambiguous.filter((a) => !ownership.has(a.path));
|
|
215
|
+
|
|
216
|
+
const fallbackGroup = groups[groups.length - 1]?.name;
|
|
217
|
+
|
|
218
|
+
if (stillUnassigned.length > 0 && fallbackGroup) {
|
|
219
|
+
for (const path of stillUnassigned) {
|
|
220
|
+
ownership.set(path, fallbackGroup);
|
|
221
|
+
reattributed.push({ path, from_groups: [], to_group: fallbackGroup, reason: "LLM did not assign; fallback to last group" });
|
|
222
|
+
}
|
|
223
|
+
warnings.push(`Files force-assigned to "${fallbackGroup}" (LLM missed): ${stillUnassigned.join(", ")}`);
|
|
224
|
+
structuredWarnings.push({
|
|
225
|
+
category: "assignment",
|
|
226
|
+
severity: "warn",
|
|
227
|
+
title: `${stillUnassigned.length} file(s) auto-assigned to "${fallbackGroup}"`,
|
|
228
|
+
message: "AI did not assign these files — they were placed in the last group as fallback",
|
|
229
|
+
details: stillUnassigned,
|
|
230
|
+
});
|
|
231
|
+
}
|
|
232
|
+
if (stillAmbiguous.length > 0 && fallbackGroup) {
|
|
233
|
+
const paths = stillAmbiguous.map((a) => a.path);
|
|
234
|
+
for (const a of stillAmbiguous) {
|
|
235
|
+
ownership.set(a.path, fallbackGroup);
|
|
236
|
+
reattributed.push({ path: a.path, from_groups: a.groups, to_group: fallbackGroup, reason: "LLM did not resolve ambiguity; fallback to last group" });
|
|
237
|
+
}
|
|
238
|
+
warnings.push(`Files force-assigned to "${fallbackGroup}" (LLM missed): ${paths.join(", ")}`);
|
|
239
|
+
structuredWarnings.push({
|
|
240
|
+
category: "assignment",
|
|
241
|
+
severity: "warn",
|
|
242
|
+
title: `${paths.length} ambiguous file(s) auto-assigned to "${fallbackGroup}"`,
|
|
243
|
+
message: "AI did not resolve which group these files belong to — placed in last group",
|
|
244
|
+
details: paths,
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
let sharedFoundation: FileGroup | undefined;
|
|
249
|
+
if (data.shared_foundation && typeof data.shared_foundation === "object") {
|
|
250
|
+
const sf = data.shared_foundation as Record<string, unknown>;
|
|
251
|
+
sharedFoundation = {
|
|
252
|
+
name: String(sf.name ?? "Shared Foundation"),
|
|
253
|
+
type: "chore",
|
|
254
|
+
description: String(sf.description ?? "Common infrastructure changes"),
|
|
255
|
+
files: Array.isArray(sf.files) ? sf.files.map(String) : [],
|
|
256
|
+
};
|
|
257
|
+
validGroupNames.add(sharedFoundation.name);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
return {
|
|
261
|
+
ownership,
|
|
262
|
+
reattributed,
|
|
263
|
+
shared_foundation_group: sharedFoundation,
|
|
264
|
+
warnings,
|
|
265
|
+
structured_warnings: structuredWarnings,
|
|
266
|
+
};
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
function extractJson(raw: string): string {
|
|
270
|
+
const codeBlockMatch = raw.match(/```(?:json)?\s*\n?([\s\S]*?)\n?\s*```/);
|
|
271
|
+
if (codeBlockMatch?.[1]) return codeBlockMatch[1].trim();
|
|
272
|
+
return raw.trim();
|
|
273
|
+
}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import { describe, test, expect, beforeAll, afterAll } from "bun:test";
|
|
2
|
+
import { mkdtempSync, rmSync, writeFileSync, mkdirSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { tmpdir } from "node:os";
|
|
5
|
+
import { createStackPlan } from "./plan.ts";
|
|
6
|
+
import { extractDeltas } from "./delta.ts";
|
|
7
|
+
import type { FileGroup } from "../types/output.ts";
|
|
8
|
+
|
|
9
|
+
let testRepoPath: string;
|
|
10
|
+
let baseSha: string;
|
|
11
|
+
let headSha: string;
|
|
12
|
+
|
|
13
|
+
beforeAll(async () => {
|
|
14
|
+
testRepoPath = mkdtempSync(join(tmpdir(), "plan-test-"));
|
|
15
|
+
|
|
16
|
+
await Bun.$`git init ${testRepoPath}`.quiet();
|
|
17
|
+
await Bun.$`git -C ${testRepoPath} config user.name "Test User"`.quiet();
|
|
18
|
+
await Bun.$`git -C ${testRepoPath} config user.email "test@example.com"`.quiet();
|
|
19
|
+
|
|
20
|
+
writeFileSync(join(testRepoPath, "README.md"), "initial\n");
|
|
21
|
+
await Bun.$`git -C ${testRepoPath} add README.md`.quiet();
|
|
22
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Initial commit"`.quiet();
|
|
23
|
+
|
|
24
|
+
baseSha = (await Bun.$`git -C ${testRepoPath} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
25
|
+
|
|
26
|
+
mkdirSync(join(testRepoPath, "src"), { recursive: true });
|
|
27
|
+
writeFileSync(join(testRepoPath, "src", "auth.ts"), "export const auth = true;\n");
|
|
28
|
+
await Bun.$`git -C ${testRepoPath} add src/auth.ts`.quiet();
|
|
29
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Add auth module"`.quiet();
|
|
30
|
+
|
|
31
|
+
writeFileSync(join(testRepoPath, "src", "ui.tsx"), "export const UI = () => <div/>;\n");
|
|
32
|
+
await Bun.$`git -C ${testRepoPath} add src/ui.tsx`.quiet();
|
|
33
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Add UI component"`.quiet();
|
|
34
|
+
|
|
35
|
+
writeFileSync(join(testRepoPath, "src", "auth.ts"), "export const auth = true;\nexport const token = 'abc';\n");
|
|
36
|
+
await Bun.$`git -C ${testRepoPath} add src/auth.ts`.quiet();
|
|
37
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Update auth with token"`.quiet();
|
|
38
|
+
|
|
39
|
+
headSha = (await Bun.$`git -C ${testRepoPath} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
afterAll(() => {
|
|
43
|
+
if (testRepoPath) {
|
|
44
|
+
rmSync(testRepoPath, { recursive: true, force: true });
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
describe("createStackPlan", () => {
|
|
49
|
+
test("creates plan with expected trees for 2 groups", async () => {
|
|
50
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
51
|
+
|
|
52
|
+
const ownership = new Map([
|
|
53
|
+
["src/auth.ts", "Auth"],
|
|
54
|
+
["src/ui.tsx", "UI"],
|
|
55
|
+
]);
|
|
56
|
+
|
|
57
|
+
const groups: FileGroup[] = [
|
|
58
|
+
{ name: "Auth", type: "feature", description: "Auth changes", files: ["src/auth.ts"] },
|
|
59
|
+
{ name: "UI", type: "feature", description: "UI changes", files: ["src/ui.tsx"] },
|
|
60
|
+
];
|
|
61
|
+
|
|
62
|
+
const plan = await createStackPlan({
|
|
63
|
+
repo_path: testRepoPath,
|
|
64
|
+
base_sha: baseSha,
|
|
65
|
+
head_sha: headSha,
|
|
66
|
+
deltas,
|
|
67
|
+
ownership,
|
|
68
|
+
group_order: ["Auth", "UI"],
|
|
69
|
+
groups,
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
expect(plan.groups.length).toBe(2);
|
|
73
|
+
expect(plan.expected_trees.size).toBe(2);
|
|
74
|
+
expect(plan.expected_trees.get("Auth")).toBeDefined();
|
|
75
|
+
expect(plan.expected_trees.get("UI")).toBeDefined();
|
|
76
|
+
|
|
77
|
+
expect(plan.groups[0]?.id).toBe("Auth");
|
|
78
|
+
expect(plan.groups[0]?.files).toContain("src/auth.ts");
|
|
79
|
+
expect(plan.groups[1]?.id).toBe("UI");
|
|
80
|
+
expect(plan.groups[1]?.files).toContain("src/ui.tsx");
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
test("final tree matches HEAD tree (suffix propagation)", async () => {
|
|
84
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
85
|
+
|
|
86
|
+
const ownership = new Map([
|
|
87
|
+
["src/auth.ts", "Auth"],
|
|
88
|
+
["src/ui.tsx", "UI"],
|
|
89
|
+
]);
|
|
90
|
+
|
|
91
|
+
const groups: FileGroup[] = [
|
|
92
|
+
{ name: "Auth", type: "feature", description: "Auth", files: ["src/auth.ts"] },
|
|
93
|
+
{ name: "UI", type: "feature", description: "UI", files: ["src/ui.tsx"] },
|
|
94
|
+
];
|
|
95
|
+
|
|
96
|
+
const plan = await createStackPlan({
|
|
97
|
+
repo_path: testRepoPath,
|
|
98
|
+
base_sha: baseSha,
|
|
99
|
+
head_sha: headSha,
|
|
100
|
+
deltas,
|
|
101
|
+
ownership,
|
|
102
|
+
group_order: ["Auth", "UI"],
|
|
103
|
+
groups,
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
const headTreeResult = await Bun.$`git -C ${testRepoPath} rev-parse ${headSha}^{tree}`.quiet();
|
|
107
|
+
const headTree = headTreeResult.stdout.toString().trim();
|
|
108
|
+
|
|
109
|
+
const lastGroupId = "UI";
|
|
110
|
+
const lastExpectedTree = plan.expected_trees.get(lastGroupId);
|
|
111
|
+
|
|
112
|
+
expect(lastExpectedTree).toBe(headTree);
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
test("plan with file deletion", async () => {
|
|
116
|
+
const repoPath = mkdtempSync(join(tmpdir(), "plan-delete-test-"));
|
|
117
|
+
try {
|
|
118
|
+
await Bun.$`git init ${repoPath}`.quiet();
|
|
119
|
+
await Bun.$`git -C ${repoPath} config user.name "Test"`.quiet();
|
|
120
|
+
await Bun.$`git -C ${repoPath} config user.email "t@t.com"`.quiet();
|
|
121
|
+
|
|
122
|
+
writeFileSync(join(repoPath, "a.ts"), "a\n");
|
|
123
|
+
writeFileSync(join(repoPath, "b.ts"), "b\n");
|
|
124
|
+
await Bun.$`git -C ${repoPath} add -A`.quiet();
|
|
125
|
+
await Bun.$`git -C ${repoPath} commit -m "Init"`.quiet();
|
|
126
|
+
const base = (await Bun.$`git -C ${repoPath} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
127
|
+
|
|
128
|
+
await Bun.$`git -C ${repoPath} rm b.ts`.quiet();
|
|
129
|
+
await Bun.$`git -C ${repoPath} commit -m "Delete b.ts"`.quiet();
|
|
130
|
+
const head = (await Bun.$`git -C ${repoPath} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
131
|
+
|
|
132
|
+
const deltas = await extractDeltas(repoPath, base, head);
|
|
133
|
+
const ownership = new Map([["b.ts", "Cleanup"]]);
|
|
134
|
+
const groups: FileGroup[] = [
|
|
135
|
+
{ name: "Cleanup", type: "chore", description: "Remove unused", files: ["b.ts"] },
|
|
136
|
+
];
|
|
137
|
+
|
|
138
|
+
const plan = await createStackPlan({
|
|
139
|
+
repo_path: repoPath,
|
|
140
|
+
base_sha: base,
|
|
141
|
+
head_sha: head,
|
|
142
|
+
deltas,
|
|
143
|
+
ownership,
|
|
144
|
+
group_order: ["Cleanup"],
|
|
145
|
+
groups,
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
const headTree = (await Bun.$`git -C ${repoPath} rev-parse ${head}^{tree}`.quiet()).stdout.toString().trim();
|
|
149
|
+
expect(plan.expected_trees.get("Cleanup")).toBe(headTree);
|
|
150
|
+
} finally {
|
|
151
|
+
rmSync(repoPath, { recursive: true, force: true });
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
});
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
DeltaEntry,
|
|
3
|
+
StackPlan,
|
|
4
|
+
StackGroup,
|
|
5
|
+
} from "./types.ts";
|
|
6
|
+
import type { FileGroup, GroupType } from "../types/output.ts";
|
|
7
|
+
|
|
8
|
+
export interface PlanInput {
|
|
9
|
+
repo_path: string;
|
|
10
|
+
base_sha: string;
|
|
11
|
+
head_sha: string;
|
|
12
|
+
deltas: DeltaEntry[];
|
|
13
|
+
ownership: Map<string, string>;
|
|
14
|
+
group_order: string[];
|
|
15
|
+
groups: FileGroup[];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function createStackPlan(input: PlanInput): Promise<StackPlan> {
|
|
19
|
+
const { repo_path, base_sha, head_sha, deltas, ownership, group_order, groups } = input;
|
|
20
|
+
|
|
21
|
+
const groupRank = new Map<string, number>();
|
|
22
|
+
group_order.forEach((gid, idx) => groupRank.set(gid, idx));
|
|
23
|
+
|
|
24
|
+
const stackGroups = buildStackGroups(groups, group_order, ownership);
|
|
25
|
+
|
|
26
|
+
const tmpIndexFiles: string[] = [];
|
|
27
|
+
const expectedTrees = new Map<string, string>();
|
|
28
|
+
|
|
29
|
+
try {
|
|
30
|
+
for (let i = 0; i < group_order.length; i++) {
|
|
31
|
+
const idxFile = `/tmp/newpr-plan-idx-${Date.now()}-${i}-${Math.random().toString(36).slice(2, 8)}`;
|
|
32
|
+
tmpIndexFiles.push(idxFile);
|
|
33
|
+
|
|
34
|
+
const readTree = await Bun.$`GIT_INDEX_FILE=${idxFile} git -C ${repo_path} read-tree ${base_sha}`.quiet().nothrow();
|
|
35
|
+
if (readTree.exitCode !== 0) {
|
|
36
|
+
throw new Error(`Failed to initialize index ${i}: ${readTree.stderr.toString().trim()}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
for (const delta of deltas) {
|
|
41
|
+
const batchPerIndex = new Map<number, string[]>();
|
|
42
|
+
|
|
43
|
+
for (const change of delta.changes) {
|
|
44
|
+
const fileGroupId = ownership.get(change.path);
|
|
45
|
+
if (!fileGroupId) continue;
|
|
46
|
+
|
|
47
|
+
const fileRank = groupRank.get(fileGroupId);
|
|
48
|
+
if (fileRank === undefined) continue;
|
|
49
|
+
|
|
50
|
+
// Suffix propagation: update index[fileRank] through index[N-1]
|
|
51
|
+
for (let idxNum = fileRank; idxNum < group_order.length; idxNum++) {
|
|
52
|
+
let batch = batchPerIndex.get(idxNum);
|
|
53
|
+
if (!batch) {
|
|
54
|
+
batch = [];
|
|
55
|
+
batchPerIndex.set(idxNum, batch);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (change.status === "D") {
|
|
59
|
+
batch.push(`0 ${"0".repeat(40)}\t${change.path}`);
|
|
60
|
+
} else if (change.status === "R") {
|
|
61
|
+
if (change.old_path) {
|
|
62
|
+
batch.push(`0 ${"0".repeat(40)}\t${change.old_path}`);
|
|
63
|
+
}
|
|
64
|
+
batch.push(`${change.new_mode} ${change.new_blob}\t${change.path}`);
|
|
65
|
+
} else {
|
|
66
|
+
batch.push(`${change.new_mode} ${change.new_blob}\t${change.path}`);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
for (const [idxNum, lines] of batchPerIndex) {
|
|
72
|
+
const idxFile = tmpIndexFiles[idxNum];
|
|
73
|
+
if (!idxFile || lines.length === 0) continue;
|
|
74
|
+
|
|
75
|
+
const stdinData = lines.join("\n") + "\n";
|
|
76
|
+
const updateIdx = await Bun.$`echo ${stdinData} | GIT_INDEX_FILE=${idxFile} git -C ${repo_path} update-index --index-info`.quiet().nothrow();
|
|
77
|
+
if (updateIdx.exitCode !== 0) {
|
|
78
|
+
throw new Error(`update-index failed for index ${idxNum}: ${updateIdx.stderr.toString().trim()}`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
for (let i = 0; i < group_order.length; i++) {
|
|
84
|
+
const idxFile = tmpIndexFiles[i];
|
|
85
|
+
const gid = group_order[i];
|
|
86
|
+
if (!idxFile || !gid) continue;
|
|
87
|
+
|
|
88
|
+
const writeTree = await Bun.$`GIT_INDEX_FILE=${idxFile} git -C ${repo_path} write-tree`.quiet().nothrow();
|
|
89
|
+
if (writeTree.exitCode !== 0) {
|
|
90
|
+
throw new Error(`write-tree failed for index ${i}: ${writeTree.stderr.toString().trim()}`);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
expectedTrees.set(gid, writeTree.stdout.toString().trim());
|
|
94
|
+
}
|
|
95
|
+
} finally {
|
|
96
|
+
for (const idxFile of tmpIndexFiles) {
|
|
97
|
+
try {
|
|
98
|
+
await Bun.$`rm -f ${idxFile}`.quiet().nothrow();
|
|
99
|
+
} catch {}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return {
|
|
104
|
+
base_sha,
|
|
105
|
+
head_sha,
|
|
106
|
+
groups: stackGroups,
|
|
107
|
+
expected_trees: expectedTrees,
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function buildStackGroups(
|
|
112
|
+
groups: FileGroup[],
|
|
113
|
+
groupOrder: string[],
|
|
114
|
+
ownership: Map<string, string>,
|
|
115
|
+
): StackGroup[] {
|
|
116
|
+
const groupNameMap = new Map<string, FileGroup>();
|
|
117
|
+
for (const g of groups) {
|
|
118
|
+
groupNameMap.set(g.name, g);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return groupOrder.map((gid, idx) => {
|
|
122
|
+
const original = groupNameMap.get(gid);
|
|
123
|
+
const files: string[] = [];
|
|
124
|
+
|
|
125
|
+
for (const [path, owner] of ownership) {
|
|
126
|
+
if (owner === gid) files.push(path);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return {
|
|
130
|
+
id: gid,
|
|
131
|
+
name: original?.name ?? gid,
|
|
132
|
+
type: (original?.type ?? "chore") as GroupType,
|
|
133
|
+
description: original?.description ?? "",
|
|
134
|
+
files: files.sort(),
|
|
135
|
+
deps: original?.dependencies ?? [],
|
|
136
|
+
order: idx,
|
|
137
|
+
};
|
|
138
|
+
});
|
|
139
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import type { LlmClient } from "../llm/client.ts";
|
|
2
|
+
import type { StackGroup } from "./types.ts";
|
|
3
|
+
|
|
4
|
+
export async function generatePrTitles(
|
|
5
|
+
llmClient: LlmClient,
|
|
6
|
+
groups: StackGroup[],
|
|
7
|
+
prTitle: string,
|
|
8
|
+
): Promise<Map<string, string>> {
|
|
9
|
+
const groupSummaries = groups
|
|
10
|
+
.map((g, i) => [
|
|
11
|
+
`Group ${i + 1}: "${g.name}"`,
|
|
12
|
+
` Type: ${g.type}`,
|
|
13
|
+
` Description: ${g.description}`,
|
|
14
|
+
` Files (${g.files.length}): ${g.files.slice(0, 10).join(", ")}${g.files.length > 10 ? ` ... +${g.files.length - 10} more` : ""}`,
|
|
15
|
+
].join("\n"))
|
|
16
|
+
.join("\n\n");
|
|
17
|
+
|
|
18
|
+
const system = `You generate short PR titles for stacked PRs.
|
|
19
|
+
|
|
20
|
+
Rules:
|
|
21
|
+
- Format: "type: description" — NO scope parentheses
|
|
22
|
+
- type: feat | fix | refactor | chore | docs | test | perf
|
|
23
|
+
- description: 3-6 words, imperative mood, lowercase, no period
|
|
24
|
+
- Be terse. Shorter is better. Omit filler words (add, implement, update, etc. when redundant)
|
|
25
|
+
- Each title must be unique across the set
|
|
26
|
+
|
|
27
|
+
Good: "feat: jwt token refresh", "fix: null user response", "refactor: shared validators", "chore: eslint config"
|
|
28
|
+
Bad: "feat(auth): add jwt token refresh middleware for authentication module" (too long, has scope)
|
|
29
|
+
|
|
30
|
+
Return ONLY JSON array: [{"group_id": "...", "title": "..."}]`;
|
|
31
|
+
|
|
32
|
+
const user = `Original PR: "${prTitle}"
|
|
33
|
+
|
|
34
|
+
${groupSummaries}
|
|
35
|
+
|
|
36
|
+
Generate a unique, descriptive PR title for each group. Return JSON array:
|
|
37
|
+
[{"group_id": "...", "title": "..."}]`;
|
|
38
|
+
|
|
39
|
+
const response = await llmClient.complete(system, user);
|
|
40
|
+
|
|
41
|
+
const titles = new Map<string, string>();
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
const cleaned = response.content.replace(/```(?:json)?\s*/g, "").replace(/```\s*/g, "").trim();
|
|
45
|
+
const parsed = JSON.parse(cleaned) as Array<{ group_id: string; title: string }>;
|
|
46
|
+
for (const item of parsed) {
|
|
47
|
+
if (item.group_id && item.title) {
|
|
48
|
+
titles.set(item.group_id, item.title);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
} catch {
|
|
52
|
+
for (const g of groups) {
|
|
53
|
+
titles.set(g.id, `${g.type}: ${g.description}`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
for (const g of groups) {
|
|
58
|
+
if (!titles.has(g.id)) {
|
|
59
|
+
titles.set(g.id, `${g.type}: ${g.description}`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return titles;
|
|
64
|
+
}
|