newpr 0.6.5 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/history/store.ts +25 -0
- package/src/stack/balance.ts +128 -0
- package/src/stack/coupling.test.ts +158 -0
- package/src/stack/coupling.ts +135 -0
- package/src/stack/delta.test.ts +223 -0
- package/src/stack/delta.ts +264 -0
- package/src/stack/execute.test.ts +176 -0
- package/src/stack/execute.ts +194 -0
- package/src/stack/feasibility.test.ts +185 -0
- package/src/stack/feasibility.ts +286 -0
- package/src/stack/integration.test.ts +266 -0
- package/src/stack/merge-groups.test.ts +97 -0
- package/src/stack/merge-groups.ts +87 -0
- package/src/stack/partition.test.ts +233 -0
- package/src/stack/partition.ts +273 -0
- package/src/stack/plan.test.ts +154 -0
- package/src/stack/plan.ts +139 -0
- package/src/stack/pr-title.ts +64 -0
- package/src/stack/publish.ts +96 -0
- package/src/stack/split.ts +173 -0
- package/src/stack/types.ts +202 -0
- package/src/stack/verify.test.ts +137 -0
- package/src/stack/verify.ts +201 -0
- package/src/web/client/components/FeasibilityAlert.tsx +64 -0
- package/src/web/client/components/InputScreen.tsx +100 -89
- package/src/web/client/components/ResultsScreen.tsx +10 -2
- package/src/web/client/components/StackGroupCard.tsx +171 -0
- package/src/web/client/components/StackWarnings.tsx +135 -0
- package/src/web/client/hooks/useStack.ts +301 -0
- package/src/web/client/panels/StackPanel.tsx +289 -0
- package/src/web/server/routes.ts +114 -0
- package/src/web/server/stack-manager.ts +580 -0
- package/src/web/server.ts +15 -0
- package/src/web/styles/built.css +1 -1
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
import { describe, test, expect, afterAll } from "bun:test";
|
|
2
|
+
import { mkdtempSync, rmSync, writeFileSync, mkdirSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { tmpdir } from "node:os";
|
|
5
|
+
import { extractDeltas } from "./delta.ts";
|
|
6
|
+
import { applyCouplingRules } from "./coupling.ts";
|
|
7
|
+
import { checkFeasibility } from "./feasibility.ts";
|
|
8
|
+
import { createStackPlan } from "./plan.ts";
|
|
9
|
+
import { executeStack } from "./execute.ts";
|
|
10
|
+
import { verifyStack } from "./verify.ts";
|
|
11
|
+
import type { FileGroup } from "../types/output.ts";
|
|
12
|
+
|
|
13
|
+
const tmpDirs: string[] = [];
|
|
14
|
+
|
|
15
|
+
function makeTmpRepo(): string {
|
|
16
|
+
const dir = mkdtempSync(join(tmpdir(), "stack-integ-"));
|
|
17
|
+
tmpDirs.push(dir);
|
|
18
|
+
return dir;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
afterAll(() => {
|
|
22
|
+
for (const dir of tmpDirs) {
|
|
23
|
+
rmSync(dir, { recursive: true, force: true });
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
async function initRepo(path: string): Promise<void> {
|
|
28
|
+
await Bun.$`git init ${path}`.quiet();
|
|
29
|
+
await Bun.$`git -C ${path} config user.name "Test"`.quiet();
|
|
30
|
+
await Bun.$`git -C ${path} config user.email "test@test.com"`.quiet();
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
async function getSha(path: string): Promise<string> {
|
|
34
|
+
return (await Bun.$`git -C ${path} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
async function getTree(path: string, sha: string): Promise<string> {
|
|
38
|
+
return (await Bun.$`git -C ${path} rev-parse ${sha}^{tree}`.quiet()).stdout.toString().trim();
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async function runFullPipeline(
|
|
42
|
+
repoPath: string,
|
|
43
|
+
baseSha: string,
|
|
44
|
+
headSha: string,
|
|
45
|
+
groups: FileGroup[],
|
|
46
|
+
ownership: Map<string, string>,
|
|
47
|
+
groupOrder: string[],
|
|
48
|
+
) {
|
|
49
|
+
const changedFiles = [...ownership.keys()];
|
|
50
|
+
const coupled = applyCouplingRules(ownership, changedFiles, groupOrder);
|
|
51
|
+
const deltas = await extractDeltas(repoPath, baseSha, headSha);
|
|
52
|
+
const feasibility = checkFeasibility({ deltas, ownership: coupled.ownership });
|
|
53
|
+
|
|
54
|
+
if (!feasibility.feasible || !feasibility.ordered_group_ids) {
|
|
55
|
+
return { feasibility, plan: null, execResult: null, verifyResult: null };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const plan = await createStackPlan({
|
|
59
|
+
repo_path: repoPath,
|
|
60
|
+
base_sha: baseSha,
|
|
61
|
+
head_sha: headSha,
|
|
62
|
+
deltas,
|
|
63
|
+
ownership: coupled.ownership,
|
|
64
|
+
group_order: feasibility.ordered_group_ids,
|
|
65
|
+
groups,
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
const execResult = await executeStack({
|
|
69
|
+
repo_path: repoPath,
|
|
70
|
+
plan,
|
|
71
|
+
deltas,
|
|
72
|
+
ownership: coupled.ownership,
|
|
73
|
+
pr_author: { name: "Test", email: "test@test.com" },
|
|
74
|
+
pr_number: 1,
|
|
75
|
+
head_branch: "test-branch",
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
const verifyResult = await verifyStack({
|
|
79
|
+
repo_path: repoPath,
|
|
80
|
+
base_sha: baseSha,
|
|
81
|
+
head_sha: headSha,
|
|
82
|
+
exec_result: execResult,
|
|
83
|
+
ownership: coupled.ownership,
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
return { feasibility, plan, execResult, verifyResult };
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
describe("integration: full stacking pipeline", () => {
|
|
90
|
+
test("happy path with 3 groups", async () => {
|
|
91
|
+
const repo = makeTmpRepo();
|
|
92
|
+
await initRepo(repo);
|
|
93
|
+
|
|
94
|
+
mkdirSync(join(repo, "src"), { recursive: true });
|
|
95
|
+
writeFileSync(join(repo, "README.md"), "init\n");
|
|
96
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
97
|
+
await Bun.$`git -C ${repo} commit -m "Init"`.quiet();
|
|
98
|
+
const base = await getSha(repo);
|
|
99
|
+
|
|
100
|
+
writeFileSync(join(repo, "src", "auth.ts"), "export const auth = true;\n");
|
|
101
|
+
await Bun.$`git -C ${repo} add src/auth.ts`.quiet();
|
|
102
|
+
await Bun.$`git -C ${repo} commit -m "Add auth"`.quiet();
|
|
103
|
+
|
|
104
|
+
writeFileSync(join(repo, "src", "api.ts"), "export const api = '/v1';\n");
|
|
105
|
+
await Bun.$`git -C ${repo} add src/api.ts`.quiet();
|
|
106
|
+
await Bun.$`git -C ${repo} commit -m "Add api"`.quiet();
|
|
107
|
+
|
|
108
|
+
writeFileSync(join(repo, "src", "ui.tsx"), "export const UI = () => <div/>;\n");
|
|
109
|
+
await Bun.$`git -C ${repo} add src/ui.tsx`.quiet();
|
|
110
|
+
await Bun.$`git -C ${repo} commit -m "Add ui"`.quiet();
|
|
111
|
+
const head = await getSha(repo);
|
|
112
|
+
|
|
113
|
+
const groups: FileGroup[] = [
|
|
114
|
+
{ name: "Auth", type: "feature", description: "Auth", files: ["src/auth.ts"] },
|
|
115
|
+
{ name: "API", type: "feature", description: "API", files: ["src/api.ts"] },
|
|
116
|
+
{ name: "UI", type: "feature", description: "UI", files: ["src/ui.tsx"] },
|
|
117
|
+
];
|
|
118
|
+
const ownership = new Map([
|
|
119
|
+
["src/auth.ts", "Auth"],
|
|
120
|
+
["src/api.ts", "API"],
|
|
121
|
+
["src/ui.tsx", "UI"],
|
|
122
|
+
]);
|
|
123
|
+
|
|
124
|
+
const result = await runFullPipeline(repo, base, head, groups, ownership, ["Auth", "API", "UI"]);
|
|
125
|
+
|
|
126
|
+
expect(result.feasibility.feasible).toBe(true);
|
|
127
|
+
expect(result.plan!.groups.length).toBe(3);
|
|
128
|
+
expect(result.execResult!.group_commits.length).toBe(3);
|
|
129
|
+
expect(result.verifyResult!.verified).toBe(true);
|
|
130
|
+
expect(result.verifyResult!.errors).toEqual([]);
|
|
131
|
+
expect(result.verifyResult!.warnings).toBeDefined();
|
|
132
|
+
|
|
133
|
+
const headTree = await getTree(repo, head);
|
|
134
|
+
expect(result.execResult!.final_tree_sha).toBe(headTree);
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
test("file rename preserves tree equivalence", async () => {
|
|
138
|
+
const repo = makeTmpRepo();
|
|
139
|
+
await initRepo(repo);
|
|
140
|
+
|
|
141
|
+
writeFileSync(join(repo, "old.ts"), "content\n");
|
|
142
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
143
|
+
await Bun.$`git -C ${repo} commit -m "Init"`.quiet();
|
|
144
|
+
const base = await getSha(repo);
|
|
145
|
+
|
|
146
|
+
await Bun.$`git -C ${repo} mv old.ts new.ts`.quiet();
|
|
147
|
+
await Bun.$`git -C ${repo} commit -m "Rename"`.quiet();
|
|
148
|
+
const head = await getSha(repo);
|
|
149
|
+
|
|
150
|
+
const groups: FileGroup[] = [
|
|
151
|
+
{ name: "Rename", type: "refactor", description: "Rename file", files: ["old.ts", "new.ts"] },
|
|
152
|
+
];
|
|
153
|
+
const ownership = new Map([
|
|
154
|
+
["old.ts", "Rename"],
|
|
155
|
+
["new.ts", "Rename"],
|
|
156
|
+
]);
|
|
157
|
+
|
|
158
|
+
const result = await runFullPipeline(repo, base, head, groups, ownership, ["Rename"]);
|
|
159
|
+
|
|
160
|
+
expect(result.feasibility.feasible).toBe(true);
|
|
161
|
+
expect(result.verifyResult!.verified).toBe(true);
|
|
162
|
+
|
|
163
|
+
const headTree = await getTree(repo, head);
|
|
164
|
+
expect(result.execResult!.final_tree_sha).toBe(headTree);
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
test("file deletion preserves tree equivalence", async () => {
|
|
168
|
+
const repo = makeTmpRepo();
|
|
169
|
+
await initRepo(repo);
|
|
170
|
+
|
|
171
|
+
writeFileSync(join(repo, "keep.ts"), "keep\n");
|
|
172
|
+
writeFileSync(join(repo, "remove.ts"), "remove\n");
|
|
173
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
174
|
+
await Bun.$`git -C ${repo} commit -m "Init"`.quiet();
|
|
175
|
+
const base = await getSha(repo);
|
|
176
|
+
|
|
177
|
+
await Bun.$`git -C ${repo} rm remove.ts`.quiet();
|
|
178
|
+
await Bun.$`git -C ${repo} commit -m "Delete"`.quiet();
|
|
179
|
+
const head = await getSha(repo);
|
|
180
|
+
|
|
181
|
+
const groups: FileGroup[] = [
|
|
182
|
+
{ name: "Cleanup", type: "chore", description: "Remove file", files: ["remove.ts"] },
|
|
183
|
+
];
|
|
184
|
+
const ownership = new Map([["remove.ts", "Cleanup"]]);
|
|
185
|
+
|
|
186
|
+
const result = await runFullPipeline(repo, base, head, groups, ownership, ["Cleanup"]);
|
|
187
|
+
|
|
188
|
+
expect(result.feasibility.feasible).toBe(true);
|
|
189
|
+
expect(result.verifyResult!.verified).toBe(true);
|
|
190
|
+
|
|
191
|
+
const headTree = await getTree(repo, head);
|
|
192
|
+
expect(result.execResult!.final_tree_sha).toBe(headTree);
|
|
193
|
+
});
|
|
194
|
+
|
|
195
|
+
test("single group stacks to one commit", async () => {
|
|
196
|
+
const repo = makeTmpRepo();
|
|
197
|
+
await initRepo(repo);
|
|
198
|
+
|
|
199
|
+
writeFileSync(join(repo, "a.ts"), "a\n");
|
|
200
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
201
|
+
await Bun.$`git -C ${repo} commit -m "Init"`.quiet();
|
|
202
|
+
const base = await getSha(repo);
|
|
203
|
+
|
|
204
|
+
writeFileSync(join(repo, "a.ts"), "a-updated\n");
|
|
205
|
+
writeFileSync(join(repo, "b.ts"), "b-new\n");
|
|
206
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
207
|
+
await Bun.$`git -C ${repo} commit -m "Changes"`.quiet();
|
|
208
|
+
const head = await getSha(repo);
|
|
209
|
+
|
|
210
|
+
const groups: FileGroup[] = [
|
|
211
|
+
{ name: "Single", type: "feature", description: "All changes", files: ["a.ts", "b.ts"] },
|
|
212
|
+
];
|
|
213
|
+
const ownership = new Map([
|
|
214
|
+
["a.ts", "Single"],
|
|
215
|
+
["b.ts", "Single"],
|
|
216
|
+
]);
|
|
217
|
+
|
|
218
|
+
const result = await runFullPipeline(repo, base, head, groups, ownership, ["Single"]);
|
|
219
|
+
|
|
220
|
+
expect(result.feasibility.feasible).toBe(true);
|
|
221
|
+
expect(result.execResult!.group_commits.length).toBe(1);
|
|
222
|
+
expect(result.verifyResult!.verified).toBe(true);
|
|
223
|
+
|
|
224
|
+
const headTree = await getTree(repo, head);
|
|
225
|
+
expect(result.execResult!.final_tree_sha).toBe(headTree);
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
test("declared dependency cycle is detected as infeasible", async () => {
|
|
229
|
+
const repo = makeTmpRepo();
|
|
230
|
+
await initRepo(repo);
|
|
231
|
+
|
|
232
|
+
mkdirSync(join(repo, "src"), { recursive: true });
|
|
233
|
+
writeFileSync(join(repo, "src", "a.ts"), "a\n");
|
|
234
|
+
writeFileSync(join(repo, "src", "b.ts"), "b\n");
|
|
235
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
236
|
+
await Bun.$`git -C ${repo} commit -m "Init"`.quiet();
|
|
237
|
+
const base = await getSha(repo);
|
|
238
|
+
|
|
239
|
+
writeFileSync(join(repo, "src", "a.ts"), "a-v2\n");
|
|
240
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
241
|
+
await Bun.$`git -C ${repo} commit -m "Update a"`.quiet();
|
|
242
|
+
|
|
243
|
+
writeFileSync(join(repo, "src", "b.ts"), "b-v2\n");
|
|
244
|
+
await Bun.$`git -C ${repo} add -A`.quiet();
|
|
245
|
+
await Bun.$`git -C ${repo} commit -m "Update b"`.quiet();
|
|
246
|
+
const head = await getSha(repo);
|
|
247
|
+
|
|
248
|
+
const ownership = new Map([
|
|
249
|
+
["src/a.ts", "GroupA"],
|
|
250
|
+
["src/b.ts", "GroupB"],
|
|
251
|
+
]);
|
|
252
|
+
|
|
253
|
+
const deltas = await extractDeltas(repo, base, head);
|
|
254
|
+
const feasibility = checkFeasibility({
|
|
255
|
+
deltas,
|
|
256
|
+
ownership,
|
|
257
|
+
declared_deps: new Map([
|
|
258
|
+
["GroupA", ["GroupB"]],
|
|
259
|
+
["GroupB", ["GroupA"]],
|
|
260
|
+
]),
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
expect(feasibility.feasible).toBe(false);
|
|
264
|
+
expect(feasibility.cycle).toBeDefined();
|
|
265
|
+
});
|
|
266
|
+
});
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { mergeGroups } from "./merge-groups.ts";
|
|
3
|
+
import type { FileGroup } from "../types/output.ts";
|
|
4
|
+
|
|
5
|
+
describe("mergeGroups", () => {
|
|
6
|
+
test("no-op when already at or below target", () => {
|
|
7
|
+
const groups: FileGroup[] = [
|
|
8
|
+
{ name: "A", type: "feature", description: "A", files: ["a.ts"] },
|
|
9
|
+
{ name: "B", type: "feature", description: "B", files: ["b.ts"] },
|
|
10
|
+
];
|
|
11
|
+
const ownership = new Map([["a.ts", "A"], ["b.ts", "B"]]);
|
|
12
|
+
|
|
13
|
+
const result = mergeGroups(groups, ownership, 3);
|
|
14
|
+
expect(result.groups.length).toBe(2);
|
|
15
|
+
expect(result.merges).toEqual([]);
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
test("merges 4 groups down to 2", () => {
|
|
19
|
+
const groups: FileGroup[] = [
|
|
20
|
+
{ name: "Big", type: "feature", description: "Big", files: ["a.ts", "b.ts", "c.ts"] },
|
|
21
|
+
{ name: "Small1", type: "feature", description: "Small1", files: ["d.ts"] },
|
|
22
|
+
{ name: "Small2", type: "feature", description: "Small2", files: ["e.ts"] },
|
|
23
|
+
{ name: "Medium", type: "feature", description: "Medium", files: ["f.ts", "g.ts"] },
|
|
24
|
+
];
|
|
25
|
+
const ownership = new Map([
|
|
26
|
+
["a.ts", "Big"], ["b.ts", "Big"], ["c.ts", "Big"],
|
|
27
|
+
["d.ts", "Small1"], ["e.ts", "Small2"],
|
|
28
|
+
["f.ts", "Medium"], ["g.ts", "Medium"],
|
|
29
|
+
]);
|
|
30
|
+
|
|
31
|
+
const result = mergeGroups(groups, ownership, 2);
|
|
32
|
+
expect(result.groups.length).toBe(2);
|
|
33
|
+
expect(result.merges.length).toBe(2);
|
|
34
|
+
|
|
35
|
+
const allFiles = result.groups.flatMap((g) => g.files);
|
|
36
|
+
expect(allFiles.sort()).toEqual(["a.ts", "b.ts", "c.ts", "d.ts", "e.ts", "f.ts", "g.ts"]);
|
|
37
|
+
|
|
38
|
+
for (const [path] of ownership) {
|
|
39
|
+
expect(result.ownership.has(path)).toBe(true);
|
|
40
|
+
const owner = result.ownership.get(path)!;
|
|
41
|
+
expect(result.groups.some((g) => g.name === owner)).toBe(true);
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
test("ownership updated for absorbed group files", () => {
|
|
46
|
+
const groups: FileGroup[] = [
|
|
47
|
+
{ name: "A", type: "feature", description: "A", files: ["a1.ts", "a2.ts"] },
|
|
48
|
+
{ name: "B", type: "feature", description: "B", files: ["b.ts"] },
|
|
49
|
+
];
|
|
50
|
+
const ownership = new Map([
|
|
51
|
+
["a1.ts", "A"], ["a2.ts", "A"], ["b.ts", "B"],
|
|
52
|
+
]);
|
|
53
|
+
|
|
54
|
+
const result = mergeGroups(groups, ownership, 1);
|
|
55
|
+
expect(result.groups.length).toBe(1);
|
|
56
|
+
|
|
57
|
+
const survivorName = result.groups[0]!.name;
|
|
58
|
+
expect(result.ownership.get("a1.ts")).toBe(survivorName);
|
|
59
|
+
expect(result.ownership.get("a2.ts")).toBe(survivorName);
|
|
60
|
+
expect(result.ownership.get("b.ts")).toBe(survivorName);
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
test("targetCount 0 or negative returns single group", () => {
|
|
64
|
+
const groups: FileGroup[] = [
|
|
65
|
+
{ name: "A", type: "feature", description: "A", files: ["a.ts"] },
|
|
66
|
+
{ name: "B", type: "feature", description: "B", files: ["b.ts"] },
|
|
67
|
+
];
|
|
68
|
+
const ownership = new Map([["a.ts", "A"], ["b.ts", "B"]]);
|
|
69
|
+
|
|
70
|
+
const result = mergeGroups(groups, ownership, 0);
|
|
71
|
+
expect(result.groups.length).toBe(2);
|
|
72
|
+
expect(result.merges).toEqual([]);
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
test("single group unchanged", () => {
|
|
76
|
+
const groups: FileGroup[] = [
|
|
77
|
+
{ name: "Only", type: "feature", description: "Only", files: ["a.ts", "b.ts"] },
|
|
78
|
+
];
|
|
79
|
+
const ownership = new Map([["a.ts", "Only"], ["b.ts", "Only"]]);
|
|
80
|
+
|
|
81
|
+
const result = mergeGroups(groups, ownership, 1);
|
|
82
|
+
expect(result.groups.length).toBe(1);
|
|
83
|
+
expect(result.merges).toEqual([]);
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
test("preserves key_changes from absorbed group", () => {
|
|
87
|
+
const groups: FileGroup[] = [
|
|
88
|
+
{ name: "A", type: "feature", description: "A", files: ["a.ts"], key_changes: ["Added auth"] },
|
|
89
|
+
{ name: "B", type: "feature", description: "B", files: ["b.ts"], key_changes: ["Added UI"] },
|
|
90
|
+
];
|
|
91
|
+
const ownership = new Map([["a.ts", "A"], ["b.ts", "B"]]);
|
|
92
|
+
|
|
93
|
+
const result = mergeGroups(groups, ownership, 1);
|
|
94
|
+
expect(result.groups[0]!.key_changes).toContain("Added auth");
|
|
95
|
+
expect(result.groups[0]!.key_changes).toContain("Added UI");
|
|
96
|
+
});
|
|
97
|
+
});
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import type { FileGroup } from "../types/output.ts";
|
|
2
|
+
|
|
3
|
+
export interface MergeResult {
|
|
4
|
+
groups: FileGroup[];
|
|
5
|
+
ownership: Map<string, string>;
|
|
6
|
+
merges: Array<{ absorbed: string; into: string }>;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export function mergeGroups(
|
|
10
|
+
groups: FileGroup[],
|
|
11
|
+
ownership: Map<string, string>,
|
|
12
|
+
targetCount: number,
|
|
13
|
+
): MergeResult {
|
|
14
|
+
if (targetCount <= 0 || groups.length <= targetCount) {
|
|
15
|
+
return { groups: [...groups], ownership: new Map(ownership), merges: [] };
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const working = groups.map((g) => ({ ...g, files: [...g.files] }));
|
|
19
|
+
const newOwnership = new Map(ownership);
|
|
20
|
+
const merges: Array<{ absorbed: string; into: string }> = [];
|
|
21
|
+
|
|
22
|
+
while (working.length > targetCount) {
|
|
23
|
+
let minSize = Infinity;
|
|
24
|
+
let minIdx = -1;
|
|
25
|
+
|
|
26
|
+
for (let i = 0; i < working.length; i++) {
|
|
27
|
+
const size = working[i]!.files.length;
|
|
28
|
+
if (size < minSize) {
|
|
29
|
+
minSize = size;
|
|
30
|
+
minIdx = i;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
if (minIdx === -1) break;
|
|
35
|
+
|
|
36
|
+
const smallest = working[minIdx]!;
|
|
37
|
+
|
|
38
|
+
let bestNeighborIdx = minIdx === 0 ? 1 : minIdx - 1;
|
|
39
|
+
if (working.length > 2) {
|
|
40
|
+
const left = minIdx > 0 ? minIdx - 1 : -1;
|
|
41
|
+
const right = minIdx < working.length - 1 ? minIdx + 1 : -1;
|
|
42
|
+
|
|
43
|
+
if (left >= 0 && right >= 0) {
|
|
44
|
+
bestNeighborIdx = working[left]!.files.length <= working[right]!.files.length
|
|
45
|
+
? left
|
|
46
|
+
: right;
|
|
47
|
+
} else if (left >= 0) {
|
|
48
|
+
bestNeighborIdx = left;
|
|
49
|
+
} else {
|
|
50
|
+
bestNeighborIdx = right;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const neighbor = working[bestNeighborIdx]!;
|
|
55
|
+
|
|
56
|
+
const absorbed = smallest.files.length <= neighbor.files.length ? smallest : neighbor;
|
|
57
|
+
const survivor = absorbed === smallest ? neighbor : smallest;
|
|
58
|
+
|
|
59
|
+
for (const file of absorbed.files) {
|
|
60
|
+
if (!survivor.files.includes(file)) {
|
|
61
|
+
survivor.files.push(file);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (absorbed.key_changes) {
|
|
66
|
+
survivor.key_changes = [
|
|
67
|
+
...(survivor.key_changes ?? []),
|
|
68
|
+
...absorbed.key_changes,
|
|
69
|
+
];
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
for (const [path, groupId] of newOwnership) {
|
|
73
|
+
if (groupId === absorbed.name) {
|
|
74
|
+
newOwnership.set(path, survivor.name);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
merges.push({ absorbed: absorbed.name, into: survivor.name });
|
|
79
|
+
|
|
80
|
+
const removeIdx = working.indexOf(absorbed);
|
|
81
|
+
if (removeIdx >= 0) {
|
|
82
|
+
working.splice(removeIdx, 1);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return { groups: working, ownership: newOwnership, merges };
|
|
87
|
+
}
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { detectAmbiguousPaths, partitionGroups, buildStackPartitionPrompt } from "./partition.ts";
|
|
3
|
+
import type { FileGroup } from "../types/output.ts";
|
|
4
|
+
import type { LlmClient, LlmResponse } from "../llm/client.ts";
|
|
5
|
+
|
|
6
|
+
const mockGroups: FileGroup[] = [
|
|
7
|
+
{ name: "Auth", type: "feature", description: "Authentication changes", files: ["src/auth.ts", "src/shared.ts"] },
|
|
8
|
+
{ name: "UI", type: "feature", description: "UI updates", files: ["src/ui.tsx", "src/shared.ts"] },
|
|
9
|
+
{ name: "Config", type: "config", description: "Config changes", files: ["tsconfig.json"] },
|
|
10
|
+
];
|
|
11
|
+
|
|
12
|
+
describe("detectAmbiguousPaths", () => {
|
|
13
|
+
test("identifies exclusive, ambiguous, and unassigned files", () => {
|
|
14
|
+
const report = detectAmbiguousPaths({
|
|
15
|
+
groups: mockGroups,
|
|
16
|
+
changed_files: ["src/auth.ts", "src/ui.tsx", "src/shared.ts", "src/unknown.ts"],
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
expect(report.exclusive.get("src/auth.ts")).toBe("Auth");
|
|
20
|
+
expect(report.exclusive.get("src/ui.tsx")).toBe("UI");
|
|
21
|
+
|
|
22
|
+
expect(report.ambiguous).toEqual([
|
|
23
|
+
{ path: "src/shared.ts", groups: ["Auth", "UI"] },
|
|
24
|
+
]);
|
|
25
|
+
|
|
26
|
+
expect(report.unassigned).toEqual(["src/unknown.ts"]);
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
test("all files exclusive → no ambiguous or unassigned", () => {
|
|
30
|
+
const groups: FileGroup[] = [
|
|
31
|
+
{ name: "A", type: "feature", description: "A", files: ["a.ts"] },
|
|
32
|
+
{ name: "B", type: "feature", description: "B", files: ["b.ts"] },
|
|
33
|
+
];
|
|
34
|
+
|
|
35
|
+
const report = detectAmbiguousPaths({
|
|
36
|
+
groups,
|
|
37
|
+
changed_files: ["a.ts", "b.ts"],
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
expect(report.exclusive.size).toBe(2);
|
|
41
|
+
expect(report.ambiguous).toEqual([]);
|
|
42
|
+
expect(report.unassigned).toEqual([]);
|
|
43
|
+
});
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
describe("buildStackPartitionPrompt", () => {
|
|
47
|
+
test("generates prompt with ambiguous and unassigned files", () => {
|
|
48
|
+
const result = buildStackPartitionPrompt(
|
|
49
|
+
[{ path: "src/shared.ts", groups: ["Auth", "UI"] }],
|
|
50
|
+
["src/unknown.ts"],
|
|
51
|
+
mockGroups,
|
|
52
|
+
[{ path: "src/shared.ts", status: "modified", summary: "Shared utilities" }],
|
|
53
|
+
[],
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
expect(result.system).toContain("exactly one group");
|
|
57
|
+
expect(result.user).toContain("src/shared.ts");
|
|
58
|
+
expect(result.user).toContain("src/unknown.ts");
|
|
59
|
+
expect(result.user).toContain("Auth");
|
|
60
|
+
expect(result.user).toContain("UI");
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
describe("partitionGroups", () => {
|
|
65
|
+
test("returns immediately when all files are exclusive (no LLM call)", async () => {
|
|
66
|
+
let llmCalled = false;
|
|
67
|
+
const mockClient: LlmClient = {
|
|
68
|
+
complete: async () => {
|
|
69
|
+
llmCalled = true;
|
|
70
|
+
return { content: "{}", model: "test", usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 } };
|
|
71
|
+
},
|
|
72
|
+
completeStream: async () => {
|
|
73
|
+
return { content: "{}", model: "test", usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 } };
|
|
74
|
+
},
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
const groups: FileGroup[] = [
|
|
78
|
+
{ name: "A", type: "feature", description: "A", files: ["a.ts"] },
|
|
79
|
+
{ name: "B", type: "feature", description: "B", files: ["b.ts"] },
|
|
80
|
+
];
|
|
81
|
+
|
|
82
|
+
const result = await partitionGroups(
|
|
83
|
+
mockClient,
|
|
84
|
+
groups,
|
|
85
|
+
["a.ts", "b.ts"],
|
|
86
|
+
[],
|
|
87
|
+
[],
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
expect(llmCalled).toBe(false);
|
|
91
|
+
expect(result.ownership.get("a.ts")).toBe("A");
|
|
92
|
+
expect(result.ownership.get("b.ts")).toBe("B");
|
|
93
|
+
expect(result.reattributed).toEqual([]);
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
test("calls LLM when ambiguous files exist and parses response", async () => {
|
|
97
|
+
const mockResponse: LlmResponse = {
|
|
98
|
+
content: JSON.stringify({
|
|
99
|
+
assignments: [
|
|
100
|
+
{ path: "src/shared.ts", group: "Auth", reason: "Primarily used for auth" },
|
|
101
|
+
{ path: "src/unknown.ts", group: "Config", reason: "Configuration file" },
|
|
102
|
+
],
|
|
103
|
+
shared_foundation: null,
|
|
104
|
+
}),
|
|
105
|
+
model: "test",
|
|
106
|
+
usage: { prompt_tokens: 100, completion_tokens: 50, total_tokens: 150 },
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
const mockClient: LlmClient = {
|
|
110
|
+
complete: async () => mockResponse,
|
|
111
|
+
completeStream: async () => mockResponse,
|
|
112
|
+
};
|
|
113
|
+
|
|
114
|
+
const result = await partitionGroups(
|
|
115
|
+
mockClient,
|
|
116
|
+
mockGroups,
|
|
117
|
+
["src/auth.ts", "src/ui.tsx", "src/shared.ts", "src/unknown.ts"],
|
|
118
|
+
[{ path: "src/shared.ts", status: "modified", summary: "Shared utils" }],
|
|
119
|
+
[],
|
|
120
|
+
);
|
|
121
|
+
|
|
122
|
+
expect(result.ownership.get("src/auth.ts")).toBe("Auth");
|
|
123
|
+
expect(result.ownership.get("src/ui.tsx")).toBe("UI");
|
|
124
|
+
expect(result.ownership.get("src/shared.ts")).toBe("Auth");
|
|
125
|
+
expect(result.ownership.get("src/unknown.ts")).toBe("Config");
|
|
126
|
+
|
|
127
|
+
expect(result.reattributed.length).toBe(2);
|
|
128
|
+
expect(result.reattributed[0]?.path).toBe("src/shared.ts");
|
|
129
|
+
expect(result.reattributed[0]?.to_group).toBe("Auth");
|
|
130
|
+
expect(result.reattributed[0]?.from_groups).toEqual(["Auth", "UI"]);
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
test("handles LLM response with code block wrapper", async () => {
|
|
134
|
+
const mockResponse: LlmResponse = {
|
|
135
|
+
content: "```json\n" + JSON.stringify({
|
|
136
|
+
assignments: [
|
|
137
|
+
{ path: "src/shared.ts", group: "Auth", reason: "Auth" },
|
|
138
|
+
],
|
|
139
|
+
shared_foundation: null,
|
|
140
|
+
}) + "\n```",
|
|
141
|
+
model: "test",
|
|
142
|
+
usage: { prompt_tokens: 100, completion_tokens: 50, total_tokens: 150 },
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
const mockClient: LlmClient = {
|
|
146
|
+
complete: async () => mockResponse,
|
|
147
|
+
completeStream: async () => mockResponse,
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
const groups: FileGroup[] = [
|
|
151
|
+
{ name: "Auth", type: "feature", description: "Auth", files: ["src/auth.ts", "src/shared.ts"] },
|
|
152
|
+
{ name: "UI", type: "feature", description: "UI", files: ["src/shared.ts"] },
|
|
153
|
+
];
|
|
154
|
+
|
|
155
|
+
const result = await partitionGroups(
|
|
156
|
+
mockClient,
|
|
157
|
+
groups,
|
|
158
|
+
["src/auth.ts", "src/shared.ts"],
|
|
159
|
+
[],
|
|
160
|
+
[],
|
|
161
|
+
);
|
|
162
|
+
|
|
163
|
+
expect(result.ownership.get("src/shared.ts")).toBe("Auth");
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
test("warns on invalid group name in LLM response", async () => {
|
|
167
|
+
const mockResponse: LlmResponse = {
|
|
168
|
+
content: JSON.stringify({
|
|
169
|
+
assignments: [
|
|
170
|
+
{ path: "src/shared.ts", group: "NonExistentGroup", reason: "Bad" },
|
|
171
|
+
],
|
|
172
|
+
shared_foundation: null,
|
|
173
|
+
}),
|
|
174
|
+
model: "test",
|
|
175
|
+
usage: { prompt_tokens: 100, completion_tokens: 50, total_tokens: 150 },
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
const mockClient: LlmClient = {
|
|
179
|
+
complete: async () => mockResponse,
|
|
180
|
+
completeStream: async () => mockResponse,
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
const groups: FileGroup[] = [
|
|
184
|
+
{ name: "Auth", type: "feature", description: "Auth", files: ["src/auth.ts", "src/shared.ts"] },
|
|
185
|
+
{ name: "UI", type: "feature", description: "UI", files: ["src/shared.ts"] },
|
|
186
|
+
];
|
|
187
|
+
|
|
188
|
+
const result = await partitionGroups(
|
|
189
|
+
mockClient,
|
|
190
|
+
groups,
|
|
191
|
+
["src/auth.ts", "src/shared.ts"],
|
|
192
|
+
[],
|
|
193
|
+
[],
|
|
194
|
+
);
|
|
195
|
+
|
|
196
|
+
expect(result.warnings.some((w) => w.includes("Unknown group"))).toBe(true);
|
|
197
|
+
expect(result.ownership.get("src/shared.ts")).toBe("UI");
|
|
198
|
+
expect(result.warnings.some((w) => w.includes("force-assigned"))).toBe(true);
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
test("force-assigns unassigned files to last group when LLM omits them", async () => {
|
|
202
|
+
const mockResponse: LlmResponse = {
|
|
203
|
+
content: JSON.stringify({
|
|
204
|
+
assignments: [],
|
|
205
|
+
shared_foundation: null,
|
|
206
|
+
}),
|
|
207
|
+
model: "test",
|
|
208
|
+
usage: { prompt_tokens: 100, completion_tokens: 50, total_tokens: 150 },
|
|
209
|
+
};
|
|
210
|
+
|
|
211
|
+
const mockClient: LlmClient = {
|
|
212
|
+
complete: async () => mockResponse,
|
|
213
|
+
completeStream: async () => mockResponse,
|
|
214
|
+
};
|
|
215
|
+
|
|
216
|
+
const groups: FileGroup[] = [
|
|
217
|
+
{ name: "Auth", type: "feature", description: "Auth", files: ["src/auth.ts"] },
|
|
218
|
+
{ name: "UI", type: "feature", description: "UI", files: ["src/ui.tsx"] },
|
|
219
|
+
];
|
|
220
|
+
|
|
221
|
+
const result = await partitionGroups(
|
|
222
|
+
mockClient,
|
|
223
|
+
groups,
|
|
224
|
+
["src/auth.ts", "src/ui.tsx", "src/orphan.ts"],
|
|
225
|
+
[],
|
|
226
|
+
[],
|
|
227
|
+
);
|
|
228
|
+
|
|
229
|
+
expect(result.ownership.get("src/orphan.ts")).toBe("UI");
|
|
230
|
+
expect(result.reattributed.some((r) => r.path === "src/orphan.ts")).toBe(true);
|
|
231
|
+
expect(result.warnings.some((w) => w.includes("force-assigned"))).toBe(true);
|
|
232
|
+
});
|
|
233
|
+
});
|