newpr 0.6.5 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/history/store.ts +25 -0
- package/src/stack/balance.ts +128 -0
- package/src/stack/coupling.test.ts +158 -0
- package/src/stack/coupling.ts +135 -0
- package/src/stack/delta.test.ts +223 -0
- package/src/stack/delta.ts +264 -0
- package/src/stack/execute.test.ts +176 -0
- package/src/stack/execute.ts +194 -0
- package/src/stack/feasibility.test.ts +185 -0
- package/src/stack/feasibility.ts +286 -0
- package/src/stack/integration.test.ts +266 -0
- package/src/stack/merge-groups.test.ts +97 -0
- package/src/stack/merge-groups.ts +87 -0
- package/src/stack/partition.test.ts +233 -0
- package/src/stack/partition.ts +273 -0
- package/src/stack/plan.test.ts +154 -0
- package/src/stack/plan.ts +139 -0
- package/src/stack/pr-title.ts +64 -0
- package/src/stack/publish.ts +96 -0
- package/src/stack/split.ts +173 -0
- package/src/stack/types.ts +202 -0
- package/src/stack/verify.test.ts +137 -0
- package/src/stack/verify.ts +201 -0
- package/src/web/client/components/FeasibilityAlert.tsx +64 -0
- package/src/web/client/components/InputScreen.tsx +100 -89
- package/src/web/client/components/ResultsScreen.tsx +10 -2
- package/src/web/client/components/StackGroupCard.tsx +171 -0
- package/src/web/client/components/StackWarnings.tsx +135 -0
- package/src/web/client/hooks/useStack.ts +301 -0
- package/src/web/client/panels/StackPanel.tsx +289 -0
- package/src/web/server/routes.ts +114 -0
- package/src/web/server/stack-manager.ts +580 -0
- package/src/web/server.ts +15 -0
- package/src/web/styles/built.css +1 -1
package/package.json
CHANGED
package/src/history/store.ts
CHANGED
|
@@ -221,6 +221,31 @@ export async function loadSlidesSidecar(
|
|
|
221
221
|
}
|
|
222
222
|
}
|
|
223
223
|
|
|
224
|
+
export async function saveStackSidecar(
|
|
225
|
+
id: string,
|
|
226
|
+
data: unknown,
|
|
227
|
+
): Promise<void> {
|
|
228
|
+
ensureDirs();
|
|
229
|
+
const tmpPath = join(SESSIONS_DIR, `${id}.stack.json.tmp`);
|
|
230
|
+
const finalPath = join(SESSIONS_DIR, `${id}.stack.json`);
|
|
231
|
+
await Bun.write(tmpPath, JSON.stringify(data, null, 2));
|
|
232
|
+
const { renameSync } = await import("node:fs");
|
|
233
|
+
renameSync(tmpPath, finalPath);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
export async function loadStackSidecar(
|
|
237
|
+
id: string,
|
|
238
|
+
): Promise<Record<string, unknown> | null> {
|
|
239
|
+
try {
|
|
240
|
+
const filePath = join(SESSIONS_DIR, `${id}.stack.json`);
|
|
241
|
+
const file = Bun.file(filePath);
|
|
242
|
+
if (!(await file.exists())) return null;
|
|
243
|
+
return JSON.parse(await file.text()) as Record<string, unknown>;
|
|
244
|
+
} catch {
|
|
245
|
+
return null;
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
224
249
|
export function getHistoryPath(): string {
|
|
225
250
|
return HISTORY_DIR;
|
|
226
251
|
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import type { LlmClient } from "../llm/client.ts";
|
|
2
|
+
import type { FileGroup } from "../types/output.ts";
|
|
3
|
+
import type { StackWarning } from "./types.ts";
|
|
4
|
+
|
|
5
|
+
export interface BalanceResult {
|
|
6
|
+
ownership: Map<string, string>;
|
|
7
|
+
warnings: StackWarning[];
|
|
8
|
+
moves: Array<{ path: string; from: string; to: string }>;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
interface GroupSize {
|
|
12
|
+
id: string;
|
|
13
|
+
fileCount: number;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function detectImbalance(groups: GroupSize[]): { oversized: GroupSize[]; threshold: number } | null {
|
|
17
|
+
if (groups.length < 2) return null;
|
|
18
|
+
|
|
19
|
+
const counts = groups.map((g) => g.fileCount).sort((a, b) => a - b);
|
|
20
|
+
const median = counts[Math.floor(counts.length / 2)]!;
|
|
21
|
+
const threshold = Math.max(median * 3, 15);
|
|
22
|
+
|
|
23
|
+
const oversized = groups.filter((g) => g.fileCount > threshold);
|
|
24
|
+
if (oversized.length === 0) return null;
|
|
25
|
+
|
|
26
|
+
return { oversized, threshold };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export async function rebalanceGroups(
|
|
30
|
+
llmClient: LlmClient,
|
|
31
|
+
ownership: Map<string, string>,
|
|
32
|
+
groups: FileGroup[],
|
|
33
|
+
): Promise<BalanceResult> {
|
|
34
|
+
const groupSizes = new Map<string, string[]>();
|
|
35
|
+
for (const [path, groupId] of ownership) {
|
|
36
|
+
const files = groupSizes.get(groupId) ?? [];
|
|
37
|
+
files.push(path);
|
|
38
|
+
groupSizes.set(groupId, files);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const sizeList: GroupSize[] = [...groupSizes.entries()].map(([id, files]) => ({
|
|
42
|
+
id,
|
|
43
|
+
fileCount: files.length,
|
|
44
|
+
}));
|
|
45
|
+
|
|
46
|
+
const imbalance = detectImbalance(sizeList);
|
|
47
|
+
if (!imbalance) {
|
|
48
|
+
return { ownership, warnings: [], moves: [] };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const groupDescriptions = groups
|
|
52
|
+
.map((g) => {
|
|
53
|
+
const files = groupSizes.get(g.name) ?? [];
|
|
54
|
+
return `- "${g.name}" (${g.type}, ${files.length} files): ${g.description}`;
|
|
55
|
+
})
|
|
56
|
+
.join("\n");
|
|
57
|
+
|
|
58
|
+
const oversizedDetails = imbalance.oversized
|
|
59
|
+
.map((g) => {
|
|
60
|
+
const files = groupSizes.get(g.id) ?? [];
|
|
61
|
+
return `"${g.id}" (${files.length} files):\n${files.map((f) => ` - ${f}`).join("\n")}`;
|
|
62
|
+
})
|
|
63
|
+
.join("\n\n");
|
|
64
|
+
|
|
65
|
+
const system = `You are a code organization expert. Your task is to rebalance PR groups so they are more evenly sized.
|
|
66
|
+
|
|
67
|
+
Rules:
|
|
68
|
+
1. Move files from oversized groups to other groups where they logically fit
|
|
69
|
+
2. Only move files that genuinely belong better in another group based on file path and purpose
|
|
70
|
+
3. If a file doesn't fit anywhere else, leave it in the current group
|
|
71
|
+
4. Do NOT create new groups
|
|
72
|
+
5. Aim for each group having roughly similar file counts
|
|
73
|
+
6. Prioritize logical cohesion over perfect balance — don't force moves that don't make sense
|
|
74
|
+
7. Move at most 50% of files from any oversized group
|
|
75
|
+
|
|
76
|
+
Response format (JSON only, no markdown):
|
|
77
|
+
{
|
|
78
|
+
"moves": [
|
|
79
|
+
{ "path": "src/foo.ts", "to": "Target Group Name", "reason": "brief reason" }
|
|
80
|
+
]
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
If no moves make sense, return: { "moves": [] }`;
|
|
84
|
+
|
|
85
|
+
const user = `Groups:\n${groupDescriptions}\n\nOversized groups (>${imbalance.threshold} files):\n${oversizedDetails}\n\nSuggest file moves to rebalance.`;
|
|
86
|
+
|
|
87
|
+
const newOwnership = new Map(ownership);
|
|
88
|
+
const moves: Array<{ path: string; from: string; to: string }> = [];
|
|
89
|
+
const warnings: StackWarning[] = [];
|
|
90
|
+
|
|
91
|
+
try {
|
|
92
|
+
const response = await llmClient.complete(system, user);
|
|
93
|
+
const cleaned = response.content.replace(/```(?:json)?\s*/g, "").replace(/```\s*/g, "").trim();
|
|
94
|
+
const parsed = JSON.parse(cleaned) as { moves: Array<{ path: string; to: string; reason: string }> };
|
|
95
|
+
|
|
96
|
+
const validGroupNames = new Set(groups.map((g) => g.name));
|
|
97
|
+
|
|
98
|
+
for (const move of parsed.moves ?? []) {
|
|
99
|
+
if (!move.path || !move.to) continue;
|
|
100
|
+
if (!validGroupNames.has(move.to)) continue;
|
|
101
|
+
|
|
102
|
+
const currentGroup = newOwnership.get(move.path);
|
|
103
|
+
if (!currentGroup || currentGroup === move.to) continue;
|
|
104
|
+
|
|
105
|
+
newOwnership.set(move.path, move.to);
|
|
106
|
+
moves.push({ path: move.path, from: currentGroup, to: move.to });
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (moves.length > 0) {
|
|
110
|
+
warnings.push({
|
|
111
|
+
category: "grouping",
|
|
112
|
+
severity: "info",
|
|
113
|
+
title: `${moves.length} file(s) rebalanced across groups`,
|
|
114
|
+
message: "Files were moved from oversized groups to better-fitting groups for more balanced PRs",
|
|
115
|
+
details: moves.map((m) => `${m.path}: "${m.from}" → "${m.to}"`),
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
} catch {
|
|
119
|
+
warnings.push({
|
|
120
|
+
category: "system",
|
|
121
|
+
severity: "warn",
|
|
122
|
+
title: "Group rebalancing skipped",
|
|
123
|
+
message: "AI rebalancing failed — proceeding with original distribution",
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return { ownership: newOwnership, warnings, moves };
|
|
128
|
+
}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { applyCouplingRules } from "./coupling.ts";
|
|
3
|
+
|
|
4
|
+
describe("applyCouplingRules", () => {
|
|
5
|
+
test("package.json + bun.lockb in different groups → forced to same group", () => {
|
|
6
|
+
const ownership = new Map([
|
|
7
|
+
["package.json", "group-a"],
|
|
8
|
+
["bun.lockb", "group-b"],
|
|
9
|
+
["src/index.ts", "group-a"],
|
|
10
|
+
]);
|
|
11
|
+
const changedFiles = ["package.json", "bun.lockb", "src/index.ts"];
|
|
12
|
+
const groupOrder = ["group-a", "group-b"];
|
|
13
|
+
|
|
14
|
+
const result = applyCouplingRules(ownership, changedFiles, groupOrder);
|
|
15
|
+
|
|
16
|
+
// Both should be in group-a (earliest)
|
|
17
|
+
expect(result.ownership.get("package.json")).toBe("group-a");
|
|
18
|
+
expect(result.ownership.get("bun.lockb")).toBe("group-a");
|
|
19
|
+
expect(result.ownership.get("src/index.ts")).toBe("group-a");
|
|
20
|
+
|
|
21
|
+
// Should have warning
|
|
22
|
+
expect(result.warnings.length).toBeGreaterThan(0);
|
|
23
|
+
expect(result.warnings[0]).toContain("package.json");
|
|
24
|
+
expect(result.warnings[0]).toContain("bun.lockb");
|
|
25
|
+
|
|
26
|
+
// Should have forced merge
|
|
27
|
+
expect(result.forced_merges).toEqual([
|
|
28
|
+
{
|
|
29
|
+
path: "bun.lockb",
|
|
30
|
+
from_group: "group-b",
|
|
31
|
+
to_group: "group-a",
|
|
32
|
+
},
|
|
33
|
+
]);
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
test("coupling set in same group → no change", () => {
|
|
37
|
+
const ownership = new Map([
|
|
38
|
+
["package.json", "group-a"],
|
|
39
|
+
["bun.lockb", "group-a"],
|
|
40
|
+
["src/index.ts", "group-b"],
|
|
41
|
+
]);
|
|
42
|
+
const changedFiles = ["package.json", "bun.lockb", "src/index.ts"];
|
|
43
|
+
const groupOrder = ["group-a", "group-b"];
|
|
44
|
+
|
|
45
|
+
const result = applyCouplingRules(ownership, changedFiles, groupOrder);
|
|
46
|
+
|
|
47
|
+
// No changes
|
|
48
|
+
expect(result.ownership.get("package.json")).toBe("group-a");
|
|
49
|
+
expect(result.ownership.get("bun.lockb")).toBe("group-a");
|
|
50
|
+
expect(result.ownership.get("src/index.ts")).toBe("group-b");
|
|
51
|
+
|
|
52
|
+
// No warnings
|
|
53
|
+
expect(result.warnings).toEqual([]);
|
|
54
|
+
expect(result.forced_merges).toEqual([]);
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
test("no coupling files → no change", () => {
|
|
58
|
+
const ownership = new Map([
|
|
59
|
+
["src/index.ts", "group-a"],
|
|
60
|
+
["src/utils.ts", "group-b"],
|
|
61
|
+
]);
|
|
62
|
+
const changedFiles = ["src/index.ts", "src/utils.ts"];
|
|
63
|
+
const groupOrder = ["group-a", "group-b"];
|
|
64
|
+
|
|
65
|
+
const result = applyCouplingRules(ownership, changedFiles, groupOrder);
|
|
66
|
+
|
|
67
|
+
// No changes
|
|
68
|
+
expect(result.ownership.get("src/index.ts")).toBe("group-a");
|
|
69
|
+
expect(result.ownership.get("src/utils.ts")).toBe("group-b");
|
|
70
|
+
|
|
71
|
+
// No warnings
|
|
72
|
+
expect(result.warnings).toEqual([]);
|
|
73
|
+
expect(result.forced_merges).toEqual([]);
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
test("tsconfig.json + tsconfig.base.json → coupled", () => {
|
|
77
|
+
const ownership = new Map([
|
|
78
|
+
["tsconfig.json", "group-a"],
|
|
79
|
+
["tsconfig.base.json", "group-b"],
|
|
80
|
+
["src/index.ts", "group-a"],
|
|
81
|
+
]);
|
|
82
|
+
const changedFiles = ["tsconfig.json", "tsconfig.base.json", "src/index.ts"];
|
|
83
|
+
const groupOrder = ["group-a", "group-b"];
|
|
84
|
+
|
|
85
|
+
const result = applyCouplingRules(ownership, changedFiles, groupOrder);
|
|
86
|
+
|
|
87
|
+
// Both tsconfig files should be in group-a
|
|
88
|
+
expect(result.ownership.get("tsconfig.json")).toBe("group-a");
|
|
89
|
+
expect(result.ownership.get("tsconfig.base.json")).toBe("group-a");
|
|
90
|
+
|
|
91
|
+
// Should have warning
|
|
92
|
+
expect(result.warnings.length).toBeGreaterThan(0);
|
|
93
|
+
expect(result.warnings[0]).toContain("tsconfig");
|
|
94
|
+
|
|
95
|
+
// Should have forced merge
|
|
96
|
+
expect(result.forced_merges).toEqual([
|
|
97
|
+
{
|
|
98
|
+
path: "tsconfig.base.json",
|
|
99
|
+
from_group: "group-b",
|
|
100
|
+
to_group: "group-a",
|
|
101
|
+
},
|
|
102
|
+
]);
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
test(".gitattributes always in earliest group", () => {
|
|
106
|
+
const ownership = new Map([
|
|
107
|
+
[".gitattributes", "group-b"],
|
|
108
|
+
["src/index.ts", "group-a"],
|
|
109
|
+
]);
|
|
110
|
+
const changedFiles = [".gitattributes", "src/index.ts"];
|
|
111
|
+
const groupOrder = ["group-a", "group-b"];
|
|
112
|
+
|
|
113
|
+
const result = applyCouplingRules(ownership, changedFiles, groupOrder);
|
|
114
|
+
|
|
115
|
+
// .gitattributes should be in group-a (earliest)
|
|
116
|
+
expect(result.ownership.get(".gitattributes")).toBe("group-b");
|
|
117
|
+
// Note: .gitattributes is in its own coupling set, so it won't move
|
|
118
|
+
// unless there are other files in the same set
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
test("multiple lockfiles in different groups → all forced to earliest", () => {
|
|
122
|
+
const ownership = new Map([
|
|
123
|
+
["package.json", "group-a"],
|
|
124
|
+
["bun.lockb", "group-b"],
|
|
125
|
+
["yarn.lock", "group-c"],
|
|
126
|
+
]);
|
|
127
|
+
const changedFiles = ["package.json", "bun.lockb", "yarn.lock"];
|
|
128
|
+
const groupOrder = ["group-a", "group-b", "group-c"];
|
|
129
|
+
|
|
130
|
+
const result = applyCouplingRules(ownership, changedFiles, groupOrder);
|
|
131
|
+
|
|
132
|
+
// All should be in group-a
|
|
133
|
+
expect(result.ownership.get("package.json")).toBe("group-a");
|
|
134
|
+
expect(result.ownership.get("bun.lockb")).toBe("group-a");
|
|
135
|
+
expect(result.ownership.get("yarn.lock")).toBe("group-a");
|
|
136
|
+
|
|
137
|
+
// Should have 2 forced merges
|
|
138
|
+
expect(result.forced_merges.length).toBe(2);
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
test("does not mutate input ownership map", () => {
|
|
142
|
+
const ownership = new Map([
|
|
143
|
+
["package.json", "group-a"],
|
|
144
|
+
["bun.lockb", "group-b"],
|
|
145
|
+
]);
|
|
146
|
+
const originalSize = ownership.size;
|
|
147
|
+
const originalPackageGroup = ownership.get("package.json");
|
|
148
|
+
|
|
149
|
+
applyCouplingRules(ownership, ["package.json", "bun.lockb"], [
|
|
150
|
+
"group-a",
|
|
151
|
+
"group-b",
|
|
152
|
+
]);
|
|
153
|
+
|
|
154
|
+
// Original map unchanged
|
|
155
|
+
expect(ownership.size).toBe(originalSize);
|
|
156
|
+
expect(ownership.get("package.json")).toBe(originalPackageGroup);
|
|
157
|
+
});
|
|
158
|
+
});
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import type { StackWarning } from "./types.ts";
|
|
2
|
+
|
|
3
|
+
export interface ForcedMerge {
|
|
4
|
+
path: string;
|
|
5
|
+
from_group: string;
|
|
6
|
+
to_group: string;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export interface CouplingResult {
|
|
10
|
+
ownership: Map<string, string>;
|
|
11
|
+
warnings: string[];
|
|
12
|
+
structured_warnings: StackWarning[];
|
|
13
|
+
forced_merges: ForcedMerge[];
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Hardcoded atomic coupling sets for v1.
|
|
18
|
+
* Files in the same set MUST be in the same group.
|
|
19
|
+
*/
|
|
20
|
+
const COUPLING_SETS: Array<Set<string> | ((path: string) => boolean)> = [
|
|
21
|
+
// Lockfiles + package.json
|
|
22
|
+
new Set([
|
|
23
|
+
"package.json",
|
|
24
|
+
"bun.lockb",
|
|
25
|
+
"package-lock.json",
|
|
26
|
+
"yarn.lock",
|
|
27
|
+
"pnpm-lock.yaml",
|
|
28
|
+
]),
|
|
29
|
+
|
|
30
|
+
// .gitattributes (always earliest group)
|
|
31
|
+
new Set([".gitattributes"]),
|
|
32
|
+
|
|
33
|
+
// tsconfig family (glob pattern)
|
|
34
|
+
(path: string) => path === "tsconfig.json" || /^tsconfig\..*\.json$/.test(path),
|
|
35
|
+
];
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Apply coupling rules to ownership map.
|
|
39
|
+
*
|
|
40
|
+
* When files in a coupling set span multiple groups:
|
|
41
|
+
* - Move all to the earliest group (by groupOrder)
|
|
42
|
+
* - Return warnings + forcedMerges
|
|
43
|
+
*
|
|
44
|
+
* @param ownership - Current path -> groupId mapping
|
|
45
|
+
* @param changedFiles - All changed file paths
|
|
46
|
+
* @param groupOrder - Ordered list of group IDs (earliest first)
|
|
47
|
+
* @returns Modified ownership + warnings + forced merges
|
|
48
|
+
*/
|
|
49
|
+
export function applyCouplingRules(
|
|
50
|
+
ownership: Map<string, string>,
|
|
51
|
+
changedFiles: string[],
|
|
52
|
+
groupOrder: string[],
|
|
53
|
+
): CouplingResult {
|
|
54
|
+
const newOwnership = new Map(ownership);
|
|
55
|
+
const warnings: string[] = [];
|
|
56
|
+
const structuredWarnings: StackWarning[] = [];
|
|
57
|
+
const forcedMerges: ForcedMerge[] = [];
|
|
58
|
+
|
|
59
|
+
// Build group rank map for ordering
|
|
60
|
+
const groupRank = new Map<string, number>();
|
|
61
|
+
groupOrder.forEach((groupId, idx) => groupRank.set(groupId, idx));
|
|
62
|
+
|
|
63
|
+
// Process each coupling set
|
|
64
|
+
for (const couplingSet of COUPLING_SETS) {
|
|
65
|
+
const matchedFiles: string[] = [];
|
|
66
|
+
|
|
67
|
+
// Find all changed files in this coupling set
|
|
68
|
+
for (const file of changedFiles) {
|
|
69
|
+
const matches =
|
|
70
|
+
couplingSet instanceof Set
|
|
71
|
+
? couplingSet.has(file)
|
|
72
|
+
: couplingSet(file);
|
|
73
|
+
|
|
74
|
+
if (matches) {
|
|
75
|
+
matchedFiles.push(file);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (matchedFiles.length === 0) continue;
|
|
80
|
+
|
|
81
|
+
// Collect groups these files belong to
|
|
82
|
+
const groups = new Set<string>();
|
|
83
|
+
for (const file of matchedFiles) {
|
|
84
|
+
const group = newOwnership.get(file);
|
|
85
|
+
if (group) groups.add(group);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// If all in same group, no action needed
|
|
89
|
+
if (groups.size <= 1) continue;
|
|
90
|
+
|
|
91
|
+
// Find earliest group
|
|
92
|
+
const sortedGroups = Array.from(groups).sort(
|
|
93
|
+
(a, b) => (groupRank.get(a) ?? Infinity) - (groupRank.get(b) ?? Infinity),
|
|
94
|
+
);
|
|
95
|
+
const targetGroup = sortedGroups[0];
|
|
96
|
+
if (!targetGroup) continue;
|
|
97
|
+
|
|
98
|
+
// Move all files to earliest group
|
|
99
|
+
for (const file of matchedFiles) {
|
|
100
|
+
const currentGroup = newOwnership.get(file);
|
|
101
|
+
if (currentGroup && currentGroup !== targetGroup) {
|
|
102
|
+
newOwnership.set(file, targetGroup);
|
|
103
|
+
forcedMerges.push({
|
|
104
|
+
path: file,
|
|
105
|
+
from_group: currentGroup,
|
|
106
|
+
to_group: targetGroup,
|
|
107
|
+
});
|
|
108
|
+
} else if (!currentGroup) {
|
|
109
|
+
// File not in ownership map, assign to target group
|
|
110
|
+
newOwnership.set(file, targetGroup);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Add warning
|
|
115
|
+
const fileList = matchedFiles.join(", ");
|
|
116
|
+
const groupList = Array.from(groups).join(", ");
|
|
117
|
+
warnings.push(
|
|
118
|
+
`Coupling constraint: [${fileList}] were in groups [${groupList}], forced to earliest group "${targetGroup}"`,
|
|
119
|
+
);
|
|
120
|
+
structuredWarnings.push({
|
|
121
|
+
category: "coupling",
|
|
122
|
+
severity: "info",
|
|
123
|
+
title: `${forcedMerges.length} file(s) moved to "${targetGroup}" by coupling rule`,
|
|
124
|
+
message: `Files [${fileList}] must stay together — moved from [${groupList}] to earliest group`,
|
|
125
|
+
details: matchedFiles,
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return {
|
|
130
|
+
ownership: newOwnership,
|
|
131
|
+
warnings,
|
|
132
|
+
structured_warnings: structuredWarnings,
|
|
133
|
+
forced_merges: forcedMerges,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
import { describe, test, expect, beforeAll, afterAll } from "bun:test";
|
|
2
|
+
import { mkdtempSync, rmSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { tmpdir } from "node:os";
|
|
5
|
+
import { extractDeltas, buildRenameMap } from "./delta.ts";
|
|
6
|
+
|
|
7
|
+
let testRepoPath: string;
|
|
8
|
+
|
|
9
|
+
beforeAll(async () => {
|
|
10
|
+
testRepoPath = mkdtempSync(join(tmpdir(), "delta-test-"));
|
|
11
|
+
|
|
12
|
+
await Bun.$`git init ${testRepoPath}`.quiet();
|
|
13
|
+
await Bun.$`git -C ${testRepoPath} config user.name "Test User"`.quiet();
|
|
14
|
+
await Bun.$`git -C ${testRepoPath} config user.email "test@example.com"`.quiet();
|
|
15
|
+
|
|
16
|
+
await Bun.$`echo "initial" > ${join(testRepoPath, "README.md")}`.quiet();
|
|
17
|
+
await Bun.$`git -C ${testRepoPath} add README.md`.quiet();
|
|
18
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Initial commit"`.quiet();
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
afterAll(() => {
|
|
22
|
+
if (testRepoPath) {
|
|
23
|
+
rmSync(testRepoPath, { recursive: true, force: true });
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
describe("extractDeltas", () => {
|
|
28
|
+
test("extracts A/M/D status correctly", async () => {
|
|
29
|
+
const baseSha = await getCurrentSha(testRepoPath);
|
|
30
|
+
|
|
31
|
+
await Bun.$`echo "new file" > ${join(testRepoPath, "new.txt")}`.quiet();
|
|
32
|
+
await Bun.$`git -C ${testRepoPath} add new.txt`.quiet();
|
|
33
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Add new.txt"`.quiet();
|
|
34
|
+
|
|
35
|
+
await Bun.$`echo "modified" > ${join(testRepoPath, "README.md")}`.quiet();
|
|
36
|
+
await Bun.$`git -C ${testRepoPath} add README.md`.quiet();
|
|
37
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Modify README"`.quiet();
|
|
38
|
+
|
|
39
|
+
await Bun.$`git -C ${testRepoPath} rm new.txt`.quiet();
|
|
40
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Delete new.txt"`.quiet();
|
|
41
|
+
|
|
42
|
+
const headSha = await getCurrentSha(testRepoPath);
|
|
43
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
44
|
+
|
|
45
|
+
expect(deltas.length).toBe(3);
|
|
46
|
+
|
|
47
|
+
const hasAdd = deltas.some((d) => d.changes.some((c) => c.status === "A" && c.path === "new.txt"));
|
|
48
|
+
expect(hasAdd).toBe(true);
|
|
49
|
+
|
|
50
|
+
const hasModify = deltas.some((d) => d.changes.some((c) => c.status === "M" && c.path === "README.md"));
|
|
51
|
+
expect(hasModify).toBe(true);
|
|
52
|
+
|
|
53
|
+
const hasDelete = deltas.some((d) => d.changes.some((c) => c.status === "D" && c.path === "new.txt"));
|
|
54
|
+
expect(hasDelete).toBe(true);
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
test("extracts rename (R status) correctly", async () => {
|
|
58
|
+
const baseSha = await getCurrentSha(testRepoPath);
|
|
59
|
+
|
|
60
|
+
await Bun.$`echo "content" > ${join(testRepoPath, "old-name.txt")}`.quiet();
|
|
61
|
+
await Bun.$`git -C ${testRepoPath} add old-name.txt`.quiet();
|
|
62
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Add old-name.txt"`.quiet();
|
|
63
|
+
|
|
64
|
+
await Bun.$`git -C ${testRepoPath} mv old-name.txt new-name.txt`.quiet();
|
|
65
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Rename file"`.quiet();
|
|
66
|
+
|
|
67
|
+
const headSha = await getCurrentSha(testRepoPath);
|
|
68
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
69
|
+
|
|
70
|
+
const allRenames = deltas.flatMap((d) => d.changes.filter((c) => c.status === "R"));
|
|
71
|
+
const renameChange = allRenames.find((c) => c.old_path === "old-name.txt");
|
|
72
|
+
|
|
73
|
+
expect(renameChange).toBeDefined();
|
|
74
|
+
expect(renameChange?.old_path).toBe("old-name.txt");
|
|
75
|
+
expect(renameChange?.path).toBe("new-name.txt");
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
test("handles merge commits via first-parent linearization", async () => {
|
|
79
|
+
const baseSha = await getCurrentSha(testRepoPath);
|
|
80
|
+
|
|
81
|
+
await Bun.$`git -C ${testRepoPath} checkout -b feature`.quiet();
|
|
82
|
+
await Bun.$`echo "feature" > ${join(testRepoPath, "feature.txt")}`.quiet();
|
|
83
|
+
await Bun.$`git -C ${testRepoPath} add feature.txt`.quiet();
|
|
84
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Feature commit"`.quiet();
|
|
85
|
+
|
|
86
|
+
await Bun.$`git -C ${testRepoPath} checkout main`.quiet();
|
|
87
|
+
await Bun.$`echo "main" > ${join(testRepoPath, "main.txt")}`.quiet();
|
|
88
|
+
await Bun.$`git -C ${testRepoPath} add main.txt`.quiet();
|
|
89
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Main commit"`.quiet();
|
|
90
|
+
|
|
91
|
+
await Bun.$`git -C ${testRepoPath} merge feature --no-edit`.quiet();
|
|
92
|
+
|
|
93
|
+
const headSha = await getCurrentSha(testRepoPath);
|
|
94
|
+
|
|
95
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
96
|
+
expect(deltas.length).toBeGreaterThanOrEqual(2);
|
|
97
|
+
const allPaths = deltas.flatMap((d) => d.changes.map((c) => c.path));
|
|
98
|
+
expect(allPaths).toContain("main.txt");
|
|
99
|
+
expect(allPaths).toContain("feature.txt");
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
test("includes commit metadata (author, date, message)", async () => {
|
|
103
|
+
const baseSha = await getCurrentSha(testRepoPath);
|
|
104
|
+
|
|
105
|
+
await Bun.$`echo "test" > ${join(testRepoPath, "test.txt")}`.quiet();
|
|
106
|
+
await Bun.$`git -C ${testRepoPath} add test.txt`.quiet();
|
|
107
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Test commit message"`.quiet();
|
|
108
|
+
|
|
109
|
+
const headSha = await getCurrentSha(testRepoPath);
|
|
110
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
111
|
+
|
|
112
|
+
expect(deltas.length).toBe(1);
|
|
113
|
+
expect(deltas[0]?.author).toBe("Test User");
|
|
114
|
+
expect(deltas[0]?.message).toBe("Test commit message");
|
|
115
|
+
expect(deltas[0]?.date).toMatch(/^\d{4}-\d{2}-\d{2}T/);
|
|
116
|
+
});
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
describe("buildRenameMap", () => {
|
|
120
|
+
test("builds rename map from deltas", () => {
|
|
121
|
+
const deltas = [
|
|
122
|
+
{
|
|
123
|
+
sha: "abc123",
|
|
124
|
+
parent_sha: "def456",
|
|
125
|
+
author: "Test",
|
|
126
|
+
date: "2024-01-01",
|
|
127
|
+
message: "Rename",
|
|
128
|
+
changes: [
|
|
129
|
+
{
|
|
130
|
+
status: "R" as const,
|
|
131
|
+
path: "new.txt",
|
|
132
|
+
old_path: "old.txt",
|
|
133
|
+
old_blob: "blob1",
|
|
134
|
+
new_blob: "blob2",
|
|
135
|
+
old_mode: "100644",
|
|
136
|
+
new_mode: "100644",
|
|
137
|
+
},
|
|
138
|
+
],
|
|
139
|
+
},
|
|
140
|
+
];
|
|
141
|
+
|
|
142
|
+
const renameMap = buildRenameMap(deltas);
|
|
143
|
+
|
|
144
|
+
expect(renameMap.get("old.txt")).toBe("new.txt");
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
test("handles multiple renames", () => {
|
|
148
|
+
const deltas = [
|
|
149
|
+
{
|
|
150
|
+
sha: "abc123",
|
|
151
|
+
parent_sha: "def456",
|
|
152
|
+
author: "Test",
|
|
153
|
+
date: "2024-01-01",
|
|
154
|
+
message: "Rename 1",
|
|
155
|
+
changes: [
|
|
156
|
+
{
|
|
157
|
+
status: "R" as const,
|
|
158
|
+
path: "b.txt",
|
|
159
|
+
old_path: "a.txt",
|
|
160
|
+
old_blob: "blob1",
|
|
161
|
+
new_blob: "blob2",
|
|
162
|
+
old_mode: "100644",
|
|
163
|
+
new_mode: "100644",
|
|
164
|
+
},
|
|
165
|
+
],
|
|
166
|
+
},
|
|
167
|
+
{
|
|
168
|
+
sha: "ghi789",
|
|
169
|
+
parent_sha: "abc123",
|
|
170
|
+
author: "Test",
|
|
171
|
+
date: "2024-01-02",
|
|
172
|
+
message: "Rename 2",
|
|
173
|
+
changes: [
|
|
174
|
+
{
|
|
175
|
+
status: "R" as const,
|
|
176
|
+
path: "c.txt",
|
|
177
|
+
old_path: "b.txt",
|
|
178
|
+
old_blob: "blob2",
|
|
179
|
+
new_blob: "blob3",
|
|
180
|
+
old_mode: "100644",
|
|
181
|
+
new_mode: "100644",
|
|
182
|
+
},
|
|
183
|
+
],
|
|
184
|
+
},
|
|
185
|
+
];
|
|
186
|
+
|
|
187
|
+
const renameMap = buildRenameMap(deltas);
|
|
188
|
+
|
|
189
|
+
expect(renameMap.get("a.txt")).toBe("b.txt");
|
|
190
|
+
expect(renameMap.get("b.txt")).toBe("c.txt");
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
test("returns empty map for no renames", () => {
|
|
194
|
+
const deltas = [
|
|
195
|
+
{
|
|
196
|
+
sha: "abc123",
|
|
197
|
+
parent_sha: "def456",
|
|
198
|
+
author: "Test",
|
|
199
|
+
date: "2024-01-01",
|
|
200
|
+
message: "Add file",
|
|
201
|
+
changes: [
|
|
202
|
+
{
|
|
203
|
+
status: "A" as const,
|
|
204
|
+
path: "new.txt",
|
|
205
|
+
old_blob: "0000000000000000000000000000000000000000",
|
|
206
|
+
new_blob: "blob1",
|
|
207
|
+
old_mode: "000000",
|
|
208
|
+
new_mode: "100644",
|
|
209
|
+
},
|
|
210
|
+
],
|
|
211
|
+
},
|
|
212
|
+
];
|
|
213
|
+
|
|
214
|
+
const renameMap = buildRenameMap(deltas);
|
|
215
|
+
|
|
216
|
+
expect(renameMap.size).toBe(0);
|
|
217
|
+
});
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
async function getCurrentSha(repoPath: string): Promise<string> {
|
|
221
|
+
const result = await Bun.$`git -C ${repoPath} rev-parse HEAD`.quiet();
|
|
222
|
+
return result.stdout.toString().trim();
|
|
223
|
+
}
|