newpr 0.6.5 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/history/store.ts +25 -0
- package/src/stack/balance.ts +128 -0
- package/src/stack/coupling.test.ts +158 -0
- package/src/stack/coupling.ts +135 -0
- package/src/stack/delta.test.ts +223 -0
- package/src/stack/delta.ts +264 -0
- package/src/stack/execute.test.ts +176 -0
- package/src/stack/execute.ts +194 -0
- package/src/stack/feasibility.test.ts +185 -0
- package/src/stack/feasibility.ts +286 -0
- package/src/stack/integration.test.ts +266 -0
- package/src/stack/merge-groups.test.ts +97 -0
- package/src/stack/merge-groups.ts +87 -0
- package/src/stack/partition.test.ts +233 -0
- package/src/stack/partition.ts +273 -0
- package/src/stack/plan.test.ts +154 -0
- package/src/stack/plan.ts +139 -0
- package/src/stack/pr-title.ts +64 -0
- package/src/stack/publish.ts +96 -0
- package/src/stack/split.ts +173 -0
- package/src/stack/types.ts +202 -0
- package/src/stack/verify.test.ts +137 -0
- package/src/stack/verify.ts +201 -0
- package/src/web/client/components/FeasibilityAlert.tsx +64 -0
- package/src/web/client/components/InputScreen.tsx +100 -89
- package/src/web/client/components/ResultsScreen.tsx +10 -2
- package/src/web/client/components/StackGroupCard.tsx +171 -0
- package/src/web/client/components/StackWarnings.tsx +135 -0
- package/src/web/client/hooks/useStack.ts +301 -0
- package/src/web/client/panels/StackPanel.tsx +289 -0
- package/src/web/server/routes.ts +114 -0
- package/src/web/server/stack-manager.ts +580 -0
- package/src/web/server.ts +15 -0
- package/src/web/styles/built.css +1 -1
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import type { StackExecResult, StackPublishResult, BranchInfo, PrInfo } from "./types.ts";
|
|
2
|
+
import type { PrMeta } from "../types/output.ts";
|
|
3
|
+
|
|
4
|
+
export interface PublishInput {
|
|
5
|
+
repo_path: string;
|
|
6
|
+
exec_result: StackExecResult;
|
|
7
|
+
pr_meta: PrMeta;
|
|
8
|
+
base_branch: string;
|
|
9
|
+
owner: string;
|
|
10
|
+
repo: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export async function publishStack(input: PublishInput): Promise<StackPublishResult> {
|
|
14
|
+
const { repo_path, exec_result, pr_meta, base_branch, owner, repo } = input;
|
|
15
|
+
const ghRepo = `${owner}/${repo}`;
|
|
16
|
+
|
|
17
|
+
const branches: BranchInfo[] = [];
|
|
18
|
+
const prs: PrInfo[] = [];
|
|
19
|
+
const total = exec_result.group_commits.length;
|
|
20
|
+
|
|
21
|
+
for (const gc of exec_result.group_commits) {
|
|
22
|
+
const pushResult = await Bun.$`git -C ${repo_path} push origin refs/heads/${gc.branch_name}:refs/heads/${gc.branch_name} --force-with-lease`.quiet().nothrow();
|
|
23
|
+
|
|
24
|
+
branches.push({
|
|
25
|
+
name: gc.branch_name,
|
|
26
|
+
pushed: pushResult.exitCode === 0,
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
if (pushResult.exitCode !== 0) {
|
|
30
|
+
continue;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
for (let i = 0; i < exec_result.group_commits.length; i++) {
|
|
35
|
+
const gc = exec_result.group_commits[i];
|
|
36
|
+
if (!gc) continue;
|
|
37
|
+
|
|
38
|
+
const branchInfo = branches.find((b) => b.name === gc.branch_name);
|
|
39
|
+
if (!branchInfo?.pushed) continue;
|
|
40
|
+
|
|
41
|
+
const prBase = i === 0 ? base_branch : exec_result.group_commits[i - 1]?.branch_name;
|
|
42
|
+
if (!prBase) continue;
|
|
43
|
+
|
|
44
|
+
const order = i + 1;
|
|
45
|
+
const title = gc.pr_title
|
|
46
|
+
? `[${order}/${total}] ${gc.pr_title}`
|
|
47
|
+
: `[Stack ${order}/${total}] ${gc.group_id}`;
|
|
48
|
+
|
|
49
|
+
const body = buildPrBody(gc.group_id, order, total, exec_result, pr_meta);
|
|
50
|
+
|
|
51
|
+
const prResult = await Bun.$`gh pr create --repo ${ghRepo} --base ${prBase} --head ${gc.branch_name} --title ${title} --body ${body} --draft`.quiet().nothrow();
|
|
52
|
+
|
|
53
|
+
if (prResult.exitCode === 0) {
|
|
54
|
+
const prUrl = prResult.stdout.toString().trim();
|
|
55
|
+
const prNumberMatch = prUrl.match(/\/pull\/(\d+)/);
|
|
56
|
+
|
|
57
|
+
prs.push({
|
|
58
|
+
group_id: gc.group_id,
|
|
59
|
+
number: prNumberMatch ? parseInt(prNumberMatch[1]!, 10) : 0,
|
|
60
|
+
url: prUrl,
|
|
61
|
+
title,
|
|
62
|
+
base_branch: prBase,
|
|
63
|
+
head_branch: gc.branch_name,
|
|
64
|
+
});
|
|
65
|
+
} else {
|
|
66
|
+
const stderr = prResult.stderr.toString().trim();
|
|
67
|
+
console.error(`[publish] gh pr create failed for ${gc.group_id}: ${stderr}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return { branches, prs };
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function buildPrBody(
|
|
75
|
+
groupId: string,
|
|
76
|
+
order: number,
|
|
77
|
+
total: number,
|
|
78
|
+
_execResult: StackExecResult,
|
|
79
|
+
prMeta: PrMeta,
|
|
80
|
+
): string {
|
|
81
|
+
const prevPr = order > 1 ? `Previous: Stack ${order - 1}/${total}` : "Previous: (base branch)";
|
|
82
|
+
const nextPr = order < total ? `Next: Stack ${order + 1}/${total}` : "Next: (top of stack)";
|
|
83
|
+
|
|
84
|
+
const lines = [
|
|
85
|
+
`> This is part of a stacked PR chain created by [newpr](${prMeta.pr_url})`,
|
|
86
|
+
`>`,
|
|
87
|
+
`> **Stack order**: ${order}/${total}`,
|
|
88
|
+
`> **${prevPr}** | **${nextPr}**`,
|
|
89
|
+
``,
|
|
90
|
+
`## ${groupId}`,
|
|
91
|
+
``,
|
|
92
|
+
`*From PR #${prMeta.pr_number}: ${prMeta.pr_title}*`,
|
|
93
|
+
];
|
|
94
|
+
|
|
95
|
+
return lines.join("\n");
|
|
96
|
+
}
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import type { LlmClient } from "../llm/client.ts";
|
|
2
|
+
import type { FileGroup, GroupType } from "../types/output.ts";
|
|
3
|
+
import type { StackWarning } from "./types.ts";
|
|
4
|
+
|
|
5
|
+
export interface SplitResult {
|
|
6
|
+
groups: FileGroup[];
|
|
7
|
+
ownership: Map<string, string>;
|
|
8
|
+
warnings: StackWarning[];
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
interface SplitCandidate {
|
|
12
|
+
group: FileGroup;
|
|
13
|
+
files: string[];
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const SPLIT_THRESHOLD = 8;
|
|
17
|
+
|
|
18
|
+
function findSplitCandidates(
|
|
19
|
+
groups: FileGroup[],
|
|
20
|
+
ownership: Map<string, string>,
|
|
21
|
+
): SplitCandidate[] {
|
|
22
|
+
const groupFiles = new Map<string, string[]>();
|
|
23
|
+
for (const [path, groupId] of ownership) {
|
|
24
|
+
const files = groupFiles.get(groupId) ?? [];
|
|
25
|
+
files.push(path);
|
|
26
|
+
groupFiles.set(groupId, files);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const candidates: SplitCandidate[] = [];
|
|
30
|
+
for (const group of groups) {
|
|
31
|
+
const files = groupFiles.get(group.name) ?? [];
|
|
32
|
+
if (files.length > SPLIT_THRESHOLD) {
|
|
33
|
+
candidates.push({ group, files });
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return candidates;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export async function splitOversizedGroups(
|
|
41
|
+
llmClient: LlmClient,
|
|
42
|
+
groups: FileGroup[],
|
|
43
|
+
ownership: Map<string, string>,
|
|
44
|
+
): Promise<SplitResult> {
|
|
45
|
+
const candidates = findSplitCandidates(groups, ownership);
|
|
46
|
+
|
|
47
|
+
if (candidates.length === 0) {
|
|
48
|
+
return { groups, ownership, warnings: [] };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const newGroups: FileGroup[] = [];
|
|
52
|
+
const newOwnership = new Map(ownership);
|
|
53
|
+
const warnings: StackWarning[] = [];
|
|
54
|
+
|
|
55
|
+
const unsplitGroupNames = new Set(groups.map((g) => g.name));
|
|
56
|
+
for (const c of candidates) {
|
|
57
|
+
unsplitGroupNames.delete(c.group.name);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
for (const group of groups) {
|
|
61
|
+
if (unsplitGroupNames.has(group.name)) {
|
|
62
|
+
newGroups.push(group);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
for (const candidate of candidates) {
|
|
67
|
+
const suggestedCount = Math.min(
|
|
68
|
+
Math.ceil(candidate.files.length / SPLIT_THRESHOLD),
|
|
69
|
+
4,
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
const system = `You split a large code change group into smaller, cohesive sub-groups for stacked PRs.
|
|
73
|
+
|
|
74
|
+
Rules:
|
|
75
|
+
1. Split into ${suggestedCount}-${suggestedCount + 1} sub-groups (aim for ${SPLIT_THRESHOLD} files or fewer each)
|
|
76
|
+
2. Each sub-group must be a cohesive unit of change that can stand alone as a PR
|
|
77
|
+
3. Group by logical purpose: shared utilities, specific feature area, tests, config, etc.
|
|
78
|
+
4. Sub-group names must be concise (2-4 words), distinct from each other
|
|
79
|
+
5. Every file from the input must appear in exactly one sub-group
|
|
80
|
+
6. type must be one of: feature, refactor, bugfix, chore, docs, test, config
|
|
81
|
+
|
|
82
|
+
Response format (JSON only, no markdown):
|
|
83
|
+
[
|
|
84
|
+
{
|
|
85
|
+
"name": "sub-group name",
|
|
86
|
+
"type": "feature",
|
|
87
|
+
"description": "what this sub-group does",
|
|
88
|
+
"files": ["path/to/file1.ts", "path/to/file2.ts"]
|
|
89
|
+
}
|
|
90
|
+
]`;
|
|
91
|
+
|
|
92
|
+
const user = `Original group: "${candidate.group.name}" (${candidate.group.type})
|
|
93
|
+
Description: ${candidate.group.description}
|
|
94
|
+
|
|
95
|
+
Files (${candidate.files.length}):
|
|
96
|
+
${candidate.files.map((f) => `- ${f}`).join("\n")}
|
|
97
|
+
|
|
98
|
+
Split this into ${suggestedCount}-${suggestedCount + 1} smaller, cohesive sub-groups.`;
|
|
99
|
+
|
|
100
|
+
try {
|
|
101
|
+
const response = await llmClient.complete(system, user);
|
|
102
|
+
const cleaned = response.content.replace(/```(?:json)?\s*/g, "").replace(/```\s*/g, "").trim();
|
|
103
|
+
const parsed = JSON.parse(cleaned) as Array<{
|
|
104
|
+
name: string;
|
|
105
|
+
type: string;
|
|
106
|
+
description: string;
|
|
107
|
+
files: string[];
|
|
108
|
+
}>;
|
|
109
|
+
|
|
110
|
+
if (!Array.isArray(parsed) || parsed.length < 2) {
|
|
111
|
+
newGroups.push(candidate.group);
|
|
112
|
+
continue;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const validTypes = new Set(["feature", "refactor", "bugfix", "chore", "docs", "test", "config"]);
|
|
116
|
+
const candidateFileSet = new Set(candidate.files);
|
|
117
|
+
const assignedFiles = new Set<string>();
|
|
118
|
+
const subGroups: FileGroup[] = [];
|
|
119
|
+
|
|
120
|
+
for (const sub of parsed) {
|
|
121
|
+
if (!sub.name || !sub.files || sub.files.length === 0) continue;
|
|
122
|
+
|
|
123
|
+
const validFiles = sub.files.filter((f) => candidateFileSet.has(f) && !assignedFiles.has(f));
|
|
124
|
+
if (validFiles.length === 0) continue;
|
|
125
|
+
|
|
126
|
+
for (const f of validFiles) assignedFiles.add(f);
|
|
127
|
+
|
|
128
|
+
const subGroup: FileGroup = {
|
|
129
|
+
name: sub.name,
|
|
130
|
+
type: (validTypes.has(sub.type) ? sub.type : candidate.group.type) as GroupType,
|
|
131
|
+
description: sub.description || candidate.group.description,
|
|
132
|
+
files: validFiles,
|
|
133
|
+
};
|
|
134
|
+
subGroups.push(subGroup);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const unassigned = candidate.files.filter((f) => !assignedFiles.has(f));
|
|
138
|
+
if (unassigned.length > 0 && subGroups.length > 0) {
|
|
139
|
+
subGroups[subGroups.length - 1]!.files.push(...unassigned);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (subGroups.length < 2) {
|
|
143
|
+
newGroups.push(candidate.group);
|
|
144
|
+
continue;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
for (const sg of subGroups) {
|
|
148
|
+
newGroups.push(sg);
|
|
149
|
+
for (const f of sg.files) {
|
|
150
|
+
newOwnership.set(f, sg.name);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
warnings.push({
|
|
155
|
+
category: "grouping",
|
|
156
|
+
severity: "info",
|
|
157
|
+
title: `"${candidate.group.name}" split into ${subGroups.length} sub-groups`,
|
|
158
|
+
message: `Group had ${candidate.files.length} files — split for smaller, more focused PRs`,
|
|
159
|
+
details: subGroups.map((sg) => `"${sg.name}" (${sg.files.length} files)`),
|
|
160
|
+
});
|
|
161
|
+
} catch {
|
|
162
|
+
newGroups.push(candidate.group);
|
|
163
|
+
warnings.push({
|
|
164
|
+
category: "system",
|
|
165
|
+
severity: "warn",
|
|
166
|
+
title: `Failed to split "${candidate.group.name}"`,
|
|
167
|
+
message: "AI splitting failed — keeping original group intact",
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
return { groups: newGroups, ownership: newOwnership, warnings };
|
|
173
|
+
}
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
import type { FileGroup, GroupType } from "../types/output.ts";
|
|
2
|
+
import type { PrCommit } from "../types/github.ts";
|
|
3
|
+
|
|
4
|
+
// ============================================================================
|
|
5
|
+
// Input Types
|
|
6
|
+
// ============================================================================
|
|
7
|
+
|
|
8
|
+
export interface StackInput {
|
|
9
|
+
session_id: string;
|
|
10
|
+
pr_number: number;
|
|
11
|
+
base_sha: string;
|
|
12
|
+
head_sha: string;
|
|
13
|
+
repo_path: string;
|
|
14
|
+
groups: FileGroup[];
|
|
15
|
+
commits: PrCommit[];
|
|
16
|
+
changed_files: string[];
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// ============================================================================
|
|
20
|
+
// Partition Types
|
|
21
|
+
// ============================================================================
|
|
22
|
+
|
|
23
|
+
export interface ReattributedFile {
|
|
24
|
+
path: string;
|
|
25
|
+
from_groups: string[];
|
|
26
|
+
to_group: string;
|
|
27
|
+
reason: string;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface PartitionResult {
|
|
31
|
+
ownership: Map<string, string>; // path -> groupId
|
|
32
|
+
reattributed: ReattributedFile[];
|
|
33
|
+
shared_foundation_group?: FileGroup;
|
|
34
|
+
warnings: string[];
|
|
35
|
+
structured_warnings: StackWarning[];
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// ============================================================================
|
|
39
|
+
// Delta Types
|
|
40
|
+
// ============================================================================
|
|
41
|
+
|
|
42
|
+
export type DeltaStatus = "A" | "M" | "D" | "R";
|
|
43
|
+
|
|
44
|
+
export interface DeltaFileChange {
|
|
45
|
+
status: DeltaStatus;
|
|
46
|
+
path: string;
|
|
47
|
+
old_path?: string;
|
|
48
|
+
old_blob: string;
|
|
49
|
+
new_blob: string;
|
|
50
|
+
old_mode: string;
|
|
51
|
+
new_mode: string;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export interface DeltaEntry {
|
|
55
|
+
sha: string;
|
|
56
|
+
parent_sha: string;
|
|
57
|
+
author: string;
|
|
58
|
+
date: string;
|
|
59
|
+
message: string;
|
|
60
|
+
changes: DeltaFileChange[];
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// ============================================================================
|
|
64
|
+
// Feasibility Types
|
|
65
|
+
// ============================================================================
|
|
66
|
+
|
|
67
|
+
export type ConstraintKind = "dependency" | "path-order" | "unassigned" | "ambiguous";
|
|
68
|
+
|
|
69
|
+
export interface ConstraintEvidence {
|
|
70
|
+
path: string;
|
|
71
|
+
from_commit?: string;
|
|
72
|
+
to_commit?: string;
|
|
73
|
+
from_commit_index?: number;
|
|
74
|
+
to_commit_index?: number;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export interface ConstraintEdge {
|
|
78
|
+
from: string; // groupId
|
|
79
|
+
to: string; // groupId
|
|
80
|
+
kind: ConstraintKind;
|
|
81
|
+
evidence?: ConstraintEvidence;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export interface CycleReport {
|
|
85
|
+
group_cycle: string[]; // [groupId, ...]
|
|
86
|
+
edge_cycle: ConstraintEdge[];
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
export interface FeasibilityResult {
|
|
90
|
+
feasible: boolean;
|
|
91
|
+
ordered_group_ids?: string[];
|
|
92
|
+
cycle?: CycleReport;
|
|
93
|
+
unassigned_paths?: Array<{ path: string; commits: string[] }>;
|
|
94
|
+
ambiguous_paths?: Array<{ path: string; groups: string[]; commits: string[] }>;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// ============================================================================
|
|
98
|
+
// Plan Types
|
|
99
|
+
// ============================================================================
|
|
100
|
+
|
|
101
|
+
export interface StackGroupStats {
|
|
102
|
+
additions: number;
|
|
103
|
+
deletions: number;
|
|
104
|
+
files_added: number;
|
|
105
|
+
files_modified: number;
|
|
106
|
+
files_deleted: number;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
export interface StackGroup {
|
|
110
|
+
id: string;
|
|
111
|
+
name: string;
|
|
112
|
+
type: GroupType;
|
|
113
|
+
description: string;
|
|
114
|
+
files: string[];
|
|
115
|
+
deps: string[]; // groupIds
|
|
116
|
+
order: number;
|
|
117
|
+
stats?: StackGroupStats;
|
|
118
|
+
pr_title?: string;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
export interface StackPlan {
|
|
122
|
+
base_sha: string;
|
|
123
|
+
head_sha: string;
|
|
124
|
+
groups: StackGroup[];
|
|
125
|
+
expected_trees: Map<string, string>; // groupId -> treeSha
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// ============================================================================
|
|
129
|
+
// Execution Types
|
|
130
|
+
// ============================================================================
|
|
131
|
+
|
|
132
|
+
export interface GroupCommitInfo {
|
|
133
|
+
group_id: string;
|
|
134
|
+
commit_sha: string;
|
|
135
|
+
tree_sha: string;
|
|
136
|
+
branch_name: string;
|
|
137
|
+
pr_title?: string;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
export interface StackExecResult {
|
|
141
|
+
run_id: string;
|
|
142
|
+
source_copy_branch: string;
|
|
143
|
+
group_commits: GroupCommitInfo[];
|
|
144
|
+
final_tree_sha: string;
|
|
145
|
+
verified: boolean;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// ============================================================================
|
|
149
|
+
// Publication Types
|
|
150
|
+
// ============================================================================
|
|
151
|
+
|
|
152
|
+
export interface BranchInfo {
|
|
153
|
+
name: string;
|
|
154
|
+
pushed: boolean;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
export interface PrInfo {
|
|
158
|
+
group_id: string;
|
|
159
|
+
number: number;
|
|
160
|
+
url: string;
|
|
161
|
+
title: string;
|
|
162
|
+
base_branch: string;
|
|
163
|
+
head_branch: string;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
export interface StackPublishResult {
|
|
167
|
+
branches: BranchInfo[];
|
|
168
|
+
prs: PrInfo[];
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// ============================================================================
|
|
172
|
+
// Structured Warnings
|
|
173
|
+
// ============================================================================
|
|
174
|
+
|
|
175
|
+
export type StackWarningCategory =
|
|
176
|
+
| "assignment"
|
|
177
|
+
| "grouping"
|
|
178
|
+
| "coupling"
|
|
179
|
+
| "verification.scope"
|
|
180
|
+
| "verification.completeness"
|
|
181
|
+
| "system";
|
|
182
|
+
|
|
183
|
+
export type StackWarningSeverity = "info" | "warn";
|
|
184
|
+
|
|
185
|
+
export interface StackWarning {
|
|
186
|
+
category: StackWarningCategory;
|
|
187
|
+
severity: StackWarningSeverity;
|
|
188
|
+
title: string;
|
|
189
|
+
message: string;
|
|
190
|
+
details?: string[];
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// ============================================================================
|
|
194
|
+
// Progress Types
|
|
195
|
+
// ============================================================================
|
|
196
|
+
|
|
197
|
+
export interface StackProgress {
|
|
198
|
+
phase: string;
|
|
199
|
+
message: string;
|
|
200
|
+
current?: number;
|
|
201
|
+
total?: number;
|
|
202
|
+
}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import { describe, test, expect, beforeAll, afterAll } from "bun:test";
|
|
2
|
+
import { mkdtempSync, rmSync, writeFileSync, mkdirSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { tmpdir } from "node:os";
|
|
5
|
+
import { createStackPlan } from "./plan.ts";
|
|
6
|
+
import { executeStack } from "./execute.ts";
|
|
7
|
+
import { extractDeltas } from "./delta.ts";
|
|
8
|
+
import { verifyStack } from "./verify.ts";
|
|
9
|
+
import type { FileGroup } from "../types/output.ts";
|
|
10
|
+
|
|
11
|
+
let testRepoPath: string;
|
|
12
|
+
let baseSha: string;
|
|
13
|
+
let headSha: string;
|
|
14
|
+
|
|
15
|
+
beforeAll(async () => {
|
|
16
|
+
testRepoPath = mkdtempSync(join(tmpdir(), "verify-test-"));
|
|
17
|
+
|
|
18
|
+
await Bun.$`git init ${testRepoPath}`.quiet();
|
|
19
|
+
await Bun.$`git -C ${testRepoPath} config user.name "Test User"`.quiet();
|
|
20
|
+
await Bun.$`git -C ${testRepoPath} config user.email "test@example.com"`.quiet();
|
|
21
|
+
|
|
22
|
+
writeFileSync(join(testRepoPath, "README.md"), "initial\n");
|
|
23
|
+
await Bun.$`git -C ${testRepoPath} add README.md`.quiet();
|
|
24
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Initial"`.quiet();
|
|
25
|
+
|
|
26
|
+
baseSha = (await Bun.$`git -C ${testRepoPath} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
27
|
+
|
|
28
|
+
mkdirSync(join(testRepoPath, "src"), { recursive: true });
|
|
29
|
+
writeFileSync(join(testRepoPath, "src", "auth.ts"), "export const auth = true;\n");
|
|
30
|
+
await Bun.$`git -C ${testRepoPath} add src/auth.ts`.quiet();
|
|
31
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Add auth"`.quiet();
|
|
32
|
+
|
|
33
|
+
writeFileSync(join(testRepoPath, "src", "ui.tsx"), "export const UI = () => <div/>;\n");
|
|
34
|
+
await Bun.$`git -C ${testRepoPath} add src/ui.tsx`.quiet();
|
|
35
|
+
await Bun.$`git -C ${testRepoPath} commit -m "Add UI"`.quiet();
|
|
36
|
+
|
|
37
|
+
headSha = (await Bun.$`git -C ${testRepoPath} rev-parse HEAD`.quiet()).stdout.toString().trim();
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
afterAll(() => {
|
|
41
|
+
if (testRepoPath) rmSync(testRepoPath, { recursive: true, force: true });
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
describe("verifyStack", () => {
|
|
45
|
+
test("valid stack passes all verification checks", async () => {
|
|
46
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
47
|
+
const ownership = new Map([
|
|
48
|
+
["src/auth.ts", "Auth"],
|
|
49
|
+
["src/ui.tsx", "UI"],
|
|
50
|
+
]);
|
|
51
|
+
const groups: FileGroup[] = [
|
|
52
|
+
{ name: "Auth", type: "feature", description: "Auth", files: ["src/auth.ts"] },
|
|
53
|
+
{ name: "UI", type: "feature", description: "UI", files: ["src/ui.tsx"] },
|
|
54
|
+
];
|
|
55
|
+
|
|
56
|
+
const plan = await createStackPlan({
|
|
57
|
+
repo_path: testRepoPath,
|
|
58
|
+
base_sha: baseSha,
|
|
59
|
+
head_sha: headSha,
|
|
60
|
+
deltas,
|
|
61
|
+
ownership,
|
|
62
|
+
group_order: ["Auth", "UI"],
|
|
63
|
+
groups,
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
const execResult = await executeStack({
|
|
67
|
+
repo_path: testRepoPath,
|
|
68
|
+
plan,
|
|
69
|
+
deltas,
|
|
70
|
+
ownership,
|
|
71
|
+
pr_author: { name: "Test", email: "t@t.com" },
|
|
72
|
+
pr_number: 1,
|
|
73
|
+
head_branch: "test-branch",
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
const verifyResult = await verifyStack({
|
|
77
|
+
repo_path: testRepoPath,
|
|
78
|
+
base_sha: baseSha,
|
|
79
|
+
head_sha: headSha,
|
|
80
|
+
exec_result: execResult,
|
|
81
|
+
ownership,
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
expect(verifyResult.verified).toBe(true);
|
|
85
|
+
expect(verifyResult.errors).toEqual([]);
|
|
86
|
+
expect(verifyResult.warnings).toBeDefined();
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
test("detects tree mismatch when final_tree_sha is wrong", async () => {
|
|
90
|
+
const deltas = await extractDeltas(testRepoPath, baseSha, headSha);
|
|
91
|
+
const ownership = new Map([
|
|
92
|
+
["src/auth.ts", "Auth"],
|
|
93
|
+
["src/ui.tsx", "UI"],
|
|
94
|
+
]);
|
|
95
|
+
const groups: FileGroup[] = [
|
|
96
|
+
{ name: "Auth", type: "feature", description: "Auth", files: ["src/auth.ts"] },
|
|
97
|
+
{ name: "UI", type: "feature", description: "UI", files: ["src/ui.tsx"] },
|
|
98
|
+
];
|
|
99
|
+
|
|
100
|
+
const plan = await createStackPlan({
|
|
101
|
+
repo_path: testRepoPath,
|
|
102
|
+
base_sha: baseSha,
|
|
103
|
+
head_sha: headSha,
|
|
104
|
+
deltas,
|
|
105
|
+
ownership,
|
|
106
|
+
group_order: ["Auth", "UI"],
|
|
107
|
+
groups,
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
const execResult = await executeStack({
|
|
111
|
+
repo_path: testRepoPath,
|
|
112
|
+
plan,
|
|
113
|
+
deltas,
|
|
114
|
+
ownership,
|
|
115
|
+
pr_author: { name: "Test", email: "t@t.com" },
|
|
116
|
+
pr_number: 2,
|
|
117
|
+
head_branch: "test-branch",
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
const tamperedResult = {
|
|
121
|
+
...execResult,
|
|
122
|
+
final_tree_sha: "0".repeat(40),
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
const verifyResult = await verifyStack({
|
|
126
|
+
repo_path: testRepoPath,
|
|
127
|
+
base_sha: baseSha,
|
|
128
|
+
head_sha: headSha,
|
|
129
|
+
exec_result: tamperedResult,
|
|
130
|
+
ownership,
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
expect(verifyResult.verified).toBe(false);
|
|
134
|
+
expect(verifyResult.errors.some((e) => e.includes("Final tree mismatch"))).toBe(true);
|
|
135
|
+
expect(verifyResult.warnings).toBeDefined();
|
|
136
|
+
});
|
|
137
|
+
});
|