bmalph 2.3.0 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -23
- package/dist/cli.js +13 -0
- package/dist/commands/doctor.js +22 -6
- package/dist/commands/implement.d.ts +6 -0
- package/dist/commands/implement.js +82 -0
- package/dist/commands/reset.d.ts +7 -0
- package/dist/commands/reset.js +81 -0
- package/dist/commands/status.js +86 -10
- package/dist/platform/claude-code.js +0 -1
- package/dist/reset.d.ts +18 -0
- package/dist/reset.js +181 -0
- package/dist/transition/artifact-scan.d.ts +27 -0
- package/dist/transition/artifact-scan.js +91 -0
- package/dist/transition/artifacts.d.ts +1 -0
- package/dist/transition/artifacts.js +1 -0
- package/dist/transition/context.js +34 -0
- package/dist/transition/fix-plan.d.ts +8 -2
- package/dist/transition/fix-plan.js +33 -7
- package/dist/transition/orchestration.d.ts +2 -2
- package/dist/transition/orchestration.js +120 -41
- package/dist/transition/preflight.d.ts +6 -0
- package/dist/transition/preflight.js +154 -0
- package/dist/transition/specs-index.d.ts +1 -1
- package/dist/transition/specs-index.js +24 -1
- package/dist/transition/types.d.ts +23 -1
- package/dist/utils/dryrun.d.ts +1 -1
- package/dist/utils/dryrun.js +22 -0
- package/dist/utils/validate.js +2 -2
- package/package.json +1 -1
- package/ralph/ralph_loop.sh +15 -0
- package/slash-commands/bmalph-doctor.md +16 -0
- package/slash-commands/bmalph-implement.md +18 -141
- package/slash-commands/bmalph-status.md +15 -0
- package/slash-commands/bmalph-upgrade.md +15 -0
package/dist/reset.js
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import { readdir, readFile, rm } from "fs/promises";
|
|
2
|
+
import { join, posix } from "path";
|
|
3
|
+
import { getSlashCommandsDir } from "./installer.js";
|
|
4
|
+
import { exists, atomicWriteFile } from "./utils/file-system.js";
|
|
5
|
+
import { isEnoent } from "./utils/errors.js";
|
|
6
|
+
import { BMAD_DIR, RALPH_DIR, BMALPH_DIR, BMAD_OUTPUT_DIR } from "./utils/constants.js";
|
|
7
|
+
export async function buildResetPlan(projectDir, platform) {
|
|
8
|
+
const plan = {
|
|
9
|
+
directories: [],
|
|
10
|
+
commandFiles: [],
|
|
11
|
+
instructionsCleanup: null,
|
|
12
|
+
gitignoreLines: [],
|
|
13
|
+
warnings: [],
|
|
14
|
+
};
|
|
15
|
+
// Check which managed directories exist
|
|
16
|
+
for (const dir of [BMAD_DIR, RALPH_DIR, BMALPH_DIR]) {
|
|
17
|
+
if (await exists(join(projectDir, dir))) {
|
|
18
|
+
plan.directories.push(dir);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// Check for slash commands to remove (directory delivery only)
|
|
22
|
+
if (platform.commandDelivery.kind === "directory") {
|
|
23
|
+
const commandsDir = join(projectDir, platform.commandDelivery.dir);
|
|
24
|
+
if (await exists(commandsDir)) {
|
|
25
|
+
const bundledNames = await getBundledCommandNames();
|
|
26
|
+
try {
|
|
27
|
+
const existingFiles = await readdir(commandsDir);
|
|
28
|
+
for (const file of existingFiles) {
|
|
29
|
+
if (file.endsWith(".md") && bundledNames.has(file)) {
|
|
30
|
+
plan.commandFiles.push(posix.join(platform.commandDelivery.dir, file));
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
catch (err) {
|
|
35
|
+
if (!isEnoent(err))
|
|
36
|
+
throw err;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
// Check instructions file for BMAD sections
|
|
41
|
+
try {
|
|
42
|
+
const content = await readFile(join(projectDir, platform.instructionsFile), "utf-8");
|
|
43
|
+
const sectionsToRemove = [];
|
|
44
|
+
if (content.includes(platform.instructionsSectionMarker)) {
|
|
45
|
+
sectionsToRemove.push(platform.instructionsSectionMarker);
|
|
46
|
+
}
|
|
47
|
+
// Codex (inline) also has a BMAD Commands section
|
|
48
|
+
if (platform.commandDelivery.kind === "inline" && content.includes("## BMAD Commands")) {
|
|
49
|
+
sectionsToRemove.push("## BMAD Commands");
|
|
50
|
+
}
|
|
51
|
+
if (sectionsToRemove.length > 0) {
|
|
52
|
+
plan.instructionsCleanup = {
|
|
53
|
+
path: platform.instructionsFile,
|
|
54
|
+
sectionsToRemove,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
catch (err) {
|
|
59
|
+
if (!isEnoent(err))
|
|
60
|
+
throw err;
|
|
61
|
+
}
|
|
62
|
+
// Check .gitignore for bmalph entries
|
|
63
|
+
try {
|
|
64
|
+
const content = await readFile(join(projectDir, ".gitignore"), "utf-8");
|
|
65
|
+
const existingLines = new Set(content
|
|
66
|
+
.split(/\r?\n/)
|
|
67
|
+
.map((line) => line.trim())
|
|
68
|
+
.filter(Boolean));
|
|
69
|
+
const bmalpEntries = [".ralph/logs/", "_bmad-output/"];
|
|
70
|
+
for (const entry of bmalpEntries) {
|
|
71
|
+
if (existingLines.has(entry)) {
|
|
72
|
+
plan.gitignoreLines.push(entry);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
catch (err) {
|
|
77
|
+
if (!isEnoent(err))
|
|
78
|
+
throw err;
|
|
79
|
+
}
|
|
80
|
+
// Warn about _bmad-output/
|
|
81
|
+
if (await exists(join(projectDir, BMAD_OUTPUT_DIR))) {
|
|
82
|
+
plan.warnings.push({
|
|
83
|
+
path: `${BMAD_OUTPUT_DIR}/`,
|
|
84
|
+
message: "Contains user planning artifacts — not removed by reset",
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
return plan;
|
|
88
|
+
}
|
|
89
|
+
async function getBundledCommandNames() {
|
|
90
|
+
const slashCommandsDir = getSlashCommandsDir();
|
|
91
|
+
try {
|
|
92
|
+
const files = await readdir(slashCommandsDir);
|
|
93
|
+
return new Set(files.filter((f) => f.endsWith(".md")));
|
|
94
|
+
}
|
|
95
|
+
catch (err) {
|
|
96
|
+
if (!isEnoent(err))
|
|
97
|
+
throw err;
|
|
98
|
+
return new Set();
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
export async function executeResetPlan(projectDir, plan) {
|
|
102
|
+
// Delete managed directories
|
|
103
|
+
for (const dir of plan.directories) {
|
|
104
|
+
await rm(join(projectDir, dir), { recursive: true, force: true });
|
|
105
|
+
}
|
|
106
|
+
// Delete slash command files
|
|
107
|
+
for (const file of plan.commandFiles) {
|
|
108
|
+
await rm(join(projectDir, file), { force: true });
|
|
109
|
+
}
|
|
110
|
+
// Clean instructions file
|
|
111
|
+
if (plan.instructionsCleanup) {
|
|
112
|
+
const filePath = join(projectDir, plan.instructionsCleanup.path);
|
|
113
|
+
try {
|
|
114
|
+
let content = await readFile(filePath, "utf-8");
|
|
115
|
+
for (const marker of plan.instructionsCleanup.sectionsToRemove) {
|
|
116
|
+
content = removeSection(content, marker);
|
|
117
|
+
}
|
|
118
|
+
content = content.trim();
|
|
119
|
+
if (content.length === 0) {
|
|
120
|
+
await rm(filePath, { force: true });
|
|
121
|
+
}
|
|
122
|
+
else {
|
|
123
|
+
await atomicWriteFile(filePath, content + "\n");
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
catch (err) {
|
|
127
|
+
if (!isEnoent(err))
|
|
128
|
+
throw err;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
// Clean .gitignore
|
|
132
|
+
if (plan.gitignoreLines.length > 0) {
|
|
133
|
+
const filePath = join(projectDir, ".gitignore");
|
|
134
|
+
try {
|
|
135
|
+
const content = await readFile(filePath, "utf-8");
|
|
136
|
+
const cleaned = removeGitignoreLines(content, plan.gitignoreLines);
|
|
137
|
+
await atomicWriteFile(filePath, cleaned);
|
|
138
|
+
}
|
|
139
|
+
catch (err) {
|
|
140
|
+
if (!isEnoent(err))
|
|
141
|
+
throw err;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
function removeSection(content, marker) {
|
|
146
|
+
if (!content.includes(marker))
|
|
147
|
+
return content;
|
|
148
|
+
const sectionStart = content.indexOf(marker);
|
|
149
|
+
const before = content.slice(0, sectionStart);
|
|
150
|
+
const afterSection = content.slice(sectionStart);
|
|
151
|
+
// Find next level-2 heading that doesn't match this section's heading
|
|
152
|
+
const markerEscaped = marker.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
153
|
+
const nextHeadingMatch = afterSection.match(new RegExp(`\\n## (?!${markerEscaped.slice(3)})`));
|
|
154
|
+
const after = nextHeadingMatch ? afterSection.slice(nextHeadingMatch.index) : "";
|
|
155
|
+
return before.trimEnd() + after;
|
|
156
|
+
}
|
|
157
|
+
function removeGitignoreLines(content, linesToRemove) {
|
|
158
|
+
const removeSet = new Set(linesToRemove);
|
|
159
|
+
const lines = content.split(/\r?\n/);
|
|
160
|
+
const filtered = lines.filter((line) => !removeSet.has(line.trim()));
|
|
161
|
+
return filtered.join("\n");
|
|
162
|
+
}
|
|
163
|
+
export function planToDryRunActions(plan) {
|
|
164
|
+
const actions = [];
|
|
165
|
+
for (const dir of plan.directories) {
|
|
166
|
+
actions.push({ type: "delete", path: `${dir}/` });
|
|
167
|
+
}
|
|
168
|
+
for (const file of plan.commandFiles) {
|
|
169
|
+
actions.push({ type: "delete", path: file });
|
|
170
|
+
}
|
|
171
|
+
if (plan.instructionsCleanup) {
|
|
172
|
+
actions.push({ type: "modify", path: plan.instructionsCleanup.path });
|
|
173
|
+
}
|
|
174
|
+
if (plan.gitignoreLines.length > 0) {
|
|
175
|
+
actions.push({ type: "modify", path: ".gitignore" });
|
|
176
|
+
}
|
|
177
|
+
for (const warning of plan.warnings) {
|
|
178
|
+
actions.push({ type: "warn", path: warning.path, reason: warning.message });
|
|
179
|
+
}
|
|
180
|
+
return actions;
|
|
181
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export interface ArtifactClassification {
|
|
2
|
+
phase: number;
|
|
3
|
+
name: string;
|
|
4
|
+
required: boolean;
|
|
5
|
+
}
|
|
6
|
+
export interface ScannedArtifact extends ArtifactClassification {
|
|
7
|
+
filename: string;
|
|
8
|
+
}
|
|
9
|
+
export interface PhaseArtifacts {
|
|
10
|
+
1: ScannedArtifact[];
|
|
11
|
+
2: ScannedArtifact[];
|
|
12
|
+
3: ScannedArtifact[];
|
|
13
|
+
}
|
|
14
|
+
export interface ProjectArtifactScan {
|
|
15
|
+
directory: string;
|
|
16
|
+
found: string[];
|
|
17
|
+
detectedPhase: number;
|
|
18
|
+
missing: string[];
|
|
19
|
+
phases: PhaseArtifacts;
|
|
20
|
+
nextAction: string;
|
|
21
|
+
}
|
|
22
|
+
export declare function classifyArtifact(filename: string): ArtifactClassification | null;
|
|
23
|
+
export declare function scanArtifacts(files: string[]): PhaseArtifacts;
|
|
24
|
+
export declare function detectPhase(phases: PhaseArtifacts): number;
|
|
25
|
+
export declare function getMissing(phases: PhaseArtifacts): string[];
|
|
26
|
+
export declare function suggestNext(phases: PhaseArtifacts, detectedPhase: number): string;
|
|
27
|
+
export declare function scanProjectArtifacts(projectDir: string): Promise<ProjectArtifactScan | null>;
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { readdir } from "fs/promises";
|
|
2
|
+
import { relative } from "path";
|
|
3
|
+
import { findArtifactsDir } from "./artifacts.js";
|
|
4
|
+
const ARTIFACT_RULES = [
|
|
5
|
+
{ pattern: /brief/i, phase: 1, name: "Product Brief", required: false },
|
|
6
|
+
{ pattern: /market/i, phase: 1, name: "Market Research", required: false },
|
|
7
|
+
{ pattern: /domain/i, phase: 1, name: "Domain Research", required: false },
|
|
8
|
+
{ pattern: /tech.*research/i, phase: 1, name: "Technical Research", required: false },
|
|
9
|
+
{ pattern: /prd/i, phase: 2, name: "PRD", required: true },
|
|
10
|
+
{ pattern: /ux/i, phase: 2, name: "UX Design", required: false },
|
|
11
|
+
{ pattern: /architect/i, phase: 3, name: "Architecture", required: true },
|
|
12
|
+
{ pattern: /epic|stor/i, phase: 3, name: "Epics & Stories", required: true },
|
|
13
|
+
{ pattern: /readiness/i, phase: 3, name: "Readiness Report", required: true },
|
|
14
|
+
];
|
|
15
|
+
export function classifyArtifact(filename) {
|
|
16
|
+
for (const rule of ARTIFACT_RULES) {
|
|
17
|
+
if (rule.pattern.test(filename)) {
|
|
18
|
+
return { phase: rule.phase, name: rule.name, required: rule.required };
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
export function scanArtifacts(files) {
|
|
24
|
+
const phases = { 1: [], 2: [], 3: [] };
|
|
25
|
+
for (const file of files) {
|
|
26
|
+
const classification = classifyArtifact(file);
|
|
27
|
+
if (classification) {
|
|
28
|
+
const phaseKey = classification.phase;
|
|
29
|
+
phases[phaseKey].push({ ...classification, filename: file });
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return phases;
|
|
33
|
+
}
|
|
34
|
+
export function detectPhase(phases) {
|
|
35
|
+
for (const phase of [3, 2, 1]) {
|
|
36
|
+
if (phases[phase].length > 0) {
|
|
37
|
+
return phase;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return 1;
|
|
41
|
+
}
|
|
42
|
+
export function getMissing(phases) {
|
|
43
|
+
const missing = [];
|
|
44
|
+
const foundNames = new Set([...phases[1], ...phases[2], ...phases[3]].map((a) => a.name));
|
|
45
|
+
for (const rule of ARTIFACT_RULES) {
|
|
46
|
+
if (rule.required && !foundNames.has(rule.name)) {
|
|
47
|
+
missing.push(rule.name);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return missing;
|
|
51
|
+
}
|
|
52
|
+
export function suggestNext(phases, detectedPhase) {
|
|
53
|
+
const foundNames = new Set([...phases[1], ...phases[2], ...phases[3]].map((a) => a.name));
|
|
54
|
+
if (detectedPhase <= 1 && phases[1].length === 0) {
|
|
55
|
+
return "Run /analyst to start analysis";
|
|
56
|
+
}
|
|
57
|
+
if (!foundNames.has("PRD")) {
|
|
58
|
+
return "Run /create-prd to create the PRD";
|
|
59
|
+
}
|
|
60
|
+
if (!foundNames.has("Architecture")) {
|
|
61
|
+
return "Run /architect to create architecture";
|
|
62
|
+
}
|
|
63
|
+
if (!foundNames.has("Epics & Stories")) {
|
|
64
|
+
return "Run /create-epics-stories to define epics and stories";
|
|
65
|
+
}
|
|
66
|
+
if (!foundNames.has("Readiness Report")) {
|
|
67
|
+
return "Run /architect to generate readiness report";
|
|
68
|
+
}
|
|
69
|
+
return "Run: bmalph implement";
|
|
70
|
+
}
|
|
71
|
+
export async function scanProjectArtifacts(projectDir) {
|
|
72
|
+
const artifactsDir = await findArtifactsDir(projectDir);
|
|
73
|
+
if (!artifactsDir) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
const files = await readdir(artifactsDir);
|
|
77
|
+
const phases = scanArtifacts(files);
|
|
78
|
+
const detectedPhase = detectPhase(phases);
|
|
79
|
+
const missing = getMissing(phases);
|
|
80
|
+
const nextAction = suggestNext(phases, detectedPhase);
|
|
81
|
+
const relativeDir = relative(projectDir, artifactsDir).replace(/\\/g, "/");
|
|
82
|
+
const found = files.filter((f) => classifyArtifact(f) !== null);
|
|
83
|
+
return {
|
|
84
|
+
directory: relativeDir,
|
|
85
|
+
found,
|
|
86
|
+
detectedPhase,
|
|
87
|
+
missing,
|
|
88
|
+
phases,
|
|
89
|
+
nextAction,
|
|
90
|
+
};
|
|
91
|
+
}
|
|
@@ -1,2 +1,3 @@
|
|
|
1
1
|
export declare function findArtifactsDir(projectDir: string): Promise<string | null>;
|
|
2
|
+
/** @deprecated Use `runPreflight` from `./preflight.js` instead. Kept for backward compatibility. */
|
|
2
3
|
export declare function validateArtifacts(files: string[], artifactsDir: string): Promise<string[]>;
|
|
@@ -19,6 +19,7 @@ export async function findArtifactsDir(projectDir) {
|
|
|
19
19
|
debug(`No artifacts found. Checked: ${candidates.join(", ")}`);
|
|
20
20
|
return null;
|
|
21
21
|
}
|
|
22
|
+
/** @deprecated Use `runPreflight` from `./preflight.js` instead. Kept for backward compatibility. */
|
|
22
23
|
export async function validateArtifacts(files, artifactsDir) {
|
|
23
24
|
const warnings = [];
|
|
24
25
|
const hasPrd = files.some((f) => /prd/i.test(f));
|
|
@@ -37,6 +37,8 @@ export function extractProjectContext(artifacts) {
|
|
|
37
37
|
// Combine all content, keyed by likely role
|
|
38
38
|
let prdContent = "";
|
|
39
39
|
let archContent = "";
|
|
40
|
+
let uxContent = "";
|
|
41
|
+
let researchContent = "";
|
|
40
42
|
for (const [filename, content] of artifacts) {
|
|
41
43
|
if (/prd/i.test(filename))
|
|
42
44
|
prdContent += "\n" + content;
|
|
@@ -44,6 +46,10 @@ export function extractProjectContext(artifacts) {
|
|
|
44
46
|
archContent += "\n" + content;
|
|
45
47
|
if (/readiness/i.test(filename))
|
|
46
48
|
archContent += "\n" + content;
|
|
49
|
+
if (/ux/i.test(filename))
|
|
50
|
+
uxContent += "\n" + content;
|
|
51
|
+
if (/research|market|domain|brief/i.test(filename))
|
|
52
|
+
researchContent += "\n" + content;
|
|
47
53
|
}
|
|
48
54
|
const allContent = prdContent + "\n" + archContent;
|
|
49
55
|
const truncated = [];
|
|
@@ -98,6 +104,28 @@ export function extractProjectContext(artifacts) {
|
|
|
98
104
|
/^##\s+Quality Attributes/m,
|
|
99
105
|
],
|
|
100
106
|
},
|
|
107
|
+
{
|
|
108
|
+
field: "designGuidelines",
|
|
109
|
+
source: uxContent,
|
|
110
|
+
patterns: [
|
|
111
|
+
/^##\s+Design Principles/m,
|
|
112
|
+
/^##\s+Design System/m,
|
|
113
|
+
/^##\s+Core Experience/m,
|
|
114
|
+
/^##\s+User Flows/m,
|
|
115
|
+
/^##\s+Visual Foundation/m,
|
|
116
|
+
],
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
field: "researchInsights",
|
|
120
|
+
source: researchContent,
|
|
121
|
+
patterns: [
|
|
122
|
+
/^##\s+Key Findings/m,
|
|
123
|
+
/^##\s+Recommendations/m,
|
|
124
|
+
/^##\s+Market Analysis/m,
|
|
125
|
+
/^##\s+Domain Insights/m,
|
|
126
|
+
/^##\s+Summary/m,
|
|
127
|
+
],
|
|
128
|
+
},
|
|
101
129
|
];
|
|
102
130
|
const context = {
|
|
103
131
|
projectGoals: "",
|
|
@@ -107,6 +135,8 @@ export function extractProjectContext(artifacts) {
|
|
|
107
135
|
scopeBoundaries: "",
|
|
108
136
|
targetUsers: "",
|
|
109
137
|
nonFunctionalRequirements: "",
|
|
138
|
+
designGuidelines: "",
|
|
139
|
+
researchInsights: "",
|
|
110
140
|
};
|
|
111
141
|
for (const { field, source, patterns } of fields) {
|
|
112
142
|
const result = extractFromPatternsWithInfo(source, patterns);
|
|
@@ -140,6 +170,8 @@ export function generateProjectContextMd(context, projectName) {
|
|
|
140
170
|
{ heading: "Scope Boundaries", content: context.scopeBoundaries },
|
|
141
171
|
{ heading: "Target Users", content: context.targetUsers },
|
|
142
172
|
{ heading: "Non-Functional Requirements", content: context.nonFunctionalRequirements },
|
|
173
|
+
{ heading: "Design Guidelines", content: context.designGuidelines },
|
|
174
|
+
{ heading: "Research Insights", content: context.researchInsights },
|
|
143
175
|
];
|
|
144
176
|
for (const { heading, content } of sections) {
|
|
145
177
|
if (content) {
|
|
@@ -161,6 +193,8 @@ export function generatePrompt(projectName, context) {
|
|
|
161
193
|
context.targetUsers && `### Target Users\n${context.targetUsers}`,
|
|
162
194
|
context.nonFunctionalRequirements &&
|
|
163
195
|
`### Non-Functional Requirements\n${context.nonFunctionalRequirements}`,
|
|
196
|
+
context.designGuidelines && `### Design Guidelines\n${context.designGuidelines}`,
|
|
197
|
+
context.researchInsights && `### Research Insights\n${context.researchInsights}`,
|
|
164
198
|
]
|
|
165
199
|
.filter(Boolean)
|
|
166
200
|
.join("\n\n")
|
|
@@ -10,6 +10,12 @@ export declare function detectOrphanedCompletedStories(existingItems: FixPlanIte
|
|
|
10
10
|
/**
|
|
11
11
|
* Detects stories that may have been renumbered by comparing titles.
|
|
12
12
|
* Returns warnings when a completed story's title appears under a different ID.
|
|
13
|
+
* Skips stories that were already auto-preserved via title-based merge.
|
|
13
14
|
*/
|
|
14
|
-
export declare function detectRenumberedStories(existingItems: FixPlanItemWithTitle[], newStories: Story[]): string[];
|
|
15
|
-
export declare function
|
|
15
|
+
export declare function detectRenumberedStories(existingItems: FixPlanItemWithTitle[], newStories: Story[], preservedIds?: Set<string>): string[];
|
|
16
|
+
export declare function normalizeTitle(title: string): string;
|
|
17
|
+
/**
|
|
18
|
+
* Builds a map from normalized (lowercased) title to story ID for completed items.
|
|
19
|
+
*/
|
|
20
|
+
export declare function buildCompletedTitleMap(items: FixPlanItemWithTitle[]): Map<string, string>;
|
|
21
|
+
export declare function mergeFixPlanProgress(newFixPlan: string, completedIds: Set<string>, titleMap?: Map<string, string>, completedTitles?: Map<string, string>): string;
|
|
@@ -65,30 +65,56 @@ export function detectOrphanedCompletedStories(existingItems, newStoryIds) {
|
|
|
65
65
|
/**
|
|
66
66
|
* Detects stories that may have been renumbered by comparing titles.
|
|
67
67
|
* Returns warnings when a completed story's title appears under a different ID.
|
|
68
|
+
* Skips stories that were already auto-preserved via title-based merge.
|
|
68
69
|
*/
|
|
69
|
-
export function detectRenumberedStories(existingItems, newStories) {
|
|
70
|
+
export function detectRenumberedStories(existingItems, newStories, preservedIds) {
|
|
70
71
|
const warnings = [];
|
|
71
72
|
// Build a map of new story titles (lowercased) to IDs
|
|
72
73
|
const newTitleToId = new Map();
|
|
73
74
|
for (const story of newStories) {
|
|
74
|
-
newTitleToId.set(story.title
|
|
75
|
+
newTitleToId.set(normalizeTitle(story.title), story.id);
|
|
75
76
|
}
|
|
76
77
|
// Check each completed story
|
|
77
78
|
for (const item of existingItems) {
|
|
78
79
|
if (!item.completed || !item.title)
|
|
79
80
|
continue;
|
|
80
|
-
const normalizedTitle = item.title
|
|
81
|
+
const normalizedTitle = normalizeTitle(item.title);
|
|
81
82
|
const newId = newTitleToId.get(normalizedTitle);
|
|
82
83
|
// If title exists under a different ID, warn about renumbering
|
|
83
|
-
|
|
84
|
+
// (unless it was already auto-preserved)
|
|
85
|
+
if (newId && newId !== item.id && !preservedIds?.has(newId)) {
|
|
84
86
|
warnings.push(`Story "${item.title}" appears to have been renumbered from ${item.id} to ${newId}. Completion status was not preserved.`);
|
|
85
87
|
}
|
|
86
88
|
}
|
|
87
89
|
return warnings;
|
|
88
90
|
}
|
|
89
|
-
export function
|
|
90
|
-
|
|
91
|
+
export function normalizeTitle(title) {
|
|
92
|
+
return title.toLowerCase().trim();
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Builds a map from normalized (lowercased) title to story ID for completed items.
|
|
96
|
+
*/
|
|
97
|
+
export function buildCompletedTitleMap(items) {
|
|
98
|
+
const map = new Map();
|
|
99
|
+
for (const item of items) {
|
|
100
|
+
if (item.completed && item.title) {
|
|
101
|
+
map.set(normalizeTitle(item.title), item.id);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return map;
|
|
105
|
+
}
|
|
106
|
+
export function mergeFixPlanProgress(newFixPlan, completedIds, titleMap, completedTitles) {
|
|
107
|
+
// Replace [ ] with [x] for completed story IDs or title matches
|
|
91
108
|
return newFixPlan.replace(/^(\s*-\s*)\[ \](\s*Story\s+(\d+\.\d+):)/gm, (match, prefix, suffix, id) => {
|
|
92
|
-
|
|
109
|
+
if (completedIds.has(id))
|
|
110
|
+
return `${prefix}[x]${suffix}`;
|
|
111
|
+
// Title-based fallback: check if title matches a completed story
|
|
112
|
+
if (titleMap && completedTitles) {
|
|
113
|
+
const title = titleMap.get(id);
|
|
114
|
+
if (title && completedTitles.has(normalizeTitle(title))) {
|
|
115
|
+
return `${prefix}[x]${suffix}`;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
return match;
|
|
93
119
|
});
|
|
94
120
|
}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import type { TransitionResult } from "./types.js";
|
|
2
|
-
export declare function runTransition(projectDir: string): Promise<TransitionResult>;
|
|
1
|
+
import type { TransitionResult, TransitionOptions } from "./types.js";
|
|
2
|
+
export declare function runTransition(projectDir: string, options?: TransitionOptions): Promise<TransitionResult>;
|