bmalph 2.2.1 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +162 -48
- package/dist/cli.js +14 -0
- package/dist/commands/doctor.d.ts +14 -2
- package/dist/commands/doctor.js +105 -41
- package/dist/commands/implement.d.ts +6 -0
- package/dist/commands/implement.js +82 -0
- package/dist/commands/init.d.ts +1 -0
- package/dist/commands/init.js +74 -7
- package/dist/commands/reset.d.ts +7 -0
- package/dist/commands/reset.js +81 -0
- package/dist/commands/status.js +86 -10
- package/dist/commands/upgrade.js +8 -5
- package/dist/installer.d.ts +15 -4
- package/dist/installer.js +190 -101
- package/dist/platform/aider.d.ts +2 -0
- package/dist/platform/aider.js +71 -0
- package/dist/platform/claude-code.d.ts +2 -0
- package/dist/platform/claude-code.js +87 -0
- package/dist/platform/codex.d.ts +2 -0
- package/dist/platform/codex.js +67 -0
- package/dist/platform/copilot.d.ts +2 -0
- package/dist/platform/copilot.js +71 -0
- package/dist/platform/cursor.d.ts +2 -0
- package/dist/platform/cursor.js +71 -0
- package/dist/platform/detect.d.ts +7 -0
- package/dist/platform/detect.js +23 -0
- package/dist/platform/index.d.ts +4 -0
- package/dist/platform/index.js +3 -0
- package/dist/platform/registry.d.ts +4 -0
- package/dist/platform/registry.js +27 -0
- package/dist/platform/resolve.d.ts +8 -0
- package/dist/platform/resolve.js +24 -0
- package/dist/platform/types.d.ts +41 -0
- package/dist/platform/types.js +7 -0
- package/dist/platform/windsurf.d.ts +2 -0
- package/dist/platform/windsurf.js +71 -0
- package/dist/reset.d.ts +18 -0
- package/dist/reset.js +181 -0
- package/dist/transition/artifact-scan.d.ts +27 -0
- package/dist/transition/artifact-scan.js +91 -0
- package/dist/transition/artifacts.d.ts +1 -0
- package/dist/transition/artifacts.js +2 -1
- package/dist/transition/context.js +34 -0
- package/dist/transition/fix-plan.d.ts +8 -2
- package/dist/transition/fix-plan.js +33 -7
- package/dist/transition/orchestration.d.ts +2 -2
- package/dist/transition/orchestration.js +120 -41
- package/dist/transition/preflight.d.ts +6 -0
- package/dist/transition/preflight.js +154 -0
- package/dist/transition/specs-changelog.js +4 -1
- package/dist/transition/specs-index.d.ts +1 -1
- package/dist/transition/specs-index.js +24 -1
- package/dist/transition/types.d.ts +23 -1
- package/dist/utils/config.d.ts +2 -0
- package/dist/utils/dryrun.d.ts +1 -1
- package/dist/utils/dryrun.js +22 -0
- package/dist/utils/validate.js +18 -2
- package/package.json +1 -1
- package/ralph/drivers/claude-code.sh +118 -0
- package/ralph/drivers/codex.sh +81 -0
- package/ralph/ralph_import.sh +11 -0
- package/ralph/ralph_loop.sh +52 -64
- package/ralph/templates/ralphrc.template +7 -0
- package/slash-commands/bmalph-doctor.md +16 -0
- package/slash-commands/bmalph-implement.md +18 -141
- package/slash-commands/bmalph-status.md +15 -0
- package/slash-commands/bmalph-upgrade.md +15 -0
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { readdir } from "fs/promises";
|
|
2
|
+
import { relative } from "path";
|
|
3
|
+
import { findArtifactsDir } from "./artifacts.js";
|
|
4
|
+
const ARTIFACT_RULES = [
|
|
5
|
+
{ pattern: /brief/i, phase: 1, name: "Product Brief", required: false },
|
|
6
|
+
{ pattern: /market/i, phase: 1, name: "Market Research", required: false },
|
|
7
|
+
{ pattern: /domain/i, phase: 1, name: "Domain Research", required: false },
|
|
8
|
+
{ pattern: /tech.*research/i, phase: 1, name: "Technical Research", required: false },
|
|
9
|
+
{ pattern: /prd/i, phase: 2, name: "PRD", required: true },
|
|
10
|
+
{ pattern: /ux/i, phase: 2, name: "UX Design", required: false },
|
|
11
|
+
{ pattern: /architect/i, phase: 3, name: "Architecture", required: true },
|
|
12
|
+
{ pattern: /epic|stor/i, phase: 3, name: "Epics & Stories", required: true },
|
|
13
|
+
{ pattern: /readiness/i, phase: 3, name: "Readiness Report", required: true },
|
|
14
|
+
];
|
|
15
|
+
export function classifyArtifact(filename) {
|
|
16
|
+
for (const rule of ARTIFACT_RULES) {
|
|
17
|
+
if (rule.pattern.test(filename)) {
|
|
18
|
+
return { phase: rule.phase, name: rule.name, required: rule.required };
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
export function scanArtifacts(files) {
|
|
24
|
+
const phases = { 1: [], 2: [], 3: [] };
|
|
25
|
+
for (const file of files) {
|
|
26
|
+
const classification = classifyArtifact(file);
|
|
27
|
+
if (classification) {
|
|
28
|
+
const phaseKey = classification.phase;
|
|
29
|
+
phases[phaseKey].push({ ...classification, filename: file });
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return phases;
|
|
33
|
+
}
|
|
34
|
+
export function detectPhase(phases) {
|
|
35
|
+
for (const phase of [3, 2, 1]) {
|
|
36
|
+
if (phases[phase].length > 0) {
|
|
37
|
+
return phase;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return 1;
|
|
41
|
+
}
|
|
42
|
+
export function getMissing(phases) {
|
|
43
|
+
const missing = [];
|
|
44
|
+
const foundNames = new Set([...phases[1], ...phases[2], ...phases[3]].map((a) => a.name));
|
|
45
|
+
for (const rule of ARTIFACT_RULES) {
|
|
46
|
+
if (rule.required && !foundNames.has(rule.name)) {
|
|
47
|
+
missing.push(rule.name);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return missing;
|
|
51
|
+
}
|
|
52
|
+
export function suggestNext(phases, detectedPhase) {
|
|
53
|
+
const foundNames = new Set([...phases[1], ...phases[2], ...phases[3]].map((a) => a.name));
|
|
54
|
+
if (detectedPhase <= 1 && phases[1].length === 0) {
|
|
55
|
+
return "Run /analyst to start analysis";
|
|
56
|
+
}
|
|
57
|
+
if (!foundNames.has("PRD")) {
|
|
58
|
+
return "Run /create-prd to create the PRD";
|
|
59
|
+
}
|
|
60
|
+
if (!foundNames.has("Architecture")) {
|
|
61
|
+
return "Run /architect to create architecture";
|
|
62
|
+
}
|
|
63
|
+
if (!foundNames.has("Epics & Stories")) {
|
|
64
|
+
return "Run /create-epics-stories to define epics and stories";
|
|
65
|
+
}
|
|
66
|
+
if (!foundNames.has("Readiness Report")) {
|
|
67
|
+
return "Run /architect to generate readiness report";
|
|
68
|
+
}
|
|
69
|
+
return "Run: bmalph implement";
|
|
70
|
+
}
|
|
71
|
+
export async function scanProjectArtifacts(projectDir) {
|
|
72
|
+
const artifactsDir = await findArtifactsDir(projectDir);
|
|
73
|
+
if (!artifactsDir) {
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
const files = await readdir(artifactsDir);
|
|
77
|
+
const phases = scanArtifacts(files);
|
|
78
|
+
const detectedPhase = detectPhase(phases);
|
|
79
|
+
const missing = getMissing(phases);
|
|
80
|
+
const nextAction = suggestNext(phases, detectedPhase);
|
|
81
|
+
const relativeDir = relative(projectDir, artifactsDir).replace(/\\/g, "/");
|
|
82
|
+
const found = files.filter((f) => classifyArtifact(f) !== null);
|
|
83
|
+
return {
|
|
84
|
+
directory: relativeDir,
|
|
85
|
+
found,
|
|
86
|
+
detectedPhase,
|
|
87
|
+
missing,
|
|
88
|
+
phases,
|
|
89
|
+
nextAction,
|
|
90
|
+
};
|
|
91
|
+
}
|
|
@@ -1,2 +1,3 @@
|
|
|
1
1
|
export declare function findArtifactsDir(projectDir: string): Promise<string | null>;
|
|
2
|
+
/** @deprecated Use `runPreflight` from `./preflight.js` instead. Kept for backward compatibility. */
|
|
2
3
|
export declare function validateArtifacts(files: string[], artifactsDir: string): Promise<string[]>;
|
|
@@ -19,6 +19,7 @@ export async function findArtifactsDir(projectDir) {
|
|
|
19
19
|
debug(`No artifacts found. Checked: ${candidates.join(", ")}`);
|
|
20
20
|
return null;
|
|
21
21
|
}
|
|
22
|
+
/** @deprecated Use `runPreflight` from `./preflight.js` instead. Kept for backward compatibility. */
|
|
22
23
|
export async function validateArtifacts(files, artifactsDir) {
|
|
23
24
|
const warnings = [];
|
|
24
25
|
const hasPrd = files.some((f) => /prd/i.test(f));
|
|
@@ -39,7 +40,7 @@ export async function validateArtifacts(files, artifactsDir) {
|
|
|
39
40
|
}
|
|
40
41
|
}
|
|
41
42
|
catch {
|
|
42
|
-
|
|
43
|
+
warnings.push("Could not read readiness report — NO-GO status unverified");
|
|
43
44
|
}
|
|
44
45
|
}
|
|
45
46
|
return warnings;
|
|
@@ -37,6 +37,8 @@ export function extractProjectContext(artifacts) {
|
|
|
37
37
|
// Combine all content, keyed by likely role
|
|
38
38
|
let prdContent = "";
|
|
39
39
|
let archContent = "";
|
|
40
|
+
let uxContent = "";
|
|
41
|
+
let researchContent = "";
|
|
40
42
|
for (const [filename, content] of artifacts) {
|
|
41
43
|
if (/prd/i.test(filename))
|
|
42
44
|
prdContent += "\n" + content;
|
|
@@ -44,6 +46,10 @@ export function extractProjectContext(artifacts) {
|
|
|
44
46
|
archContent += "\n" + content;
|
|
45
47
|
if (/readiness/i.test(filename))
|
|
46
48
|
archContent += "\n" + content;
|
|
49
|
+
if (/ux/i.test(filename))
|
|
50
|
+
uxContent += "\n" + content;
|
|
51
|
+
if (/research|market|domain|brief/i.test(filename))
|
|
52
|
+
researchContent += "\n" + content;
|
|
47
53
|
}
|
|
48
54
|
const allContent = prdContent + "\n" + archContent;
|
|
49
55
|
const truncated = [];
|
|
@@ -98,6 +104,28 @@ export function extractProjectContext(artifacts) {
|
|
|
98
104
|
/^##\s+Quality Attributes/m,
|
|
99
105
|
],
|
|
100
106
|
},
|
|
107
|
+
{
|
|
108
|
+
field: "designGuidelines",
|
|
109
|
+
source: uxContent,
|
|
110
|
+
patterns: [
|
|
111
|
+
/^##\s+Design Principles/m,
|
|
112
|
+
/^##\s+Design System/m,
|
|
113
|
+
/^##\s+Core Experience/m,
|
|
114
|
+
/^##\s+User Flows/m,
|
|
115
|
+
/^##\s+Visual Foundation/m,
|
|
116
|
+
],
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
field: "researchInsights",
|
|
120
|
+
source: researchContent,
|
|
121
|
+
patterns: [
|
|
122
|
+
/^##\s+Key Findings/m,
|
|
123
|
+
/^##\s+Recommendations/m,
|
|
124
|
+
/^##\s+Market Analysis/m,
|
|
125
|
+
/^##\s+Domain Insights/m,
|
|
126
|
+
/^##\s+Summary/m,
|
|
127
|
+
],
|
|
128
|
+
},
|
|
101
129
|
];
|
|
102
130
|
const context = {
|
|
103
131
|
projectGoals: "",
|
|
@@ -107,6 +135,8 @@ export function extractProjectContext(artifacts) {
|
|
|
107
135
|
scopeBoundaries: "",
|
|
108
136
|
targetUsers: "",
|
|
109
137
|
nonFunctionalRequirements: "",
|
|
138
|
+
designGuidelines: "",
|
|
139
|
+
researchInsights: "",
|
|
110
140
|
};
|
|
111
141
|
for (const { field, source, patterns } of fields) {
|
|
112
142
|
const result = extractFromPatternsWithInfo(source, patterns);
|
|
@@ -140,6 +170,8 @@ export function generateProjectContextMd(context, projectName) {
|
|
|
140
170
|
{ heading: "Scope Boundaries", content: context.scopeBoundaries },
|
|
141
171
|
{ heading: "Target Users", content: context.targetUsers },
|
|
142
172
|
{ heading: "Non-Functional Requirements", content: context.nonFunctionalRequirements },
|
|
173
|
+
{ heading: "Design Guidelines", content: context.designGuidelines },
|
|
174
|
+
{ heading: "Research Insights", content: context.researchInsights },
|
|
143
175
|
];
|
|
144
176
|
for (const { heading, content } of sections) {
|
|
145
177
|
if (content) {
|
|
@@ -161,6 +193,8 @@ export function generatePrompt(projectName, context) {
|
|
|
161
193
|
context.targetUsers && `### Target Users\n${context.targetUsers}`,
|
|
162
194
|
context.nonFunctionalRequirements &&
|
|
163
195
|
`### Non-Functional Requirements\n${context.nonFunctionalRequirements}`,
|
|
196
|
+
context.designGuidelines && `### Design Guidelines\n${context.designGuidelines}`,
|
|
197
|
+
context.researchInsights && `### Research Insights\n${context.researchInsights}`,
|
|
164
198
|
]
|
|
165
199
|
.filter(Boolean)
|
|
166
200
|
.join("\n\n")
|
|
@@ -10,6 +10,12 @@ export declare function detectOrphanedCompletedStories(existingItems: FixPlanIte
|
|
|
10
10
|
/**
|
|
11
11
|
* Detects stories that may have been renumbered by comparing titles.
|
|
12
12
|
* Returns warnings when a completed story's title appears under a different ID.
|
|
13
|
+
* Skips stories that were already auto-preserved via title-based merge.
|
|
13
14
|
*/
|
|
14
|
-
export declare function detectRenumberedStories(existingItems: FixPlanItemWithTitle[], newStories: Story[]): string[];
|
|
15
|
-
export declare function
|
|
15
|
+
export declare function detectRenumberedStories(existingItems: FixPlanItemWithTitle[], newStories: Story[], preservedIds?: Set<string>): string[];
|
|
16
|
+
export declare function normalizeTitle(title: string): string;
|
|
17
|
+
/**
|
|
18
|
+
* Builds a map from normalized (lowercased) title to story ID for completed items.
|
|
19
|
+
*/
|
|
20
|
+
export declare function buildCompletedTitleMap(items: FixPlanItemWithTitle[]): Map<string, string>;
|
|
21
|
+
export declare function mergeFixPlanProgress(newFixPlan: string, completedIds: Set<string>, titleMap?: Map<string, string>, completedTitles?: Map<string, string>): string;
|
|
@@ -65,30 +65,56 @@ export function detectOrphanedCompletedStories(existingItems, newStoryIds) {
|
|
|
65
65
|
/**
|
|
66
66
|
* Detects stories that may have been renumbered by comparing titles.
|
|
67
67
|
* Returns warnings when a completed story's title appears under a different ID.
|
|
68
|
+
* Skips stories that were already auto-preserved via title-based merge.
|
|
68
69
|
*/
|
|
69
|
-
export function detectRenumberedStories(existingItems, newStories) {
|
|
70
|
+
export function detectRenumberedStories(existingItems, newStories, preservedIds) {
|
|
70
71
|
const warnings = [];
|
|
71
72
|
// Build a map of new story titles (lowercased) to IDs
|
|
72
73
|
const newTitleToId = new Map();
|
|
73
74
|
for (const story of newStories) {
|
|
74
|
-
newTitleToId.set(story.title
|
|
75
|
+
newTitleToId.set(normalizeTitle(story.title), story.id);
|
|
75
76
|
}
|
|
76
77
|
// Check each completed story
|
|
77
78
|
for (const item of existingItems) {
|
|
78
79
|
if (!item.completed || !item.title)
|
|
79
80
|
continue;
|
|
80
|
-
const normalizedTitle = item.title
|
|
81
|
+
const normalizedTitle = normalizeTitle(item.title);
|
|
81
82
|
const newId = newTitleToId.get(normalizedTitle);
|
|
82
83
|
// If title exists under a different ID, warn about renumbering
|
|
83
|
-
|
|
84
|
+
// (unless it was already auto-preserved)
|
|
85
|
+
if (newId && newId !== item.id && !preservedIds?.has(newId)) {
|
|
84
86
|
warnings.push(`Story "${item.title}" appears to have been renumbered from ${item.id} to ${newId}. Completion status was not preserved.`);
|
|
85
87
|
}
|
|
86
88
|
}
|
|
87
89
|
return warnings;
|
|
88
90
|
}
|
|
89
|
-
export function
|
|
90
|
-
|
|
91
|
+
export function normalizeTitle(title) {
|
|
92
|
+
return title.toLowerCase().trim();
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Builds a map from normalized (lowercased) title to story ID for completed items.
|
|
96
|
+
*/
|
|
97
|
+
export function buildCompletedTitleMap(items) {
|
|
98
|
+
const map = new Map();
|
|
99
|
+
for (const item of items) {
|
|
100
|
+
if (item.completed && item.title) {
|
|
101
|
+
map.set(normalizeTitle(item.title), item.id);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return map;
|
|
105
|
+
}
|
|
106
|
+
export function mergeFixPlanProgress(newFixPlan, completedIds, titleMap, completedTitles) {
|
|
107
|
+
// Replace [ ] with [x] for completed story IDs or title matches
|
|
91
108
|
return newFixPlan.replace(/^(\s*-\s*)\[ \](\s*Story\s+(\d+\.\d+):)/gm, (match, prefix, suffix, id) => {
|
|
92
|
-
|
|
109
|
+
if (completedIds.has(id))
|
|
110
|
+
return `${prefix}[x]${suffix}`;
|
|
111
|
+
// Title-based fallback: check if title matches a completed story
|
|
112
|
+
if (titleMap && completedTitles) {
|
|
113
|
+
const title = titleMap.get(id);
|
|
114
|
+
if (title && completedTitles.has(normalizeTitle(title))) {
|
|
115
|
+
return `${prefix}[x]${suffix}`;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
return match;
|
|
93
119
|
});
|
|
94
120
|
}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import type { TransitionResult } from "./types.js";
|
|
2
|
-
export declare function runTransition(projectDir: string): Promise<TransitionResult>;
|
|
1
|
+
import type { TransitionResult, TransitionOptions } from "./types.js";
|
|
2
|
+
export declare function runTransition(projectDir: string, options?: TransitionOptions): Promise<TransitionResult>;
|
|
@@ -2,17 +2,18 @@ import { readFile, readdir, cp, mkdir, access, rm, rename } from "fs/promises";
|
|
|
2
2
|
import { join } from "path";
|
|
3
3
|
import { debug, info, warn } from "../utils/logger.js";
|
|
4
4
|
import { isEnoent, formatError } from "../utils/errors.js";
|
|
5
|
-
import { atomicWriteFile } from "../utils/file-system.js";
|
|
5
|
+
import { atomicWriteFile, exists } from "../utils/file-system.js";
|
|
6
6
|
import { readConfig } from "../utils/config.js";
|
|
7
7
|
import { readState, writeState } from "../utils/state.js";
|
|
8
8
|
import { parseStoriesWithWarnings } from "./story-parsing.js";
|
|
9
|
-
import { generateFixPlan, parseFixPlan, mergeFixPlanProgress, detectOrphanedCompletedStories, detectRenumberedStories, } from "./fix-plan.js";
|
|
9
|
+
import { generateFixPlan, parseFixPlan, mergeFixPlanProgress, detectOrphanedCompletedStories, detectRenumberedStories, buildCompletedTitleMap, normalizeTitle, } from "./fix-plan.js";
|
|
10
10
|
import { detectTechStack, customizeAgentMd } from "./tech-stack.js";
|
|
11
|
-
import { findArtifactsDir
|
|
11
|
+
import { findArtifactsDir } from "./artifacts.js";
|
|
12
|
+
import { runPreflight } from "./preflight.js";
|
|
12
13
|
import { extractProjectContext, generateProjectContextMd, generatePrompt, detectTruncation, } from "./context.js";
|
|
13
14
|
import { generateSpecsChangelog, formatChangelog } from "./specs-changelog.js";
|
|
14
15
|
import { generateSpecsIndex, formatSpecsIndexMd } from "./specs-index.js";
|
|
15
|
-
export async function runTransition(projectDir) {
|
|
16
|
+
export async function runTransition(projectDir, options) {
|
|
16
17
|
info("Locating BMAD artifacts...");
|
|
17
18
|
const artifactsDir = await findArtifactsDir(projectDir);
|
|
18
19
|
if (!artifactsDir) {
|
|
@@ -20,6 +21,19 @@ export async function runTransition(projectDir) {
|
|
|
20
21
|
}
|
|
21
22
|
// Find and parse stories file
|
|
22
23
|
const files = await readdir(artifactsDir);
|
|
24
|
+
// Read artifact contents early for preflight validation and later use
|
|
25
|
+
const artifactContents = new Map();
|
|
26
|
+
for (const file of files) {
|
|
27
|
+
if (file.endsWith(".md")) {
|
|
28
|
+
try {
|
|
29
|
+
const content = await readFile(join(artifactsDir, file), "utf-8");
|
|
30
|
+
artifactContents.set(file, content);
|
|
31
|
+
}
|
|
32
|
+
catch (err) {
|
|
33
|
+
warn(`Could not read artifact ${file}: ${formatError(err)}`);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
23
37
|
const storiesPattern = /^(epics[-_]?(and[-_]?)?)?stor(y|ies)([-_]\d+)?\.md$/i;
|
|
24
38
|
const storiesFile = files.find((f) => storiesPattern.test(f) || /epic/i.test(f));
|
|
25
39
|
if (!storiesFile) {
|
|
@@ -33,10 +47,40 @@ export async function runTransition(projectDir) {
|
|
|
33
47
|
if (stories.length === 0) {
|
|
34
48
|
throw new Error("No stories parsed from the epics file. Ensure stories follow the format: ### Story N.M: Title");
|
|
35
49
|
}
|
|
50
|
+
// Pre-flight validation
|
|
51
|
+
info("Pre-flight validation...");
|
|
52
|
+
const preflightResult = runPreflight(artifactContents, files, stories, parseWarnings);
|
|
53
|
+
for (const issue of preflightResult.issues) {
|
|
54
|
+
if (issue.severity === "error") {
|
|
55
|
+
warn(` ERROR ${issue.id}: ${issue.message}`);
|
|
56
|
+
if (issue.suggestion)
|
|
57
|
+
warn(` ${issue.suggestion}`);
|
|
58
|
+
}
|
|
59
|
+
else if (issue.severity === "warning") {
|
|
60
|
+
warn(` WARN ${issue.id}: ${issue.message}`);
|
|
61
|
+
if (issue.suggestion)
|
|
62
|
+
warn(` ${issue.suggestion}`);
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
info(` INFO ${issue.id}: ${issue.message}`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
if (!preflightResult.pass) {
|
|
69
|
+
if (options?.force) {
|
|
70
|
+
warn("Pre-flight validation has errors but --force was used, continuing...");
|
|
71
|
+
}
|
|
72
|
+
else {
|
|
73
|
+
const errors = preflightResult.issues.filter((i) => i.severity === "error");
|
|
74
|
+
throw new Error(`Pre-flight validation failed: ${errors.map((e) => e.message).join("; ")}. Use --force to override.`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
// Track generated files for summary output
|
|
78
|
+
const generatedFiles = [];
|
|
36
79
|
// Check existing fix_plan for completed items (smart merge)
|
|
37
80
|
let completedIds = new Set();
|
|
38
81
|
let existingItems = [];
|
|
39
82
|
const fixPlanPath = join(projectDir, ".ralph/@fix_plan.md");
|
|
83
|
+
const fixPlanExisted = await exists(fixPlanPath);
|
|
40
84
|
try {
|
|
41
85
|
const existingFixPlan = await readFile(fixPlanPath, "utf-8");
|
|
42
86
|
existingItems = parseFixPlan(existingFixPlan);
|
|
@@ -57,16 +101,30 @@ export async function runTransition(projectDir) {
|
|
|
57
101
|
for (const w of orphanWarnings) {
|
|
58
102
|
warn(w);
|
|
59
103
|
}
|
|
60
|
-
//
|
|
61
|
-
const
|
|
62
|
-
|
|
63
|
-
warn(w);
|
|
64
|
-
}
|
|
104
|
+
// Build title maps for title-based merge (Gap 3: renumbered story preservation)
|
|
105
|
+
const completedTitles = buildCompletedTitleMap(existingItems);
|
|
106
|
+
const newTitleMap = new Map(stories.map((s) => [s.id, s.title]));
|
|
65
107
|
// Generate new fix_plan from current stories, preserving completion status
|
|
66
108
|
info(`Generating fix plan for ${stories.length} stories...`);
|
|
67
109
|
const newFixPlan = generateFixPlan(stories, storiesFile);
|
|
68
|
-
const mergedFixPlan = mergeFixPlanProgress(newFixPlan, completedIds);
|
|
110
|
+
const mergedFixPlan = mergeFixPlanProgress(newFixPlan, completedIds, newTitleMap, completedTitles);
|
|
111
|
+
// Detect which stories were preserved via title match (for renumber warning suppression)
|
|
112
|
+
const preservedIds = new Set();
|
|
113
|
+
for (const [id, title] of newTitleMap) {
|
|
114
|
+
if (!completedIds.has(id) && completedTitles.has(normalizeTitle(title))) {
|
|
115
|
+
preservedIds.add(id);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
// Detect renumbered stories (Bug #3), skipping auto-preserved ones
|
|
119
|
+
const renumberWarnings = detectRenumberedStories(existingItems, stories, preservedIds);
|
|
120
|
+
for (const w of renumberWarnings) {
|
|
121
|
+
warn(w);
|
|
122
|
+
}
|
|
69
123
|
await atomicWriteFile(fixPlanPath, mergedFixPlan);
|
|
124
|
+
generatedFiles.push({
|
|
125
|
+
path: ".ralph/@fix_plan.md",
|
|
126
|
+
action: fixPlanExisted ? "updated" : "created",
|
|
127
|
+
});
|
|
70
128
|
// Track whether progress was preserved for return value
|
|
71
129
|
const fixPlanPreserved = completedIds.size > 0;
|
|
72
130
|
// Generate changelog before overwriting specs/
|
|
@@ -78,6 +136,7 @@ export async function runTransition(projectDir) {
|
|
|
78
136
|
if (changes.length > 0) {
|
|
79
137
|
const changelog = formatChangelog(changes, new Date().toISOString());
|
|
80
138
|
await atomicWriteFile(join(projectDir, ".ralph/SPECS_CHANGELOG.md"), changelog);
|
|
139
|
+
generatedFiles.push({ path: ".ralph/SPECS_CHANGELOG.md", action: "updated" });
|
|
81
140
|
debug(`Generated SPECS_CHANGELOG.md with ${changes.length} changes`);
|
|
82
141
|
}
|
|
83
142
|
}
|
|
@@ -114,6 +173,7 @@ export async function runTransition(projectDir) {
|
|
|
114
173
|
await access(specsTmpDir);
|
|
115
174
|
await rm(specsDir, { recursive: true, force: true });
|
|
116
175
|
await rename(specsTmpDir, specsDir);
|
|
176
|
+
generatedFiles.push({ path: ".ralph/specs/", action: "updated" });
|
|
117
177
|
debug("Copied _bmad-output/ to .ralph/specs/ (atomic)");
|
|
118
178
|
}
|
|
119
179
|
else {
|
|
@@ -129,13 +189,20 @@ export async function runTransition(projectDir) {
|
|
|
129
189
|
await access(specsTmpDir);
|
|
130
190
|
await rm(specsDir, { recursive: true, force: true });
|
|
131
191
|
await rename(specsTmpDir, specsDir);
|
|
192
|
+
generatedFiles.push({ path: ".ralph/specs/", action: "updated" });
|
|
132
193
|
}
|
|
133
194
|
// Generate SPECS_INDEX.md for intelligent spec reading
|
|
134
195
|
info("Generating SPECS_INDEX.md...");
|
|
196
|
+
const specsIndexPath = join(projectDir, ".ralph/SPECS_INDEX.md");
|
|
197
|
+
const specsIndexExisted = await exists(specsIndexPath);
|
|
135
198
|
try {
|
|
136
199
|
const specsIndex = await generateSpecsIndex(specsDir);
|
|
137
200
|
if (specsIndex.totalFiles > 0) {
|
|
138
|
-
await atomicWriteFile(
|
|
201
|
+
await atomicWriteFile(specsIndexPath, formatSpecsIndexMd(specsIndex));
|
|
202
|
+
generatedFiles.push({
|
|
203
|
+
path: ".ralph/SPECS_INDEX.md",
|
|
204
|
+
action: specsIndexExisted ? "updated" : "created",
|
|
205
|
+
});
|
|
139
206
|
debug(`Generated SPECS_INDEX.md with ${specsIndex.totalFiles} files`);
|
|
140
207
|
}
|
|
141
208
|
}
|
|
@@ -143,18 +210,6 @@ export async function runTransition(projectDir) {
|
|
|
143
210
|
warn(`Could not generate SPECS_INDEX.md: ${formatError(err)}`);
|
|
144
211
|
}
|
|
145
212
|
// Generate PROJECT_CONTEXT.md from planning artifacts
|
|
146
|
-
const artifactContents = new Map();
|
|
147
|
-
for (const file of files) {
|
|
148
|
-
if (file.endsWith(".md")) {
|
|
149
|
-
try {
|
|
150
|
-
const content = await readFile(join(artifactsDir, file), "utf-8");
|
|
151
|
-
artifactContents.set(file, content);
|
|
152
|
-
}
|
|
153
|
-
catch (err) {
|
|
154
|
-
warn(`Could not read artifact ${file}: ${formatError(err)}`);
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
}
|
|
158
213
|
let projectName = "project";
|
|
159
214
|
try {
|
|
160
215
|
const config = await readConfig(projectDir);
|
|
@@ -167,6 +222,8 @@ export async function runTransition(projectDir) {
|
|
|
167
222
|
}
|
|
168
223
|
// Extract project context for both PROJECT_CONTEXT.md and PROMPT.md
|
|
169
224
|
info("Generating PROJECT_CONTEXT.md...");
|
|
225
|
+
const projectContextPath = join(projectDir, ".ralph/PROJECT_CONTEXT.md");
|
|
226
|
+
const projectContextExisted = await exists(projectContextPath);
|
|
170
227
|
let projectContext = null;
|
|
171
228
|
let truncationWarnings = [];
|
|
172
229
|
if (artifactContents.size > 0) {
|
|
@@ -174,15 +231,21 @@ export async function runTransition(projectDir) {
|
|
|
174
231
|
projectContext = context;
|
|
175
232
|
truncationWarnings = detectTruncation(truncated);
|
|
176
233
|
const contextMd = generateProjectContextMd(projectContext, projectName);
|
|
177
|
-
await atomicWriteFile(
|
|
234
|
+
await atomicWriteFile(projectContextPath, contextMd);
|
|
235
|
+
generatedFiles.push({
|
|
236
|
+
path: ".ralph/PROJECT_CONTEXT.md",
|
|
237
|
+
action: projectContextExisted ? "updated" : "created",
|
|
238
|
+
});
|
|
178
239
|
debug("Generated PROJECT_CONTEXT.md");
|
|
179
240
|
}
|
|
180
241
|
// Generate PROMPT.md with embedded context
|
|
181
242
|
info("Generating PROMPT.md...");
|
|
182
243
|
// Try to preserve rich PROMPT.md template if it has the placeholder
|
|
183
244
|
let prompt;
|
|
245
|
+
let promptExisted = false;
|
|
184
246
|
try {
|
|
185
247
|
const existingPrompt = await readFile(join(projectDir, ".ralph/PROMPT.md"), "utf-8");
|
|
248
|
+
promptExisted = true;
|
|
186
249
|
if (existingPrompt.includes("[YOUR PROJECT NAME]")) {
|
|
187
250
|
prompt = existingPrompt.replace(/\[YOUR PROJECT NAME\]/g, projectName);
|
|
188
251
|
}
|
|
@@ -201,29 +264,39 @@ export async function runTransition(projectDir) {
|
|
|
201
264
|
prompt = generatePrompt(projectName, projectContext ?? undefined);
|
|
202
265
|
}
|
|
203
266
|
await atomicWriteFile(join(projectDir, ".ralph/PROMPT.md"), prompt);
|
|
267
|
+
generatedFiles.push({ path: ".ralph/PROMPT.md", action: promptExisted ? "updated" : "created" });
|
|
204
268
|
// Customize @AGENT.md based on detected tech stack from architecture
|
|
205
269
|
const architectureFile = files.find((f) => /architect/i.test(f));
|
|
206
270
|
if (architectureFile) {
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
271
|
+
const archContent = artifactContents.get(architectureFile);
|
|
272
|
+
if (archContent) {
|
|
273
|
+
try {
|
|
274
|
+
const stack = detectTechStack(archContent);
|
|
275
|
+
if (stack) {
|
|
276
|
+
const agentPath = join(projectDir, ".ralph/@AGENT.md");
|
|
277
|
+
const agentTemplate = await readFile(agentPath, "utf-8");
|
|
278
|
+
const customized = customizeAgentMd(agentTemplate, stack);
|
|
279
|
+
await atomicWriteFile(agentPath, customized);
|
|
280
|
+
generatedFiles.push({ path: ".ralph/@AGENT.md", action: "updated" });
|
|
281
|
+
debug("Customized @AGENT.md with detected tech stack");
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
catch (err) {
|
|
285
|
+
warn(`Could not customize @AGENT.md: ${formatError(err)}`);
|
|
216
286
|
}
|
|
217
|
-
}
|
|
218
|
-
catch (err) {
|
|
219
|
-
warn(`Could not customize @AGENT.md: ${formatError(err)}`);
|
|
220
287
|
}
|
|
221
288
|
}
|
|
222
|
-
//
|
|
223
|
-
const
|
|
289
|
+
// Collect warnings from all sources
|
|
290
|
+
const preflightWarnings = preflightResult.issues
|
|
291
|
+
.filter((i) => i.severity === "warning" || (i.severity === "error" && options?.force))
|
|
292
|
+
.map((i) => i.message);
|
|
293
|
+
// Keep parse warnings not already covered by preflight (e.g., malformed IDs)
|
|
294
|
+
const nonPreflightParseWarnings = parseWarnings.filter((w) => !/has no acceptance criteria/i.test(w) &&
|
|
295
|
+
!/has no description/i.test(w) &&
|
|
296
|
+
!/not under an epic/i.test(w));
|
|
224
297
|
const warnings = [
|
|
225
|
-
...
|
|
226
|
-
...
|
|
298
|
+
...preflightWarnings,
|
|
299
|
+
...nonPreflightParseWarnings,
|
|
227
300
|
...orphanWarnings,
|
|
228
301
|
...renumberWarnings,
|
|
229
302
|
...truncationWarnings,
|
|
@@ -239,5 +312,11 @@ export async function runTransition(projectDir) {
|
|
|
239
312
|
};
|
|
240
313
|
await writeState(projectDir, newState);
|
|
241
314
|
info("Transition complete: phase 4 (implementing)");
|
|
242
|
-
return {
|
|
315
|
+
return {
|
|
316
|
+
storiesCount: stories.length,
|
|
317
|
+
warnings,
|
|
318
|
+
fixPlanPreserved,
|
|
319
|
+
preflightIssues: preflightResult.issues,
|
|
320
|
+
generatedFiles,
|
|
321
|
+
};
|
|
243
322
|
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { Story, PreflightIssue, PreflightResult } from "./types.js";
|
|
2
|
+
export declare function validatePrd(content: string | null): PreflightIssue[];
|
|
3
|
+
export declare function validateArchitecture(content: string | null): PreflightIssue[];
|
|
4
|
+
export declare function validateStories(stories: Story[], parseWarnings: string[]): PreflightIssue[];
|
|
5
|
+
export declare function validateReadiness(content: string | null): PreflightIssue[];
|
|
6
|
+
export declare function runPreflight(artifactContents: Map<string, string>, files: string[], stories: Story[], parseWarnings: string[]): PreflightResult;
|