openplanr 1.3.0 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +23 -0
- package/dist/ai/prompts/prompt-builder.d.ts +14 -0
- package/dist/ai/prompts/prompt-builder.d.ts.map +1 -1
- package/dist/ai/prompts/prompt-builder.js +32 -1
- package/dist/ai/prompts/prompt-builder.js.map +1 -1
- package/dist/ai/prompts/system-prompts.d.ts +1 -0
- package/dist/ai/prompts/system-prompts.d.ts.map +1 -1
- package/dist/ai/prompts/system-prompts.js +110 -0
- package/dist/ai/prompts/system-prompts.js.map +1 -1
- package/dist/ai/schemas/ai-response-schemas.d.ts +62 -0
- package/dist/ai/schemas/ai-response-schemas.d.ts.map +1 -1
- package/dist/ai/schemas/ai-response-schemas.js +50 -0
- package/dist/ai/schemas/ai-response-schemas.js.map +1 -1
- package/dist/cli/commands/spec.d.ts +28 -0
- package/dist/cli/commands/spec.d.ts.map +1 -0
- package/dist/cli/commands/spec.js +542 -0
- package/dist/cli/commands/spec.js.map +1 -0
- package/dist/cli/index.js +2 -0
- package/dist/cli/index.js.map +1 -1
- package/dist/models/schema.d.ts +1 -0
- package/dist/models/schema.d.ts.map +1 -1
- package/dist/models/schema.js +1 -0
- package/dist/models/schema.js.map +1 -1
- package/dist/models/types.d.ts +9 -0
- package/dist/models/types.d.ts.map +1 -1
- package/dist/services/config-service.d.ts.map +1 -1
- package/dist/services/config-service.js +1 -0
- package/dist/services/config-service.js.map +1 -1
- package/dist/services/spec-service.d.ts +292 -0
- package/dist/services/spec-service.d.ts.map +1 -0
- package/dist/services/spec-service.js +805 -0
- package/dist/services/spec-service.js.map +1 -0
- package/dist/templates/spec/spec-shaped.md.hbs +89 -0
- package/dist/templates/spec/spec.md.hbs +68 -0
- package/dist/templates/spec/story.md.hbs +51 -0
- package/dist/templates/spec/task.md.hbs +98 -0
- package/dist/utils/constants.d.ts +18 -0
- package/dist/utils/constants.d.ts.map +1 -1
- package/dist/utils/constants.js +25 -0
- package/dist/utils/constants.js.map +1 -1
- package/package.json +1 -1
|
@@ -0,0 +1,805 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* spec-service — directory-aware CRUD for spec-driven planning mode.
|
|
3
|
+
*
|
|
4
|
+
* Unlike agile/QT artifacts which are flat single files, each spec is a
|
|
5
|
+
* **self-contained directory** (per BL-011 addendum + design doc):
|
|
6
|
+
*
|
|
7
|
+
* .planr/specs/SPEC-NNN-{slug}/
|
|
8
|
+
* ├── SPEC-NNN-{slug}.md ← the spec document
|
|
9
|
+
* ├── design/ ← UI mockups + design-spec.md (if any)
|
|
10
|
+
* │ ├── *.png
|
|
11
|
+
* │ └── design-spec.md ← reserved path (written by openplanr-pipeline's designer-agent)
|
|
12
|
+
* ├── stories/
|
|
13
|
+
* │ └── US-NNN-{slug}.md ← US-NNN scoped to this spec
|
|
14
|
+
* └── tasks/
|
|
15
|
+
* └── T-NNN-{slug}.md ← T-NNN scoped to this spec
|
|
16
|
+
*
|
|
17
|
+
* Why directory-per-spec:
|
|
18
|
+
* - Self-contained / portable / `rm -rf` clean
|
|
19
|
+
* - `PREFIX-NNN-slug` naming consistent with every other planr artifact
|
|
20
|
+
* - US-NNN and T-NNN are SCOPED TO THE PARENT SPEC (not project-globally
|
|
21
|
+
* unique). Two specs can each have their own US-001. Disambiguation is
|
|
22
|
+
* via the path or via `specId` frontmatter.
|
|
23
|
+
* - Schema matches openplanr-pipeline plugin verbatim — both products
|
|
24
|
+
* share one contract. See https://github.com/openplanr/openplanr-pipeline
|
|
25
|
+
*
|
|
26
|
+
* This service owns spec authoring inside the planr CLI. The
|
|
27
|
+
* openplanr-pipeline plugin is the executor: it reads `.planr/specs/` (when
|
|
28
|
+
* spec mode is active) and runs the PO/DEV phases to ship code.
|
|
29
|
+
*/
|
|
30
|
+
import path from 'node:path';
|
|
31
|
+
import { ensureDir, fileExists, listFiles, readFile, writeFile } from '../utils/fs.js';
|
|
32
|
+
import { logger } from '../utils/logger.js';
|
|
33
|
+
import { parseMarkdown } from '../utils/markdown.js';
|
|
34
|
+
import { slugify } from '../utils/slugify.js';
|
|
35
|
+
import { atomicWriteFile } from './atomic-write-service.js';
|
|
36
|
+
import { getNextId } from './id-service.js';
|
|
37
|
+
import { renderTemplate } from './template-service.js';
|
|
38
|
+
// ---------------------------------------------------------------------------
|
|
39
|
+
// Path resolvers
|
|
40
|
+
// ---------------------------------------------------------------------------
|
|
41
|
+
/** Root directory holding all specs (e.g., `.planr/specs/`). */
|
|
42
|
+
export function getSpecsRootDir(projectDir, config) {
|
|
43
|
+
return path.join(projectDir, config.outputPaths.agile, 'specs');
|
|
44
|
+
}
|
|
45
|
+
/** Self-contained directory for a single spec, e.g. `.planr/specs/SPEC-001-auth-flow/`. */
|
|
46
|
+
export function getSpecDir(projectDir, config, specId, slug) {
|
|
47
|
+
return path.join(getSpecsRootDir(projectDir, config), `${specId}-${slug}`);
|
|
48
|
+
}
|
|
49
|
+
/** Stories subdirectory inside a spec. */
|
|
50
|
+
export function getSpecStoriesDir(specDir) {
|
|
51
|
+
return path.join(specDir, 'stories');
|
|
52
|
+
}
|
|
53
|
+
/** Tasks subdirectory inside a spec. */
|
|
54
|
+
export function getSpecTasksDir(specDir) {
|
|
55
|
+
return path.join(specDir, 'tasks');
|
|
56
|
+
}
|
|
57
|
+
/** Design assets subdirectory inside a spec (PNGs + design-spec.md). */
|
|
58
|
+
export function getSpecDesignDir(specDir) {
|
|
59
|
+
return path.join(specDir, 'design');
|
|
60
|
+
}
|
|
61
|
+
// ---------------------------------------------------------------------------
|
|
62
|
+
// Spec resolution
|
|
63
|
+
// ---------------------------------------------------------------------------
|
|
64
|
+
/**
|
|
65
|
+
* Resolve a spec ID (e.g. `SPEC-001`) to its on-disk directory by scanning
|
|
66
|
+
* `.planr/specs/` for a matching `SPEC-NNN-{slug}` directory. Returns null
|
|
67
|
+
* if the spec isn't found.
|
|
68
|
+
*
|
|
69
|
+
* The directory name encodes both ID and slug, so we don't need to read the
|
|
70
|
+
* spec file to find it.
|
|
71
|
+
*/
|
|
72
|
+
export async function resolveSpecDir(projectDir, config, specId) {
|
|
73
|
+
const specsRoot = getSpecsRootDir(projectDir, config);
|
|
74
|
+
const exists = await fileExists(specsRoot);
|
|
75
|
+
if (!exists)
|
|
76
|
+
return null;
|
|
77
|
+
const fs = await import('node:fs/promises');
|
|
78
|
+
const entries = await fs.readdir(specsRoot, { withFileTypes: true });
|
|
79
|
+
const escapedId = specId.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
80
|
+
const re = new RegExp(`^${escapedId}-(.+)$`);
|
|
81
|
+
for (const entry of entries) {
|
|
82
|
+
if (!entry.isDirectory())
|
|
83
|
+
continue;
|
|
84
|
+
const m = entry.name.match(re);
|
|
85
|
+
if (m) {
|
|
86
|
+
return { dir: path.join(specsRoot, entry.name), slug: m[1] };
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* List every spec under `.planr/specs/`.
|
|
93
|
+
* Reads each spec's frontmatter for title + status; counts stories + tasks.
|
|
94
|
+
*/
|
|
95
|
+
export async function listSpecs(projectDir, config) {
|
|
96
|
+
const specsRoot = getSpecsRootDir(projectDir, config);
|
|
97
|
+
const exists = await fileExists(specsRoot);
|
|
98
|
+
if (!exists)
|
|
99
|
+
return [];
|
|
100
|
+
const fs = await import('node:fs/promises');
|
|
101
|
+
const entries = await fs.readdir(specsRoot, { withFileTypes: true });
|
|
102
|
+
const dirRegex = /^([A-Z]+-\d{3})-(.+)$/;
|
|
103
|
+
const results = [];
|
|
104
|
+
for (const entry of entries) {
|
|
105
|
+
if (!entry.isDirectory())
|
|
106
|
+
continue;
|
|
107
|
+
const m = entry.name.match(dirRegex);
|
|
108
|
+
if (!m)
|
|
109
|
+
continue;
|
|
110
|
+
const [, id, slug] = m;
|
|
111
|
+
const specDir = path.join(specsRoot, entry.name);
|
|
112
|
+
const specFile = path.join(specDir, `${id}-${slug}.md`);
|
|
113
|
+
const specFileExists = await fileExists(specFile);
|
|
114
|
+
if (!specFileExists)
|
|
115
|
+
continue;
|
|
116
|
+
let title = slug.replace(/-/g, ' ');
|
|
117
|
+
let status = 'pending';
|
|
118
|
+
try {
|
|
119
|
+
const raw = await readFile(specFile);
|
|
120
|
+
const parsed = parseMarkdown(raw);
|
|
121
|
+
if (typeof parsed.data.title === 'string')
|
|
122
|
+
title = parsed.data.title;
|
|
123
|
+
if (typeof parsed.data.status === 'string')
|
|
124
|
+
status = parsed.data.status;
|
|
125
|
+
}
|
|
126
|
+
catch (err) {
|
|
127
|
+
logger.debug(`Failed to parse spec ${id} frontmatter: ${err.message}`);
|
|
128
|
+
}
|
|
129
|
+
const stories = await listSpecStories(specDir);
|
|
130
|
+
const tasks = await listSpecTasks(specDir);
|
|
131
|
+
results.push({
|
|
132
|
+
id,
|
|
133
|
+
slug,
|
|
134
|
+
title,
|
|
135
|
+
status,
|
|
136
|
+
dirName: entry.name,
|
|
137
|
+
storyCount: stories.length,
|
|
138
|
+
taskCount: tasks.length,
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
return results.sort((a, b) => a.id.localeCompare(b.id));
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Create a new spec directory + spec file from the template.
|
|
145
|
+
* Returns the assigned ID and the absolute file path of the spec markdown.
|
|
146
|
+
*
|
|
147
|
+
* Refuses if a directory with the same slug already exists, to avoid
|
|
148
|
+
* accidental overwrites.
|
|
149
|
+
*/
|
|
150
|
+
export async function createSpec(projectDir, config, title, options = {}) {
|
|
151
|
+
const slug = options.slug ? slugify(options.slug) : slugify(title);
|
|
152
|
+
if (!slug) {
|
|
153
|
+
throw new Error('Could not derive a slug from the title. Provide --slug explicitly.');
|
|
154
|
+
}
|
|
155
|
+
const specsRoot = getSpecsRootDir(projectDir, config);
|
|
156
|
+
await ensureDir(specsRoot);
|
|
157
|
+
// Cross-spec slug-collision check. Two specs with the same slug would be
|
|
158
|
+
// ambiguous in pipeline handoffs (`/openplanr-pipeline:plan {slug}` —
|
|
159
|
+
// which spec?), and they'd be hard to distinguish in `planr spec list`.
|
|
160
|
+
// Refuse early with a clear suggestion.
|
|
161
|
+
{
|
|
162
|
+
const fs = await import('node:fs/promises');
|
|
163
|
+
let entries = [];
|
|
164
|
+
try {
|
|
165
|
+
entries = await fs.readdir(specsRoot, { withFileTypes: true });
|
|
166
|
+
}
|
|
167
|
+
catch {
|
|
168
|
+
// specs/ may not exist yet — fine
|
|
169
|
+
}
|
|
170
|
+
const slugRe = new RegExp(`^[A-Z]+-\\d{3}-${slug.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
|
171
|
+
const collision = entries.find((e) => e.isDirectory() && slugRe.test(e.name));
|
|
172
|
+
if (collision) {
|
|
173
|
+
throw new Error(`A spec with slug "${slug}" already exists at ${collision.name}. Use a different --slug or delete the existing spec with \`planr spec destroy ${collision.name.split('-').slice(0, 2).join('-')}\`.`);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
const prefix = config.idPrefix.spec || 'SPEC';
|
|
177
|
+
// Custom-scan: getNextId() looks for files; here we look at *directories*.
|
|
178
|
+
// We compose a synthetic "files-only" view by listing entries with the prefix.
|
|
179
|
+
const id = await nextSpecId(specsRoot, prefix);
|
|
180
|
+
const dirName = `${id}-${slug}`;
|
|
181
|
+
const specDir = path.join(specsRoot, dirName);
|
|
182
|
+
if (await fileExists(specDir)) {
|
|
183
|
+
throw new Error(`Spec directory ${dirName} already exists. Use a different --slug or delete the existing spec with \`planr spec destroy ${id}\`.`);
|
|
184
|
+
}
|
|
185
|
+
await ensureDir(specDir);
|
|
186
|
+
await ensureDir(getSpecStoriesDir(specDir));
|
|
187
|
+
await ensureDir(getSpecTasksDir(specDir));
|
|
188
|
+
await ensureDir(getSpecDesignDir(specDir));
|
|
189
|
+
const specFile = path.join(specDir, `${id}-${slug}.md`);
|
|
190
|
+
const today = new Date().toISOString().split('T')[0];
|
|
191
|
+
const content = await renderTemplate('spec/spec.md.hbs', {
|
|
192
|
+
id,
|
|
193
|
+
slug,
|
|
194
|
+
title,
|
|
195
|
+
status: 'pending',
|
|
196
|
+
schemaVersion: '1.0.0',
|
|
197
|
+
priority: options.priority || 'P1',
|
|
198
|
+
milestone: options.milestone || '',
|
|
199
|
+
po: options.po || '',
|
|
200
|
+
date: today,
|
|
201
|
+
projectName: config.projectName,
|
|
202
|
+
}, config.templateOverrides);
|
|
203
|
+
await writeFile(specFile, content);
|
|
204
|
+
// Drop a .gitkeep into design/ so the empty subdir survives commits
|
|
205
|
+
// (stories/ and tasks/ will be populated by `planr spec decompose` later).
|
|
206
|
+
await writeFile(path.join(getSpecDesignDir(specDir), '.gitkeep'), '');
|
|
207
|
+
logger.debug(`Created spec ${id}: ${specDir}`);
|
|
208
|
+
return { id, slug, specDir, specFile };
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Pick the next available SPEC ID by scanning sibling directories under
|
|
212
|
+
* `.planr/specs/`. Mirrors `id-service.getNextId()` but for directories.
|
|
213
|
+
*/
|
|
214
|
+
async function nextSpecId(specsRoot, prefix) {
|
|
215
|
+
const fs = await import('node:fs/promises');
|
|
216
|
+
let entries = [];
|
|
217
|
+
try {
|
|
218
|
+
entries = await fs.readdir(specsRoot, { withFileTypes: true });
|
|
219
|
+
}
|
|
220
|
+
catch {
|
|
221
|
+
return `${prefix}-001`;
|
|
222
|
+
}
|
|
223
|
+
const re = new RegExp(`^${prefix}-(\\d{3})-`);
|
|
224
|
+
const taken = new Set();
|
|
225
|
+
for (const e of entries) {
|
|
226
|
+
if (!e.isDirectory())
|
|
227
|
+
continue;
|
|
228
|
+
const m = e.name.match(re);
|
|
229
|
+
if (m)
|
|
230
|
+
taken.add(Number.parseInt(m[1], 10));
|
|
231
|
+
}
|
|
232
|
+
let n = 1;
|
|
233
|
+
while (taken.has(n))
|
|
234
|
+
n++;
|
|
235
|
+
return `${prefix}-${String(n).padStart(3, '0')}`;
|
|
236
|
+
}
|
|
237
|
+
export async function readSpec(projectDir, config, specId) {
|
|
238
|
+
const resolved = await resolveSpecDir(projectDir, config, specId);
|
|
239
|
+
if (!resolved)
|
|
240
|
+
return null;
|
|
241
|
+
const { dir: specDir, slug } = resolved;
|
|
242
|
+
const specFile = path.join(specDir, `${specId}-${slug}.md`);
|
|
243
|
+
const specFileExists = await fileExists(specFile);
|
|
244
|
+
if (!specFileExists)
|
|
245
|
+
return null;
|
|
246
|
+
const raw = await readFile(specFile);
|
|
247
|
+
try {
|
|
248
|
+
const parsed = parseMarkdown(raw);
|
|
249
|
+
return { id: specId, slug, specDir, specFile, data: parsed.data, content: parsed.content };
|
|
250
|
+
}
|
|
251
|
+
catch (err) {
|
|
252
|
+
logger.warn(`Skipping spec ${specId}: frontmatter parse error.\n ${specFile}\n ${err.message}`);
|
|
253
|
+
return null;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
/** Overwrite the spec markdown file in place. Atomic. */
|
|
257
|
+
export async function updateSpec(projectDir, config, specId, content) {
|
|
258
|
+
const resolved = await resolveSpecDir(projectDir, config, specId);
|
|
259
|
+
if (!resolved)
|
|
260
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
261
|
+
const specFile = path.join(resolved.dir, `${specId}-${resolved.slug}.md`);
|
|
262
|
+
await atomicWriteFile(specFile, content);
|
|
263
|
+
}
|
|
264
|
+
/**
|
|
265
|
+
* Surgical YAML frontmatter update for a spec.
|
|
266
|
+
* Mirrors artifact-service.updateArtifactFields shape.
|
|
267
|
+
*/
|
|
268
|
+
export async function updateSpecFields(projectDir, config, specId, fields) {
|
|
269
|
+
const resolved = await resolveSpecDir(projectDir, config, specId);
|
|
270
|
+
if (!resolved)
|
|
271
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
272
|
+
const specFile = path.join(resolved.dir, `${specId}-${resolved.slug}.md`);
|
|
273
|
+
const raw = await readFile(specFile);
|
|
274
|
+
const today = new Date().toISOString().split('T')[0];
|
|
275
|
+
const allFields = { ...fields, updated: today };
|
|
276
|
+
const openIdx = raw.indexOf('---');
|
|
277
|
+
const closeIdx = raw.indexOf('\n---', openIdx + 3);
|
|
278
|
+
if (openIdx === -1 || closeIdx === -1) {
|
|
279
|
+
throw new Error(`Spec ${specId} has no valid frontmatter.`);
|
|
280
|
+
}
|
|
281
|
+
let frontmatter = raw.slice(openIdx, closeIdx);
|
|
282
|
+
const body = raw.slice(closeIdx);
|
|
283
|
+
for (const [key, value] of Object.entries(allFields)) {
|
|
284
|
+
const escapedKey = key.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
285
|
+
const pattern = new RegExp(`^${escapedKey}:\\s*.*$`, 'm');
|
|
286
|
+
const replacement = `${key}: ${formatYamlValue(value)}`;
|
|
287
|
+
if (pattern.test(frontmatter)) {
|
|
288
|
+
frontmatter = frontmatter.replace(pattern, () => replacement);
|
|
289
|
+
}
|
|
290
|
+
else {
|
|
291
|
+
frontmatter += `\n${replacement}`;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
await atomicWriteFile(specFile, frontmatter + body);
|
|
295
|
+
}
|
|
296
|
+
/**
|
|
297
|
+
* Format a JS value as valid YAML for frontmatter.
|
|
298
|
+
* - Arrays → inline-flow: `["a", "b"]` (so they round-trip as arrays, not strings)
|
|
299
|
+
* - Empty arrays → `[]`
|
|
300
|
+
* - Other → double-quoted scalar with escapes
|
|
301
|
+
*/
|
|
302
|
+
function formatYamlValue(value) {
|
|
303
|
+
if (Array.isArray(value)) {
|
|
304
|
+
if (value.length === 0)
|
|
305
|
+
return '[]';
|
|
306
|
+
const items = value.map((v) => `"${String(v).replace(/\\/g, '\\\\').replace(/"/g, '\\"')}"`);
|
|
307
|
+
return `[${items.join(', ')}]`;
|
|
308
|
+
}
|
|
309
|
+
return `"${String(value).replace(/\\/g, '\\\\').replace(/"/g, '\\"')}"`;
|
|
310
|
+
}
|
|
311
|
+
/**
|
|
312
|
+
* Validate one spec's internal integrity and repair safe inconsistencies.
|
|
313
|
+
*
|
|
314
|
+
* Checks performed:
|
|
315
|
+
* 1. Orphaned task: `task.storyId` doesn't match any existing US in the same
|
|
316
|
+
* spec → WARN (don't auto-delete; user reviews and either fixes the
|
|
317
|
+
* storyId or destroys the task)
|
|
318
|
+
* 2. Story without tasks: WARN (decomposition is incomplete)
|
|
319
|
+
* 3. Missing `specId` in US/Task frontmatter → AUTO-FIX from path
|
|
320
|
+
* 4. Schema version mismatch (artifact's schemaVersion older than current)
|
|
321
|
+
* → WARN (no auto-migration in v1; flagged for follow-up)
|
|
322
|
+
*
|
|
323
|
+
* Note: this is a *read-mostly* operation. The only writes happen in case 3
|
|
324
|
+
* (adding a missing `specId` field via updateSpecFields-equivalent); all
|
|
325
|
+
* other findings are reported as warnings so the user controls the fix.
|
|
326
|
+
*
|
|
327
|
+
* `dryRun: true` skips writes entirely; only report.
|
|
328
|
+
*/
|
|
329
|
+
export async function syncSpec(projectDir, config, specId, opts = {}) {
|
|
330
|
+
const spec = await readSpec(projectDir, config, specId);
|
|
331
|
+
if (!spec)
|
|
332
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
333
|
+
const fixed = [];
|
|
334
|
+
const warnings = [];
|
|
335
|
+
const stories = await listSpecStories(spec.specDir);
|
|
336
|
+
const tasks = await listSpecTasks(spec.specDir);
|
|
337
|
+
// ── Check 1: orphaned tasks ────────────────────────────────────────────
|
|
338
|
+
const storyIds = new Set(stories.map((s) => s.id));
|
|
339
|
+
for (const t of tasks) {
|
|
340
|
+
if (!t.storyId) {
|
|
341
|
+
warnings.push(`Task ${t.id} (${t.filename}) has no storyId — set it manually or via \`planr spec decompose --force\`.`);
|
|
342
|
+
}
|
|
343
|
+
else if (!storyIds.has(t.storyId)) {
|
|
344
|
+
warnings.push(`Task ${t.id} (${t.filename}) references non-existent story ${t.storyId} in this spec. ` +
|
|
345
|
+
`Fix the storyId or destroy the task.`);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
// ── Check 2: stories without tasks ────────────────────────────────────
|
|
349
|
+
const tasksByStory = new Map();
|
|
350
|
+
for (const t of tasks) {
|
|
351
|
+
if (t.storyId)
|
|
352
|
+
tasksByStory.set(t.storyId, (tasksByStory.get(t.storyId) ?? 0) + 1);
|
|
353
|
+
}
|
|
354
|
+
for (const s of stories) {
|
|
355
|
+
if (!tasksByStory.has(s.id)) {
|
|
356
|
+
warnings.push(`Story ${s.id} has no tasks — decomposition incomplete. Run \`planr spec decompose ${specId} --force\` or hand-author tasks.`);
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
// ── Check 3: missing specId in US/Task frontmatter ────────────────────
|
|
360
|
+
// (auto-fixable; fix from path)
|
|
361
|
+
const fs = await import('node:fs/promises');
|
|
362
|
+
for (const s of stories) {
|
|
363
|
+
const raw = await readFile(s.filePath);
|
|
364
|
+
if (!/^specId:\s*"/m.test(raw)) {
|
|
365
|
+
if (!opts.dryRun) {
|
|
366
|
+
const insertion = `\nspecId: "${specId}"`;
|
|
367
|
+
const fixedContent = raw.replace(/^id:\s*"[^"]+"$/m, (m) => m + insertion);
|
|
368
|
+
await fs.writeFile(s.filePath, fixedContent);
|
|
369
|
+
}
|
|
370
|
+
fixed.push(`Story ${s.id}: added missing specId frontmatter${opts.dryRun ? ' [dry-run]' : ''}.`);
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
for (const t of tasks) {
|
|
374
|
+
const raw = await readFile(t.filePath);
|
|
375
|
+
if (!/^specId:\s*"/m.test(raw)) {
|
|
376
|
+
if (!opts.dryRun) {
|
|
377
|
+
const insertion = `\nspecId: "${specId}"`;
|
|
378
|
+
const fixedContent = raw.replace(/^id:\s*"[^"]+"$/m, (m) => m + insertion);
|
|
379
|
+
await fs.writeFile(t.filePath, fixedContent);
|
|
380
|
+
}
|
|
381
|
+
fixed.push(`Task ${t.id}: added missing specId frontmatter${opts.dryRun ? ' [dry-run]' : ''}.`);
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
// ── Check 4: schema version drift ─────────────────────────────────────
|
|
385
|
+
const CURRENT_SCHEMA_VERSION = '1.0.0';
|
|
386
|
+
const specSchemaVersion = typeof spec.data.schemaVersion === 'string' ? spec.data.schemaVersion : null;
|
|
387
|
+
if (specSchemaVersion && specSchemaVersion !== CURRENT_SCHEMA_VERSION) {
|
|
388
|
+
warnings.push(`Spec uses schemaVersion ${specSchemaVersion} (current: ${CURRENT_SCHEMA_VERSION}). No auto-migration in v1 — review manually.`);
|
|
389
|
+
}
|
|
390
|
+
return {
|
|
391
|
+
specId: spec.id,
|
|
392
|
+
specSlug: spec.slug,
|
|
393
|
+
fixed,
|
|
394
|
+
warnings,
|
|
395
|
+
};
|
|
396
|
+
}
|
|
397
|
+
/**
|
|
398
|
+
* Run syncSpec across every spec in the project.
|
|
399
|
+
* Aggregates per-spec reports.
|
|
400
|
+
*/
|
|
401
|
+
export async function syncAllSpecs(projectDir, config, opts = {}) {
|
|
402
|
+
const specs = await listSpecs(projectDir, config);
|
|
403
|
+
const reports = [];
|
|
404
|
+
for (const s of specs) {
|
|
405
|
+
reports.push(await syncSpec(projectDir, config, s.id, opts));
|
|
406
|
+
}
|
|
407
|
+
return { specsScanned: specs.length, reports };
|
|
408
|
+
}
|
|
409
|
+
/**
|
|
410
|
+
* Decompose a SPEC into User Stories + Tasks via AI.
|
|
411
|
+
*
|
|
412
|
+
* High-level flow:
|
|
413
|
+
* 1. Read the spec; refuse if stories/ or tasks/ already populated
|
|
414
|
+
* (unless `opts.force === true`)
|
|
415
|
+
* 2. Read `input/tech/stack.md` (best-effort; passed as a hint to the AI)
|
|
416
|
+
* 3. Build codebase context via planr's existing scanner (skipped if
|
|
417
|
+
* `opts.noCodeContext === true`)
|
|
418
|
+
* 4. Build prompt + call AI provider via `generateStreamingJSON`
|
|
419
|
+
* 5. Validate the response with `aiSpecDecomposeResponseSchema`
|
|
420
|
+
* 6. Write each US via `createSpecStory` and each Task via `createSpecTask`
|
|
421
|
+
* 7. Update SPEC frontmatter status: pending|shaping → decomposing → decomposed
|
|
422
|
+
*/
|
|
423
|
+
export async function decomposeSpec(projectDir, config, specId, opts = {}) {
|
|
424
|
+
const spec = await readSpec(projectDir, config, specId);
|
|
425
|
+
if (!spec)
|
|
426
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
427
|
+
// ── Guard: refuse to overwrite existing decomposition ─────────────────
|
|
428
|
+
const existingStories = await listSpecStories(spec.specDir);
|
|
429
|
+
const existingTasks = await listSpecTasks(spec.specDir);
|
|
430
|
+
if ((existingStories.length > 0 || existingTasks.length > 0) && !opts.force) {
|
|
431
|
+
throw new Error(`Spec ${specId} already has ${existingStories.length} stor${existingStories.length === 1 ? 'y' : 'ies'} and ${existingTasks.length} task${existingTasks.length === 1 ? '' : 's'}. ` +
|
|
432
|
+
`Pass --force to overwrite, or \`planr spec destroy ${specId}\` to start fresh.`);
|
|
433
|
+
}
|
|
434
|
+
// If forcing, wipe existing US + Task files BEFORE the AI call so a failed
|
|
435
|
+
// decomposition doesn't leave a half-overwritten tree.
|
|
436
|
+
if (opts.force && (existingStories.length > 0 || existingTasks.length > 0)) {
|
|
437
|
+
const fs = await import('node:fs/promises');
|
|
438
|
+
for (const s of existingStories)
|
|
439
|
+
await fs.rm(s.filePath, { force: true });
|
|
440
|
+
for (const t of existingTasks)
|
|
441
|
+
await fs.rm(t.filePath, { force: true });
|
|
442
|
+
}
|
|
443
|
+
// ── Determine PNG presence (drives 1-vs-2 tasks per US per rules.md R2) ─
|
|
444
|
+
const uiFilesData = spec.data.ui_files;
|
|
445
|
+
const hasPNGs = Array.isArray(uiFilesData) && uiFilesData.length > 0;
|
|
446
|
+
// ── Read stack.md (best-effort) ───────────────────────────────────────
|
|
447
|
+
let stackInfo;
|
|
448
|
+
try {
|
|
449
|
+
const stackPath = path.join(projectDir, 'input/tech/stack.md');
|
|
450
|
+
if (await fileExists(stackPath)) {
|
|
451
|
+
stackInfo = await readFile(stackPath);
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
catch {
|
|
455
|
+
// best-effort
|
|
456
|
+
}
|
|
457
|
+
// ── Build codebase context (lazy import keeps startup fast) ───────────
|
|
458
|
+
let codebaseContext;
|
|
459
|
+
if (!opts.noCodeContext) {
|
|
460
|
+
try {
|
|
461
|
+
const { buildCodebaseContext, extractKeywords, formatCodebaseContext } = await import('../ai/codebase/index.js');
|
|
462
|
+
const keywordSource = `${typeof spec.data.title === 'string' ? spec.data.title : ''}\n${spec.content}`;
|
|
463
|
+
const keywords = extractKeywords(keywordSource);
|
|
464
|
+
const ctx = await buildCodebaseContext(projectDir, keywords);
|
|
465
|
+
codebaseContext = formatCodebaseContext(ctx);
|
|
466
|
+
const stackHint = ctx.techStack
|
|
467
|
+
? ` — ${ctx.techStack.language}${ctx.techStack.framework ? ` + ${ctx.techStack.framework}` : ''}`
|
|
468
|
+
: '';
|
|
469
|
+
logger.dim(` Scanned codebase${stackHint}`);
|
|
470
|
+
}
|
|
471
|
+
catch (err) {
|
|
472
|
+
logger.debug('Codebase scanning failed during spec decompose', err);
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
// ── Update status to "decomposing" so observers see in-progress state ─
|
|
476
|
+
await updateSpecFields(projectDir, config, specId, { status: 'decomposing' });
|
|
477
|
+
// ── Call AI (lazy imports — keep heavy deps off startup path) ─────────
|
|
478
|
+
const { buildSpecDecomposePrompt } = await import('../ai/prompts/prompt-builder.js');
|
|
479
|
+
const { aiSpecDecomposeResponseSchema } = await import('../ai/schemas/ai-response-schemas.js');
|
|
480
|
+
const { generateStreamingJSON, getAIProvider } = await import('./ai-service.js');
|
|
481
|
+
const { TOKEN_BUDGETS } = await import('../ai/types.js');
|
|
482
|
+
const provider = await getAIProvider(config);
|
|
483
|
+
const messages = buildSpecDecomposePrompt(spec.content, hasPNGs, stackInfo, codebaseContext, opts.maxStories);
|
|
484
|
+
const { result } = await generateStreamingJSON(provider, messages, aiSpecDecomposeResponseSchema, { maxTokens: TOKEN_BUDGETS.taskFeature });
|
|
485
|
+
// ── Persist stories + tasks ────────────────────────────────────────────
|
|
486
|
+
let storiesCreated = 0;
|
|
487
|
+
let tasksCreated = 0;
|
|
488
|
+
for (const aiStory of result.stories) {
|
|
489
|
+
const created = await createSpecStory(projectDir, config, specId, aiStory.title, {
|
|
490
|
+
roleAction: aiStory.roleAction,
|
|
491
|
+
benefit: aiStory.benefit,
|
|
492
|
+
scope: aiStory.scope,
|
|
493
|
+
acceptanceCriteria: aiStory.acceptanceCriteria,
|
|
494
|
+
});
|
|
495
|
+
storiesCreated++;
|
|
496
|
+
for (const aiTask of aiStory.tasks) {
|
|
497
|
+
await createSpecTask(projectDir, config, specId, {
|
|
498
|
+
storyId: created.id,
|
|
499
|
+
title: aiTask.title,
|
|
500
|
+
type: aiTask.type,
|
|
501
|
+
agent: aiTask.agent,
|
|
502
|
+
filesCreate: aiTask.filesCreate,
|
|
503
|
+
filesModify: aiTask.filesModify,
|
|
504
|
+
filesPreserve: aiTask.filesPreserve,
|
|
505
|
+
objective: aiTask.objective,
|
|
506
|
+
technicalSpec: aiTask.technicalSpec,
|
|
507
|
+
testRequirements: aiTask.testRequirements,
|
|
508
|
+
});
|
|
509
|
+
tasksCreated++;
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
// ── Final status: decomposed ──────────────────────────────────────────
|
|
513
|
+
await updateSpecFields(projectDir, config, specId, { status: 'decomposed' });
|
|
514
|
+
logger.debug(`Decomposed ${specId}: ${storiesCreated} stories, ${tasksCreated} tasks written`);
|
|
515
|
+
return {
|
|
516
|
+
storiesCreated,
|
|
517
|
+
tasksCreated,
|
|
518
|
+
decompositionNotes: result.decompositionNotes,
|
|
519
|
+
};
|
|
520
|
+
}
|
|
521
|
+
/**
|
|
522
|
+
* Re-render the SPEC body from a structured set of answers and write it back
|
|
523
|
+
* atomically. Preserves frontmatter values that the user (or `planr spec
|
|
524
|
+
* create`) already set: priority, milestone, po, ui_files, created, etc.
|
|
525
|
+
*
|
|
526
|
+
* Updates `status` to `shaping` so subsequent commands (`decompose`, `promote`)
|
|
527
|
+
* can see the spec has moved past the initial empty placeholder body.
|
|
528
|
+
*/
|
|
529
|
+
export async function shapeSpec(projectDir, config, specId, answers) {
|
|
530
|
+
const spec = await readSpec(projectDir, config, specId);
|
|
531
|
+
if (!spec)
|
|
532
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
533
|
+
const today = new Date().toISOString().split('T')[0];
|
|
534
|
+
// Carry through every frontmatter field that was already set, so we don't
|
|
535
|
+
// accidentally erase user customizations on re-shape.
|
|
536
|
+
const data = spec.data;
|
|
537
|
+
const uiFilesRaw = data.ui_files;
|
|
538
|
+
let uiFiles = [];
|
|
539
|
+
if (Array.isArray(uiFilesRaw)) {
|
|
540
|
+
uiFiles = uiFilesRaw.filter((f) => typeof f === 'string');
|
|
541
|
+
}
|
|
542
|
+
const content = await renderTemplate('spec/spec-shaped.md.hbs', {
|
|
543
|
+
id: spec.id,
|
|
544
|
+
slug: spec.slug,
|
|
545
|
+
title: typeof data.title === 'string' ? data.title : spec.slug,
|
|
546
|
+
schemaVersion: typeof data.schemaVersion === 'string' ? data.schemaVersion : '1.0.0',
|
|
547
|
+
priority: typeof data.priority === 'string' ? data.priority : 'P1',
|
|
548
|
+
milestone: typeof data.milestone === 'string' ? data.milestone : '',
|
|
549
|
+
po: typeof data.po === 'string' ? data.po : '',
|
|
550
|
+
created: typeof data.created === 'string' ? data.created : today,
|
|
551
|
+
date: today,
|
|
552
|
+
uiFiles,
|
|
553
|
+
context: answers.context.trim(),
|
|
554
|
+
functionalRequirements: answers.functionalRequirements.map((s) => s.trim()).filter(Boolean),
|
|
555
|
+
businessRules: (answers.businessRules || '').trim(),
|
|
556
|
+
outOfScope: (answers.outOfScope || []).map((s) => s.trim()).filter(Boolean),
|
|
557
|
+
acceptanceCriteria: answers.acceptanceCriteria.map((s) => s.trim()).filter(Boolean),
|
|
558
|
+
decompositionNotes: (answers.decompositionNotes || '').trim(),
|
|
559
|
+
projectName: config.projectName,
|
|
560
|
+
}, config.templateOverrides);
|
|
561
|
+
await atomicWriteFile(spec.specFile, content);
|
|
562
|
+
logger.debug(`Shaped spec ${spec.id} (status: shaping)`);
|
|
563
|
+
return { specFile: spec.specFile };
|
|
564
|
+
}
|
|
565
|
+
/**
|
|
566
|
+
* Destroy a spec directory. Self-contained = single `rm -rf` of the
|
|
567
|
+
* spec's own directory. Stories and tasks are removed atomically with the
|
|
568
|
+
* spec. No cross-spec references to clean up.
|
|
569
|
+
*/
|
|
570
|
+
export async function destroySpec(projectDir, config, specId) {
|
|
571
|
+
const resolved = await resolveSpecDir(projectDir, config, specId);
|
|
572
|
+
if (!resolved)
|
|
573
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
574
|
+
const fs = await import('node:fs/promises');
|
|
575
|
+
await fs.rm(resolved.dir, { recursive: true, force: true });
|
|
576
|
+
logger.debug(`Destroyed spec ${specId}: ${resolved.dir}`);
|
|
577
|
+
}
|
|
578
|
+
/**
|
|
579
|
+
* List US-NNN files inside a spec's stories/ subdirectory.
|
|
580
|
+
* Returns IDs scoped to this spec — two specs can each have US-001.
|
|
581
|
+
*/
|
|
582
|
+
export async function listSpecStories(specDir) {
|
|
583
|
+
const storiesDir = getSpecStoriesDir(specDir);
|
|
584
|
+
const exists = await fileExists(storiesDir);
|
|
585
|
+
if (!exists)
|
|
586
|
+
return [];
|
|
587
|
+
const files = await listFiles(storiesDir, /^US-\d{3}-.+\.md$/);
|
|
588
|
+
const out = [];
|
|
589
|
+
for (const filename of files.sort()) {
|
|
590
|
+
const m = filename.match(/^(US-\d{3})-(.+)\.md$/);
|
|
591
|
+
if (!m)
|
|
592
|
+
continue;
|
|
593
|
+
const [, id, slug] = m;
|
|
594
|
+
let title = slug.replace(/-/g, ' ');
|
|
595
|
+
let status = 'pending';
|
|
596
|
+
try {
|
|
597
|
+
const raw = await readFile(path.join(storiesDir, filename));
|
|
598
|
+
const parsed = parseMarkdown(raw);
|
|
599
|
+
if (typeof parsed.data.title === 'string')
|
|
600
|
+
title = parsed.data.title;
|
|
601
|
+
if (typeof parsed.data.status === 'string')
|
|
602
|
+
status = parsed.data.status;
|
|
603
|
+
}
|
|
604
|
+
catch {
|
|
605
|
+
// Best-effort; preserve listing even if frontmatter is malformed.
|
|
606
|
+
}
|
|
607
|
+
out.push({ id, slug, title, status, filename, filePath: path.join(storiesDir, filename) });
|
|
608
|
+
}
|
|
609
|
+
return out;
|
|
610
|
+
}
|
|
611
|
+
/** Append a US-NNN-{slug}.md file under the spec's stories/ directory. */
|
|
612
|
+
export async function createSpecStory(projectDir, config, specId, title, body) {
|
|
613
|
+
const resolved = await resolveSpecDir(projectDir, config, specId);
|
|
614
|
+
if (!resolved)
|
|
615
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
616
|
+
const storiesDir = getSpecStoriesDir(resolved.dir);
|
|
617
|
+
await ensureDir(storiesDir);
|
|
618
|
+
const slug = slugify(title);
|
|
619
|
+
const prefix = config.idPrefix.story || 'US';
|
|
620
|
+
const id = await getNextId(storiesDir, prefix);
|
|
621
|
+
const filename = `${id}-${slug}.md`;
|
|
622
|
+
const filePath = path.join(storiesDir, filename);
|
|
623
|
+
const today = new Date().toISOString().split('T')[0];
|
|
624
|
+
const content = await renderTemplate('spec/story.md.hbs', {
|
|
625
|
+
id,
|
|
626
|
+
slug,
|
|
627
|
+
title,
|
|
628
|
+
specId,
|
|
629
|
+
schemaVersion: '1.0.0',
|
|
630
|
+
status: 'pending',
|
|
631
|
+
date: today,
|
|
632
|
+
roleAction: body.roleAction,
|
|
633
|
+
benefit: body.benefit,
|
|
634
|
+
scope: body.scope || '',
|
|
635
|
+
acceptanceCriteria: body.acceptanceCriteria || [],
|
|
636
|
+
projectName: config.projectName,
|
|
637
|
+
}, config.templateOverrides);
|
|
638
|
+
await writeFile(filePath, content);
|
|
639
|
+
return { id, slug, filePath };
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* List T-NNN files inside a spec's tasks/ subdirectory.
|
|
643
|
+
*/
|
|
644
|
+
export async function listSpecTasks(specDir) {
|
|
645
|
+
const tasksDir = getSpecTasksDir(specDir);
|
|
646
|
+
const exists = await fileExists(tasksDir);
|
|
647
|
+
if (!exists)
|
|
648
|
+
return [];
|
|
649
|
+
const files = await listFiles(tasksDir, /^T-\d{3}-.+\.md$/);
|
|
650
|
+
const out = [];
|
|
651
|
+
for (const filename of files.sort()) {
|
|
652
|
+
const m = filename.match(/^(T-\d{3})-(.+)\.md$/);
|
|
653
|
+
if (!m)
|
|
654
|
+
continue;
|
|
655
|
+
const [, id, slug] = m;
|
|
656
|
+
let title = slug.replace(/-/g, ' ');
|
|
657
|
+
let status = 'pending';
|
|
658
|
+
let type = 'Tech';
|
|
659
|
+
let agent = 'backend-agent';
|
|
660
|
+
let storyId = '';
|
|
661
|
+
try {
|
|
662
|
+
const raw = await readFile(path.join(tasksDir, filename));
|
|
663
|
+
const parsed = parseMarkdown(raw);
|
|
664
|
+
if (typeof parsed.data.title === 'string')
|
|
665
|
+
title = parsed.data.title;
|
|
666
|
+
if (typeof parsed.data.status === 'string')
|
|
667
|
+
status = parsed.data.status;
|
|
668
|
+
if (typeof parsed.data.type === 'string')
|
|
669
|
+
type = parsed.data.type;
|
|
670
|
+
if (typeof parsed.data.agent === 'string')
|
|
671
|
+
agent = parsed.data.agent;
|
|
672
|
+
if (typeof parsed.data.storyId === 'string')
|
|
673
|
+
storyId = parsed.data.storyId;
|
|
674
|
+
}
|
|
675
|
+
catch {
|
|
676
|
+
// best-effort
|
|
677
|
+
}
|
|
678
|
+
out.push({
|
|
679
|
+
id,
|
|
680
|
+
slug,
|
|
681
|
+
title,
|
|
682
|
+
status,
|
|
683
|
+
type,
|
|
684
|
+
agent,
|
|
685
|
+
storyId,
|
|
686
|
+
filename,
|
|
687
|
+
filePath: path.join(tasksDir, filename),
|
|
688
|
+
});
|
|
689
|
+
}
|
|
690
|
+
return out;
|
|
691
|
+
}
|
|
692
|
+
/** Create a new T-NNN task file under the spec's tasks/ directory. */
|
|
693
|
+
export async function createSpecTask(projectDir, config, specId, input) {
|
|
694
|
+
const resolved = await resolveSpecDir(projectDir, config, specId);
|
|
695
|
+
if (!resolved)
|
|
696
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
697
|
+
const tasksDir = getSpecTasksDir(resolved.dir);
|
|
698
|
+
await ensureDir(tasksDir);
|
|
699
|
+
const slug = slugify(input.title);
|
|
700
|
+
// Task ID prefix: 'T' (single letter) by convention in spec mode
|
|
701
|
+
// (vs agile mode's 'TASK'). Matches openplanr-pipeline schema.
|
|
702
|
+
const id = await getNextId(tasksDir, 'T');
|
|
703
|
+
const filename = `${id}-${slug}.md`;
|
|
704
|
+
const filePath = path.join(tasksDir, filename);
|
|
705
|
+
const today = new Date().toISOString().split('T')[0];
|
|
706
|
+
const content = await renderTemplate('spec/task.md.hbs', {
|
|
707
|
+
id,
|
|
708
|
+
slug,
|
|
709
|
+
title: input.title,
|
|
710
|
+
storyId: input.storyId,
|
|
711
|
+
specId,
|
|
712
|
+
schemaVersion: '1.0.0',
|
|
713
|
+
type: input.type,
|
|
714
|
+
agent: input.agent,
|
|
715
|
+
status: 'pending',
|
|
716
|
+
date: today,
|
|
717
|
+
filesCreate: input.filesCreate || [],
|
|
718
|
+
filesModify: input.filesModify || [],
|
|
719
|
+
filesPreserve: input.filesPreserve || [],
|
|
720
|
+
objective: input.objective || '',
|
|
721
|
+
technicalSpec: input.technicalSpec || '',
|
|
722
|
+
testRequirements: input.testRequirements || '',
|
|
723
|
+
projectName: config.projectName,
|
|
724
|
+
}, config.templateOverrides);
|
|
725
|
+
await writeFile(filePath, content);
|
|
726
|
+
return { id, slug, filePath };
|
|
727
|
+
}
|
|
728
|
+
// ---------------------------------------------------------------------------
|
|
729
|
+
// Design assets
|
|
730
|
+
// ---------------------------------------------------------------------------
|
|
731
|
+
/**
|
|
732
|
+
* Copy PNG mockup files into a spec's design/ directory. Updates the
|
|
733
|
+
* spec frontmatter `ui_files` to list the copied filenames.
|
|
734
|
+
*/
|
|
735
|
+
export async function attachSpecDesigns(projectDir, config, specId, pngPaths) {
|
|
736
|
+
const resolved = await resolveSpecDir(projectDir, config, specId);
|
|
737
|
+
if (!resolved)
|
|
738
|
+
throw new Error(`Spec ${specId} not found.`);
|
|
739
|
+
const designDir = getSpecDesignDir(resolved.dir);
|
|
740
|
+
await ensureDir(designDir);
|
|
741
|
+
const fs = await import('node:fs/promises');
|
|
742
|
+
const copied = [];
|
|
743
|
+
for (const src of pngPaths) {
|
|
744
|
+
if (!src.toLowerCase().endsWith('.png')) {
|
|
745
|
+
logger.warn(`Skipping non-PNG file: ${src}`);
|
|
746
|
+
continue;
|
|
747
|
+
}
|
|
748
|
+
const exists = await fileExists(src);
|
|
749
|
+
if (!exists) {
|
|
750
|
+
logger.warn(`Source PNG not found: ${src}`);
|
|
751
|
+
continue;
|
|
752
|
+
}
|
|
753
|
+
const filename = path.basename(src);
|
|
754
|
+
const dest = path.join(designDir, filename);
|
|
755
|
+
await fs.copyFile(src, dest);
|
|
756
|
+
copied.push(filename);
|
|
757
|
+
}
|
|
758
|
+
if (copied.length > 0) {
|
|
759
|
+
// Update SPEC frontmatter ui_files (relative paths inside the spec dir).
|
|
760
|
+
// Pass the actual array — updateSpecFields → formatYamlValue serializes
|
|
761
|
+
// it as a YAML inline-flow list so it round-trips back as an array.
|
|
762
|
+
const uiPaths = copied.map((f) => `design/${f}`);
|
|
763
|
+
await updateSpecFields(projectDir, config, specId, { ui_files: uiPaths });
|
|
764
|
+
}
|
|
765
|
+
return { copied, designDir };
|
|
766
|
+
}
|
|
767
|
+
export async function getSpecStatus(projectDir, config) {
|
|
768
|
+
const specs = await listSpecs(projectDir, config);
|
|
769
|
+
const totalStories = specs.reduce((acc, s) => acc + s.storyCount, 0);
|
|
770
|
+
const totalTasks = specs.reduce((acc, s) => acc + s.taskCount, 0);
|
|
771
|
+
return { specCount: specs.length, specs, totalStories, totalTasks };
|
|
772
|
+
}
|
|
773
|
+
/**
|
|
774
|
+
* Validate that a spec is ready to hand off to openplanr-pipeline.
|
|
775
|
+
* Returns the list of issues, or empty array if ready.
|
|
776
|
+
*/
|
|
777
|
+
export async function validateSpecForPromotion(projectDir, config, specId) {
|
|
778
|
+
const issues = [];
|
|
779
|
+
const spec = await readSpec(projectDir, config, specId);
|
|
780
|
+
if (!spec) {
|
|
781
|
+
return { ready: false, issues: [`Spec ${specId} not found.`] };
|
|
782
|
+
}
|
|
783
|
+
const stories = await listSpecStories(spec.specDir);
|
|
784
|
+
if (stories.length === 0) {
|
|
785
|
+
issues.push(`No User Stories found. Run \`planr spec decompose ${specId}\` first.`);
|
|
786
|
+
}
|
|
787
|
+
const tasks = await listSpecTasks(spec.specDir);
|
|
788
|
+
if (tasks.length === 0) {
|
|
789
|
+
issues.push(`No Tasks found. Run \`planr spec decompose ${specId}\` first.`);
|
|
790
|
+
}
|
|
791
|
+
// Each story should have at least 1 task
|
|
792
|
+
const storyIds = new Set(stories.map((s) => s.id));
|
|
793
|
+
const storiesWithTasks = new Set(tasks.map((t) => t.storyId).filter(Boolean));
|
|
794
|
+
for (const storyId of storyIds) {
|
|
795
|
+
if (!storiesWithTasks.has(storyId)) {
|
|
796
|
+
issues.push(`Story ${storyId} has no tasks. Decomposition incomplete.`);
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
// Spec body should be non-trivial (> placeholder)
|
|
800
|
+
if (spec.content.trim().length < 100) {
|
|
801
|
+
issues.push(`Spec body is very short (< 100 chars). Run \`planr spec shape ${specId}\` to flesh it out.`);
|
|
802
|
+
}
|
|
803
|
+
return { ready: issues.length === 0, issues };
|
|
804
|
+
}
|
|
805
|
+
//# sourceMappingURL=spec-service.js.map
|