@sienklogic/plan-build-run 2.24.0 → 2.26.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +58 -0
- package/README.md +62 -13
- package/dashboard/package.json +1 -2
- package/dashboard/public/css/layout.css +128 -21
- package/dashboard/public/css/status-colors.css +14 -2
- package/dashboard/public/css/tokens.css +36 -0
- package/dashboard/src/middleware/current-phase.js +2 -1
- package/dashboard/src/routes/events.routes.js +49 -0
- package/dashboard/src/routes/pages.routes.js +250 -1
- package/dashboard/src/services/config.service.js +140 -0
- package/dashboard/src/services/dashboard.service.js +156 -11
- package/dashboard/src/services/log.service.js +105 -0
- package/dashboard/src/services/notes.service.js +16 -0
- package/dashboard/src/services/phase.service.js +58 -9
- package/dashboard/src/services/requirements.service.js +130 -0
- package/dashboard/src/services/research.service.js +137 -0
- package/dashboard/src/services/todo.service.js +30 -0
- package/dashboard/src/views/config.ejs +5 -0
- package/dashboard/src/views/logs.ejs +3 -0
- package/dashboard/src/views/note-detail.ejs +3 -0
- package/dashboard/src/views/partials/activity-feed.ejs +12 -0
- package/dashboard/src/views/partials/config-content.ejs +196 -0
- package/dashboard/src/views/partials/dashboard-content.ejs +71 -46
- package/dashboard/src/views/partials/log-entries-content.ejs +17 -0
- package/dashboard/src/views/partials/logs-content.ejs +131 -0
- package/dashboard/src/views/partials/note-detail-content.ejs +22 -0
- package/dashboard/src/views/partials/notes-content.ejs +7 -1
- package/dashboard/src/views/partials/phase-content.ejs +181 -146
- package/dashboard/src/views/partials/phase-timeline.ejs +16 -0
- package/dashboard/src/views/partials/requirements-content.ejs +44 -0
- package/dashboard/src/views/partials/research-content.ejs +49 -0
- package/dashboard/src/views/partials/research-detail-content.ejs +23 -0
- package/dashboard/src/views/partials/sidebar.ejs +63 -26
- package/dashboard/src/views/partials/todos-done-content.ejs +44 -0
- package/dashboard/src/views/requirements.ejs +3 -0
- package/dashboard/src/views/research-detail.ejs +3 -0
- package/dashboard/src/views/research.ejs +3 -0
- package/dashboard/src/views/todos-done.ejs +3 -0
- package/package.json +1 -1
- package/plugins/copilot-pbr/agents/dev-sync.agent.md +114 -0
- package/plugins/copilot-pbr/hooks/hooks.json +12 -0
- package/plugins/copilot-pbr/plugin.json +1 -1
- package/plugins/cursor-pbr/.cursor-plugin/plugin.json +1 -1
- package/plugins/cursor-pbr/agents/dev-sync.md +113 -0
- package/plugins/cursor-pbr/hooks/hooks.json +10 -0
- package/plugins/pbr/.claude-plugin/plugin.json +1 -1
- package/plugins/pbr/agents/dev-sync.md +120 -0
- package/plugins/pbr/hooks/hooks.json +10 -0
- package/plugins/pbr/scripts/config-schema.json +4 -1
- package/plugins/pbr/scripts/local-llm/health.js +4 -1
- package/plugins/pbr/scripts/local-llm/operations/classify-commit.js +68 -0
- package/plugins/pbr/scripts/local-llm/operations/classify-file-intent.js +73 -0
- package/plugins/pbr/scripts/local-llm/operations/triage-test-output.js +72 -0
- package/plugins/pbr/scripts/post-bash-triage.js +132 -0
- package/plugins/pbr/scripts/post-write-dispatch.js +44 -0
- package/plugins/pbr/scripts/pre-bash-dispatch.js +17 -11
- package/plugins/pbr/scripts/status-line.js +50 -5
- package/plugins/pbr/scripts/validate-commit.js +66 -2
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { readdir, stat, readFile } from 'node:fs/promises';
|
|
2
|
+
import { createReadStream } from 'node:fs';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
import { createInterface } from 'node:readline';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* List .jsonl files in .planning/logs/ sorted by filename descending.
|
|
8
|
+
* Returns array of { name, size, modified }.
|
|
9
|
+
* @param {string} projectDir - Root project directory
|
|
10
|
+
* @returns {Promise<Array<{name: string, size: number, modified: string}>>}
|
|
11
|
+
*/
|
|
12
|
+
export async function listLogFiles(projectDir) {
|
|
13
|
+
const logsDir = join(projectDir, '.planning', 'logs');
|
|
14
|
+
let entries;
|
|
15
|
+
try {
|
|
16
|
+
entries = await readdir(logsDir);
|
|
17
|
+
} catch (err) {
|
|
18
|
+
if (err.code === 'ENOENT') return [];
|
|
19
|
+
throw err;
|
|
20
|
+
}
|
|
21
|
+
const jsonlFiles = entries.filter(f => f.endsWith('.jsonl')).sort().reverse();
|
|
22
|
+
const results = await Promise.allSettled(
|
|
23
|
+
jsonlFiles.map(async f => {
|
|
24
|
+
const s = await stat(join(logsDir, f));
|
|
25
|
+
return { name: f, size: s.size, modified: s.mtime.toISOString() };
|
|
26
|
+
})
|
|
27
|
+
);
|
|
28
|
+
return results
|
|
29
|
+
.filter(r => r.status === 'fulfilled')
|
|
30
|
+
.map(r => r.value);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Read a page of entries from a JSONL log file.
|
|
35
|
+
* Reads the entire file line-by-line but only keeps the requested page in memory.
|
|
36
|
+
* @param {string} filePath - Absolute path to the .jsonl file
|
|
37
|
+
* @param {object} opts - { page=1, pageSize=100, typeFilter='', q='' }
|
|
38
|
+
* @returns {Promise<{ entries: object[], total: number, page: number, pageSize: number }>}
|
|
39
|
+
*/
|
|
40
|
+
export async function readLogPage(filePath, { page = 1, pageSize = 100, typeFilter = '', q = '' } = {}) {
|
|
41
|
+
let raw;
|
|
42
|
+
try {
|
|
43
|
+
raw = await readFile(filePath, 'utf8');
|
|
44
|
+
} catch (err) {
|
|
45
|
+
if (err.code === 'ENOENT') return { entries: [], total: 0, page, pageSize };
|
|
46
|
+
throw err;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Parse and filter in a single pass — no full array of all lines held beyond filtering
|
|
50
|
+
const allEntries = [];
|
|
51
|
+
for (const line of raw.split('\n')) {
|
|
52
|
+
const trimmed = line.trim();
|
|
53
|
+
if (!trimmed) continue;
|
|
54
|
+
let obj;
|
|
55
|
+
try { obj = JSON.parse(trimmed); } catch { continue; }
|
|
56
|
+
if (typeFilter && obj.type !== typeFilter) continue;
|
|
57
|
+
if (q && !JSON.stringify(obj).toLowerCase().includes(q.toLowerCase())) continue;
|
|
58
|
+
allEntries.push(obj);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const total = allEntries.length;
|
|
62
|
+
const start = (page - 1) * pageSize;
|
|
63
|
+
const entries = allEntries.slice(start, start + pageSize);
|
|
64
|
+
return { entries, total, page, pageSize };
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Tail a log file: watch for new lines appended after the current end.
|
|
69
|
+
* Uses fs.stat polling via setInterval (no extra deps).
|
|
70
|
+
* @param {string} filePath - Absolute path to the .jsonl file
|
|
71
|
+
* @param {(entry: object) => void} onLine - Called for each new parsed entry
|
|
72
|
+
* @returns {Promise<() => void>} cleanup function — call to stop watching
|
|
73
|
+
*/
|
|
74
|
+
export async function tailLogFile(filePath, onLine) {
|
|
75
|
+
let currentSize;
|
|
76
|
+
try {
|
|
77
|
+
const s = await stat(filePath);
|
|
78
|
+
currentSize = s.size;
|
|
79
|
+
} catch {
|
|
80
|
+
currentSize = 0;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const interval = setInterval(async () => {
|
|
84
|
+
let newSize;
|
|
85
|
+
try {
|
|
86
|
+
const s = await stat(filePath);
|
|
87
|
+
newSize = s.size;
|
|
88
|
+
} catch {
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
if (newSize <= currentSize) return;
|
|
92
|
+
|
|
93
|
+
// Read only the new bytes
|
|
94
|
+
const stream = createReadStream(filePath, { start: currentSize, end: newSize - 1 });
|
|
95
|
+
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
|
96
|
+
rl.on('line', line => {
|
|
97
|
+
const trimmed = line.trim();
|
|
98
|
+
if (!trimmed) return;
|
|
99
|
+
try { onLine(JSON.parse(trimmed)); } catch { /* skip malformed */ }
|
|
100
|
+
});
|
|
101
|
+
currentSize = newSize;
|
|
102
|
+
}, 500);
|
|
103
|
+
|
|
104
|
+
return () => clearInterval(interval);
|
|
105
|
+
}
|
|
@@ -48,3 +48,19 @@ export async function listNotes(projectDir) {
|
|
|
48
48
|
|
|
49
49
|
return notes;
|
|
50
50
|
}
|
|
51
|
+
|
|
52
|
+
export async function getNoteBySlug(projectDir, slug) {
|
|
53
|
+
const notesDir = join(projectDir, '.planning', 'notes');
|
|
54
|
+
let entries;
|
|
55
|
+
try {
|
|
56
|
+
entries = await readdir(notesDir);
|
|
57
|
+
} catch (err) {
|
|
58
|
+
if (err.code === 'ENOENT') return null;
|
|
59
|
+
throw err;
|
|
60
|
+
}
|
|
61
|
+
const filename = entries.find(f => f.endsWith('.md') && f.replace(/^\d{4}-\d{2}-\d{2}-/, '').replace(/\.md$/, '') === slug);
|
|
62
|
+
if (!filename) return null;
|
|
63
|
+
const { frontmatter, html } = await readMarkdownFile(join(notesDir, filename));
|
|
64
|
+
const title = slug.split('-').map(w => w.charAt(0).toUpperCase() + w.slice(1)).join(' ');
|
|
65
|
+
return { filename, slug, title, date: frontmatter.date || null, promoted: !!frontmatter.promoted, html };
|
|
66
|
+
}
|
|
@@ -2,6 +2,42 @@ import { readdir } from 'node:fs/promises';
|
|
|
2
2
|
import { join } from 'node:path';
|
|
3
3
|
import { readMarkdownFile, validatePath } from '../repositories/planning.repository.js';
|
|
4
4
|
|
|
5
|
+
/**
|
|
6
|
+
* Extract plan title and task count from raw PLAN.md content.
|
|
7
|
+
*
|
|
8
|
+
* @param {string|null} rawContent - Raw PLAN.md file content
|
|
9
|
+
* @returns {{ planTitle: string|null, taskCount: number }}
|
|
10
|
+
*/
|
|
11
|
+
export function extractPlanMeta(rawContent) {
|
|
12
|
+
if (!rawContent) return { planTitle: null, taskCount: 0 };
|
|
13
|
+
const titleMatch = rawContent.match(/\*\*Plan \d{2}-\d{2}\*\*:\s*(.+)/);
|
|
14
|
+
const planTitle = titleMatch ? titleMatch[1].trim() : null;
|
|
15
|
+
const taskCount = (rawContent.match(/<task /g) || []).length;
|
|
16
|
+
return { planTitle, taskCount };
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Normalise VERIFICATION.md frontmatter to include a flat mustHaves array.
|
|
21
|
+
* Each entry has { category, text, passed }.
|
|
22
|
+
*
|
|
23
|
+
* @param {object|null|undefined} frontmatter - Parsed VERIFICATION.md frontmatter
|
|
24
|
+
* @returns {object|null|undefined} - Original frontmatter extended with mustHaves array, or unchanged
|
|
25
|
+
*/
|
|
26
|
+
export function enrichVerification(frontmatter) {
|
|
27
|
+
if (!frontmatter || !frontmatter.must_haves) return frontmatter;
|
|
28
|
+
const allPassed = frontmatter.result === 'pass' || frontmatter.result === 'passed';
|
|
29
|
+
const gaps = Array.isArray(frontmatter.gaps) ? frontmatter.gaps : [];
|
|
30
|
+
const mustHaves = [];
|
|
31
|
+
for (const [category, items] of Object.entries(frontmatter.must_haves)) {
|
|
32
|
+
if (!Array.isArray(items)) continue;
|
|
33
|
+
for (const text of items) {
|
|
34
|
+
const inGap = gaps.some(g => g.includes(text.slice(0, 30)));
|
|
35
|
+
mustHaves.push({ category, text, passed: allPassed || !inGap });
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return { ...frontmatter, mustHaves };
|
|
39
|
+
}
|
|
40
|
+
|
|
5
41
|
/**
|
|
6
42
|
* Format a phase directory name into a human-readable title.
|
|
7
43
|
* Strips the numeric prefix and title-cases each word.
|
|
@@ -130,30 +166,43 @@ export async function getPhaseDetail(projectDir, phaseId) {
|
|
|
130
166
|
summaryPaths.map(({ summaryPath }) => readMarkdownFile(summaryPath))
|
|
131
167
|
);
|
|
132
168
|
|
|
169
|
+
// Read raw PLAN.md content for each plan to extract metadata
|
|
170
|
+
const planRawResults = await Promise.allSettled(
|
|
171
|
+
summaryPaths.map(({ planFile }) => readMarkdownFile(join(phaseFullPath, planFile)))
|
|
172
|
+
);
|
|
173
|
+
|
|
133
174
|
// Map results to plan objects
|
|
134
175
|
const plans = summaryPaths.map(({ planId, planFile }, index) => {
|
|
135
|
-
const
|
|
136
|
-
|
|
176
|
+
const summaryResult = summaryResults[index];
|
|
177
|
+
const planRawResult = planRawResults[index];
|
|
178
|
+
|
|
179
|
+
// Extract planTitle and taskCount from raw PLAN.md content
|
|
180
|
+
const rawPlanContent = planRawResult.status === 'fulfilled' ? planRawResult.value.rawContent : null;
|
|
181
|
+
const { planTitle, taskCount } = extractPlanMeta(rawPlanContent);
|
|
182
|
+
|
|
183
|
+
if (summaryResult.status === 'fulfilled') {
|
|
137
184
|
return {
|
|
138
185
|
planId,
|
|
139
186
|
planFile,
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
187
|
+
planTitle,
|
|
188
|
+
taskCount,
|
|
189
|
+
summary: summaryResult.value.frontmatter,
|
|
190
|
+
content: summaryResult.value.html,
|
|
191
|
+
commits: parseTaskResultsTable(summaryResult.value.rawContent)
|
|
143
192
|
};
|
|
144
193
|
}
|
|
145
|
-
if (
|
|
146
|
-
return { planId, planFile, summary: null, content: null, commits: [] };
|
|
194
|
+
if (summaryResult.reason && summaryResult.reason.code === 'ENOENT') {
|
|
195
|
+
return { planId, planFile, planTitle, taskCount, summary: null, content: null, commits: [] };
|
|
147
196
|
}
|
|
148
197
|
// Unexpected error -- re-throw
|
|
149
|
-
throw
|
|
198
|
+
throw summaryResult.reason;
|
|
150
199
|
});
|
|
151
200
|
|
|
152
201
|
// Read VERIFICATION.md
|
|
153
202
|
let verification = null;
|
|
154
203
|
try {
|
|
155
204
|
const verDoc = await readMarkdownFile(join(phaseFullPath, 'VERIFICATION.md'));
|
|
156
|
-
verification = verDoc.frontmatter;
|
|
205
|
+
verification = enrichVerification(verDoc.frontmatter);
|
|
157
206
|
} catch (error) {
|
|
158
207
|
if (error.code !== 'ENOENT') {
|
|
159
208
|
throw error;
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import { readdir } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { readMarkdownFile } from '../repositories/planning.repository.js';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Parse requirement IDs and text from REQUIREMENTS.md markdown body.
|
|
7
|
+
* Looks for lines matching: - **ID**: description text
|
|
8
|
+
* Groups them by the nearest ## heading (section).
|
|
9
|
+
*
|
|
10
|
+
* @param {string} rawContent - Raw markdown string
|
|
11
|
+
* @returns {Array<{sectionTitle: string, requirements: Array<{id: string, text: string, planRefs: string[]}>}>}
|
|
12
|
+
*/
|
|
13
|
+
function parseRequirementSections(rawContent) {
|
|
14
|
+
const sections = [];
|
|
15
|
+
let currentSection = null;
|
|
16
|
+
|
|
17
|
+
for (const line of rawContent.split('\n')) {
|
|
18
|
+
const headingMatch = line.match(/^##\s+(.+)/);
|
|
19
|
+
if (headingMatch) {
|
|
20
|
+
currentSection = { sectionTitle: headingMatch[1].trim(), requirements: [] };
|
|
21
|
+
sections.push(currentSection);
|
|
22
|
+
continue;
|
|
23
|
+
}
|
|
24
|
+
// Match: - **P02-G1**: Some text OR - **P02-G1** Some text
|
|
25
|
+
const reqMatch = line.match(/^[-*]\s+\*\*([A-Z][A-Z0-9]*-[A-Z0-9]+)\*\*[:\s]+(.+)/);
|
|
26
|
+
if (reqMatch && currentSection) {
|
|
27
|
+
currentSection.requirements.push({
|
|
28
|
+
id: reqMatch[1].trim(),
|
|
29
|
+
text: reqMatch[2].trim(),
|
|
30
|
+
planRefs: []
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
return sections.filter(s => s.requirements.length > 0);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Scan all PLAN-*.md files in all phase directories and collect requirement_ids
|
|
40
|
+
* from their frontmatter. Returns a Map<requirementId, planId[]>.
|
|
41
|
+
*
|
|
42
|
+
* @param {string} projectDir - Absolute path to the project root
|
|
43
|
+
* @returns {Promise<Map<string, string[]>>}
|
|
44
|
+
*/
|
|
45
|
+
async function buildRequirementIndex(projectDir) {
|
|
46
|
+
const phasesDir = join(projectDir, '.planning', 'phases');
|
|
47
|
+
const index = new Map();
|
|
48
|
+
|
|
49
|
+
let phaseDirs;
|
|
50
|
+
try {
|
|
51
|
+
phaseDirs = await readdir(phasesDir, { withFileTypes: true });
|
|
52
|
+
} catch (err) {
|
|
53
|
+
if (err.code === 'ENOENT') return index;
|
|
54
|
+
throw err;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const planReadTasks = [];
|
|
58
|
+
for (const entry of phaseDirs) {
|
|
59
|
+
if (!entry.isDirectory()) continue;
|
|
60
|
+
const phaseFullPath = join(phasesDir, entry.name);
|
|
61
|
+
let phaseFiles;
|
|
62
|
+
try {
|
|
63
|
+
phaseFiles = await readdir(phaseFullPath);
|
|
64
|
+
} catch { continue; }
|
|
65
|
+
|
|
66
|
+
for (const filename of phaseFiles) {
|
|
67
|
+
if (!/^PLAN-\d{2}\.md$/.test(filename)) continue;
|
|
68
|
+
planReadTasks.push(
|
|
69
|
+
readMarkdownFile(join(phaseFullPath, filename))
|
|
70
|
+
.then(({ frontmatter }) => {
|
|
71
|
+
const ids = frontmatter.requirement_ids;
|
|
72
|
+
const planId = frontmatter.plan;
|
|
73
|
+
if (!Array.isArray(ids) || !planId) return;
|
|
74
|
+
for (const id of ids) {
|
|
75
|
+
if (!index.has(id)) index.set(id, []);
|
|
76
|
+
index.get(id).push(planId);
|
|
77
|
+
}
|
|
78
|
+
})
|
|
79
|
+
.catch(() => { /* skip unreadable plan files */ })
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
await Promise.all(planReadTasks);
|
|
85
|
+
return index;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Build the full requirements traceability dataset:
|
|
90
|
+
* - Parses REQUIREMENTS.md into sections + requirements
|
|
91
|
+
* - Cross-references each requirement ID against plan frontmatter
|
|
92
|
+
* - Returns coverage counts and per-requirement plan references
|
|
93
|
+
*
|
|
94
|
+
* @param {string} projectDir - Absolute path to the project root
|
|
95
|
+
* @returns {Promise<{sections: Array, totalCount: number, coveredCount: number}>}
|
|
96
|
+
*/
|
|
97
|
+
export async function getRequirementsData(projectDir) {
|
|
98
|
+
const reqPath = join(projectDir, '.planning', 'REQUIREMENTS.md');
|
|
99
|
+
|
|
100
|
+
let rawContent;
|
|
101
|
+
try {
|
|
102
|
+
const parsed = await readMarkdownFile(reqPath);
|
|
103
|
+
rawContent = parsed.rawContent || '';
|
|
104
|
+
} catch (err) {
|
|
105
|
+
if (err.code === 'ENOENT') {
|
|
106
|
+
return { sections: [], totalCount: 0, coveredCount: 0 };
|
|
107
|
+
}
|
|
108
|
+
throw err;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const [sections, reqIndex] = await Promise.all([
|
|
112
|
+
Promise.resolve(parseRequirementSections(rawContent)),
|
|
113
|
+
buildRequirementIndex(projectDir)
|
|
114
|
+
]);
|
|
115
|
+
|
|
116
|
+
let totalCount = 0;
|
|
117
|
+
let coveredCount = 0;
|
|
118
|
+
|
|
119
|
+
for (const section of sections) {
|
|
120
|
+
for (const req of section.requirements) {
|
|
121
|
+
const refs = reqIndex.get(req.id) || [];
|
|
122
|
+
req.planRefs = refs;
|
|
123
|
+
req.covered = refs.length > 0;
|
|
124
|
+
totalCount++;
|
|
125
|
+
if (req.covered) coveredCount++;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return { sections, totalCount, coveredCount };
|
|
130
|
+
}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import { readdir } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { readMarkdownFile } from '../repositories/planning.repository.js';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* List all research docs from .planning/research/*.md, sorted by filename descending.
|
|
7
|
+
*
|
|
8
|
+
* @param {string} projectDir - Absolute path to the project root
|
|
9
|
+
* @returns {Promise<Array>}
|
|
10
|
+
*/
|
|
11
|
+
export async function listResearchDocs(projectDir) {
|
|
12
|
+
const dir = join(projectDir, '.planning', 'research');
|
|
13
|
+
let entries;
|
|
14
|
+
try {
|
|
15
|
+
entries = await readdir(dir);
|
|
16
|
+
} catch (err) {
|
|
17
|
+
if (err.code === 'ENOENT') return [];
|
|
18
|
+
throw err;
|
|
19
|
+
}
|
|
20
|
+
const mdFiles = entries.filter(f => f.endsWith('.md')).sort().reverse();
|
|
21
|
+
const results = await Promise.allSettled(
|
|
22
|
+
mdFiles.map(f => readMarkdownFile(join(dir, f)))
|
|
23
|
+
);
|
|
24
|
+
const docs = [];
|
|
25
|
+
for (let i = 0; i < mdFiles.length; i++) {
|
|
26
|
+
if (results[i].status !== 'fulfilled') continue;
|
|
27
|
+
const { frontmatter, html } = results[i].value;
|
|
28
|
+
const filename = mdFiles[i];
|
|
29
|
+
const slug = filename.replace(/^\d{4}-\d{2}-\d{2}-/, '').replace(/\.md$/, '');
|
|
30
|
+
const title = slug.split('-').map(w => w.charAt(0).toUpperCase() + w.slice(1)).join(' ');
|
|
31
|
+
docs.push({
|
|
32
|
+
filename,
|
|
33
|
+
slug,
|
|
34
|
+
title,
|
|
35
|
+
topic: frontmatter.topic || null,
|
|
36
|
+
date: frontmatter.research_date
|
|
37
|
+
? (frontmatter.research_date instanceof Date
|
|
38
|
+
? frontmatter.research_date.toISOString().slice(0, 10)
|
|
39
|
+
: String(frontmatter.research_date))
|
|
40
|
+
: null,
|
|
41
|
+
confidence: frontmatter.confidence || null,
|
|
42
|
+
coverage: frontmatter.coverage || null,
|
|
43
|
+
html
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
return docs;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* List all codebase docs from .planning/codebase/*.md, sorted by filename descending.
|
|
51
|
+
*
|
|
52
|
+
* @param {string} projectDir - Absolute path to the project root
|
|
53
|
+
* @returns {Promise<Array>}
|
|
54
|
+
*/
|
|
55
|
+
export async function listCodebaseDocs(projectDir) {
|
|
56
|
+
const dir = join(projectDir, '.planning', 'codebase');
|
|
57
|
+
let entries;
|
|
58
|
+
try {
|
|
59
|
+
entries = await readdir(dir);
|
|
60
|
+
} catch (err) {
|
|
61
|
+
if (err.code === 'ENOENT') return [];
|
|
62
|
+
throw err;
|
|
63
|
+
}
|
|
64
|
+
const mdFiles = entries.filter(f => f.endsWith('.md')).sort().reverse();
|
|
65
|
+
const results = await Promise.allSettled(
|
|
66
|
+
mdFiles.map(f => readMarkdownFile(join(dir, f)))
|
|
67
|
+
);
|
|
68
|
+
const docs = [];
|
|
69
|
+
for (let i = 0; i < mdFiles.length; i++) {
|
|
70
|
+
if (results[i].status !== 'fulfilled') continue;
|
|
71
|
+
const { frontmatter, html } = results[i].value;
|
|
72
|
+
const filename = mdFiles[i];
|
|
73
|
+
const slug = filename.replace(/^\d{4}-\d{2}-\d{2}-/, '').replace(/\.md$/, '');
|
|
74
|
+
const title = slug.split('-').map(w => w.charAt(0).toUpperCase() + w.slice(1)).join(' ');
|
|
75
|
+
docs.push({
|
|
76
|
+
filename,
|
|
77
|
+
slug,
|
|
78
|
+
title,
|
|
79
|
+
date: frontmatter.scan_date
|
|
80
|
+
? (frontmatter.scan_date instanceof Date
|
|
81
|
+
? frontmatter.scan_date.toISOString().slice(0, 10)
|
|
82
|
+
: String(frontmatter.scan_date))
|
|
83
|
+
: null,
|
|
84
|
+
html
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
return docs;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Get a single research or codebase doc by slug.
|
|
92
|
+
* Searches research/ first, then codebase/.
|
|
93
|
+
*
|
|
94
|
+
* @param {string} projectDir - Absolute path to the project root
|
|
95
|
+
* @param {string} slug - Slug derived from filename (without date prefix and .md extension)
|
|
96
|
+
* @returns {Promise<object|null>}
|
|
97
|
+
*/
|
|
98
|
+
export async function getResearchDocBySlug(projectDir, slug) {
|
|
99
|
+
for (const subdir of ['research', 'codebase']) {
|
|
100
|
+
const dir = join(projectDir, '.planning', subdir);
|
|
101
|
+
let entries;
|
|
102
|
+
try {
|
|
103
|
+
entries = await readdir(dir);
|
|
104
|
+
} catch (err) {
|
|
105
|
+
if (err.code === 'ENOENT') continue;
|
|
106
|
+
throw err;
|
|
107
|
+
}
|
|
108
|
+
const filename = entries.find(
|
|
109
|
+
f => f.endsWith('.md') &&
|
|
110
|
+
f.replace(/^\d{4}-\d{2}-\d{2}-/, '').replace(/\.md$/, '') === slug
|
|
111
|
+
);
|
|
112
|
+
if (!filename) continue;
|
|
113
|
+
const { frontmatter, html } = await readMarkdownFile(join(dir, filename));
|
|
114
|
+
const title = slug.split('-').map(w => w.charAt(0).toUpperCase() + w.slice(1)).join(' ');
|
|
115
|
+
return {
|
|
116
|
+
filename,
|
|
117
|
+
slug,
|
|
118
|
+
title,
|
|
119
|
+
topic: frontmatter.topic || null,
|
|
120
|
+
date: frontmatter.research_date
|
|
121
|
+
? (frontmatter.research_date instanceof Date
|
|
122
|
+
? frontmatter.research_date.toISOString().slice(0, 10)
|
|
123
|
+
: String(frontmatter.research_date))
|
|
124
|
+
: frontmatter.scan_date
|
|
125
|
+
? (frontmatter.scan_date instanceof Date
|
|
126
|
+
? frontmatter.scan_date.toISOString().slice(0, 10)
|
|
127
|
+
: String(frontmatter.scan_date))
|
|
128
|
+
: null,
|
|
129
|
+
confidence: frontmatter.confidence || null,
|
|
130
|
+
sources_checked: frontmatter.sources_checked || null,
|
|
131
|
+
coverage: frontmatter.coverage || null,
|
|
132
|
+
section: subdir,
|
|
133
|
+
html
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
return null;
|
|
137
|
+
}
|
|
@@ -230,6 +230,36 @@ export async function createTodo(projectDir, todoData) {
|
|
|
230
230
|
});
|
|
231
231
|
}
|
|
232
232
|
|
|
233
|
+
export async function listDoneTodos(projectDir) {
|
|
234
|
+
const doneDir = join(projectDir, '.planning', 'todos', 'done');
|
|
235
|
+
let files;
|
|
236
|
+
try {
|
|
237
|
+
files = await readdir(doneDir);
|
|
238
|
+
} catch (err) {
|
|
239
|
+
if (err.code === 'ENOENT') return [];
|
|
240
|
+
throw err;
|
|
241
|
+
}
|
|
242
|
+
const mdFiles = files.filter(f => f.endsWith('.md')).sort().reverse();
|
|
243
|
+
const todos = [];
|
|
244
|
+
for (const filename of mdFiles) {
|
|
245
|
+
const match = filename.match(/^(\d{3})-(.+)\.md$/);
|
|
246
|
+
if (!match) continue;
|
|
247
|
+
const [, id, slugPart] = match;
|
|
248
|
+
try {
|
|
249
|
+
const raw = await readFile(join(doneDir, filename), 'utf-8');
|
|
250
|
+
const { data } = matter(raw);
|
|
251
|
+
todos.push({
|
|
252
|
+
id, filename,
|
|
253
|
+
title: data.title || slugPart,
|
|
254
|
+
priority: data.priority || '',
|
|
255
|
+
phase: data.phase || '',
|
|
256
|
+
completedAt: data.completed_at || null
|
|
257
|
+
});
|
|
258
|
+
} catch { /* skip */ }
|
|
259
|
+
}
|
|
260
|
+
return todos;
|
|
261
|
+
}
|
|
262
|
+
|
|
233
263
|
export async function completeTodo(projectDir, todoId) {
|
|
234
264
|
return writeQueue.enqueue(async () => {
|
|
235
265
|
const pendingDir = join(projectDir, '.planning', 'todos', 'pending');
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
<% if (!recentActivity || recentActivity.length === 0) { %>
|
|
2
|
+
<p class="muted">No recent activity.</p>
|
|
3
|
+
<% } else { %>
|
|
4
|
+
<ul class="activity-feed">
|
|
5
|
+
<% recentActivity.forEach(function(item) { %>
|
|
6
|
+
<li class="activity-item">
|
|
7
|
+
<span class="activity-path"><%= item.path %></span>
|
|
8
|
+
<time class="activity-time" datetime="<%= item.timestamp %>"><%= item.timestamp %></time>
|
|
9
|
+
</li>
|
|
10
|
+
<% }); %>
|
|
11
|
+
</ul>
|
|
12
|
+
<% } %>
|