gsd-opencode 1.30.0 → 1.33.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agents/gsd-debugger.md +0 -1
- package/agents/gsd-doc-verifier.md +207 -0
- package/agents/gsd-doc-writer.md +608 -0
- package/agents/gsd-executor.md +22 -1
- package/agents/gsd-phase-researcher.md +41 -0
- package/agents/gsd-plan-checker.md +82 -0
- package/agents/gsd-planner.md +123 -194
- package/agents/gsd-security-auditor.md +129 -0
- package/agents/gsd-ui-auditor.md +40 -0
- package/agents/gsd-user-profiler.md +2 -2
- package/agents/gsd-verifier.md +84 -18
- package/commands/gsd/gsd-add-backlog.md +1 -1
- package/commands/gsd/gsd-analyze-dependencies.md +34 -0
- package/commands/gsd/gsd-autonomous.md +6 -2
- package/commands/gsd/gsd-cleanup.md +5 -0
- package/commands/gsd/gsd-debug.md +24 -21
- package/commands/gsd/gsd-discuss-phase.md +7 -2
- package/commands/gsd/gsd-docs-update.md +48 -0
- package/commands/gsd/gsd-execute-phase.md +4 -0
- package/commands/gsd/gsd-help.md +2 -0
- package/commands/gsd/gsd-join-discord.md +2 -1
- package/commands/gsd/gsd-manager.md +1 -0
- package/commands/gsd/gsd-new-project.md +4 -0
- package/commands/gsd/gsd-plan-phase.md +5 -0
- package/commands/gsd/gsd-quick.md +5 -3
- package/commands/gsd/gsd-reapply-patches.md +171 -39
- package/commands/gsd/gsd-research-phase.md +2 -12
- package/commands/gsd/gsd-review-backlog.md +1 -0
- package/commands/gsd/gsd-review.md +3 -2
- package/commands/gsd/gsd-secure-phase.md +35 -0
- package/commands/gsd/gsd-thread.md +1 -1
- package/commands/gsd/gsd-workstreams.md +7 -2
- package/get-shit-done/bin/gsd-tools.cjs +42 -8
- package/get-shit-done/bin/lib/commands.cjs +68 -14
- package/get-shit-done/bin/lib/config.cjs +18 -10
- package/get-shit-done/bin/lib/core.cjs +383 -80
- package/get-shit-done/bin/lib/docs.cjs +267 -0
- package/get-shit-done/bin/lib/frontmatter.cjs +47 -2
- package/get-shit-done/bin/lib/init.cjs +85 -5
- package/get-shit-done/bin/lib/milestone.cjs +21 -0
- package/get-shit-done/bin/lib/model-profiles.cjs +2 -0
- package/get-shit-done/bin/lib/phase.cjs +232 -189
- package/get-shit-done/bin/lib/profile-output.cjs +97 -1
- package/get-shit-done/bin/lib/roadmap.cjs +137 -113
- package/get-shit-done/bin/lib/schema-detect.cjs +238 -0
- package/get-shit-done/bin/lib/security.cjs +5 -3
- package/get-shit-done/bin/lib/state.cjs +366 -44
- package/get-shit-done/bin/lib/verify.cjs +158 -14
- package/get-shit-done/bin/lib/workstream.cjs +6 -2
- package/get-shit-done/references/agent-contracts.md +79 -0
- package/get-shit-done/references/artifact-types.md +113 -0
- package/get-shit-done/references/context-budget.md +49 -0
- package/get-shit-done/references/continuation-format.md +15 -15
- package/get-shit-done/references/domain-probes.md +125 -0
- package/get-shit-done/references/gate-prompts.md +100 -0
- package/get-shit-done/references/model-profiles.md +2 -2
- package/get-shit-done/references/planner-gap-closure.md +62 -0
- package/get-shit-done/references/planner-reviews.md +39 -0
- package/get-shit-done/references/planner-revision.md +87 -0
- package/get-shit-done/references/planning-config.md +15 -0
- package/get-shit-done/references/revision-loop.md +97 -0
- package/get-shit-done/references/ui-brand.md +2 -2
- package/get-shit-done/references/universal-anti-patterns.md +58 -0
- package/get-shit-done/references/workstream-flag.md +56 -3
- package/get-shit-done/templates/SECURITY.md +61 -0
- package/get-shit-done/templates/VALIDATION.md +3 -3
- package/get-shit-done/templates/claude-md.md +27 -4
- package/get-shit-done/templates/config.json +4 -0
- package/get-shit-done/templates/debug-subagent-prompt.md +2 -6
- package/get-shit-done/templates/planner-subagent-prompt.md +2 -10
- package/get-shit-done/workflows/add-phase.md +2 -2
- package/get-shit-done/workflows/add-todo.md +1 -1
- package/get-shit-done/workflows/analyze-dependencies.md +96 -0
- package/get-shit-done/workflows/audit-milestone.md +8 -12
- package/get-shit-done/workflows/autonomous.md +158 -13
- package/get-shit-done/workflows/check-todos.md +2 -2
- package/get-shit-done/workflows/complete-milestone.md +13 -4
- package/get-shit-done/workflows/diagnose-issues.md +8 -6
- package/get-shit-done/workflows/discovery-phase.md +1 -1
- package/get-shit-done/workflows/discuss-phase-assumptions.md +22 -4
- package/get-shit-done/workflows/discuss-phase-power.md +291 -0
- package/get-shit-done/workflows/discuss-phase.md +149 -11
- package/get-shit-done/workflows/docs-update.md +1093 -0
- package/get-shit-done/workflows/execute-phase.md +362 -66
- package/get-shit-done/workflows/execute-plan.md +1 -1
- package/get-shit-done/workflows/help.md +9 -6
- package/get-shit-done/workflows/insert-phase.md +2 -2
- package/get-shit-done/workflows/manager.md +27 -26
- package/get-shit-done/workflows/map-codebase.md +10 -32
- package/get-shit-done/workflows/new-milestone.md +14 -8
- package/get-shit-done/workflows/new-project.md +48 -25
- package/get-shit-done/workflows/next.md +1 -1
- package/get-shit-done/workflows/note.md +1 -1
- package/get-shit-done/workflows/pause-work.md +73 -10
- package/get-shit-done/workflows/plan-milestone-gaps.md +2 -2
- package/get-shit-done/workflows/plan-phase.md +184 -32
- package/get-shit-done/workflows/progress.md +20 -20
- package/get-shit-done/workflows/quick.md +102 -84
- package/get-shit-done/workflows/research-phase.md +2 -6
- package/get-shit-done/workflows/resume-project.md +4 -4
- package/get-shit-done/workflows/review.md +56 -3
- package/get-shit-done/workflows/secure-phase.md +154 -0
- package/get-shit-done/workflows/settings.md +13 -2
- package/get-shit-done/workflows/ship.md +13 -4
- package/get-shit-done/workflows/transition.md +6 -6
- package/get-shit-done/workflows/ui-phase.md +4 -14
- package/get-shit-done/workflows/ui-review.md +25 -7
- package/get-shit-done/workflows/update.md +165 -16
- package/get-shit-done/workflows/validate-phase.md +1 -11
- package/get-shit-done/workflows/verify-phase.md +127 -6
- package/get-shit-done/workflows/verify-work.md +69 -21
- package/package.json +1 -1
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Docs — Commands for the docs-update workflow
|
|
3
|
+
*
|
|
4
|
+
* Provides `cmdDocsInit` which returns project signals, existing doc inventory
|
|
5
|
+
* with GSD marker detection, doc tooling detection, monorepo awareness, and
|
|
6
|
+
* model resolution. Used by Phase 2 to route doc generation appropriately.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const fs = require('fs');
|
|
10
|
+
const path = require('path');
|
|
11
|
+
const { output, loadConfig, resolveModelInternal, pathExistsInternal, toPosixPath, checkAgentsInstalled } = require('./core.cjs');
|
|
12
|
+
|
|
13
|
+
// ─── Constants ────────────────────────────────────────────────────────────────
|
|
14
|
+
|
|
15
|
+
const GSD_MARKER = '<!-- generated-by: gsd-doc-writer -->';
|
|
16
|
+
|
|
17
|
+
const SKIP_DIRS = new Set([
|
|
18
|
+
'node_modules', '.git', '.planning', '.OpenCode', '__pycache__',
|
|
19
|
+
'target', 'dist', 'build', '.next', '.nuxt', 'coverage',
|
|
20
|
+
'.vscode', '.idea',
|
|
21
|
+
]);
|
|
22
|
+
|
|
23
|
+
// ─── Private helpers ──────────────────────────────────────────────────────────
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Check whether a file begins with the GSD doc writer marker.
|
|
27
|
+
* Reads the first 500 bytes only — avoids loading large files.
|
|
28
|
+
*
|
|
29
|
+
* @param {string} filePath - Absolute path to the file
|
|
30
|
+
* @returns {boolean}
|
|
31
|
+
*/
|
|
32
|
+
function hasGsdMarker(filePath) {
|
|
33
|
+
try {
|
|
34
|
+
const buf = Buffer.alloc(500);
|
|
35
|
+
const fd = fs.openSync(filePath, 'r');
|
|
36
|
+
const bytesRead = fs.readSync(fd, buf, 0, 500, 0);
|
|
37
|
+
fs.closeSync(fd);
|
|
38
|
+
return buf.slice(0, bytesRead).toString('utf-8').includes(GSD_MARKER);
|
|
39
|
+
} catch {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Recursively scan the project root (immediate .md files) and docs/ directory
|
|
46
|
+
* (up to 4 levels deep) for Markdown files, excluding dirs in SKIP_DIRS.
|
|
47
|
+
*
|
|
48
|
+
* @param {string} cwd - Project root
|
|
49
|
+
* @returns {Array<{path: string, has_gsd_marker: boolean}>}
|
|
50
|
+
*/
|
|
51
|
+
function scanExistingDocs(cwd) {
|
|
52
|
+
const MAX_DEPTH = 4;
|
|
53
|
+
const results = [];
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Recursively walk a directory for .md files up to MAX_DEPTH levels.
|
|
57
|
+
* @param {string} dir - Directory to scan
|
|
58
|
+
* @param {number} depth - Current depth (1-based)
|
|
59
|
+
*/
|
|
60
|
+
function walkDir(dir, depth) {
|
|
61
|
+
if (depth > MAX_DEPTH) return;
|
|
62
|
+
try {
|
|
63
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
64
|
+
for (const entry of entries) {
|
|
65
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
66
|
+
const abs = path.join(dir, entry.name);
|
|
67
|
+
if (entry.isDirectory()) {
|
|
68
|
+
walkDir(abs, depth + 1);
|
|
69
|
+
} else if (entry.isFile() && entry.name.toLowerCase().endsWith('.md')) {
|
|
70
|
+
const rel = toPosixPath(path.relative(cwd, abs));
|
|
71
|
+
results.push({ path: rel, has_gsd_marker: hasGsdMarker(abs) });
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
} catch { /* directory may not exist — best-effort */ }
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Scan root-level .md files (non-recursive)
|
|
78
|
+
try {
|
|
79
|
+
const entries = fs.readdirSync(cwd, { withFileTypes: true });
|
|
80
|
+
for (const entry of entries) {
|
|
81
|
+
if (entry.isFile() && entry.name.toLowerCase().endsWith('.md')) {
|
|
82
|
+
const abs = path.join(cwd, entry.name);
|
|
83
|
+
const rel = toPosixPath(path.relative(cwd, abs));
|
|
84
|
+
results.push({ path: rel, has_gsd_marker: hasGsdMarker(abs) });
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
} catch { /* best-effort */ }
|
|
88
|
+
|
|
89
|
+
// Recursively scan docs/ directory
|
|
90
|
+
const docsDir = path.join(cwd, 'docs');
|
|
91
|
+
walkDir(docsDir, 1);
|
|
92
|
+
|
|
93
|
+
// Fallback: if docs/ does not exist, try documentation/ or doc/
|
|
94
|
+
try {
|
|
95
|
+
fs.statSync(docsDir);
|
|
96
|
+
} catch {
|
|
97
|
+
const alternatives = ['documentation', 'doc'];
|
|
98
|
+
for (const alt of alternatives) {
|
|
99
|
+
const altDir = path.join(cwd, alt);
|
|
100
|
+
try {
|
|
101
|
+
const stat = fs.statSync(altDir);
|
|
102
|
+
if (stat.isDirectory()) {
|
|
103
|
+
walkDir(altDir, 1);
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
} catch { /* not present */ }
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return results.sort((a, b) => a.path.localeCompare(b.path));
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Detect project type signals from the filesystem and package.json.
|
|
115
|
+
* All checks are best-effort and never throw.
|
|
116
|
+
*
|
|
117
|
+
* @param {string} cwd - Project root
|
|
118
|
+
* @returns {Object} Boolean signal fields
|
|
119
|
+
*/
|
|
120
|
+
function detectProjectType(cwd) {
|
|
121
|
+
const exists = (rel) => {
|
|
122
|
+
try { return pathExistsInternal(cwd, rel); } catch { return false; }
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
// has_cli_bin: package.json has a `bin` field
|
|
126
|
+
let has_cli_bin = false;
|
|
127
|
+
try {
|
|
128
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
129
|
+
has_cli_bin = !!(pkg.bin && (typeof pkg.bin === 'string' || Object.keys(pkg.bin).length > 0));
|
|
130
|
+
} catch { /* no package.json or invalid JSON */ }
|
|
131
|
+
|
|
132
|
+
// is_monorepo: pnpm-workspace.yaml, lerna.json, or package.json workspaces
|
|
133
|
+
let is_monorepo = exists('pnpm-workspace.yaml') || exists('lerna.json');
|
|
134
|
+
if (!is_monorepo) {
|
|
135
|
+
try {
|
|
136
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
137
|
+
is_monorepo = Array.isArray(pkg.workspaces) && pkg.workspaces.length > 0;
|
|
138
|
+
} catch { /* ignore */ }
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// has_tests: common test directories or test frameworks in devDependencies
|
|
142
|
+
let has_tests = exists('test') || exists('tests') || exists('__tests__') || exists('spec');
|
|
143
|
+
if (!has_tests) {
|
|
144
|
+
try {
|
|
145
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
146
|
+
const devDeps = Object.keys(pkg.devDependencies || {});
|
|
147
|
+
has_tests = devDeps.some(d => ['vitest', 'jest', 'mocha', 'jasmine', 'ava'].includes(d));
|
|
148
|
+
} catch { /* ignore */ }
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// has_deploy_config: various deployment config files
|
|
152
|
+
const deployFiles = [
|
|
153
|
+
'Dockerfile', 'docker-compose.yml', 'docker-compose.yaml',
|
|
154
|
+
'fly.toml', 'render.yaml', 'vercel.json', 'netlify.toml', 'railway.json',
|
|
155
|
+
'.github/workflows/deploy.yml', '.github/workflows/deploy.yaml',
|
|
156
|
+
];
|
|
157
|
+
const has_deploy_config = deployFiles.some(f => exists(f));
|
|
158
|
+
|
|
159
|
+
return {
|
|
160
|
+
has_package_json: exists('package.json'),
|
|
161
|
+
has_api_routes: (
|
|
162
|
+
exists('src/app/api') || exists('routes') || exists('src/routes') ||
|
|
163
|
+
exists('api') || exists('server')
|
|
164
|
+
),
|
|
165
|
+
has_cli_bin,
|
|
166
|
+
is_open_source: exists('LICENSE') || exists('LICENSE.md'),
|
|
167
|
+
has_deploy_config,
|
|
168
|
+
is_monorepo,
|
|
169
|
+
has_tests,
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Detect known documentation tooling in the project.
|
|
175
|
+
*
|
|
176
|
+
* @param {string} cwd - Project root
|
|
177
|
+
* @returns {Object} Boolean detection fields
|
|
178
|
+
*/
|
|
179
|
+
function detectDocTooling(cwd) {
|
|
180
|
+
const exists = (rel) => {
|
|
181
|
+
try { return pathExistsInternal(cwd, rel); } catch { return false; }
|
|
182
|
+
};
|
|
183
|
+
|
|
184
|
+
return {
|
|
185
|
+
docusaurus: exists('docusaurus.config.js') || exists('docusaurus.config.ts'),
|
|
186
|
+
vitepress: (
|
|
187
|
+
exists('.vitepress/config.js') ||
|
|
188
|
+
exists('.vitepress/config.ts') ||
|
|
189
|
+
exists('.vitepress/config.mts')
|
|
190
|
+
),
|
|
191
|
+
mkdocs: exists('mkdocs.yml'),
|
|
192
|
+
storybook: exists('.storybook'),
|
|
193
|
+
};
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Extract monorepo workspace globs from pnpm-workspace.yaml, package.json
|
|
198
|
+
* workspaces, or lerna.json.
|
|
199
|
+
*
|
|
200
|
+
* @param {string} cwd - Project root
|
|
201
|
+
* @returns {string[]} Array of workspace glob patterns, or [] if not a monorepo
|
|
202
|
+
*/
|
|
203
|
+
function detectMonorepoWorkspaces(cwd) {
|
|
204
|
+
// pnpm-workspace.yaml
|
|
205
|
+
try {
|
|
206
|
+
const content = fs.readFileSync(path.join(cwd, 'pnpm-workspace.yaml'), 'utf-8');
|
|
207
|
+
const lines = content.split('\n');
|
|
208
|
+
const workspaces = [];
|
|
209
|
+
for (const line of lines) {
|
|
210
|
+
const m = line.match(/^\s*-\s+['"]?(.+?)['"]?\s*$/);
|
|
211
|
+
if (m) workspaces.push(m[1].trim());
|
|
212
|
+
}
|
|
213
|
+
if (workspaces.length > 0) return workspaces;
|
|
214
|
+
} catch { /* not present */ }
|
|
215
|
+
|
|
216
|
+
// package.json workspaces
|
|
217
|
+
try {
|
|
218
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
219
|
+
if (Array.isArray(pkg.workspaces) && pkg.workspaces.length > 0) {
|
|
220
|
+
return pkg.workspaces;
|
|
221
|
+
}
|
|
222
|
+
} catch { /* not present or invalid */ }
|
|
223
|
+
|
|
224
|
+
// lerna.json
|
|
225
|
+
try {
|
|
226
|
+
const lerna = JSON.parse(fs.readFileSync(path.join(cwd, 'lerna.json'), 'utf-8'));
|
|
227
|
+
if (Array.isArray(lerna.packages) && lerna.packages.length > 0) {
|
|
228
|
+
return lerna.packages;
|
|
229
|
+
}
|
|
230
|
+
} catch { /* not present or invalid */ }
|
|
231
|
+
|
|
232
|
+
return [];
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// ─── Public commands ──────────────────────────────────────────────────────────
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Return JSON context for the docs-update workflow: project signals, existing
|
|
239
|
+
* doc inventory, doc tooling detection, monorepo workspaces, and model
|
|
240
|
+
* resolution. Follows the cmdInitMapCodebase pattern.
|
|
241
|
+
*
|
|
242
|
+
* @example
|
|
243
|
+
* node gsd-tools.cjs docs-init --raw
|
|
244
|
+
*
|
|
245
|
+
* @param {string} cwd - Project root directory
|
|
246
|
+
* @param {boolean} raw - Pass raw JSON flag through to output()
|
|
247
|
+
*/
|
|
248
|
+
function cmdDocsInit(cwd, raw) {
|
|
249
|
+
const config = loadConfig(cwd);
|
|
250
|
+
const result = {
|
|
251
|
+
doc_writer_model: resolveModelInternal(cwd, 'gsd-doc-writer'),
|
|
252
|
+
commit_docs: config.commit_docs,
|
|
253
|
+
existing_docs: scanExistingDocs(cwd),
|
|
254
|
+
project_type: detectProjectType(cwd),
|
|
255
|
+
doc_tooling: detectDocTooling(cwd),
|
|
256
|
+
monorepo_workspaces: detectMonorepoWorkspaces(cwd),
|
|
257
|
+
planning_exists: pathExistsInternal(cwd, '.planning'),
|
|
258
|
+
};
|
|
259
|
+
// Inject project_root and agent installation status (mirrors withProjectRoot in init.cjs)
|
|
260
|
+
result.project_root = cwd;
|
|
261
|
+
const agentStatus = checkAgentsInstalled();
|
|
262
|
+
result.agents_installed = agentStatus.agents_installed;
|
|
263
|
+
result.missing_agents = agentStatus.missing_agents;
|
|
264
|
+
output(result, raw);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
module.exports = { cmdDocsInit };
|
|
@@ -8,6 +8,38 @@ const { safeReadFile, normalizeMd, output, error } = require('./core.cjs');
|
|
|
8
8
|
|
|
9
9
|
// ─── Parsing engine ───────────────────────────────────────────────────────────
|
|
10
10
|
|
|
11
|
+
/**
|
|
12
|
+
* Split a YAML inline array body on commas, respecting quoted strings.
|
|
13
|
+
* e.g. '"a, b", c' → ['a, b', 'c']
|
|
14
|
+
*/
|
|
15
|
+
function splitInlineArray(body) {
|
|
16
|
+
const items = [];
|
|
17
|
+
let current = '';
|
|
18
|
+
let inQuote = null; // null | '"' | "'"
|
|
19
|
+
|
|
20
|
+
for (let i = 0; i < body.length; i++) {
|
|
21
|
+
const ch = body[i];
|
|
22
|
+
if (inQuote) {
|
|
23
|
+
if (ch === inQuote) {
|
|
24
|
+
inQuote = null;
|
|
25
|
+
} else {
|
|
26
|
+
current += ch;
|
|
27
|
+
}
|
|
28
|
+
} else if (ch === '"' || ch === "'") {
|
|
29
|
+
inQuote = ch;
|
|
30
|
+
} else if (ch === ',') {
|
|
31
|
+
const trimmed = current.trim();
|
|
32
|
+
if (trimmed) items.push(trimmed);
|
|
33
|
+
current = '';
|
|
34
|
+
} else {
|
|
35
|
+
current += ch;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
const trimmed = current.trim();
|
|
39
|
+
if (trimmed) items.push(trimmed);
|
|
40
|
+
return items;
|
|
41
|
+
}
|
|
42
|
+
|
|
11
43
|
function extractFrontmatter(content) {
|
|
12
44
|
const frontmatter = {};
|
|
13
45
|
// Find ALL frontmatter blocks at the start of the file.
|
|
@@ -53,8 +85,8 @@ function extractFrontmatter(content) {
|
|
|
53
85
|
// Push new context for potential nested content
|
|
54
86
|
stack.push({ obj: current.obj[key], key: null, indent });
|
|
55
87
|
} else if (value.startsWith('[') && value.endsWith(']')) {
|
|
56
|
-
// Inline array: key: [a, b, c]
|
|
57
|
-
current.obj[key] = value.slice(1, -1)
|
|
88
|
+
// Inline array: key: [a, b, c] — quote-aware split (REG-04 fix)
|
|
89
|
+
current.obj[key] = splitInlineArray(value.slice(1, -1));
|
|
58
90
|
current.key = null;
|
|
59
91
|
} else {
|
|
60
92
|
// Simple key: value
|
|
@@ -252,6 +284,19 @@ function parseMustHavesBlock(content, blockName) {
|
|
|
252
284
|
}
|
|
253
285
|
if (current) items.push(current);
|
|
254
286
|
|
|
287
|
+
// Warn when must_haves block exists but parsed as empty -- likely YAML formatting issue.
|
|
288
|
+
// This is a critical diagnostic: empty must_haves causes verification to silently degrade
|
|
289
|
+
// to Option C (LLM-derived truths) instead of checking documented contracts.
|
|
290
|
+
if (items.length === 0 && blockLines.length > 0) {
|
|
291
|
+
const nonEmptyLines = blockLines.filter(l => l.trim() !== '').length;
|
|
292
|
+
if (nonEmptyLines > 0) {
|
|
293
|
+
process.stderr.write(
|
|
294
|
+
`[gsd-tools] WARNING: must_haves.${blockName} block has ${nonEmptyLines} content lines but parsed 0 items. ` +
|
|
295
|
+
`Possible YAML formatting issue — verification will fall back to LLM-derived truths.\n`
|
|
296
|
+
);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
255
300
|
return items;
|
|
256
301
|
}
|
|
257
302
|
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
const fs = require('fs');
|
|
6
6
|
const path = require('path');
|
|
7
7
|
const { execSync } = require('child_process');
|
|
8
|
-
const { loadConfig, resolveModelInternal, findPhaseInternal, getRoadmapPhaseInternal, pathExistsInternal, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, normalizePhaseName, planningPaths, planningDir, planningRoot, toPosixPath, output, error, checkAgentsInstalled } = require('./core.cjs');
|
|
8
|
+
const { loadConfig, resolveModelInternal, findPhaseInternal, getRoadmapPhaseInternal, pathExistsInternal, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, normalizePhaseName, planningPaths, planningDir, planningRoot, toPosixPath, output, error, checkAgentsInstalled, phaseTokenMatches } = require('./core.cjs');
|
|
9
9
|
|
|
10
10
|
function getLatestCompletedMilestone(cwd) {
|
|
11
11
|
const milestonesPath = path.join(planningRoot(cwd), 'MILESTONES.md');
|
|
@@ -37,10 +37,17 @@ function withProjectRoot(cwd, result) {
|
|
|
37
37
|
const agentStatus = checkAgentsInstalled();
|
|
38
38
|
result.agents_installed = agentStatus.agents_installed;
|
|
39
39
|
result.missing_agents = agentStatus.missing_agents;
|
|
40
|
+
// Inject response_language into all init outputs (#1399).
|
|
41
|
+
// Workflows propagate this to subagent prompts so user-facing questions
|
|
42
|
+
// stay in the configured language across phase boundaries.
|
|
43
|
+
const config = loadConfig(cwd);
|
|
44
|
+
if (config.response_language) {
|
|
45
|
+
result.response_language = config.response_language;
|
|
46
|
+
}
|
|
40
47
|
return result;
|
|
41
48
|
}
|
|
42
49
|
|
|
43
|
-
function cmdInitExecutePhase(cwd, phase, raw) {
|
|
50
|
+
function cmdInitExecutePhase(cwd, phase, raw, options = {}) {
|
|
44
51
|
if (!phase) {
|
|
45
52
|
error('phase required for init execute-phase');
|
|
46
53
|
}
|
|
@@ -108,6 +115,7 @@ function cmdInitExecutePhase(cwd, phase, raw) {
|
|
|
108
115
|
// Branch name (pre-computed)
|
|
109
116
|
branch_name: config.branching_strategy === 'phase' && phaseInfo
|
|
110
117
|
? config.phase_branch_template
|
|
118
|
+
.replace('{project}', config.project_code || '')
|
|
111
119
|
.replace('{phase}', phaseInfo.phase_number)
|
|
112
120
|
.replace('{slug}', phaseInfo.phase_slug || 'phase')
|
|
113
121
|
: config.branching_strategy === 'milestone'
|
|
@@ -131,10 +139,38 @@ function cmdInitExecutePhase(cwd, phase, raw) {
|
|
|
131
139
|
config_path: toPosixPath(path.relative(cwd, path.join(planningDir(cwd), 'config.json'))),
|
|
132
140
|
};
|
|
133
141
|
|
|
142
|
+
// Optional --validate: run state validation and include warnings (#1627)
|
|
143
|
+
if (options.validate) {
|
|
144
|
+
try {
|
|
145
|
+
const { cmdStateValidate } = require('./state.cjs');
|
|
146
|
+
// Capture validate output by temporarily redirecting
|
|
147
|
+
const statePath = path.join(planningDir(cwd), 'STATE.md');
|
|
148
|
+
if (fs.existsSync(statePath)) {
|
|
149
|
+
const stateContent = fs.readFileSync(statePath, 'utf-8');
|
|
150
|
+
const { stateExtractField } = require('./state.cjs');
|
|
151
|
+
const status = stateExtractField(stateContent, 'Status') || '';
|
|
152
|
+
result.state_validation_ran = true;
|
|
153
|
+
// Simple inline validation — check for obvious drift
|
|
154
|
+
const warnings = [];
|
|
155
|
+
const phasesPath = planningPaths(cwd).phases;
|
|
156
|
+
if (phaseInfo && phaseInfo.directory && fs.existsSync(path.join(cwd, phaseInfo.directory))) {
|
|
157
|
+
const files = fs.readdirSync(path.join(cwd, phaseInfo.directory));
|
|
158
|
+
const diskPlans = files.filter(f => f.match(/-PLAN\.md$/i)).length;
|
|
159
|
+
const totalPlansRaw = stateExtractField(stateContent, 'Total Plans in Phase');
|
|
160
|
+
const totalPlansInPhase = totalPlansRaw ? parseInt(totalPlansRaw, 10) : null;
|
|
161
|
+
if (totalPlansInPhase !== null && diskPlans !== totalPlansInPhase) {
|
|
162
|
+
warnings.push(`Plan count mismatch: STATE.md says ${totalPlansInPhase}, disk has ${diskPlans}`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
result.state_warnings = warnings;
|
|
166
|
+
}
|
|
167
|
+
} catch { /* intentionally empty */ }
|
|
168
|
+
}
|
|
169
|
+
|
|
134
170
|
output(withProjectRoot(cwd, result), raw);
|
|
135
171
|
}
|
|
136
172
|
|
|
137
|
-
function cmdInitPlanPhase(cwd, phase, raw) {
|
|
173
|
+
function cmdInitPlanPhase(cwd, phase, raw, options = {}) {
|
|
138
174
|
if (!phase) {
|
|
139
175
|
error('phase required for init plan-phase');
|
|
140
176
|
}
|
|
@@ -235,6 +271,25 @@ function cmdInitPlanPhase(cwd, phase, raw) {
|
|
|
235
271
|
} catch { /* intentionally empty */ }
|
|
236
272
|
}
|
|
237
273
|
|
|
274
|
+
// Optional --validate: run state validation and include warnings (#1627)
|
|
275
|
+
if (options.validate) {
|
|
276
|
+
try {
|
|
277
|
+
const statePath = path.join(planningDir(cwd), 'STATE.md');
|
|
278
|
+
if (fs.existsSync(statePath)) {
|
|
279
|
+
const { stateExtractField } = require('./state.cjs');
|
|
280
|
+
const stateContent = fs.readFileSync(statePath, 'utf-8');
|
|
281
|
+
const warnings = [];
|
|
282
|
+
result.state_validation_ran = true;
|
|
283
|
+
const totalPlansRaw = stateExtractField(stateContent, 'Total Plans in Phase');
|
|
284
|
+
const totalPlansInPhase = totalPlansRaw ? parseInt(totalPlansRaw, 10) : null;
|
|
285
|
+
if (totalPlansInPhase !== null && phaseInfo && totalPlansInPhase !== (phaseInfo.plans?.length || 0)) {
|
|
286
|
+
warnings.push(`Plan count mismatch: STATE.md says ${totalPlansInPhase}, disk has ${phaseInfo.plans?.length || 0}`);
|
|
287
|
+
}
|
|
288
|
+
result.state_warnings = warnings;
|
|
289
|
+
}
|
|
290
|
+
} catch { /* intentionally empty */ }
|
|
291
|
+
}
|
|
292
|
+
|
|
238
293
|
output(withProjectRoot(cwd, result), raw);
|
|
239
294
|
}
|
|
240
295
|
|
|
@@ -275,7 +330,7 @@ function cmdInitNewProject(cwd, raw) {
|
|
|
275
330
|
'.ex', '.exs', // Elixir
|
|
276
331
|
'.clj', // Clojure
|
|
277
332
|
]);
|
|
278
|
-
const skipDirs = new Set(['node_modules', '.git', '.planning', '.OpenCode', '__pycache__', 'target', 'dist', 'build']);
|
|
333
|
+
const skipDirs = new Set(['node_modules', '.git', '.planning', '.OpenCode', '.codex', '__pycache__', 'target', 'dist', 'build']);
|
|
279
334
|
function findCodeFiles(dir, depth) {
|
|
280
335
|
if (depth > 3) return false;
|
|
281
336
|
let entries;
|
|
@@ -779,6 +834,7 @@ function cmdInitMapCodebase(cwd, raw) {
|
|
|
779
834
|
commit_docs: config.commit_docs,
|
|
780
835
|
search_gitignored: config.search_gitignored,
|
|
781
836
|
parallelization: config.parallelization,
|
|
837
|
+
subagent_timeout: config.subagent_timeout,
|
|
782
838
|
|
|
783
839
|
// Paths
|
|
784
840
|
codebase_dir: '.planning/codebase',
|
|
@@ -846,7 +902,7 @@ function cmdInitManager(cwd, raw) {
|
|
|
846
902
|
try {
|
|
847
903
|
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
|
848
904
|
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).filter(isDirInMilestone);
|
|
849
|
-
const dirMatch = dirs.find(d =>
|
|
905
|
+
const dirMatch = dirs.find(d => phaseTokenMatches(d, normalized));
|
|
850
906
|
|
|
851
907
|
if (dirMatch) {
|
|
852
908
|
const fullDir = path.join(phasesDir, dirMatch);
|
|
@@ -954,9 +1010,11 @@ function cmdInitManager(cwd, raw) {
|
|
|
954
1010
|
} catch { /* intentionally empty */ }
|
|
955
1011
|
|
|
956
1012
|
// Compute recommended actions (execute > plan > discuss)
|
|
1013
|
+
// Skip BACKLOG phases (999.x numbering) — they are parked ideas, not active work
|
|
957
1014
|
const recommendedActions = [];
|
|
958
1015
|
for (const phase of phases) {
|
|
959
1016
|
if (phase.disk_status === 'complete') continue;
|
|
1017
|
+
if (/^999(?:\.|$)/.test(phase.number)) continue;
|
|
960
1018
|
|
|
961
1019
|
if (phase.disk_status === 'planned' && phase.deps_satisfied) {
|
|
962
1020
|
recommendedActions.push({
|
|
@@ -1024,6 +1082,27 @@ function cmdInitManager(cwd, raw) {
|
|
|
1024
1082
|
});
|
|
1025
1083
|
|
|
1026
1084
|
const completedCount = phases.filter(p => p.disk_status === 'complete').length;
|
|
1085
|
+
|
|
1086
|
+
// read manager flags from config (passthrough flags for each step)
|
|
1087
|
+
// Validate: flags must be CLI-safe (only --flags, alphanumeric, hyphens, spaces)
|
|
1088
|
+
const sanitizeFlags = (raw) => {
|
|
1089
|
+
const val = typeof raw === 'string' ? raw : '';
|
|
1090
|
+
if (!val) return '';
|
|
1091
|
+
// Allow only --flag patterns with alphanumeric/hyphen values separated by spaces
|
|
1092
|
+
const tokens = val.split(/\s+/).filter(Boolean);
|
|
1093
|
+
const safe = tokens.every(t => /^--[a-zA-Z0-9][-a-zA-Z0-9]*$/.test(t) || /^[a-zA-Z0-9][-a-zA-Z0-9_.]*$/.test(t));
|
|
1094
|
+
if (!safe) {
|
|
1095
|
+
process.stderr.write(`gsd-tools: warning: manager.flags contains invalid tokens, ignoring: ${val}\n`);
|
|
1096
|
+
return '';
|
|
1097
|
+
}
|
|
1098
|
+
return val;
|
|
1099
|
+
};
|
|
1100
|
+
const managerFlags = {
|
|
1101
|
+
discuss: sanitizeFlags(config.manager && config.manager.flags && config.manager.flags.discuss),
|
|
1102
|
+
plan: sanitizeFlags(config.manager && config.manager.flags && config.manager.flags.plan),
|
|
1103
|
+
execute: sanitizeFlags(config.manager && config.manager.flags && config.manager.flags.execute),
|
|
1104
|
+
};
|
|
1105
|
+
|
|
1027
1106
|
const result = {
|
|
1028
1107
|
milestone_version: milestone.version,
|
|
1029
1108
|
milestone_name: milestone.name,
|
|
@@ -1037,6 +1116,7 @@ function cmdInitManager(cwd, raw) {
|
|
|
1037
1116
|
project_exists: pathExistsInternal(cwd, '.planning/PROJECT.md'),
|
|
1038
1117
|
roadmap_exists: true,
|
|
1039
1118
|
state_exists: true,
|
|
1119
|
+
manager_flags: managerFlags,
|
|
1040
1120
|
};
|
|
1041
1121
|
|
|
1042
1122
|
output(withProjectRoot(cwd, result), raw);
|
|
@@ -246,7 +246,28 @@ function cmdMilestoneComplete(cwd, version, options, raw) {
|
|
|
246
246
|
output(result, raw);
|
|
247
247
|
}
|
|
248
248
|
|
|
249
|
+
function cmdPhasesClear(cwd, raw) {
|
|
250
|
+
const phasesDir = planningPaths(cwd).phases;
|
|
251
|
+
let cleared = 0;
|
|
252
|
+
|
|
253
|
+
if (fs.existsSync(phasesDir)) {
|
|
254
|
+
try {
|
|
255
|
+
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
|
256
|
+
for (const entry of entries) {
|
|
257
|
+
if (!entry.isDirectory()) continue;
|
|
258
|
+
fs.rmSync(path.join(phasesDir, entry.name), { recursive: true, force: true });
|
|
259
|
+
cleared++;
|
|
260
|
+
}
|
|
261
|
+
} catch (e) {
|
|
262
|
+
error('Failed to clear phases directory: ' + e.message);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
output({ cleared }, raw, `${cleared} phase director${cleared === 1 ? 'y' : 'ies'} cleared`);
|
|
267
|
+
}
|
|
268
|
+
|
|
249
269
|
module.exports = {
|
|
250
270
|
cmdRequirementsMarkComplete,
|
|
251
271
|
cmdMilestoneComplete,
|
|
272
|
+
cmdPhasesClear,
|
|
252
273
|
};
|
|
@@ -22,6 +22,8 @@ const MODEL_PROFILES = {
|
|
|
22
22
|
'gsd-ui-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
23
23
|
'gsd-ui-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
24
24
|
'gsd-ui-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
25
|
+
'gsd-doc-writer': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
26
|
+
'gsd-doc-verifier': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
25
27
|
};
|
|
26
28
|
const VALID_PROFILES = Object.keys(MODEL_PROFILES['gsd-planner']);
|
|
27
29
|
|