gsd-opencode 1.22.1 → 1.33.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agents/gsd-advisor-researcher.md +112 -0
- package/agents/gsd-assumptions-analyzer.md +110 -0
- package/agents/gsd-codebase-mapper.md +0 -2
- package/agents/gsd-debugger.md +117 -2
- package/agents/gsd-doc-verifier.md +207 -0
- package/agents/gsd-doc-writer.md +608 -0
- package/agents/gsd-executor.md +45 -4
- package/agents/gsd-integration-checker.md +0 -2
- package/agents/gsd-nyquist-auditor.md +0 -2
- package/agents/gsd-phase-researcher.md +191 -5
- package/agents/gsd-plan-checker.md +152 -5
- package/agents/gsd-planner.md +131 -157
- package/agents/gsd-project-researcher.md +28 -3
- package/agents/gsd-research-synthesizer.md +0 -2
- package/agents/gsd-roadmapper.md +29 -2
- package/agents/gsd-security-auditor.md +129 -0
- package/agents/gsd-ui-auditor.md +485 -0
- package/agents/gsd-ui-checker.md +305 -0
- package/agents/gsd-ui-researcher.md +368 -0
- package/agents/gsd-user-profiler.md +173 -0
- package/agents/gsd-verifier.md +207 -22
- package/commands/gsd/gsd-add-backlog.md +76 -0
- package/commands/gsd/gsd-analyze-dependencies.md +34 -0
- package/commands/gsd/gsd-audit-uat.md +24 -0
- package/commands/gsd/gsd-autonomous.md +45 -0
- package/commands/gsd/gsd-cleanup.md +5 -0
- package/commands/gsd/gsd-debug.md +29 -21
- package/commands/gsd/gsd-discuss-phase.md +15 -36
- package/commands/gsd/gsd-do.md +30 -0
- package/commands/gsd/gsd-docs-update.md +48 -0
- package/commands/gsd/gsd-execute-phase.md +24 -2
- package/commands/gsd/gsd-fast.md +30 -0
- package/commands/gsd/gsd-forensics.md +56 -0
- package/commands/gsd/gsd-help.md +2 -0
- package/commands/gsd/gsd-join-discord.md +2 -1
- package/commands/gsd/gsd-list-workspaces.md +19 -0
- package/commands/gsd/gsd-manager.md +40 -0
- package/commands/gsd/gsd-milestone-summary.md +51 -0
- package/commands/gsd/gsd-new-project.md +4 -0
- package/commands/gsd/gsd-new-workspace.md +44 -0
- package/commands/gsd/gsd-next.md +24 -0
- package/commands/gsd/gsd-note.md +34 -0
- package/commands/gsd/gsd-plan-phase.md +8 -1
- package/commands/gsd/gsd-plant-seed.md +28 -0
- package/commands/gsd/gsd-pr-branch.md +25 -0
- package/commands/gsd/gsd-profile-user.md +46 -0
- package/commands/gsd/gsd-quick.md +7 -3
- package/commands/gsd/gsd-reapply-patches.md +178 -45
- package/commands/gsd/gsd-remove-workspace.md +26 -0
- package/commands/gsd/gsd-research-phase.md +7 -12
- package/commands/gsd/gsd-review-backlog.md +62 -0
- package/commands/gsd/gsd-review.md +38 -0
- package/commands/gsd/gsd-secure-phase.md +35 -0
- package/commands/gsd/gsd-session-report.md +19 -0
- package/commands/gsd/gsd-set-profile.md +24 -23
- package/commands/gsd/gsd-ship.md +23 -0
- package/commands/gsd/gsd-stats.md +18 -0
- package/commands/gsd/gsd-thread.md +127 -0
- package/commands/gsd/gsd-ui-phase.md +34 -0
- package/commands/gsd/gsd-ui-review.md +32 -0
- package/commands/gsd/gsd-workstreams.md +71 -0
- package/get-shit-done/bin/gsd-tools.cjs +450 -90
- package/get-shit-done/bin/lib/commands.cjs +489 -24
- package/get-shit-done/bin/lib/config.cjs +329 -48
- package/get-shit-done/bin/lib/core.cjs +1143 -102
- package/get-shit-done/bin/lib/docs.cjs +267 -0
- package/get-shit-done/bin/lib/frontmatter.cjs +125 -43
- package/get-shit-done/bin/lib/init.cjs +918 -106
- package/get-shit-done/bin/lib/milestone.cjs +65 -33
- package/get-shit-done/bin/lib/model-profiles.cjs +70 -0
- package/get-shit-done/bin/lib/phase.cjs +434 -404
- package/get-shit-done/bin/lib/profile-output.cjs +1048 -0
- package/get-shit-done/bin/lib/profile-pipeline.cjs +539 -0
- package/get-shit-done/bin/lib/roadmap.cjs +156 -101
- package/get-shit-done/bin/lib/schema-detect.cjs +238 -0
- package/get-shit-done/bin/lib/security.cjs +384 -0
- package/get-shit-done/bin/lib/state.cjs +711 -79
- package/get-shit-done/bin/lib/template.cjs +2 -2
- package/get-shit-done/bin/lib/uat.cjs +282 -0
- package/get-shit-done/bin/lib/verify.cjs +254 -42
- package/get-shit-done/bin/lib/workstream.cjs +495 -0
- package/get-shit-done/references/agent-contracts.md +79 -0
- package/get-shit-done/references/artifact-types.md +113 -0
- package/get-shit-done/references/checkpoints.md +12 -10
- package/get-shit-done/references/context-budget.md +49 -0
- package/get-shit-done/references/continuation-format.md +15 -15
- package/get-shit-done/references/decimal-phase-calculation.md +2 -3
- package/get-shit-done/references/domain-probes.md +125 -0
- package/get-shit-done/references/gate-prompts.md +100 -0
- package/get-shit-done/references/git-integration.md +47 -0
- package/get-shit-done/references/model-profile-resolution.md +2 -0
- package/get-shit-done/references/model-profiles.md +62 -16
- package/get-shit-done/references/phase-argument-parsing.md +2 -2
- package/get-shit-done/references/planner-gap-closure.md +62 -0
- package/get-shit-done/references/planner-reviews.md +39 -0
- package/get-shit-done/references/planner-revision.md +87 -0
- package/get-shit-done/references/planning-config.md +18 -1
- package/get-shit-done/references/revision-loop.md +97 -0
- package/get-shit-done/references/ui-brand.md +2 -2
- package/get-shit-done/references/universal-anti-patterns.md +58 -0
- package/get-shit-done/references/user-profiling.md +681 -0
- package/get-shit-done/references/workstream-flag.md +111 -0
- package/get-shit-done/templates/SECURITY.md +61 -0
- package/get-shit-done/templates/UAT.md +21 -3
- package/get-shit-done/templates/UI-SPEC.md +100 -0
- package/get-shit-done/templates/VALIDATION.md +3 -3
- package/get-shit-done/templates/claude-md.md +145 -0
- package/get-shit-done/templates/config.json +14 -3
- package/get-shit-done/templates/context.md +61 -6
- package/get-shit-done/templates/debug-subagent-prompt.md +2 -6
- package/get-shit-done/templates/dev-preferences.md +21 -0
- package/get-shit-done/templates/discussion-log.md +63 -0
- package/get-shit-done/templates/phase-prompt.md +46 -5
- package/get-shit-done/templates/planner-subagent-prompt.md +2 -10
- package/get-shit-done/templates/project.md +2 -0
- package/get-shit-done/templates/state.md +2 -2
- package/get-shit-done/templates/user-profile.md +146 -0
- package/get-shit-done/workflows/add-phase.md +4 -4
- package/get-shit-done/workflows/add-tests.md +4 -4
- package/get-shit-done/workflows/add-todo.md +4 -4
- package/get-shit-done/workflows/analyze-dependencies.md +96 -0
- package/get-shit-done/workflows/audit-milestone.md +20 -16
- package/get-shit-done/workflows/audit-uat.md +109 -0
- package/get-shit-done/workflows/autonomous.md +1036 -0
- package/get-shit-done/workflows/check-todos.md +4 -4
- package/get-shit-done/workflows/cleanup.md +4 -4
- package/get-shit-done/workflows/complete-milestone.md +22 -10
- package/get-shit-done/workflows/diagnose-issues.md +21 -7
- package/get-shit-done/workflows/discovery-phase.md +2 -2
- package/get-shit-done/workflows/discuss-phase-assumptions.md +671 -0
- package/get-shit-done/workflows/discuss-phase-power.md +291 -0
- package/get-shit-done/workflows/discuss-phase.md +558 -47
- package/get-shit-done/workflows/do.md +104 -0
- package/get-shit-done/workflows/docs-update.md +1093 -0
- package/get-shit-done/workflows/execute-phase.md +741 -58
- package/get-shit-done/workflows/execute-plan.md +77 -12
- package/get-shit-done/workflows/fast.md +105 -0
- package/get-shit-done/workflows/forensics.md +265 -0
- package/get-shit-done/workflows/health.md +28 -6
- package/get-shit-done/workflows/help.md +127 -7
- package/get-shit-done/workflows/insert-phase.md +4 -4
- package/get-shit-done/workflows/list-phase-assumptions.md +2 -2
- package/get-shit-done/workflows/list-workspaces.md +56 -0
- package/get-shit-done/workflows/manager.md +363 -0
- package/get-shit-done/workflows/map-codebase.md +83 -44
- package/get-shit-done/workflows/milestone-summary.md +223 -0
- package/get-shit-done/workflows/new-milestone.md +133 -25
- package/get-shit-done/workflows/new-project.md +216 -54
- package/get-shit-done/workflows/new-workspace.md +237 -0
- package/get-shit-done/workflows/next.md +97 -0
- package/get-shit-done/workflows/node-repair.md +92 -0
- package/get-shit-done/workflows/note.md +156 -0
- package/get-shit-done/workflows/pause-work.md +132 -15
- package/get-shit-done/workflows/plan-milestone-gaps.md +6 -7
- package/get-shit-done/workflows/plan-phase.md +513 -62
- package/get-shit-done/workflows/plant-seed.md +169 -0
- package/get-shit-done/workflows/pr-branch.md +129 -0
- package/get-shit-done/workflows/profile-user.md +450 -0
- package/get-shit-done/workflows/progress.md +154 -29
- package/get-shit-done/workflows/quick.md +285 -111
- package/get-shit-done/workflows/remove-phase.md +2 -2
- package/get-shit-done/workflows/remove-workspace.md +90 -0
- package/get-shit-done/workflows/research-phase.md +13 -9
- package/get-shit-done/workflows/resume-project.md +37 -18
- package/get-shit-done/workflows/review.md +281 -0
- package/get-shit-done/workflows/secure-phase.md +154 -0
- package/get-shit-done/workflows/session-report.md +146 -0
- package/get-shit-done/workflows/set-profile.md +2 -2
- package/get-shit-done/workflows/settings.md +91 -11
- package/get-shit-done/workflows/ship.md +237 -0
- package/get-shit-done/workflows/stats.md +60 -0
- package/get-shit-done/workflows/transition.md +150 -23
- package/get-shit-done/workflows/ui-phase.md +292 -0
- package/get-shit-done/workflows/ui-review.md +183 -0
- package/get-shit-done/workflows/update.md +262 -30
- package/get-shit-done/workflows/validate-phase.md +14 -17
- package/get-shit-done/workflows/verify-phase.md +143 -11
- package/get-shit-done/workflows/verify-work.md +141 -39
- package/package.json +1 -1
- package/skills/gsd-audit-milestone/SKILL.md +29 -0
- package/skills/gsd-cleanup/SKILL.md +19 -0
- package/skills/gsd-complete-milestone/SKILL.md +131 -0
- package/skills/gsd-discuss-phase/SKILL.md +54 -0
- package/skills/gsd-execute-phase/SKILL.md +49 -0
- package/skills/gsd-plan-phase/SKILL.md +37 -0
- package/skills/gsd-ui-phase/SKILL.md +24 -0
- package/skills/gsd-ui-review/SKILL.md +24 -0
- package/skills/gsd-verify-work/SKILL.md +30 -0
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Docs — Commands for the docs-update workflow
|
|
3
|
+
*
|
|
4
|
+
* Provides `cmdDocsInit` which returns project signals, existing doc inventory
|
|
5
|
+
* with GSD marker detection, doc tooling detection, monorepo awareness, and
|
|
6
|
+
* model resolution. Used by Phase 2 to route doc generation appropriately.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const fs = require('fs');
|
|
10
|
+
const path = require('path');
|
|
11
|
+
const { output, loadConfig, resolveModelInternal, pathExistsInternal, toPosixPath, checkAgentsInstalled } = require('./core.cjs');
|
|
12
|
+
|
|
13
|
+
// ─── Constants ────────────────────────────────────────────────────────────────
|
|
14
|
+
|
|
15
|
+
const GSD_MARKER = '<!-- generated-by: gsd-doc-writer -->';
|
|
16
|
+
|
|
17
|
+
const SKIP_DIRS = new Set([
|
|
18
|
+
'node_modules', '.git', '.planning', '.OpenCode', '__pycache__',
|
|
19
|
+
'target', 'dist', 'build', '.next', '.nuxt', 'coverage',
|
|
20
|
+
'.vscode', '.idea',
|
|
21
|
+
]);
|
|
22
|
+
|
|
23
|
+
// ─── Private helpers ──────────────────────────────────────────────────────────
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Check whether a file begins with the GSD doc writer marker.
|
|
27
|
+
* Reads the first 500 bytes only — avoids loading large files.
|
|
28
|
+
*
|
|
29
|
+
* @param {string} filePath - Absolute path to the file
|
|
30
|
+
* @returns {boolean}
|
|
31
|
+
*/
|
|
32
|
+
function hasGsdMarker(filePath) {
|
|
33
|
+
try {
|
|
34
|
+
const buf = Buffer.alloc(500);
|
|
35
|
+
const fd = fs.openSync(filePath, 'r');
|
|
36
|
+
const bytesRead = fs.readSync(fd, buf, 0, 500, 0);
|
|
37
|
+
fs.closeSync(fd);
|
|
38
|
+
return buf.slice(0, bytesRead).toString('utf-8').includes(GSD_MARKER);
|
|
39
|
+
} catch {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Recursively scan the project root (immediate .md files) and docs/ directory
|
|
46
|
+
* (up to 4 levels deep) for Markdown files, excluding dirs in SKIP_DIRS.
|
|
47
|
+
*
|
|
48
|
+
* @param {string} cwd - Project root
|
|
49
|
+
* @returns {Array<{path: string, has_gsd_marker: boolean}>}
|
|
50
|
+
*/
|
|
51
|
+
function scanExistingDocs(cwd) {
|
|
52
|
+
const MAX_DEPTH = 4;
|
|
53
|
+
const results = [];
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Recursively walk a directory for .md files up to MAX_DEPTH levels.
|
|
57
|
+
* @param {string} dir - Directory to scan
|
|
58
|
+
* @param {number} depth - Current depth (1-based)
|
|
59
|
+
*/
|
|
60
|
+
function walkDir(dir, depth) {
|
|
61
|
+
if (depth > MAX_DEPTH) return;
|
|
62
|
+
try {
|
|
63
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
64
|
+
for (const entry of entries) {
|
|
65
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
66
|
+
const abs = path.join(dir, entry.name);
|
|
67
|
+
if (entry.isDirectory()) {
|
|
68
|
+
walkDir(abs, depth + 1);
|
|
69
|
+
} else if (entry.isFile() && entry.name.toLowerCase().endsWith('.md')) {
|
|
70
|
+
const rel = toPosixPath(path.relative(cwd, abs));
|
|
71
|
+
results.push({ path: rel, has_gsd_marker: hasGsdMarker(abs) });
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
} catch { /* directory may not exist — best-effort */ }
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Scan root-level .md files (non-recursive)
|
|
78
|
+
try {
|
|
79
|
+
const entries = fs.readdirSync(cwd, { withFileTypes: true });
|
|
80
|
+
for (const entry of entries) {
|
|
81
|
+
if (entry.isFile() && entry.name.toLowerCase().endsWith('.md')) {
|
|
82
|
+
const abs = path.join(cwd, entry.name);
|
|
83
|
+
const rel = toPosixPath(path.relative(cwd, abs));
|
|
84
|
+
results.push({ path: rel, has_gsd_marker: hasGsdMarker(abs) });
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
} catch { /* best-effort */ }
|
|
88
|
+
|
|
89
|
+
// Recursively scan docs/ directory
|
|
90
|
+
const docsDir = path.join(cwd, 'docs');
|
|
91
|
+
walkDir(docsDir, 1);
|
|
92
|
+
|
|
93
|
+
// Fallback: if docs/ does not exist, try documentation/ or doc/
|
|
94
|
+
try {
|
|
95
|
+
fs.statSync(docsDir);
|
|
96
|
+
} catch {
|
|
97
|
+
const alternatives = ['documentation', 'doc'];
|
|
98
|
+
for (const alt of alternatives) {
|
|
99
|
+
const altDir = path.join(cwd, alt);
|
|
100
|
+
try {
|
|
101
|
+
const stat = fs.statSync(altDir);
|
|
102
|
+
if (stat.isDirectory()) {
|
|
103
|
+
walkDir(altDir, 1);
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
} catch { /* not present */ }
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return results.sort((a, b) => a.path.localeCompare(b.path));
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Detect project type signals from the filesystem and package.json.
|
|
115
|
+
* All checks are best-effort and never throw.
|
|
116
|
+
*
|
|
117
|
+
* @param {string} cwd - Project root
|
|
118
|
+
* @returns {Object} Boolean signal fields
|
|
119
|
+
*/
|
|
120
|
+
function detectProjectType(cwd) {
|
|
121
|
+
const exists = (rel) => {
|
|
122
|
+
try { return pathExistsInternal(cwd, rel); } catch { return false; }
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
// has_cli_bin: package.json has a `bin` field
|
|
126
|
+
let has_cli_bin = false;
|
|
127
|
+
try {
|
|
128
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
129
|
+
has_cli_bin = !!(pkg.bin && (typeof pkg.bin === 'string' || Object.keys(pkg.bin).length > 0));
|
|
130
|
+
} catch { /* no package.json or invalid JSON */ }
|
|
131
|
+
|
|
132
|
+
// is_monorepo: pnpm-workspace.yaml, lerna.json, or package.json workspaces
|
|
133
|
+
let is_monorepo = exists('pnpm-workspace.yaml') || exists('lerna.json');
|
|
134
|
+
if (!is_monorepo) {
|
|
135
|
+
try {
|
|
136
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
137
|
+
is_monorepo = Array.isArray(pkg.workspaces) && pkg.workspaces.length > 0;
|
|
138
|
+
} catch { /* ignore */ }
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// has_tests: common test directories or test frameworks in devDependencies
|
|
142
|
+
let has_tests = exists('test') || exists('tests') || exists('__tests__') || exists('spec');
|
|
143
|
+
if (!has_tests) {
|
|
144
|
+
try {
|
|
145
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
146
|
+
const devDeps = Object.keys(pkg.devDependencies || {});
|
|
147
|
+
has_tests = devDeps.some(d => ['vitest', 'jest', 'mocha', 'jasmine', 'ava'].includes(d));
|
|
148
|
+
} catch { /* ignore */ }
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// has_deploy_config: various deployment config files
|
|
152
|
+
const deployFiles = [
|
|
153
|
+
'Dockerfile', 'docker-compose.yml', 'docker-compose.yaml',
|
|
154
|
+
'fly.toml', 'render.yaml', 'vercel.json', 'netlify.toml', 'railway.json',
|
|
155
|
+
'.github/workflows/deploy.yml', '.github/workflows/deploy.yaml',
|
|
156
|
+
];
|
|
157
|
+
const has_deploy_config = deployFiles.some(f => exists(f));
|
|
158
|
+
|
|
159
|
+
return {
|
|
160
|
+
has_package_json: exists('package.json'),
|
|
161
|
+
has_api_routes: (
|
|
162
|
+
exists('src/app/api') || exists('routes') || exists('src/routes') ||
|
|
163
|
+
exists('api') || exists('server')
|
|
164
|
+
),
|
|
165
|
+
has_cli_bin,
|
|
166
|
+
is_open_source: exists('LICENSE') || exists('LICENSE.md'),
|
|
167
|
+
has_deploy_config,
|
|
168
|
+
is_monorepo,
|
|
169
|
+
has_tests,
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Detect known documentation tooling in the project.
|
|
175
|
+
*
|
|
176
|
+
* @param {string} cwd - Project root
|
|
177
|
+
* @returns {Object} Boolean detection fields
|
|
178
|
+
*/
|
|
179
|
+
function detectDocTooling(cwd) {
|
|
180
|
+
const exists = (rel) => {
|
|
181
|
+
try { return pathExistsInternal(cwd, rel); } catch { return false; }
|
|
182
|
+
};
|
|
183
|
+
|
|
184
|
+
return {
|
|
185
|
+
docusaurus: exists('docusaurus.config.js') || exists('docusaurus.config.ts'),
|
|
186
|
+
vitepress: (
|
|
187
|
+
exists('.vitepress/config.js') ||
|
|
188
|
+
exists('.vitepress/config.ts') ||
|
|
189
|
+
exists('.vitepress/config.mts')
|
|
190
|
+
),
|
|
191
|
+
mkdocs: exists('mkdocs.yml'),
|
|
192
|
+
storybook: exists('.storybook'),
|
|
193
|
+
};
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Extract monorepo workspace globs from pnpm-workspace.yaml, package.json
|
|
198
|
+
* workspaces, or lerna.json.
|
|
199
|
+
*
|
|
200
|
+
* @param {string} cwd - Project root
|
|
201
|
+
* @returns {string[]} Array of workspace glob patterns, or [] if not a monorepo
|
|
202
|
+
*/
|
|
203
|
+
function detectMonorepoWorkspaces(cwd) {
|
|
204
|
+
// pnpm-workspace.yaml
|
|
205
|
+
try {
|
|
206
|
+
const content = fs.readFileSync(path.join(cwd, 'pnpm-workspace.yaml'), 'utf-8');
|
|
207
|
+
const lines = content.split('\n');
|
|
208
|
+
const workspaces = [];
|
|
209
|
+
for (const line of lines) {
|
|
210
|
+
const m = line.match(/^\s*-\s+['"]?(.+?)['"]?\s*$/);
|
|
211
|
+
if (m) workspaces.push(m[1].trim());
|
|
212
|
+
}
|
|
213
|
+
if (workspaces.length > 0) return workspaces;
|
|
214
|
+
} catch { /* not present */ }
|
|
215
|
+
|
|
216
|
+
// package.json workspaces
|
|
217
|
+
try {
|
|
218
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(cwd, 'package.json'), 'utf-8'));
|
|
219
|
+
if (Array.isArray(pkg.workspaces) && pkg.workspaces.length > 0) {
|
|
220
|
+
return pkg.workspaces;
|
|
221
|
+
}
|
|
222
|
+
} catch { /* not present or invalid */ }
|
|
223
|
+
|
|
224
|
+
// lerna.json
|
|
225
|
+
try {
|
|
226
|
+
const lerna = JSON.parse(fs.readFileSync(path.join(cwd, 'lerna.json'), 'utf-8'));
|
|
227
|
+
if (Array.isArray(lerna.packages) && lerna.packages.length > 0) {
|
|
228
|
+
return lerna.packages;
|
|
229
|
+
}
|
|
230
|
+
} catch { /* not present or invalid */ }
|
|
231
|
+
|
|
232
|
+
return [];
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// ─── Public commands ──────────────────────────────────────────────────────────
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Return JSON context for the docs-update workflow: project signals, existing
|
|
239
|
+
* doc inventory, doc tooling detection, monorepo workspaces, and model
|
|
240
|
+
* resolution. Follows the cmdInitMapCodebase pattern.
|
|
241
|
+
*
|
|
242
|
+
* @example
|
|
243
|
+
* node gsd-tools.cjs docs-init --raw
|
|
244
|
+
*
|
|
245
|
+
* @param {string} cwd - Project root directory
|
|
246
|
+
* @param {boolean} raw - Pass raw JSON flag through to output()
|
|
247
|
+
*/
|
|
248
|
+
function cmdDocsInit(cwd, raw) {
|
|
249
|
+
const config = loadConfig(cwd);
|
|
250
|
+
const result = {
|
|
251
|
+
doc_writer_model: resolveModelInternal(cwd, 'gsd-doc-writer'),
|
|
252
|
+
commit_docs: config.commit_docs,
|
|
253
|
+
existing_docs: scanExistingDocs(cwd),
|
|
254
|
+
project_type: detectProjectType(cwd),
|
|
255
|
+
doc_tooling: detectDocTooling(cwd),
|
|
256
|
+
monorepo_workspaces: detectMonorepoWorkspaces(cwd),
|
|
257
|
+
planning_exists: pathExistsInternal(cwd, '.planning'),
|
|
258
|
+
};
|
|
259
|
+
// Inject project_root and agent installation status (mirrors withProjectRoot in init.cjs)
|
|
260
|
+
result.project_root = cwd;
|
|
261
|
+
const agentStatus = checkAgentsInstalled();
|
|
262
|
+
result.agents_installed = agentStatus.agents_installed;
|
|
263
|
+
result.missing_agents = agentStatus.missing_agents;
|
|
264
|
+
output(result, raw);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
module.exports = { cmdDocsInit };
|
|
@@ -4,17 +4,53 @@
|
|
|
4
4
|
|
|
5
5
|
const fs = require('fs');
|
|
6
6
|
const path = require('path');
|
|
7
|
-
const { safeReadFile, output, error } = require('./core.cjs');
|
|
7
|
+
const { safeReadFile, normalizeMd, output, error } = require('./core.cjs');
|
|
8
8
|
|
|
9
9
|
// ─── Parsing engine ───────────────────────────────────────────────────────────
|
|
10
10
|
|
|
11
|
+
/**
|
|
12
|
+
* Split a YAML inline array body on commas, respecting quoted strings.
|
|
13
|
+
* e.g. '"a, b", c' → ['a, b', 'c']
|
|
14
|
+
*/
|
|
15
|
+
function splitInlineArray(body) {
|
|
16
|
+
const items = [];
|
|
17
|
+
let current = '';
|
|
18
|
+
let inQuote = null; // null | '"' | "'"
|
|
19
|
+
|
|
20
|
+
for (let i = 0; i < body.length; i++) {
|
|
21
|
+
const ch = body[i];
|
|
22
|
+
if (inQuote) {
|
|
23
|
+
if (ch === inQuote) {
|
|
24
|
+
inQuote = null;
|
|
25
|
+
} else {
|
|
26
|
+
current += ch;
|
|
27
|
+
}
|
|
28
|
+
} else if (ch === '"' || ch === "'") {
|
|
29
|
+
inQuote = ch;
|
|
30
|
+
} else if (ch === ',') {
|
|
31
|
+
const trimmed = current.trim();
|
|
32
|
+
if (trimmed) items.push(trimmed);
|
|
33
|
+
current = '';
|
|
34
|
+
} else {
|
|
35
|
+
current += ch;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
const trimmed = current.trim();
|
|
39
|
+
if (trimmed) items.push(trimmed);
|
|
40
|
+
return items;
|
|
41
|
+
}
|
|
42
|
+
|
|
11
43
|
function extractFrontmatter(content) {
|
|
12
44
|
const frontmatter = {};
|
|
13
|
-
|
|
45
|
+
// Find ALL frontmatter blocks at the start of the file.
|
|
46
|
+
// If multiple blocks exist (corruption from CRLF mismatch), use the LAST one
|
|
47
|
+
// since it represents the most recent state sync.
|
|
48
|
+
const allBlocks = [...content.matchAll(/(?:^|\n)\s*---\r?\n([\s\S]+?)\r?\n---/g)];
|
|
49
|
+
const match = allBlocks.length > 0 ? allBlocks[allBlocks.length - 1] : null;
|
|
14
50
|
if (!match) return frontmatter;
|
|
15
51
|
|
|
16
52
|
const yaml = match[1];
|
|
17
|
-
const lines = yaml.split(
|
|
53
|
+
const lines = yaml.split(/\r?\n/);
|
|
18
54
|
|
|
19
55
|
// Stack to track nested objects: [{obj, key, indent}]
|
|
20
56
|
// obj = object to write to, key = current key collecting array items, indent = indentation level
|
|
@@ -49,8 +85,8 @@ function extractFrontmatter(content) {
|
|
|
49
85
|
// Push new context for potential nested content
|
|
50
86
|
stack.push({ obj: current.obj[key], key: null, indent });
|
|
51
87
|
} else if (value.startsWith('[') && value.endsWith(']')) {
|
|
52
|
-
// Inline array: key: [a, b, c]
|
|
53
|
-
current.obj[key] = value.slice(1, -1)
|
|
88
|
+
// Inline array: key: [a, b, c] — quote-aware split (REG-04 fix)
|
|
89
|
+
current.obj[key] = splitInlineArray(value.slice(1, -1));
|
|
54
90
|
current.key = null;
|
|
55
91
|
} else {
|
|
56
92
|
// Simple key: value
|
|
@@ -149,7 +185,7 @@ function reconstructFrontmatter(obj) {
|
|
|
149
185
|
|
|
150
186
|
function spliceFrontmatter(content, newObj) {
|
|
151
187
|
const yamlStr = reconstructFrontmatter(newObj);
|
|
152
|
-
const match = content.match(/^---\n[\s\S]+?\n---/);
|
|
188
|
+
const match = content.match(/^---\r?\n[\s\S]+?\r?\n---/);
|
|
153
189
|
if (match) {
|
|
154
190
|
return `---\n${yamlStr}\n---` + content.slice(match[0].length);
|
|
155
191
|
}
|
|
@@ -159,66 +195,108 @@ function spliceFrontmatter(content, newObj) {
|
|
|
159
195
|
function parseMustHavesBlock(content, blockName) {
|
|
160
196
|
// Extract a specific block from must_haves in raw frontmatter YAML
|
|
161
197
|
// Handles 3-level nesting: must_haves > artifacts/key_links > [{path, provides, ...}]
|
|
162
|
-
const fmMatch = content.match(/^---\n([\s\S]+?)\n---/);
|
|
198
|
+
const fmMatch = content.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
|
163
199
|
if (!fmMatch) return [];
|
|
164
200
|
|
|
165
201
|
const yaml = fmMatch[1];
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
const
|
|
202
|
+
|
|
203
|
+
// Find must_haves: first to detect its indentation level
|
|
204
|
+
const mustHavesMatch = yaml.match(/^(\s*)must_haves:\s*$/m);
|
|
205
|
+
if (!mustHavesMatch) return [];
|
|
206
|
+
const mustHavesIndent = mustHavesMatch[1].length;
|
|
207
|
+
|
|
208
|
+
// Find the block (e.g., "truths:", "artifacts:", "key_links:") under must_haves
|
|
209
|
+
// It must be indented more than must_haves but we detect the actual indent dynamically
|
|
210
|
+
const blockPattern = new RegExp(`^(\\s+)${blockName}:\\s*$`, 'm');
|
|
211
|
+
const blockMatch = yaml.match(blockPattern);
|
|
212
|
+
if (!blockMatch) return [];
|
|
213
|
+
|
|
214
|
+
const blockIndent = blockMatch[1].length;
|
|
215
|
+
// The block must be nested under must_haves (more indented)
|
|
216
|
+
if (blockIndent <= mustHavesIndent) return [];
|
|
217
|
+
|
|
218
|
+
// Find where the block starts in the yaml string
|
|
219
|
+
const blockStart = yaml.indexOf(blockMatch[0]);
|
|
169
220
|
if (blockStart === -1) return [];
|
|
170
221
|
|
|
171
222
|
const afterBlock = yaml.slice(blockStart);
|
|
172
|
-
const blockLines = afterBlock.split(
|
|
223
|
+
const blockLines = afterBlock.split(/\r?\n/).slice(1); // skip the header line
|
|
173
224
|
|
|
225
|
+
// List items are indented one level deeper than blockIndent
|
|
226
|
+
// Continuation KVs are indented one level deeper than list items
|
|
174
227
|
const items = [];
|
|
175
228
|
let current = null;
|
|
229
|
+
let listItemIndent = -1; // detected from first "- " line
|
|
176
230
|
|
|
177
231
|
for (const line of blockLines) {
|
|
178
|
-
//
|
|
232
|
+
// Skip empty lines
|
|
179
233
|
if (line.trim() === '') continue;
|
|
180
234
|
const indent = line.match(/^(\s*)/)[1].length;
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
//
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
235
|
+
// Stop at same or lower indent level than the block header
|
|
236
|
+
if (indent <= blockIndent && line.trim() !== '') break;
|
|
237
|
+
|
|
238
|
+
const trimmed = line.trim();
|
|
239
|
+
|
|
240
|
+
if (trimmed.startsWith('- ')) {
|
|
241
|
+
// Detect list item indent from the first occurrence
|
|
242
|
+
if (listItemIndent === -1) listItemIndent = indent;
|
|
243
|
+
|
|
244
|
+
// Only treat as a top-level list item if at the expected indent
|
|
245
|
+
if (indent === listItemIndent) {
|
|
246
|
+
if (current) items.push(current);
|
|
247
|
+
current = {};
|
|
248
|
+
const afterDash = trimmed.slice(2);
|
|
249
|
+
// Check if it's a simple string item (no colon means not a key-value)
|
|
250
|
+
if (!afterDash.includes(':')) {
|
|
251
|
+
current = afterDash.replace(/^["']|["']$/g, '');
|
|
252
|
+
} else {
|
|
253
|
+
// Key-value on same line as dash: "- path: value"
|
|
254
|
+
const kvMatch = afterDash.match(/^(\w+):\s*"?([^"]*)"?\s*$/);
|
|
255
|
+
if (kvMatch) {
|
|
256
|
+
current = {};
|
|
257
|
+
current[kvMatch[1]] = kvMatch[2];
|
|
258
|
+
}
|
|
197
259
|
}
|
|
260
|
+
continue;
|
|
198
261
|
}
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
//
|
|
205
|
-
|
|
206
|
-
}
|
|
207
|
-
// Array items under a key
|
|
208
|
-
const arrMatch = line.match(/^\s{10,}-\s+"?([^"]+)"?\s*$/);
|
|
209
|
-
if (arrMatch) {
|
|
210
|
-
// Find the last key added and convert to array
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
if (current && typeof current === 'object' && indent > listItemIndent) {
|
|
265
|
+
// Continuation key-value or nested array item
|
|
266
|
+
if (trimmed.startsWith('- ')) {
|
|
267
|
+
// Array item under a key
|
|
268
|
+
const arrVal = trimmed.slice(2).replace(/^["']|["']$/g, '');
|
|
211
269
|
const keys = Object.keys(current);
|
|
212
270
|
const lastKey = keys[keys.length - 1];
|
|
213
271
|
if (lastKey && !Array.isArray(current[lastKey])) {
|
|
214
272
|
current[lastKey] = current[lastKey] ? [current[lastKey]] : [];
|
|
215
273
|
}
|
|
216
|
-
if (lastKey) current[lastKey].push(
|
|
274
|
+
if (lastKey) current[lastKey].push(arrVal);
|
|
275
|
+
} else {
|
|
276
|
+
const kvMatch = trimmed.match(/^(\w+):\s*"?([^"]*)"?\s*$/);
|
|
277
|
+
if (kvMatch) {
|
|
278
|
+
const val = kvMatch[2];
|
|
279
|
+
// Try to parse as number
|
|
280
|
+
current[kvMatch[1]] = /^\d+$/.test(val) ? parseInt(val, 10) : val;
|
|
281
|
+
}
|
|
217
282
|
}
|
|
218
283
|
}
|
|
219
284
|
}
|
|
220
285
|
if (current) items.push(current);
|
|
221
286
|
|
|
287
|
+
// Warn when must_haves block exists but parsed as empty -- likely YAML formatting issue.
|
|
288
|
+
// This is a critical diagnostic: empty must_haves causes verification to silently degrade
|
|
289
|
+
// to Option C (LLM-derived truths) instead of checking documented contracts.
|
|
290
|
+
if (items.length === 0 && blockLines.length > 0) {
|
|
291
|
+
const nonEmptyLines = blockLines.filter(l => l.trim() !== '').length;
|
|
292
|
+
if (nonEmptyLines > 0) {
|
|
293
|
+
process.stderr.write(
|
|
294
|
+
`[gsd-tools] WARNING: must_haves.${blockName} block has ${nonEmptyLines} content lines but parsed 0 items. ` +
|
|
295
|
+
`Possible YAML formatting issue — verification will fall back to LLM-derived truths.\n`
|
|
296
|
+
);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
222
300
|
return items;
|
|
223
301
|
}
|
|
224
302
|
|
|
@@ -232,6 +310,8 @@ const FRONTMATTER_SCHEMAS = {
|
|
|
232
310
|
|
|
233
311
|
function cmdFrontmatterGet(cwd, filePath, field, raw) {
|
|
234
312
|
if (!filePath) { error('file path required'); }
|
|
313
|
+
// Path traversal guard: reject null bytes
|
|
314
|
+
if (filePath.includes('\0')) { error('file path contains null bytes'); }
|
|
235
315
|
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
|
236
316
|
const content = safeReadFile(fullPath);
|
|
237
317
|
if (!content) { output({ error: 'File not found', path: filePath }, raw); return; }
|
|
@@ -247,6 +327,8 @@ function cmdFrontmatterGet(cwd, filePath, field, raw) {
|
|
|
247
327
|
|
|
248
328
|
function cmdFrontmatterSet(cwd, filePath, field, value, raw) {
|
|
249
329
|
if (!filePath || !field || value === undefined) { error('file, field, and value required'); }
|
|
330
|
+
// Path traversal guard: reject null bytes
|
|
331
|
+
if (filePath.includes('\0')) { error('file path contains null bytes'); }
|
|
250
332
|
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
|
251
333
|
if (!fs.existsSync(fullPath)) { output({ error: 'File not found', path: filePath }, raw); return; }
|
|
252
334
|
const content = fs.readFileSync(fullPath, 'utf-8');
|
|
@@ -255,7 +337,7 @@ function cmdFrontmatterSet(cwd, filePath, field, value, raw) {
|
|
|
255
337
|
try { parsedValue = JSON.parse(value); } catch { parsedValue = value; }
|
|
256
338
|
fm[field] = parsedValue;
|
|
257
339
|
const newContent = spliceFrontmatter(content, fm);
|
|
258
|
-
fs.writeFileSync(fullPath, newContent, 'utf-8');
|
|
340
|
+
fs.writeFileSync(fullPath, normalizeMd(newContent), 'utf-8');
|
|
259
341
|
output({ updated: true, field, value: parsedValue }, raw, 'true');
|
|
260
342
|
}
|
|
261
343
|
|
|
@@ -269,7 +351,7 @@ function cmdFrontmatterMerge(cwd, filePath, data, raw) {
|
|
|
269
351
|
try { mergeData = JSON.parse(data); } catch { error('Invalid JSON for --data'); return; }
|
|
270
352
|
Object.assign(fm, mergeData);
|
|
271
353
|
const newContent = spliceFrontmatter(content, fm);
|
|
272
|
-
fs.writeFileSync(fullPath, newContent, 'utf-8');
|
|
354
|
+
fs.writeFileSync(fullPath, normalizeMd(newContent), 'utf-8');
|
|
273
355
|
output({ merged: true, fields: Object.keys(mergeData) }, raw, 'true');
|
|
274
356
|
}
|
|
275
357
|
|