gsd-opencode 1.22.1 → 1.33.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agents/gsd-advisor-researcher.md +112 -0
- package/agents/gsd-assumptions-analyzer.md +110 -0
- package/agents/gsd-codebase-mapper.md +0 -2
- package/agents/gsd-debugger.md +117 -2
- package/agents/gsd-doc-verifier.md +207 -0
- package/agents/gsd-doc-writer.md +608 -0
- package/agents/gsd-executor.md +45 -4
- package/agents/gsd-integration-checker.md +0 -2
- package/agents/gsd-nyquist-auditor.md +0 -2
- package/agents/gsd-phase-researcher.md +191 -5
- package/agents/gsd-plan-checker.md +152 -5
- package/agents/gsd-planner.md +131 -157
- package/agents/gsd-project-researcher.md +28 -3
- package/agents/gsd-research-synthesizer.md +0 -2
- package/agents/gsd-roadmapper.md +29 -2
- package/agents/gsd-security-auditor.md +129 -0
- package/agents/gsd-ui-auditor.md +485 -0
- package/agents/gsd-ui-checker.md +305 -0
- package/agents/gsd-ui-researcher.md +368 -0
- package/agents/gsd-user-profiler.md +173 -0
- package/agents/gsd-verifier.md +207 -22
- package/commands/gsd/gsd-add-backlog.md +76 -0
- package/commands/gsd/gsd-analyze-dependencies.md +34 -0
- package/commands/gsd/gsd-audit-uat.md +24 -0
- package/commands/gsd/gsd-autonomous.md +45 -0
- package/commands/gsd/gsd-cleanup.md +5 -0
- package/commands/gsd/gsd-debug.md +29 -21
- package/commands/gsd/gsd-discuss-phase.md +15 -36
- package/commands/gsd/gsd-do.md +30 -0
- package/commands/gsd/gsd-docs-update.md +48 -0
- package/commands/gsd/gsd-execute-phase.md +24 -2
- package/commands/gsd/gsd-fast.md +30 -0
- package/commands/gsd/gsd-forensics.md +56 -0
- package/commands/gsd/gsd-help.md +2 -0
- package/commands/gsd/gsd-join-discord.md +2 -1
- package/commands/gsd/gsd-list-workspaces.md +19 -0
- package/commands/gsd/gsd-manager.md +40 -0
- package/commands/gsd/gsd-milestone-summary.md +51 -0
- package/commands/gsd/gsd-new-project.md +4 -0
- package/commands/gsd/gsd-new-workspace.md +44 -0
- package/commands/gsd/gsd-next.md +24 -0
- package/commands/gsd/gsd-note.md +34 -0
- package/commands/gsd/gsd-plan-phase.md +8 -1
- package/commands/gsd/gsd-plant-seed.md +28 -0
- package/commands/gsd/gsd-pr-branch.md +25 -0
- package/commands/gsd/gsd-profile-user.md +46 -0
- package/commands/gsd/gsd-quick.md +7 -3
- package/commands/gsd/gsd-reapply-patches.md +178 -45
- package/commands/gsd/gsd-remove-workspace.md +26 -0
- package/commands/gsd/gsd-research-phase.md +7 -12
- package/commands/gsd/gsd-review-backlog.md +62 -0
- package/commands/gsd/gsd-review.md +38 -0
- package/commands/gsd/gsd-secure-phase.md +35 -0
- package/commands/gsd/gsd-session-report.md +19 -0
- package/commands/gsd/gsd-set-profile.md +24 -23
- package/commands/gsd/gsd-ship.md +23 -0
- package/commands/gsd/gsd-stats.md +18 -0
- package/commands/gsd/gsd-thread.md +127 -0
- package/commands/gsd/gsd-ui-phase.md +34 -0
- package/commands/gsd/gsd-ui-review.md +32 -0
- package/commands/gsd/gsd-workstreams.md +71 -0
- package/get-shit-done/bin/gsd-tools.cjs +450 -90
- package/get-shit-done/bin/lib/commands.cjs +489 -24
- package/get-shit-done/bin/lib/config.cjs +329 -48
- package/get-shit-done/bin/lib/core.cjs +1143 -102
- package/get-shit-done/bin/lib/docs.cjs +267 -0
- package/get-shit-done/bin/lib/frontmatter.cjs +125 -43
- package/get-shit-done/bin/lib/init.cjs +918 -106
- package/get-shit-done/bin/lib/milestone.cjs +65 -33
- package/get-shit-done/bin/lib/model-profiles.cjs +70 -0
- package/get-shit-done/bin/lib/phase.cjs +434 -404
- package/get-shit-done/bin/lib/profile-output.cjs +1048 -0
- package/get-shit-done/bin/lib/profile-pipeline.cjs +539 -0
- package/get-shit-done/bin/lib/roadmap.cjs +156 -101
- package/get-shit-done/bin/lib/schema-detect.cjs +238 -0
- package/get-shit-done/bin/lib/security.cjs +384 -0
- package/get-shit-done/bin/lib/state.cjs +711 -79
- package/get-shit-done/bin/lib/template.cjs +2 -2
- package/get-shit-done/bin/lib/uat.cjs +282 -0
- package/get-shit-done/bin/lib/verify.cjs +254 -42
- package/get-shit-done/bin/lib/workstream.cjs +495 -0
- package/get-shit-done/references/agent-contracts.md +79 -0
- package/get-shit-done/references/artifact-types.md +113 -0
- package/get-shit-done/references/checkpoints.md +12 -10
- package/get-shit-done/references/context-budget.md +49 -0
- package/get-shit-done/references/continuation-format.md +15 -15
- package/get-shit-done/references/decimal-phase-calculation.md +2 -3
- package/get-shit-done/references/domain-probes.md +125 -0
- package/get-shit-done/references/gate-prompts.md +100 -0
- package/get-shit-done/references/git-integration.md +47 -0
- package/get-shit-done/references/model-profile-resolution.md +2 -0
- package/get-shit-done/references/model-profiles.md +62 -16
- package/get-shit-done/references/phase-argument-parsing.md +2 -2
- package/get-shit-done/references/planner-gap-closure.md +62 -0
- package/get-shit-done/references/planner-reviews.md +39 -0
- package/get-shit-done/references/planner-revision.md +87 -0
- package/get-shit-done/references/planning-config.md +18 -1
- package/get-shit-done/references/revision-loop.md +97 -0
- package/get-shit-done/references/ui-brand.md +2 -2
- package/get-shit-done/references/universal-anti-patterns.md +58 -0
- package/get-shit-done/references/user-profiling.md +681 -0
- package/get-shit-done/references/workstream-flag.md +111 -0
- package/get-shit-done/templates/SECURITY.md +61 -0
- package/get-shit-done/templates/UAT.md +21 -3
- package/get-shit-done/templates/UI-SPEC.md +100 -0
- package/get-shit-done/templates/VALIDATION.md +3 -3
- package/get-shit-done/templates/claude-md.md +145 -0
- package/get-shit-done/templates/config.json +14 -3
- package/get-shit-done/templates/context.md +61 -6
- package/get-shit-done/templates/debug-subagent-prompt.md +2 -6
- package/get-shit-done/templates/dev-preferences.md +21 -0
- package/get-shit-done/templates/discussion-log.md +63 -0
- package/get-shit-done/templates/phase-prompt.md +46 -5
- package/get-shit-done/templates/planner-subagent-prompt.md +2 -10
- package/get-shit-done/templates/project.md +2 -0
- package/get-shit-done/templates/state.md +2 -2
- package/get-shit-done/templates/user-profile.md +146 -0
- package/get-shit-done/workflows/add-phase.md +4 -4
- package/get-shit-done/workflows/add-tests.md +4 -4
- package/get-shit-done/workflows/add-todo.md +4 -4
- package/get-shit-done/workflows/analyze-dependencies.md +96 -0
- package/get-shit-done/workflows/audit-milestone.md +20 -16
- package/get-shit-done/workflows/audit-uat.md +109 -0
- package/get-shit-done/workflows/autonomous.md +1036 -0
- package/get-shit-done/workflows/check-todos.md +4 -4
- package/get-shit-done/workflows/cleanup.md +4 -4
- package/get-shit-done/workflows/complete-milestone.md +22 -10
- package/get-shit-done/workflows/diagnose-issues.md +21 -7
- package/get-shit-done/workflows/discovery-phase.md +2 -2
- package/get-shit-done/workflows/discuss-phase-assumptions.md +671 -0
- package/get-shit-done/workflows/discuss-phase-power.md +291 -0
- package/get-shit-done/workflows/discuss-phase.md +558 -47
- package/get-shit-done/workflows/do.md +104 -0
- package/get-shit-done/workflows/docs-update.md +1093 -0
- package/get-shit-done/workflows/execute-phase.md +741 -58
- package/get-shit-done/workflows/execute-plan.md +77 -12
- package/get-shit-done/workflows/fast.md +105 -0
- package/get-shit-done/workflows/forensics.md +265 -0
- package/get-shit-done/workflows/health.md +28 -6
- package/get-shit-done/workflows/help.md +127 -7
- package/get-shit-done/workflows/insert-phase.md +4 -4
- package/get-shit-done/workflows/list-phase-assumptions.md +2 -2
- package/get-shit-done/workflows/list-workspaces.md +56 -0
- package/get-shit-done/workflows/manager.md +363 -0
- package/get-shit-done/workflows/map-codebase.md +83 -44
- package/get-shit-done/workflows/milestone-summary.md +223 -0
- package/get-shit-done/workflows/new-milestone.md +133 -25
- package/get-shit-done/workflows/new-project.md +216 -54
- package/get-shit-done/workflows/new-workspace.md +237 -0
- package/get-shit-done/workflows/next.md +97 -0
- package/get-shit-done/workflows/node-repair.md +92 -0
- package/get-shit-done/workflows/note.md +156 -0
- package/get-shit-done/workflows/pause-work.md +132 -15
- package/get-shit-done/workflows/plan-milestone-gaps.md +6 -7
- package/get-shit-done/workflows/plan-phase.md +513 -62
- package/get-shit-done/workflows/plant-seed.md +169 -0
- package/get-shit-done/workflows/pr-branch.md +129 -0
- package/get-shit-done/workflows/profile-user.md +450 -0
- package/get-shit-done/workflows/progress.md +154 -29
- package/get-shit-done/workflows/quick.md +285 -111
- package/get-shit-done/workflows/remove-phase.md +2 -2
- package/get-shit-done/workflows/remove-workspace.md +90 -0
- package/get-shit-done/workflows/research-phase.md +13 -9
- package/get-shit-done/workflows/resume-project.md +37 -18
- package/get-shit-done/workflows/review.md +281 -0
- package/get-shit-done/workflows/secure-phase.md +154 -0
- package/get-shit-done/workflows/session-report.md +146 -0
- package/get-shit-done/workflows/set-profile.md +2 -2
- package/get-shit-done/workflows/settings.md +91 -11
- package/get-shit-done/workflows/ship.md +237 -0
- package/get-shit-done/workflows/stats.md +60 -0
- package/get-shit-done/workflows/transition.md +150 -23
- package/get-shit-done/workflows/ui-phase.md +292 -0
- package/get-shit-done/workflows/ui-review.md +183 -0
- package/get-shit-done/workflows/update.md +262 -30
- package/get-shit-done/workflows/validate-phase.md +14 -17
- package/get-shit-done/workflows/verify-phase.md +143 -11
- package/get-shit-done/workflows/verify-work.md +141 -39
- package/package.json +1 -1
- package/skills/gsd-audit-milestone/SKILL.md +29 -0
- package/skills/gsd-cleanup/SKILL.md +19 -0
- package/skills/gsd-complete-milestone/SKILL.md +131 -0
- package/skills/gsd-discuss-phase/SKILL.md +54 -0
- package/skills/gsd-execute-phase/SKILL.md +49 -0
- package/skills/gsd-plan-phase/SKILL.md +37 -0
- package/skills/gsd-ui-phase/SKILL.md +24 -0
- package/skills/gsd-ui-review/SKILL.md +24 -0
- package/skills/gsd-verify-work/SKILL.md +30 -0
|
@@ -3,8 +3,29 @@
|
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
5
|
const fs = require('fs');
|
|
6
|
+
const os = require('os');
|
|
6
7
|
const path = require('path');
|
|
7
|
-
const
|
|
8
|
+
const crypto = require('crypto');
|
|
9
|
+
const { execSync, execFileSync, spawnSync } = require('child_process');
|
|
10
|
+
const { MODEL_PROFILES } = require('./model-profiles.cjs');
|
|
11
|
+
|
|
12
|
+
const WORKSTREAM_SESSION_ENV_KEYS = [
|
|
13
|
+
'GSD_SESSION_KEY',
|
|
14
|
+
'CODEX_THREAD_ID',
|
|
15
|
+
'CLAUDE_SESSION_ID',
|
|
16
|
+
'CLAUDE_CODE_SSE_PORT',
|
|
17
|
+
'OPENCODE_SESSION_ID',
|
|
18
|
+
'GEMINI_SESSION_ID',
|
|
19
|
+
'CURSOR_SESSION_ID',
|
|
20
|
+
'WINDSURF_SESSION_ID',
|
|
21
|
+
'TERM_SESSION_ID',
|
|
22
|
+
'WT_SESSION',
|
|
23
|
+
'TMUX_PANE',
|
|
24
|
+
'ZELLIJ_SESSION_NAME',
|
|
25
|
+
];
|
|
26
|
+
|
|
27
|
+
let cachedControllingTtyToken = null;
|
|
28
|
+
let didProbeControllingTtyToken = false;
|
|
8
29
|
|
|
9
30
|
// ─── Path helpers ────────────────────────────────────────────────────────────
|
|
10
31
|
|
|
@@ -13,45 +34,173 @@ function toPosixPath(p) {
|
|
|
13
34
|
return p.split(path.sep).join('/');
|
|
14
35
|
}
|
|
15
36
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
37
|
+
/**
|
|
38
|
+
* Scan immediate child directories for separate git repos.
|
|
39
|
+
* Returns a sorted array of directory names that have their own `.git`.
|
|
40
|
+
* Excludes hidden directories and node_modules.
|
|
41
|
+
*/
|
|
42
|
+
function detectSubRepos(cwd) {
|
|
43
|
+
const results = [];
|
|
44
|
+
try {
|
|
45
|
+
const entries = fs.readdirSync(cwd, { withFileTypes: true });
|
|
46
|
+
for (const entry of entries) {
|
|
47
|
+
if (!entry.isDirectory()) continue;
|
|
48
|
+
if (entry.name.startsWith('.') || entry.name === 'node_modules') continue;
|
|
49
|
+
const gitPath = path.join(cwd, entry.name, '.git');
|
|
50
|
+
try {
|
|
51
|
+
if (fs.existsSync(gitPath)) {
|
|
52
|
+
results.push(entry.name);
|
|
53
|
+
}
|
|
54
|
+
} catch {}
|
|
55
|
+
}
|
|
56
|
+
} catch {}
|
|
57
|
+
return results.sort();
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Walk up from `startDir` to find the project root that owns `.planning/`.
|
|
62
|
+
*
|
|
63
|
+
* In multi-repo workspaces, OpenCode may open inside a sub-repo (e.g. `backend/`)
|
|
64
|
+
* instead of the project root. This function prevents `.planning/` from being
|
|
65
|
+
* created inside the sub-repo by locating the nearest ancestor that already has
|
|
66
|
+
* a `.planning/` directory.
|
|
67
|
+
*
|
|
68
|
+
* Detection strategy (checked in order for each ancestor):
|
|
69
|
+
* 1. Parent has `.planning/config.json` with `sub_repos` listing this directory
|
|
70
|
+
* 2. Parent has `.planning/config.json` with `multiRepo: true` (legacy format)
|
|
71
|
+
* 3. Parent has `.planning/` and current dir has its own `.git` (heuristic)
|
|
72
|
+
*
|
|
73
|
+
* Returns `startDir` unchanged when no ancestor `.planning/` is found (first-run
|
|
74
|
+
* or single-repo projects).
|
|
75
|
+
*/
|
|
76
|
+
function findProjectRoot(startDir) {
|
|
77
|
+
const resolved = path.resolve(startDir);
|
|
78
|
+
const root = path.parse(resolved).root;
|
|
79
|
+
const homedir = require('os').homedir();
|
|
80
|
+
|
|
81
|
+
// If startDir already contains .planning/, it IS the project root.
|
|
82
|
+
// Do not walk up to a parent workspace that also has .planning/ (#1362).
|
|
83
|
+
const ownPlanning = path.join(resolved, '.planning');
|
|
84
|
+
if (fs.existsSync(ownPlanning) && fs.statSync(ownPlanning).isDirectory()) {
|
|
85
|
+
return startDir;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Check if startDir or any of its ancestors (up to AND including the
|
|
89
|
+
// candidate project root) contains a .git directory. This handles both
|
|
90
|
+
// `backend/` (direct sub-repo) and `backend/src/modules/` (nested inside),
|
|
91
|
+
// as well as the common case where .git lives at the same level as .planning/.
|
|
92
|
+
function isInsideGitRepo(candidateParent) {
|
|
93
|
+
let d = resolved;
|
|
94
|
+
while (d !== root) {
|
|
95
|
+
if (fs.existsSync(path.join(d, '.git'))) return true;
|
|
96
|
+
if (d === candidateParent) break;
|
|
97
|
+
d = path.dirname(d);
|
|
98
|
+
}
|
|
99
|
+
return false;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
let dir = resolved;
|
|
103
|
+
while (dir !== root) {
|
|
104
|
+
const parent = path.dirname(dir);
|
|
105
|
+
if (parent === dir) break; // filesystem root
|
|
106
|
+
if (parent === homedir) break; // never go above home
|
|
107
|
+
|
|
108
|
+
const parentPlanning = path.join(parent, '.planning');
|
|
109
|
+
if (fs.existsSync(parentPlanning) && fs.statSync(parentPlanning).isDirectory()) {
|
|
110
|
+
const configPath = path.join(parentPlanning, 'config.json');
|
|
111
|
+
try {
|
|
112
|
+
const config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
|
|
113
|
+
const subRepos = config.sub_repos || config.planning?.sub_repos || [];
|
|
114
|
+
|
|
115
|
+
// Check explicit sub_repos list
|
|
116
|
+
if (Array.isArray(subRepos) && subRepos.length > 0) {
|
|
117
|
+
const relPath = path.relative(parent, resolved);
|
|
118
|
+
const topSegment = relPath.split(path.sep)[0];
|
|
119
|
+
if (subRepos.includes(topSegment)) {
|
|
120
|
+
return parent;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Check legacy multiRepo flag
|
|
125
|
+
if (config.multiRepo === true && isInsideGitRepo(parent)) {
|
|
126
|
+
return parent;
|
|
127
|
+
}
|
|
128
|
+
} catch {
|
|
129
|
+
// config.json missing or malformed — fall back to .git heuristic
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Heuristic: parent has .planning/ and we're inside a git repo
|
|
133
|
+
if (isInsideGitRepo(parent)) {
|
|
134
|
+
return parent;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
dir = parent;
|
|
138
|
+
}
|
|
139
|
+
return startDir;
|
|
140
|
+
}
|
|
32
141
|
|
|
33
142
|
// ─── Output helpers ───────────────────────────────────────────────────────────
|
|
34
143
|
|
|
144
|
+
/**
|
|
145
|
+
* Remove stale gsd-* temp files/dirs older than maxAgeMs (default: 5 minutes).
|
|
146
|
+
* Runs opportunistically before each new temp file write to prevent unbounded accumulation.
|
|
147
|
+
* @param {string} prefix - filename prefix to match (e.g., 'gsd-')
|
|
148
|
+
* @param {object} opts
|
|
149
|
+
* @param {number} opts.maxAgeMs - max age in ms before removal (default: 5 min)
|
|
150
|
+
* @param {boolean} opts.dirsOnly - if true, only remove directories (default: false)
|
|
151
|
+
*/
|
|
152
|
+
function reapStaleTempFiles(prefix = 'gsd-', { maxAgeMs = 5 * 60 * 1000, dirsOnly = false } = {}) {
|
|
153
|
+
try {
|
|
154
|
+
const tmpDir = require('os').tmpdir();
|
|
155
|
+
const now = Date.now();
|
|
156
|
+
const entries = fs.readdirSync(tmpDir);
|
|
157
|
+
for (const entry of entries) {
|
|
158
|
+
if (!entry.startsWith(prefix)) continue;
|
|
159
|
+
const fullPath = path.join(tmpDir, entry);
|
|
160
|
+
try {
|
|
161
|
+
const stat = fs.statSync(fullPath);
|
|
162
|
+
if (now - stat.mtimeMs > maxAgeMs) {
|
|
163
|
+
if (stat.isDirectory()) {
|
|
164
|
+
fs.rmSync(fullPath, { recursive: true, force: true });
|
|
165
|
+
} else if (!dirsOnly) {
|
|
166
|
+
fs.unlinkSync(fullPath);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
} catch {
|
|
170
|
+
// File may have been removed between readdir and stat — ignore
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
} catch {
|
|
174
|
+
// Non-critical — don't let cleanup failures break output
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
35
178
|
function output(result, raw, rawValue) {
|
|
179
|
+
let data;
|
|
36
180
|
if (raw && rawValue !== undefined) {
|
|
37
|
-
|
|
181
|
+
data = String(rawValue);
|
|
38
182
|
} else {
|
|
39
183
|
const json = JSON.stringify(result, null, 2);
|
|
40
184
|
// Large payloads exceed OpenCode's bash tool buffer (~50KB).
|
|
41
185
|
// write to tmpfile and output the path prefixed with @file: so callers can detect it.
|
|
42
186
|
if (json.length > 50000) {
|
|
187
|
+
reapStaleTempFiles();
|
|
43
188
|
const tmpPath = path.join(require('os').tmpdir(), `gsd-${Date.now()}.json`);
|
|
44
189
|
fs.writeFileSync(tmpPath, json, 'utf-8');
|
|
45
|
-
|
|
190
|
+
data = '@file:' + tmpPath;
|
|
46
191
|
} else {
|
|
47
|
-
|
|
192
|
+
data = json;
|
|
48
193
|
}
|
|
49
194
|
}
|
|
50
|
-
process.exit(
|
|
195
|
+
// process.stdout.write() is async when stdout is a pipe — process.exit()
|
|
196
|
+
// can tear down the process before the reader consumes the buffer.
|
|
197
|
+
// fs.writeSync(1, ...) blocks until the kernel accepts the bytes, and
|
|
198
|
+
// skipping process.exit() lets the event loop drain naturally.
|
|
199
|
+
fs.writeSync(1, data);
|
|
51
200
|
}
|
|
52
201
|
|
|
53
202
|
function error(message) {
|
|
54
|
-
|
|
203
|
+
fs.writeSync(2, 'Error: ' + message + '\n');
|
|
55
204
|
process.exit(1);
|
|
56
205
|
}
|
|
57
206
|
|
|
@@ -65,22 +214,37 @@ function safeReadFile(filePath) {
|
|
|
65
214
|
}
|
|
66
215
|
}
|
|
67
216
|
|
|
217
|
+
/**
|
|
218
|
+
* Canonical config defaults. Single source of truth — imported by config.cjs and verify.cjs.
|
|
219
|
+
*/
|
|
220
|
+
const CONFIG_DEFAULTS = {
|
|
221
|
+
model_profile: 'balanced',
|
|
222
|
+
commit_docs: true,
|
|
223
|
+
search_gitignored: false,
|
|
224
|
+
branching_strategy: 'none',
|
|
225
|
+
phase_branch_template: 'gsd/phase-{phase}-{slug}',
|
|
226
|
+
milestone_branch_template: 'gsd/{milestone}-{slug}',
|
|
227
|
+
quick_branch_template: null,
|
|
228
|
+
research: true,
|
|
229
|
+
plan_checker: true,
|
|
230
|
+
verifier: true,
|
|
231
|
+
nyquist_validation: true,
|
|
232
|
+
parallelization: true,
|
|
233
|
+
brave_search: false,
|
|
234
|
+
firecrawl: false,
|
|
235
|
+
exa_search: false,
|
|
236
|
+
text_mode: false, // when true, use plain-text numbered lists instead of question menus
|
|
237
|
+
sub_repos: [],
|
|
238
|
+
resolve_model_ids: false, // false: return alias as-is | true: map to full OpenCode model ID | "omit": return '' (runtime uses its default)
|
|
239
|
+
context_window: 200000, // default 200k; set to 1000000 for Opus/Sonnet 4.6 1M models
|
|
240
|
+
phase_naming: 'sequential', // 'sequential' (default, auto-increment) or 'custom' (arbitrary string IDs)
|
|
241
|
+
project_code: null, // optional short prefix for phase dirs (e.g., 'CK' → 'CK-01-foundation')
|
|
242
|
+
subagent_timeout: 300000, // 5 min default; increase for large codebases or slower models (ms)
|
|
243
|
+
};
|
|
244
|
+
|
|
68
245
|
function loadConfig(cwd) {
|
|
69
|
-
const configPath = path.join(cwd, '
|
|
70
|
-
const defaults =
|
|
71
|
-
model_profile: 'balanced',
|
|
72
|
-
commit_docs: true,
|
|
73
|
-
search_gitignored: false,
|
|
74
|
-
branching_strategy: 'none',
|
|
75
|
-
phase_branch_template: 'gsd/phase-{phase}-{slug}',
|
|
76
|
-
milestone_branch_template: 'gsd/{milestone}-{slug}',
|
|
77
|
-
research: true,
|
|
78
|
-
plan_checker: true,
|
|
79
|
-
verifier: true,
|
|
80
|
-
nyquist_validation: true,
|
|
81
|
-
parallelization: true,
|
|
82
|
-
brave_search: false,
|
|
83
|
-
};
|
|
246
|
+
const configPath = path.join(planningDir(cwd), 'config.json');
|
|
247
|
+
const defaults = CONFIG_DEFAULTS;
|
|
84
248
|
|
|
85
249
|
try {
|
|
86
250
|
const raw = fs.readFileSync(configPath, 'utf-8');
|
|
@@ -91,9 +255,64 @@ function loadConfig(cwd) {
|
|
|
91
255
|
const depthToGranularity = { quick: 'coarse', standard: 'standard', comprehensive: 'fine' };
|
|
92
256
|
parsed.granularity = depthToGranularity[parsed.depth] || parsed.depth;
|
|
93
257
|
delete parsed.depth;
|
|
258
|
+
try { fs.writeFileSync(configPath, JSON.stringify(parsed, null, 2), 'utf-8'); } catch { /* intentionally empty */ }
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// Auto-detect and sync sub_repos: scan for child directories with .git
|
|
262
|
+
let configDirty = false;
|
|
263
|
+
|
|
264
|
+
// Migrate legacy "multiRepo: true" boolean → sub_repos array
|
|
265
|
+
if (parsed.multiRepo === true && !parsed.sub_repos && !parsed.planning?.sub_repos) {
|
|
266
|
+
const detected = detectSubRepos(cwd);
|
|
267
|
+
if (detected.length > 0) {
|
|
268
|
+
parsed.sub_repos = detected;
|
|
269
|
+
if (!parsed.planning) parsed.planning = {};
|
|
270
|
+
parsed.planning.commit_docs = false;
|
|
271
|
+
delete parsed.multiRepo;
|
|
272
|
+
configDirty = true;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// Keep sub_repos in sync with actual filesystem
|
|
277
|
+
const currentSubRepos = parsed.sub_repos || parsed.planning?.sub_repos || [];
|
|
278
|
+
if (Array.isArray(currentSubRepos) && currentSubRepos.length > 0) {
|
|
279
|
+
const detected = detectSubRepos(cwd);
|
|
280
|
+
if (detected.length > 0) {
|
|
281
|
+
const sorted = [...currentSubRepos].sort();
|
|
282
|
+
if (JSON.stringify(sorted) !== JSON.stringify(detected)) {
|
|
283
|
+
parsed.sub_repos = detected;
|
|
284
|
+
configDirty = true;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
// Persist sub_repos changes (migration or sync)
|
|
290
|
+
if (configDirty) {
|
|
94
291
|
try { fs.writeFileSync(configPath, JSON.stringify(parsed, null, 2), 'utf-8'); } catch {}
|
|
95
292
|
}
|
|
96
293
|
|
|
294
|
+
// Warn about unrecognized top-level keys so users don't silently lose config.
|
|
295
|
+
// Derived from config-set's VALID_CONFIG_KEYS (canonical source) plus internal-only
|
|
296
|
+
// keys that loadConfig handles but config-set doesn't expose. This avoids maintaining
|
|
297
|
+
// a hardcoded duplicate that drifts when new config keys are added.
|
|
298
|
+
const { VALID_CONFIG_KEYS } = require('./config.cjs');
|
|
299
|
+
const KNOWN_TOP_LEVEL = new Set([
|
|
300
|
+
// Extract top-level key names from dot-notation paths (e.g., 'workflow.research' → 'workflow')
|
|
301
|
+
...[...VALID_CONFIG_KEYS].map(k => k.split('.')[0]),
|
|
302
|
+
// Section containers that hold nested sub-keys
|
|
303
|
+
'git', 'workflow', 'planning', 'hooks',
|
|
304
|
+
// Internal keys loadConfig reads but config-set doesn't expose
|
|
305
|
+
'model_overrides', 'agent_skills', 'context_window', 'resolve_model_ids',
|
|
306
|
+
// Deprecated keys (still accepted for migration, not in config-set)
|
|
307
|
+
'depth', 'multiRepo',
|
|
308
|
+
]);
|
|
309
|
+
const unknownKeys = Object.keys(parsed).filter(k => !KNOWN_TOP_LEVEL.has(k));
|
|
310
|
+
if (unknownKeys.length > 0) {
|
|
311
|
+
process.stderr.write(
|
|
312
|
+
`gsd-tools: warning: unknown config key(s) in .planning/config.json: ${unknownKeys.join(', ')} — these will be ignored\n`
|
|
313
|
+
);
|
|
314
|
+
}
|
|
315
|
+
|
|
97
316
|
const get = (key, nested) => {
|
|
98
317
|
if (parsed[key] !== undefined) return parsed[key];
|
|
99
318
|
if (nested && parsed[nested.section] && parsed[nested.section][nested.field] !== undefined) {
|
|
@@ -111,21 +330,71 @@ function loadConfig(cwd) {
|
|
|
111
330
|
|
|
112
331
|
return {
|
|
113
332
|
model_profile: get('model_profile') ?? defaults.model_profile,
|
|
114
|
-
commit_docs:
|
|
333
|
+
commit_docs: (() => {
|
|
334
|
+
const explicit = get('commit_docs', { section: 'planning', field: 'commit_docs' });
|
|
335
|
+
// If explicitly set in config, respect the user's choice
|
|
336
|
+
if (explicit !== undefined) return explicit;
|
|
337
|
+
// Auto-detection: when no explicit value and .planning/ is gitignored,
|
|
338
|
+
// default to false instead of true
|
|
339
|
+
if (isGitIgnored(cwd, '.planning/')) return false;
|
|
340
|
+
return defaults.commit_docs;
|
|
341
|
+
})(),
|
|
115
342
|
search_gitignored: get('search_gitignored', { section: 'planning', field: 'search_gitignored' }) ?? defaults.search_gitignored,
|
|
116
343
|
branching_strategy: get('branching_strategy', { section: 'git', field: 'branching_strategy' }) ?? defaults.branching_strategy,
|
|
117
344
|
phase_branch_template: get('phase_branch_template', { section: 'git', field: 'phase_branch_template' }) ?? defaults.phase_branch_template,
|
|
118
345
|
milestone_branch_template: get('milestone_branch_template', { section: 'git', field: 'milestone_branch_template' }) ?? defaults.milestone_branch_template,
|
|
346
|
+
quick_branch_template: get('quick_branch_template', { section: 'git', field: 'quick_branch_template' }) ?? defaults.quick_branch_template,
|
|
119
347
|
research: get('research', { section: 'workflow', field: 'research' }) ?? defaults.research,
|
|
120
348
|
plan_checker: get('plan_checker', { section: 'workflow', field: 'plan_check' }) ?? defaults.plan_checker,
|
|
121
349
|
verifier: get('verifier', { section: 'workflow', field: 'verifier' }) ?? defaults.verifier,
|
|
122
350
|
nyquist_validation: get('nyquist_validation', { section: 'workflow', field: 'nyquist_validation' }) ?? defaults.nyquist_validation,
|
|
123
351
|
parallelization,
|
|
124
352
|
brave_search: get('brave_search') ?? defaults.brave_search,
|
|
353
|
+
firecrawl: get('firecrawl') ?? defaults.firecrawl,
|
|
354
|
+
exa_search: get('exa_search') ?? defaults.exa_search,
|
|
355
|
+
text_mode: get('text_mode', { section: 'workflow', field: 'text_mode' }) ?? defaults.text_mode,
|
|
356
|
+
sub_repos: get('sub_repos', { section: 'planning', field: 'sub_repos' }) ?? defaults.sub_repos,
|
|
357
|
+
resolve_model_ids: get('resolve_model_ids') ?? defaults.resolve_model_ids,
|
|
358
|
+
context_window: get('context_window') ?? defaults.context_window,
|
|
359
|
+
phase_naming: get('phase_naming') ?? defaults.phase_naming,
|
|
360
|
+
project_code: get('project_code') ?? defaults.project_code,
|
|
361
|
+
subagent_timeout: get('subagent_timeout', { section: 'workflow', field: 'subagent_timeout' }) ?? defaults.subagent_timeout,
|
|
125
362
|
model_overrides: parsed.model_overrides || null,
|
|
363
|
+
agent_skills: parsed.agent_skills || {},
|
|
364
|
+
manager: parsed.manager || {},
|
|
365
|
+
response_language: get('response_language') || null,
|
|
126
366
|
};
|
|
127
367
|
} catch {
|
|
128
|
-
|
|
368
|
+
// Fall back to ~/.gsd/defaults.json only for truly pre-project contexts (#1683)
|
|
369
|
+
// If .planning/ exists, the project is initialized — just missing config.json
|
|
370
|
+
if (fs.existsSync(planningDir(cwd))) {
|
|
371
|
+
return defaults;
|
|
372
|
+
}
|
|
373
|
+
try {
|
|
374
|
+
const home = process.env.GSD_HOME || os.homedir();
|
|
375
|
+
const globalDefaultsPath = path.join(home, '.gsd', 'defaults.json');
|
|
376
|
+
const raw = fs.readFileSync(globalDefaultsPath, 'utf-8');
|
|
377
|
+
const globalDefaults = JSON.parse(raw);
|
|
378
|
+
return {
|
|
379
|
+
...defaults,
|
|
380
|
+
model_profile: globalDefaults.model_profile ?? defaults.model_profile,
|
|
381
|
+
commit_docs: globalDefaults.commit_docs ?? defaults.commit_docs,
|
|
382
|
+
research: globalDefaults.research ?? defaults.research,
|
|
383
|
+
plan_checker: globalDefaults.plan_checker ?? defaults.plan_checker,
|
|
384
|
+
verifier: globalDefaults.verifier ?? defaults.verifier,
|
|
385
|
+
nyquist_validation: globalDefaults.nyquist_validation ?? defaults.nyquist_validation,
|
|
386
|
+
parallelization: globalDefaults.parallelization ?? defaults.parallelization,
|
|
387
|
+
text_mode: globalDefaults.text_mode ?? defaults.text_mode,
|
|
388
|
+
resolve_model_ids: globalDefaults.resolve_model_ids ?? defaults.resolve_model_ids,
|
|
389
|
+
context_window: globalDefaults.context_window ?? defaults.context_window,
|
|
390
|
+
subagent_timeout: globalDefaults.subagent_timeout ?? defaults.subagent_timeout,
|
|
391
|
+
model_overrides: globalDefaults.model_overrides || null,
|
|
392
|
+
agent_skills: globalDefaults.agent_skills || {},
|
|
393
|
+
response_language: globalDefaults.response_language || null,
|
|
394
|
+
};
|
|
395
|
+
} catch {
|
|
396
|
+
return defaults;
|
|
397
|
+
}
|
|
129
398
|
}
|
|
130
399
|
}
|
|
131
400
|
|
|
@@ -137,7 +406,9 @@ function isGitIgnored(cwd, targetPath) {
|
|
|
137
406
|
// Without it, git check-ignore returns "not ignored" for tracked files even when
|
|
138
407
|
// .gitignore explicitly lists them — a common source of confusion when .planning/
|
|
139
408
|
// was committed before being added to .gitignore.
|
|
140
|
-
|
|
409
|
+
// Use execFileSync (array args) to prevent shell interpretation of special characters
|
|
410
|
+
// in file paths — avoids command injection via crafted path names.
|
|
411
|
+
execFileSync('git', ['check-ignore', '-q', '--no-index', '--', targetPath], {
|
|
141
412
|
cwd,
|
|
142
413
|
stdio: 'pipe',
|
|
143
414
|
});
|
|
@@ -147,27 +418,453 @@ function isGitIgnored(cwd, targetPath) {
|
|
|
147
418
|
}
|
|
148
419
|
}
|
|
149
420
|
|
|
421
|
+
// ─── Markdown normalization ─────────────────────────────────────────────────
|
|
422
|
+
|
|
423
|
+
/**
|
|
424
|
+
* Normalize markdown to fix common markdownlint violations.
|
|
425
|
+
* Applied at write points so GSD-generated .planning/ files are IDE-friendly.
|
|
426
|
+
*
|
|
427
|
+
* Rules enforced:
|
|
428
|
+
* MD022 — Blank lines around headings
|
|
429
|
+
* MD031 — Blank lines around fenced code blocks
|
|
430
|
+
* MD032 — Blank lines around lists
|
|
431
|
+
* MD012 — No multiple consecutive blank lines (collapsed to 2 max)
|
|
432
|
+
* MD047 — Files end with a single newline
|
|
433
|
+
*/
|
|
434
|
+
function normalizeMd(content) {
|
|
435
|
+
if (!content || typeof content !== 'string') return content;
|
|
436
|
+
|
|
437
|
+
// Normalize line endings to LF for consistent processing
|
|
438
|
+
let text = content.replace(/\r\n/g, '\n');
|
|
439
|
+
|
|
440
|
+
const lines = text.split('\n');
|
|
441
|
+
const result = [];
|
|
442
|
+
|
|
443
|
+
// Pre-compute fence state in a single O(n) pass instead of O(n^2) per-line scanning
|
|
444
|
+
const fenceRegex = /^```/;
|
|
445
|
+
const insideFence = new Array(lines.length);
|
|
446
|
+
let fenceOpen = false;
|
|
447
|
+
for (let i = 0; i < lines.length; i++) {
|
|
448
|
+
if (fenceRegex.test(lines[i].trimEnd())) {
|
|
449
|
+
if (fenceOpen) {
|
|
450
|
+
// This is a closing fence — mark as NOT inside (it's the boundary)
|
|
451
|
+
insideFence[i] = false;
|
|
452
|
+
fenceOpen = false;
|
|
453
|
+
} else {
|
|
454
|
+
// This is an opening fence
|
|
455
|
+
insideFence[i] = false;
|
|
456
|
+
fenceOpen = true;
|
|
457
|
+
}
|
|
458
|
+
} else {
|
|
459
|
+
insideFence[i] = fenceOpen;
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
for (let i = 0; i < lines.length; i++) {
|
|
464
|
+
const line = lines[i];
|
|
465
|
+
const prev = i > 0 ? lines[i - 1] : '';
|
|
466
|
+
const prevTrimmed = prev.trimEnd();
|
|
467
|
+
const trimmed = line.trimEnd();
|
|
468
|
+
const isFenceLine = fenceRegex.test(trimmed);
|
|
469
|
+
|
|
470
|
+
// MD022: Blank line before headings (skip first line and frontmatter delimiters)
|
|
471
|
+
if (/^#{1,6}\s/.test(trimmed) && i > 0 && prevTrimmed !== '' && prevTrimmed !== '---') {
|
|
472
|
+
result.push('');
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// MD031: Blank line before fenced code blocks (opening fences only)
|
|
476
|
+
if (isFenceLine && i > 0 && prevTrimmed !== '' && !insideFence[i] && (i === 0 || !insideFence[i - 1] || isFenceLine)) {
|
|
477
|
+
// Only add blank before opening fences (not closing ones)
|
|
478
|
+
if (i === 0 || !insideFence[i - 1]) {
|
|
479
|
+
result.push('');
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
// MD032: Blank line before lists (- item, * item, N. item, - [ ] item)
|
|
484
|
+
if (/^(\s*[-*+]\s|\s*\d+\.\s)/.test(line) && i > 0 &&
|
|
485
|
+
prevTrimmed !== '' && !/^(\s*[-*+]\s|\s*\d+\.\s)/.test(prev) &&
|
|
486
|
+
prevTrimmed !== '---') {
|
|
487
|
+
result.push('');
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
result.push(line);
|
|
491
|
+
|
|
492
|
+
// MD022: Blank line after headings
|
|
493
|
+
if (/^#{1,6}\s/.test(trimmed) && i < lines.length - 1) {
|
|
494
|
+
const next = lines[i + 1];
|
|
495
|
+
if (next !== undefined && next.trimEnd() !== '') {
|
|
496
|
+
result.push('');
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
// MD031: Blank line after closing fenced code blocks
|
|
501
|
+
if (/^```\s*$/.test(trimmed) && i > 0 && insideFence[i - 1] && i < lines.length - 1) {
|
|
502
|
+
const next = lines[i + 1];
|
|
503
|
+
if (next !== undefined && next.trimEnd() !== '') {
|
|
504
|
+
result.push('');
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
// MD032: Blank line after last list item in a block
|
|
509
|
+
if (/^(\s*[-*+]\s|\s*\d+\.\s)/.test(line) && i < lines.length - 1) {
|
|
510
|
+
const next = lines[i + 1];
|
|
511
|
+
if (next !== undefined && next.trimEnd() !== '' &&
|
|
512
|
+
!/^(\s*[-*+]\s|\s*\d+\.\s)/.test(next) &&
|
|
513
|
+
!/^\s/.test(next)) {
|
|
514
|
+
// Only add blank line if next line is not a continuation/indented line
|
|
515
|
+
result.push('');
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
text = result.join('\n');
|
|
521
|
+
|
|
522
|
+
// MD012: Collapse 3+ consecutive blank lines to 2
|
|
523
|
+
text = text.replace(/\n{3,}/g, '\n\n');
|
|
524
|
+
|
|
525
|
+
// MD047: Ensure file ends with exactly one newline
|
|
526
|
+
text = text.replace(/\n*$/, '\n');
|
|
527
|
+
|
|
528
|
+
return text;
|
|
529
|
+
}
|
|
530
|
+
|
|
150
531
|
function execGit(cwd, args) {
|
|
532
|
+
const result = spawnSync('git', args, {
|
|
533
|
+
cwd,
|
|
534
|
+
stdio: 'pipe',
|
|
535
|
+
encoding: 'utf-8',
|
|
536
|
+
});
|
|
537
|
+
return {
|
|
538
|
+
exitCode: result.status ?? 1,
|
|
539
|
+
stdout: (result.stdout ?? '').toString().trim(),
|
|
540
|
+
stderr: (result.stderr ?? '').toString().trim(),
|
|
541
|
+
};
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
// ─── Common path helpers ──────────────────────────────────────────────────────
|
|
545
|
+
|
|
546
|
+
/**
|
|
547
|
+
* Resolve the main worktree root when running inside a git worktree.
|
|
548
|
+
* In a linked worktree, .planning/ lives in the main worktree, not in the linked one.
|
|
549
|
+
* Returns the main worktree path, or cwd if not in a worktree.
|
|
550
|
+
*/
|
|
551
|
+
function resolveWorktreeRoot(cwd) {
|
|
552
|
+
// If the current directory already has its own .planning/, respect it.
|
|
553
|
+
// This handles linked worktrees with independent planning state (e.g., Conductor workspaces).
|
|
554
|
+
if (fs.existsSync(path.join(cwd, '.planning'))) {
|
|
555
|
+
return cwd;
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
// Check if we're in a linked worktree
|
|
559
|
+
const gitDir = execGit(cwd, ['rev-parse', '--git-dir']);
|
|
560
|
+
const commonDir = execGit(cwd, ['rev-parse', '--git-common-dir']);
|
|
561
|
+
|
|
562
|
+
if (gitDir.exitCode !== 0 || commonDir.exitCode !== 0) return cwd;
|
|
563
|
+
|
|
564
|
+
// In a linked worktree, .git is a file pointing to .git/worktrees/<name>
|
|
565
|
+
// and git-common-dir points to the main repo's .git directory
|
|
566
|
+
const gitDirResolved = path.resolve(cwd, gitDir.stdout);
|
|
567
|
+
const commonDirResolved = path.resolve(cwd, commonDir.stdout);
|
|
568
|
+
|
|
569
|
+
if (gitDirResolved !== commonDirResolved) {
|
|
570
|
+
// We're in a linked worktree — resolve main worktree root
|
|
571
|
+
// The common dir is the main repo's .git, so its parent is the main worktree root
|
|
572
|
+
return path.dirname(commonDirResolved);
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
return cwd;
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
/**
|
|
579
|
+
* Acquire a file-based lock for .planning/ writes.
|
|
580
|
+
* Prevents concurrent worktrees from corrupting shared planning files.
|
|
581
|
+
* Lock is auto-released after the callback completes.
|
|
582
|
+
*/
|
|
583
|
+
function withPlanningLock(cwd, fn) {
|
|
584
|
+
const lockPath = path.join(planningDir(cwd), '.lock');
|
|
585
|
+
const lockTimeout = 10000; // 10 seconds
|
|
586
|
+
const retryDelay = 100;
|
|
587
|
+
const start = Date.now();
|
|
588
|
+
|
|
589
|
+
// Ensure .planning/ exists
|
|
590
|
+
try { fs.mkdirSync(planningDir(cwd), { recursive: true }); } catch { /* ok */ }
|
|
591
|
+
|
|
592
|
+
while (Date.now() - start < lockTimeout) {
|
|
593
|
+
try {
|
|
594
|
+
// Atomic create — fails if file exists
|
|
595
|
+
fs.writeFileSync(lockPath, JSON.stringify({
|
|
596
|
+
pid: process.pid,
|
|
597
|
+
cwd,
|
|
598
|
+
acquired: new Date().toISOString(),
|
|
599
|
+
}), { flag: 'wx' });
|
|
600
|
+
|
|
601
|
+
// Lock acquired — run the function
|
|
602
|
+
try {
|
|
603
|
+
return fn();
|
|
604
|
+
} finally {
|
|
605
|
+
try { fs.unlinkSync(lockPath); } catch { /* already released */ }
|
|
606
|
+
}
|
|
607
|
+
} catch (err) {
|
|
608
|
+
if (err.code === 'EEXIST') {
|
|
609
|
+
// Lock exists — check if stale (>30s old)
|
|
610
|
+
try {
|
|
611
|
+
const stat = fs.statSync(lockPath);
|
|
612
|
+
if (Date.now() - stat.mtimeMs > 30000) {
|
|
613
|
+
fs.unlinkSync(lockPath);
|
|
614
|
+
continue; // retry
|
|
615
|
+
}
|
|
616
|
+
} catch { continue; }
|
|
617
|
+
|
|
618
|
+
// Wait and retry (cross-platform, no shell dependency)
|
|
619
|
+
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, 100);
|
|
620
|
+
continue;
|
|
621
|
+
}
|
|
622
|
+
throw err;
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
// Timeout — force acquire (stale lock recovery)
|
|
626
|
+
try { fs.unlinkSync(lockPath); } catch { /* ok */ }
|
|
627
|
+
return fn();
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
/**
|
|
631
|
+
* Get the .planning directory path, project- and workstream-aware.
|
|
632
|
+
*
|
|
633
|
+
* Resolution order:
|
|
634
|
+
* 1. If GSD_PROJECT is set (env var or explicit `project` arg), routes to
|
|
635
|
+
* `.planning/{project}/` — supports multi-project workspaces where several
|
|
636
|
+
* independent projects share a single `.planning/` root directory (e.g.,
|
|
637
|
+
* an Obsidian vault or monorepo knowledge base used as a command center).
|
|
638
|
+
* 2. If GSD_WORKSTREAM is set, routes to `.planning/workstreams/{ws}/`.
|
|
639
|
+
* 3. Otherwise returns `.planning/`.
|
|
640
|
+
*
|
|
641
|
+
* GSD_PROJECT and GSD_WORKSTREAM can be combined:
|
|
642
|
+
* `.planning/{project}/workstreams/{ws}/`
|
|
643
|
+
*
|
|
644
|
+
* @param {string} cwd - project root
|
|
645
|
+
* @param {string} [ws] - explicit workstream name; if omitted, checks GSD_WORKSTREAM env var
|
|
646
|
+
* @param {string} [project] - explicit project name; if omitted, checks GSD_PROJECT env var
|
|
647
|
+
*/
|
|
648
|
+
function planningDir(cwd, ws, project) {
|
|
649
|
+
if (project === undefined) project = process.env.GSD_PROJECT || null;
|
|
650
|
+
if (ws === undefined) ws = process.env.GSD_WORKSTREAM || null;
|
|
651
|
+
|
|
652
|
+
// Reject path separators and traversal components in project/workstream names
|
|
653
|
+
const BAD_SEGMENT = /[/\\]|\.\./;
|
|
654
|
+
if (project && BAD_SEGMENT.test(project)) {
|
|
655
|
+
throw new Error(`GSD_PROJECT contains invalid path characters: ${project}`);
|
|
656
|
+
}
|
|
657
|
+
if (ws && BAD_SEGMENT.test(ws)) {
|
|
658
|
+
throw new Error(`GSD_WORKSTREAM contains invalid path characters: ${ws}`);
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
let base = path.join(cwd, '.planning');
|
|
662
|
+
if (project) base = path.join(base, project);
|
|
663
|
+
if (ws) base = path.join(base, 'workstreams', ws);
|
|
664
|
+
return base;
|
|
665
|
+
}
|
|
666
|
+
|
|
667
|
+
/** Always returns the root .planning/ path, ignoring workstreams and projects. For shared resources. */
|
|
668
|
+
function planningRoot(cwd) {
|
|
669
|
+
return path.join(cwd, '.planning');
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
/**
|
|
673
|
+
* Get common .planning file paths, workstream-aware.
|
|
674
|
+
* Scoped paths (state, roadmap, phases, requirements) resolve to the active workstream.
|
|
675
|
+
* Shared paths (project, config) always resolve to the root .planning/.
|
|
676
|
+
*/
|
|
677
|
+
function planningPaths(cwd, ws) {
|
|
678
|
+
const base = planningDir(cwd, ws);
|
|
679
|
+
const root = path.join(cwd, '.planning');
|
|
680
|
+
return {
|
|
681
|
+
planning: base,
|
|
682
|
+
state: path.join(base, 'STATE.md'),
|
|
683
|
+
roadmap: path.join(base, 'ROADMAP.md'),
|
|
684
|
+
project: path.join(root, 'PROJECT.md'),
|
|
685
|
+
config: path.join(root, 'config.json'),
|
|
686
|
+
phases: path.join(base, 'phases'),
|
|
687
|
+
requirements: path.join(base, 'REQUIREMENTS.md'),
|
|
688
|
+
};
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
// ─── Active Workstream Detection ─────────────────────────────────────────────
|
|
692
|
+
|
|
693
|
+
function sanitizeWorkstreamSessionToken(value) {
|
|
694
|
+
if (value === null || value === undefined) return null;
|
|
695
|
+
const token = String(value).trim().replace(/[^a-zA-Z0-9._-]+/g, '_').replace(/^_+|_+$/g, '');
|
|
696
|
+
return token ? token.slice(0, 160) : null;
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
function probeControllingTtyToken() {
|
|
700
|
+
if (didProbeControllingTtyToken) return cachedControllingTtyToken;
|
|
701
|
+
didProbeControllingTtyToken = true;
|
|
702
|
+
|
|
703
|
+
// `tty` reads stdin. When stdin is already non-interactive, spawning it only
|
|
704
|
+
// adds avoidable failures on the routing hot path and cannot reveal a stable token.
|
|
705
|
+
if (!(process.stdin && process.stdin.isTTY)) {
|
|
706
|
+
return cachedControllingTtyToken;
|
|
707
|
+
}
|
|
708
|
+
|
|
151
709
|
try {
|
|
152
|
-
const
|
|
153
|
-
if (/^[a-zA-Z0-9._\-/=:@]+$/.test(a)) return a;
|
|
154
|
-
return "'" + a.replace(/'/g, "'\\''") + "'";
|
|
155
|
-
});
|
|
156
|
-
const stdout = execSync('git ' + escaped.join(' '), {
|
|
157
|
-
cwd,
|
|
158
|
-
stdio: 'pipe',
|
|
710
|
+
const ttyPath = execFileSync('tty', [], {
|
|
159
711
|
encoding: 'utf-8',
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
712
|
+
stdio: ['inherit', 'pipe', 'ignore'],
|
|
713
|
+
}).trim();
|
|
714
|
+
if (ttyPath && ttyPath !== 'not a tty') {
|
|
715
|
+
const token = sanitizeWorkstreamSessionToken(ttyPath.replace(/^\/dev\//, ''));
|
|
716
|
+
if (token) cachedControllingTtyToken = `tty-${token}`;
|
|
717
|
+
}
|
|
718
|
+
} catch {}
|
|
719
|
+
|
|
720
|
+
return cachedControllingTtyToken;
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
function getControllingTtyToken() {
|
|
724
|
+
for (const envKey of ['TTY', 'SSH_TTY']) {
|
|
725
|
+
const token = sanitizeWorkstreamSessionToken(process.env[envKey]);
|
|
726
|
+
if (token) return `tty-${token.replace(/^dev_/, '')}`;
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
return probeControllingTtyToken();
|
|
730
|
+
}
|
|
731
|
+
|
|
732
|
+
/**
|
|
733
|
+
* Resolve a deterministic session key for workstream-local routing.
|
|
734
|
+
*
|
|
735
|
+
* Order:
|
|
736
|
+
* 1. Explicit runtime/session env vars (`GSD_SESSION_KEY`, `CODEX_THREAD_ID`, etc.)
|
|
737
|
+
* 2. Terminal identity exposed via `TTY` or `SSH_TTY`
|
|
738
|
+
* 3. One best-effort `tty` probe when stdin is interactive
|
|
739
|
+
* 4. `null`, which tells callers to use the legacy shared pointer fallback
|
|
740
|
+
*/
|
|
741
|
+
function getWorkstreamSessionKey() {
|
|
742
|
+
for (const envKey of WORKSTREAM_SESSION_ENV_KEYS) {
|
|
743
|
+
const raw = process.env[envKey];
|
|
744
|
+
const token = sanitizeWorkstreamSessionToken(raw);
|
|
745
|
+
if (token) return `${envKey.toLowerCase().replace(/[^a-z0-9]+/g, '-')}-${token}`;
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
return getControllingTtyToken();
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
function getSessionScopedWorkstreamFile(cwd) {
|
|
752
|
+
const sessionKey = getWorkstreamSessionKey();
|
|
753
|
+
if (!sessionKey) return null;
|
|
754
|
+
|
|
755
|
+
// Use realpathSync.native so the hash is derived from the canonical filesystem
|
|
756
|
+
// path. On Windows, path.resolve returns whatever case the caller supplied,
|
|
757
|
+
// while realpathSync.native returns the case the OS recorded — they differ on
|
|
758
|
+
// case-insensitive NTFS, producing different hashes and different tmpdir slots.
|
|
759
|
+
// Fall back to path.resolve when the directory does not yet exist.
|
|
760
|
+
let planningAbs;
|
|
761
|
+
try {
|
|
762
|
+
planningAbs = fs.realpathSync.native(planningRoot(cwd));
|
|
763
|
+
} catch {
|
|
764
|
+
planningAbs = path.resolve(planningRoot(cwd));
|
|
765
|
+
}
|
|
766
|
+
const projectId = crypto
|
|
767
|
+
.createHash('sha1')
|
|
768
|
+
.update(planningAbs)
|
|
769
|
+
.digest('hex')
|
|
770
|
+
.slice(0, 16);
|
|
771
|
+
|
|
772
|
+
const dirPath = path.join(os.tmpdir(), 'gsd-workstream-sessions', projectId);
|
|
773
|
+
return {
|
|
774
|
+
sessionKey,
|
|
775
|
+
dirPath,
|
|
776
|
+
filePath: path.join(dirPath, sessionKey),
|
|
777
|
+
};
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
function clearActiveWorkstreamPointer(filePath, cleanupDirPath) {
|
|
781
|
+
try { fs.unlinkSync(filePath); } catch {}
|
|
782
|
+
|
|
783
|
+
// Session-scoped pointers for a repo share one tmp directory. Only remove it
|
|
784
|
+
// when it is empty so clearing or self-healing one session never deletes siblings.
|
|
785
|
+
// Explicitly check remaining entries rather than relying on rmdirSync throwing
|
|
786
|
+
// ENOTEMPTY — that error is not raised reliably on Windows.
|
|
787
|
+
if (cleanupDirPath) {
|
|
788
|
+
try {
|
|
789
|
+
const remaining = fs.readdirSync(cleanupDirPath);
|
|
790
|
+
if (remaining.length === 0) {
|
|
791
|
+
fs.rmdirSync(cleanupDirPath);
|
|
792
|
+
}
|
|
793
|
+
} catch {}
|
|
168
794
|
}
|
|
169
795
|
}
|
|
170
796
|
|
|
797
|
+
/**
|
|
798
|
+
* Pointer files are self-healing: invalid names or deleted-workstream pointers
|
|
799
|
+
* are removed on read so the session falls back to `null` instead of carrying
|
|
800
|
+
* silent stale state forward. Session-scoped callers may also prune an empty
|
|
801
|
+
* per-project tmp directory; shared `.planning/active-workstream` callers do not.
|
|
802
|
+
*/
|
|
803
|
+
function readActiveWorkstreamPointer(filePath, cwd, cleanupDirPath = null) {
|
|
804
|
+
try {
|
|
805
|
+
const name = fs.readFileSync(filePath, 'utf-8').trim();
|
|
806
|
+
if (!name || !/^[a-zA-Z0-9_-]+$/.test(name)) {
|
|
807
|
+
clearActiveWorkstreamPointer(filePath, cleanupDirPath);
|
|
808
|
+
return null;
|
|
809
|
+
}
|
|
810
|
+
const wsDir = path.join(planningRoot(cwd), 'workstreams', name);
|
|
811
|
+
if (!fs.existsSync(wsDir)) {
|
|
812
|
+
clearActiveWorkstreamPointer(filePath, cleanupDirPath);
|
|
813
|
+
return null;
|
|
814
|
+
}
|
|
815
|
+
return name;
|
|
816
|
+
} catch {
|
|
817
|
+
return null;
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
/**
|
|
822
|
+
* Get the active workstream name.
|
|
823
|
+
*
|
|
824
|
+
* Resolution priority:
|
|
825
|
+
* 1. Session-scoped pointer (tmpdir) when the runtime exposes a stable session key
|
|
826
|
+
* 2. Legacy shared `.planning/active-workstream` file when no session key is available
|
|
827
|
+
*
|
|
828
|
+
* The shared file is intentionally ignored when a session key exists so multiple
|
|
829
|
+
* concurrent sessions do not overwrite each other's active workstream.
|
|
830
|
+
*/
|
|
831
|
+
function getActiveWorkstream(cwd) {
|
|
832
|
+
const sessionScoped = getSessionScopedWorkstreamFile(cwd);
|
|
833
|
+
if (sessionScoped) {
|
|
834
|
+
return readActiveWorkstreamPointer(sessionScoped.filePath, cwd, sessionScoped.dirPath);
|
|
835
|
+
}
|
|
836
|
+
|
|
837
|
+
const sharedFilePath = path.join(planningRoot(cwd), 'active-workstream');
|
|
838
|
+
return readActiveWorkstreamPointer(sharedFilePath, cwd);
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
/**
|
|
842
|
+
* Set the active workstream. Pass null to clear.
|
|
843
|
+
*
|
|
844
|
+
* When a stable session key is available, this updates a tmpdir-backed
|
|
845
|
+
* session-scoped pointer. Otherwise it falls back to the legacy shared
|
|
846
|
+
* `.planning/active-workstream` file for backward compatibility.
|
|
847
|
+
*/
|
|
848
|
+
function setActiveWorkstream(cwd, name) {
|
|
849
|
+
const sessionScoped = getSessionScopedWorkstreamFile(cwd);
|
|
850
|
+
const filePath = sessionScoped
|
|
851
|
+
? sessionScoped.filePath
|
|
852
|
+
: path.join(planningRoot(cwd), 'active-workstream');
|
|
853
|
+
|
|
854
|
+
if (!name) {
|
|
855
|
+
clearActiveWorkstreamPointer(filePath, sessionScoped ? sessionScoped.dirPath : null);
|
|
856
|
+
return;
|
|
857
|
+
}
|
|
858
|
+
if (!/^[a-zA-Z0-9_-]+$/.test(name)) {
|
|
859
|
+
throw new Error('Invalid workstream name: must be alphanumeric, hyphens, and underscores only');
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
if (sessionScoped) {
|
|
863
|
+
fs.mkdirSync(sessionScoped.dirPath, { recursive: true });
|
|
864
|
+
}
|
|
865
|
+
fs.writeFileSync(filePath, name + '\n', 'utf-8');
|
|
866
|
+
}
|
|
867
|
+
|
|
171
868
|
// ─── Phase utilities ──────────────────────────────────────────────────────────
|
|
172
869
|
|
|
173
870
|
function escapeRegex(value) {
|
|
@@ -175,17 +872,28 @@ function escapeRegex(value) {
|
|
|
175
872
|
}
|
|
176
873
|
|
|
177
874
|
function normalizePhaseName(phase) {
|
|
178
|
-
const
|
|
179
|
-
|
|
180
|
-
const
|
|
181
|
-
|
|
182
|
-
const
|
|
183
|
-
|
|
875
|
+
const str = String(phase);
|
|
876
|
+
// Strip optional project_code prefix (e.g., 'CK-01' → '01')
|
|
877
|
+
const stripped = str.replace(/^[A-Z]{1,6}-(?=\d)/, '');
|
|
878
|
+
// Standard numeric phases: 1, 01, 12A, 12.1
|
|
879
|
+
const match = stripped.match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
|
880
|
+
if (match) {
|
|
881
|
+
const padded = match[1].padStart(2, '0');
|
|
882
|
+
const letter = match[2] ? match[2].toUpperCase() : '';
|
|
883
|
+
const decimal = match[3] || '';
|
|
884
|
+
return padded + letter + decimal;
|
|
885
|
+
}
|
|
886
|
+
// Custom phase IDs (e.g. PROJ-42, AUTH-101): return as-is
|
|
887
|
+
return str;
|
|
184
888
|
}
|
|
185
889
|
|
|
186
890
|
function comparePhaseNum(a, b) {
|
|
187
|
-
|
|
188
|
-
const
|
|
891
|
+
// Strip optional project_code prefix before comparing (e.g., 'CK-01-name' → '01-name')
|
|
892
|
+
const sa = String(a).replace(/^[A-Z]{1,6}-/, '');
|
|
893
|
+
const sb = String(b).replace(/^[A-Z]{1,6}-/, '');
|
|
894
|
+
const pa = sa.match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
|
895
|
+
const pb = sb.match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
|
896
|
+
// If either is non-numeric (custom ID), fall back to string comparison
|
|
189
897
|
if (!pa || !pb) return String(a).localeCompare(String(b));
|
|
190
898
|
const intDiff = parseInt(pa[1], 10) - parseInt(pb[1], 10);
|
|
191
899
|
if (intDiff !== 0) return intDiff;
|
|
@@ -211,24 +919,58 @@ function comparePhaseNum(a, b) {
|
|
|
211
919
|
return 0;
|
|
212
920
|
}
|
|
213
921
|
|
|
922
|
+
/**
|
|
923
|
+
* Extract the phase token from a directory name.
|
|
924
|
+
* Supports: '01-name', '1009A-name', '999.6-name', 'CK-01-name', 'PROJ-42-name'.
|
|
925
|
+
* Returns the token portion (e.g. '01', '1009A', '999.6', 'PROJ-42') or the full name if no separator.
|
|
926
|
+
*/
|
|
927
|
+
function extractPhaseToken(dirName) {
|
|
928
|
+
// Try project-code-prefixed numeric: CK-01-name → CK-01, CK-01A.2-name → CK-01A.2
|
|
929
|
+
const codePrefixed = dirName.match(/^([A-Z]{1,6}-\d+[A-Z]?(?:\.\d+)*)(?:-|$)/i);
|
|
930
|
+
if (codePrefixed) return codePrefixed[1];
|
|
931
|
+
// Try plain numeric: 01-name, 1009A-name, 999.6-name
|
|
932
|
+
const numeric = dirName.match(/^(\d+[A-Z]?(?:\.\d+)*)(?:-|$)/i);
|
|
933
|
+
if (numeric) return numeric[1];
|
|
934
|
+
// Custom IDs: PROJ-42-name → everything before the last segment that looks like a name
|
|
935
|
+
const custom = dirName.match(/^([A-Z][A-Z0-9]*(?:-[A-Z0-9]+)*)(?:-[a-z]|$)/i);
|
|
936
|
+
if (custom) return custom[1];
|
|
937
|
+
return dirName;
|
|
938
|
+
}
|
|
939
|
+
|
|
940
|
+
/**
|
|
941
|
+
* Check if a directory name's phase token matches the normalized phase exactly.
|
|
942
|
+
* Case-insensitive comparison for the token portion.
|
|
943
|
+
*/
|
|
944
|
+
function phaseTokenMatches(dirName, normalized) {
|
|
945
|
+
const token = extractPhaseToken(dirName);
|
|
946
|
+
if (token.toUpperCase() === normalized.toUpperCase()) return true;
|
|
947
|
+
// Strip optional project_code prefix from dir and retry
|
|
948
|
+
const stripped = dirName.replace(/^[A-Z]{1,6}-(?=\d)/i, '');
|
|
949
|
+
if (stripped !== dirName) {
|
|
950
|
+
const strippedToken = extractPhaseToken(stripped);
|
|
951
|
+
if (strippedToken.toUpperCase() === normalized.toUpperCase()) return true;
|
|
952
|
+
}
|
|
953
|
+
return false;
|
|
954
|
+
}
|
|
955
|
+
|
|
214
956
|
function searchPhaseInDir(baseDir, relBase, normalized) {
|
|
215
957
|
try {
|
|
216
|
-
const
|
|
217
|
-
|
|
218
|
-
const match = dirs.find(d => d
|
|
958
|
+
const dirs = readSubdirectories(baseDir, true);
|
|
959
|
+
// Match: exact phase token comparison (not prefix matching)
|
|
960
|
+
const match = dirs.find(d => phaseTokenMatches(d, normalized));
|
|
219
961
|
if (!match) return null;
|
|
220
962
|
|
|
221
|
-
|
|
963
|
+
// Extract phase number and name — supports numeric (01-name), project-code-prefixed (CK-01-name), and custom (PROJ-42-name)
|
|
964
|
+
const dirMatch = match.match(/^(?:[A-Z]{1,6}-)(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i)
|
|
965
|
+
|| match.match(/^(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i)
|
|
966
|
+
|| match.match(/^([A-Z][A-Z0-9]*(?:-[A-Z0-9]+)*)-(.+)/i)
|
|
967
|
+
|| [null, match, null];
|
|
222
968
|
const phaseNumber = dirMatch ? dirMatch[1] : normalized;
|
|
223
969
|
const phaseName = dirMatch && dirMatch[2] ? dirMatch[2] : null;
|
|
224
970
|
const phaseDir = path.join(baseDir, match);
|
|
225
|
-
const
|
|
226
|
-
|
|
227
|
-
const
|
|
228
|
-
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').sort();
|
|
229
|
-
const hasResearch = phaseFiles.some(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md');
|
|
230
|
-
const hasContext = phaseFiles.some(f => f.endsWith('-CONTEXT.md') || f === 'CONTEXT.md');
|
|
231
|
-
const hasVerification = phaseFiles.some(f => f.endsWith('-VERIFICATION.md') || f === 'VERIFICATION.md');
|
|
971
|
+
const { plans: unsortedPlans, summaries: unsortedSummaries, hasResearch, hasContext, hasVerification, hasReviews } = getPhaseFileStats(phaseDir);
|
|
972
|
+
const plans = unsortedPlans.sort();
|
|
973
|
+
const summaries = unsortedSummaries.sort();
|
|
232
974
|
|
|
233
975
|
const completedPlanIds = new Set(
|
|
234
976
|
summaries.map(s => s.replace('-SUMMARY.md', '').replace('SUMMARY.md', ''))
|
|
@@ -250,6 +992,7 @@ function searchPhaseInDir(baseDir, relBase, normalized) {
|
|
|
250
992
|
has_research: hasResearch,
|
|
251
993
|
has_context: hasContext,
|
|
252
994
|
has_verification: hasVerification,
|
|
995
|
+
has_reviews: hasReviews,
|
|
253
996
|
};
|
|
254
997
|
} catch {
|
|
255
998
|
return null;
|
|
@@ -259,11 +1002,12 @@ function searchPhaseInDir(baseDir, relBase, normalized) {
|
|
|
259
1002
|
function findPhaseInternal(cwd, phase) {
|
|
260
1003
|
if (!phase) return null;
|
|
261
1004
|
|
|
262
|
-
const phasesDir = path.join(cwd, '
|
|
1005
|
+
const phasesDir = path.join(planningDir(cwd), 'phases');
|
|
263
1006
|
const normalized = normalizePhaseName(phase);
|
|
264
1007
|
|
|
265
1008
|
// Search current phases first
|
|
266
|
-
const
|
|
1009
|
+
const relPhasesDir = toPosixPath(path.relative(cwd, phasesDir));
|
|
1010
|
+
const current = searchPhaseInDir(phasesDir, relPhasesDir, normalized);
|
|
267
1011
|
if (current) return current;
|
|
268
1012
|
|
|
269
1013
|
// Search archived milestone phases (newest first)
|
|
@@ -288,7 +1032,7 @@ function findPhaseInternal(cwd, phase) {
|
|
|
288
1032
|
return result;
|
|
289
1033
|
}
|
|
290
1034
|
}
|
|
291
|
-
} catch {}
|
|
1035
|
+
} catch { /* intentionally empty */ }
|
|
292
1036
|
|
|
293
1037
|
return null;
|
|
294
1038
|
}
|
|
@@ -311,8 +1055,7 @@ function getArchivedPhaseDirs(cwd) {
|
|
|
311
1055
|
for (const archiveName of phaseDirs) {
|
|
312
1056
|
const version = archiveName.match(/^(v[\d.]+)-phases$/)[1];
|
|
313
1057
|
const archivePath = path.join(milestonesDir, archiveName);
|
|
314
|
-
const
|
|
315
|
-
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
|
1058
|
+
const dirs = readSubdirectories(archivePath, true);
|
|
316
1059
|
|
|
317
1060
|
for (const dir of dirs) {
|
|
318
1061
|
results.push({
|
|
@@ -323,21 +1066,135 @@ function getArchivedPhaseDirs(cwd) {
|
|
|
323
1066
|
});
|
|
324
1067
|
}
|
|
325
1068
|
}
|
|
326
|
-
} catch {}
|
|
1069
|
+
} catch { /* intentionally empty */ }
|
|
327
1070
|
|
|
328
1071
|
return results;
|
|
329
1072
|
}
|
|
330
1073
|
|
|
1074
|
+
// ─── Roadmap milestone scoping ───────────────────────────────────────────────
|
|
1075
|
+
|
|
1076
|
+
/**
|
|
1077
|
+
* Strip shipped milestone content wrapped in <details> blocks.
|
|
1078
|
+
* Used to isolate current milestone phases when searching ROADMAP.md
|
|
1079
|
+
* for phase headings or checkboxes — prevents matching archived milestone
|
|
1080
|
+
* phases that share the same numbers as current milestone phases.
|
|
1081
|
+
*/
|
|
1082
|
+
function stripShippedMilestones(content) {
|
|
1083
|
+
return content.replace(/<details>[\s\S]*?<\/details>/gi, '');
|
|
1084
|
+
}
|
|
1085
|
+
|
|
1086
|
+
/**
|
|
1087
|
+
* Extract the current milestone section from ROADMAP.md by positive lookup.
|
|
1088
|
+
*
|
|
1089
|
+
* Instead of stripping <details> blocks (negative heuristic that breaks if
|
|
1090
|
+
* agents wrap the current milestone in <details>), this finds the section
|
|
1091
|
+
* matching the current milestone version and returns only that content.
|
|
1092
|
+
*
|
|
1093
|
+
* Falls back to stripShippedMilestones() if:
|
|
1094
|
+
* - cwd is not provided
|
|
1095
|
+
* - STATE.md doesn't exist or has no milestone field
|
|
1096
|
+
* - Version can't be found in ROADMAP.md
|
|
1097
|
+
*
|
|
1098
|
+
* @param {string} content - Full ROADMAP.md content
|
|
1099
|
+
* @param {string} [cwd] - Working directory for reading STATE.md
|
|
1100
|
+
* @returns {string} Content scoped to current milestone
|
|
1101
|
+
*/
|
|
1102
|
+
function extractCurrentMilestone(content, cwd) {
|
|
1103
|
+
if (!cwd) return stripShippedMilestones(content);
|
|
1104
|
+
|
|
1105
|
+
// 1. Get current milestone version from STATE.md frontmatter
|
|
1106
|
+
let version = null;
|
|
1107
|
+
try {
|
|
1108
|
+
const statePath = path.join(planningDir(cwd), 'STATE.md');
|
|
1109
|
+
if (fs.existsSync(statePath)) {
|
|
1110
|
+
const stateRaw = fs.readFileSync(statePath, 'utf-8');
|
|
1111
|
+
const milestoneMatch = stateRaw.match(/^milestone:\s*(.+)/m);
|
|
1112
|
+
if (milestoneMatch) {
|
|
1113
|
+
version = milestoneMatch[1].trim();
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
} catch {}
|
|
1117
|
+
|
|
1118
|
+
// 2. Fallback: derive version from getMilestoneInfo pattern in ROADMAP.md itself
|
|
1119
|
+
if (!version) {
|
|
1120
|
+
// Check for 🚧 in-progress marker
|
|
1121
|
+
const inProgressMatch = content.match(/🚧\s*\*\*v(\d+\.\d+)\s/);
|
|
1122
|
+
if (inProgressMatch) {
|
|
1123
|
+
version = 'v' + inProgressMatch[1];
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
|
|
1127
|
+
if (!version) return stripShippedMilestones(content);
|
|
1128
|
+
|
|
1129
|
+
// 3. Find the section matching this version
|
|
1130
|
+
// Match headings like: ## Roadmap v3.0: Name, ## v3.0 Name, etc.
|
|
1131
|
+
const escapedVersion = escapeRegex(version);
|
|
1132
|
+
const sectionPattern = new RegExp(
|
|
1133
|
+
`(^#{1,3}\\s+.*${escapedVersion}[^\\n]*)`,
|
|
1134
|
+
'mi'
|
|
1135
|
+
);
|
|
1136
|
+
const sectionMatch = content.match(sectionPattern);
|
|
1137
|
+
|
|
1138
|
+
if (!sectionMatch) return stripShippedMilestones(content);
|
|
1139
|
+
|
|
1140
|
+
const sectionStart = sectionMatch.index;
|
|
1141
|
+
|
|
1142
|
+
// Find the end: next milestone heading at same or higher level, or EOF
|
|
1143
|
+
// Milestone headings look like: ## v2.0, ## Roadmap v2.0, ## ✅ v1.0, etc.
|
|
1144
|
+
const headingLevel = sectionMatch[1].match(/^(#{1,3})\s/)[1].length;
|
|
1145
|
+
const restContent = content.slice(sectionStart + sectionMatch[0].length);
|
|
1146
|
+
const nextMilestonePattern = new RegExp(
|
|
1147
|
+
`^#{1,${headingLevel}}\\s+(?:.*v\\d+\\.\\d+|✅|📋|🚧)`,
|
|
1148
|
+
'mi'
|
|
1149
|
+
);
|
|
1150
|
+
const nextMatch = restContent.match(nextMilestonePattern);
|
|
1151
|
+
|
|
1152
|
+
let sectionEnd;
|
|
1153
|
+
if (nextMatch) {
|
|
1154
|
+
sectionEnd = sectionStart + sectionMatch[0].length + nextMatch.index;
|
|
1155
|
+
} else {
|
|
1156
|
+
sectionEnd = content.length;
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
// Return everything before the current milestone section (non-milestone content
|
|
1160
|
+
// like title, overview) plus the current milestone section
|
|
1161
|
+
const beforeMilestones = content.slice(0, sectionStart);
|
|
1162
|
+
const currentSection = content.slice(sectionStart, sectionEnd);
|
|
1163
|
+
|
|
1164
|
+
// Also include any content before the first milestone heading (title, overview, etc.)
|
|
1165
|
+
// but strip any <details> blocks in it (these are definitely shipped)
|
|
1166
|
+
const preamble = beforeMilestones.replace(/<details>[\s\S]*?<\/details>/gi, '');
|
|
1167
|
+
|
|
1168
|
+
return preamble + currentSection;
|
|
1169
|
+
}
|
|
1170
|
+
|
|
1171
|
+
/**
|
|
1172
|
+
* Replace a pattern only in the current milestone section of ROADMAP.md
|
|
1173
|
+
* (everything after the last </details> close tag). Used for write operations
|
|
1174
|
+
* that must not accidentally modify archived milestone checkboxes/tables.
|
|
1175
|
+
*/
|
|
1176
|
+
function replaceInCurrentMilestone(content, pattern, replacement) {
|
|
1177
|
+
const lastDetailsClose = content.lastIndexOf('</details>');
|
|
1178
|
+
if (lastDetailsClose === -1) {
|
|
1179
|
+
return content.replace(pattern, replacement);
|
|
1180
|
+
}
|
|
1181
|
+
const offset = lastDetailsClose + '</details>'.length;
|
|
1182
|
+
const before = content.slice(0, offset);
|
|
1183
|
+
const after = content.slice(offset);
|
|
1184
|
+
return before + after.replace(pattern, replacement);
|
|
1185
|
+
}
|
|
1186
|
+
|
|
331
1187
|
// ─── Roadmap & model utilities ────────────────────────────────────────────────
|
|
332
1188
|
|
|
333
1189
|
function getRoadmapPhaseInternal(cwd, phaseNum) {
|
|
334
1190
|
if (!phaseNum) return null;
|
|
335
|
-
const roadmapPath = path.join(cwd, '
|
|
1191
|
+
const roadmapPath = path.join(planningDir(cwd), 'ROADMAP.md');
|
|
336
1192
|
if (!fs.existsSync(roadmapPath)) return null;
|
|
337
1193
|
|
|
338
1194
|
try {
|
|
339
|
-
const content = fs.readFileSync(roadmapPath, 'utf-8');
|
|
1195
|
+
const content = extractCurrentMilestone(fs.readFileSync(roadmapPath, 'utf-8'), cwd);
|
|
340
1196
|
const escapedPhase = escapeRegex(phaseNum.toString());
|
|
1197
|
+
// Match both numeric (Phase 1:) and custom (Phase PROJ-42:) headers
|
|
341
1198
|
const phasePattern = new RegExp(`#{2,4}\\s*Phase\\s+${escapedPhase}:\\s*([^\\n]+)`, 'i');
|
|
342
1199
|
const headerMatch = content.match(phasePattern);
|
|
343
1200
|
if (!headerMatch) return null;
|
|
@@ -345,11 +1202,11 @@ function getRoadmapPhaseInternal(cwd, phaseNum) {
|
|
|
345
1202
|
const phaseName = headerMatch[1].trim();
|
|
346
1203
|
const headerIndex = headerMatch.index;
|
|
347
1204
|
const restOfContent = content.slice(headerIndex);
|
|
348
|
-
const nextHeaderMatch = restOfContent.match(/\n#{2,4}\s+Phase\s
|
|
1205
|
+
const nextHeaderMatch = restOfContent.match(/\n#{2,4}\s+Phase\s+[\w]/i);
|
|
349
1206
|
const sectionEnd = nextHeaderMatch ? headerIndex + nextHeaderMatch.index : content.length;
|
|
350
1207
|
const section = content.slice(headerIndex, sectionEnd).trim();
|
|
351
1208
|
|
|
352
|
-
const goalMatch = section.match(/\*\*Goal
|
|
1209
|
+
const goalMatch = section.match(/\*\*Goal(?:\*\*:|\*?\*?:\*\*)\s*([^\n]+)/i);
|
|
353
1210
|
const goal = goalMatch ? goalMatch[1].trim() : null;
|
|
354
1211
|
|
|
355
1212
|
return {
|
|
@@ -364,21 +1221,130 @@ function getRoadmapPhaseInternal(cwd, phaseNum) {
|
|
|
364
1221
|
}
|
|
365
1222
|
}
|
|
366
1223
|
|
|
1224
|
+
// ─── Agent installation validation (#1371) ───────────────────────────────────
|
|
1225
|
+
|
|
1226
|
+
/**
|
|
1227
|
+
* Resolve the agents directory from the GSD install location.
|
|
1228
|
+
* gsd-tools.cjs lives at <configDir>/get-shit-done/bin/gsd-tools.cjs,
|
|
1229
|
+
* so agents/ is at <configDir>/agents/.
|
|
1230
|
+
*
|
|
1231
|
+
* GSD_AGENTS_DIR env var overrides the default path. Used in tests and for
|
|
1232
|
+
* installs where the agents directory is not co-located with gsd-tools.cjs.
|
|
1233
|
+
*
|
|
1234
|
+
* @returns {string} Absolute path to the agents directory
|
|
1235
|
+
*/
|
|
1236
|
+
function getAgentsDir() {
|
|
1237
|
+
if (process.env.GSD_AGENTS_DIR) {
|
|
1238
|
+
return process.env.GSD_AGENTS_DIR;
|
|
1239
|
+
}
|
|
1240
|
+
// __dirname is get-shit-done/bin/lib/ → go up 3 levels to configDir
|
|
1241
|
+
return path.join(__dirname, '..', '..', '..', 'agents');
|
|
1242
|
+
}
|
|
1243
|
+
|
|
1244
|
+
/**
|
|
1245
|
+
* Check which GSD agents are installed on disk.
|
|
1246
|
+
* Returns an object with installation status and details.
|
|
1247
|
+
*
|
|
1248
|
+
* Recognises both standard format (gsd-planner.md) and Copilot format
|
|
1249
|
+
* (gsd-planner.agent.md). Copilot renames agent files during install (#1512).
|
|
1250
|
+
*
|
|
1251
|
+
* @returns {{ agents_installed: boolean, missing_agents: string[], installed_agents: string[], agents_dir: string }}
|
|
1252
|
+
*/
|
|
1253
|
+
function checkAgentsInstalled() {
|
|
1254
|
+
const agentsDir = getAgentsDir();
|
|
1255
|
+
const expectedAgents = Object.keys(MODEL_PROFILES);
|
|
1256
|
+
const installed = [];
|
|
1257
|
+
const missing = [];
|
|
1258
|
+
|
|
1259
|
+
if (!fs.existsSync(agentsDir)) {
|
|
1260
|
+
return {
|
|
1261
|
+
agents_installed: false,
|
|
1262
|
+
missing_agents: expectedAgents,
|
|
1263
|
+
installed_agents: [],
|
|
1264
|
+
agents_dir: agentsDir,
|
|
1265
|
+
};
|
|
1266
|
+
}
|
|
1267
|
+
|
|
1268
|
+
for (const agent of expectedAgents) {
|
|
1269
|
+
// Check both .md (standard) and .agent.md (Copilot) file formats.
|
|
1270
|
+
const agentFile = path.join(agentsDir, `${agent}.md`);
|
|
1271
|
+
const agentFileCopilot = path.join(agentsDir, `${agent}.agent.md`);
|
|
1272
|
+
if (fs.existsSync(agentFile) || fs.existsSync(agentFileCopilot)) {
|
|
1273
|
+
installed.push(agent);
|
|
1274
|
+
} else {
|
|
1275
|
+
missing.push(agent);
|
|
1276
|
+
}
|
|
1277
|
+
}
|
|
1278
|
+
|
|
1279
|
+
return {
|
|
1280
|
+
agents_installed: installed.length > 0 && missing.length === 0,
|
|
1281
|
+
missing_agents: missing,
|
|
1282
|
+
installed_agents: installed,
|
|
1283
|
+
agents_dir: agentsDir,
|
|
1284
|
+
};
|
|
1285
|
+
}
|
|
1286
|
+
|
|
1287
|
+
// ─── Model alias resolution ───────────────────────────────────────────────────
|
|
1288
|
+
|
|
1289
|
+
/**
|
|
1290
|
+
* Map short model aliases to full model IDs.
|
|
1291
|
+
* Updated each release to match current model versions.
|
|
1292
|
+
* Users can override with model_overrides in config.json for custom/latest models.
|
|
1293
|
+
*/
|
|
1294
|
+
const MODEL_ALIAS_MAP = {
|
|
1295
|
+
'opus': 'OpenCode-opus-4-6',
|
|
1296
|
+
'sonnet': 'OpenCode-sonnet-4-6',
|
|
1297
|
+
'haiku': 'OpenCode-haiku-4-5',
|
|
1298
|
+
};
|
|
1299
|
+
|
|
367
1300
|
function resolveModelInternal(cwd, agentType) {
|
|
368
1301
|
const config = loadConfig(cwd);
|
|
369
1302
|
|
|
370
|
-
// Check per-agent override first
|
|
1303
|
+
// Check per-agent override first — always respected regardless of resolve_model_ids.
|
|
1304
|
+
// Users who set fully-qualified model IDs (e.g., "openai/gpt-5.4") get exactly that.
|
|
371
1305
|
const override = config.model_overrides?.[agentType];
|
|
372
1306
|
if (override) {
|
|
373
|
-
return override
|
|
1307
|
+
return override;
|
|
1308
|
+
}
|
|
1309
|
+
|
|
1310
|
+
// resolve_model_ids: "omit" — return empty string so the runtime uses its configured
|
|
1311
|
+
// default model. For non-OpenCode runtimes (OpenCode, Codex, etc.) that don't recognize
|
|
1312
|
+
// OpenCode aliases (opus/sonnet/haiku/inherit). Set automatically during install. See #1156.
|
|
1313
|
+
if (config.resolve_model_ids === 'omit') {
|
|
1314
|
+
return '';
|
|
374
1315
|
}
|
|
375
1316
|
|
|
376
1317
|
// Fall back to profile lookup
|
|
377
|
-
const profile = config.model_profile || 'balanced';
|
|
1318
|
+
const profile = String(config.model_profile || 'balanced').toLowerCase();
|
|
378
1319
|
const agentModels = MODEL_PROFILES[agentType];
|
|
379
1320
|
if (!agentModels) return 'sonnet';
|
|
380
|
-
|
|
381
|
-
|
|
1321
|
+
if (profile === 'inherit') return 'inherit';
|
|
1322
|
+
const alias = agentModels[profile] || agentModels['balanced'] || 'sonnet';
|
|
1323
|
+
|
|
1324
|
+
// resolve_model_ids: true — map alias to full OpenCode model ID
|
|
1325
|
+
// Prevents 404s when the task tool passes aliases directly to the API
|
|
1326
|
+
if (config.resolve_model_ids) {
|
|
1327
|
+
return MODEL_ALIAS_MAP[alias] || alias;
|
|
1328
|
+
}
|
|
1329
|
+
|
|
1330
|
+
return alias;
|
|
1331
|
+
}
|
|
1332
|
+
|
|
1333
|
+
// ─── Summary body helpers ─────────────────────────────────────────────────
|
|
1334
|
+
|
|
1335
|
+
/**
|
|
1336
|
+
* Extract a one-liner from the summary body when it's not in frontmatter.
|
|
1337
|
+
* The summary template defines one-liner as a bold markdown line after the heading:
|
|
1338
|
+
* # Phase X: Name Summary
|
|
1339
|
+
* **[substantive one-liner text]**
|
|
1340
|
+
*/
|
|
1341
|
+
function extractOneLinerFromBody(content) {
|
|
1342
|
+
if (!content) return null;
|
|
1343
|
+
// Strip frontmatter first
|
|
1344
|
+
const body = content.replace(/^---\n[\s\S]*?\n---\n*/, '');
|
|
1345
|
+
// Find the first **...** line after a # heading
|
|
1346
|
+
const match = body.match(/^#[^\n]*\n+\*\*([^*]+)\*\*/m);
|
|
1347
|
+
return match ? match[1].trim() : null;
|
|
382
1348
|
}
|
|
383
1349
|
|
|
384
1350
|
// ─── Misc utilities ───────────────────────────────────────────────────────────
|
|
@@ -395,16 +1361,17 @@ function pathExistsInternal(cwd, targetPath) {
|
|
|
395
1361
|
|
|
396
1362
|
function generateSlugInternal(text) {
|
|
397
1363
|
if (!text) return null;
|
|
398
|
-
return text.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');
|
|
1364
|
+
return text.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '').substring(0, 60);
|
|
399
1365
|
}
|
|
400
1366
|
|
|
401
1367
|
function getMilestoneInfo(cwd) {
|
|
402
1368
|
try {
|
|
403
|
-
const roadmap = fs.readFileSync(path.join(cwd, '
|
|
1369
|
+
const roadmap = fs.readFileSync(path.join(planningDir(cwd), 'ROADMAP.md'), 'utf-8');
|
|
404
1370
|
|
|
405
1371
|
// First: check for list-format roadmaps using 🚧 (in-progress) marker
|
|
406
1372
|
// e.g. "- 🚧 **v2.1 Belgium** — Phases 24-28 (in progress)"
|
|
407
|
-
|
|
1373
|
+
// e.g. "- 🚧 **v1.2.1 Tech Debt** — Phases 1-8 (in progress)"
|
|
1374
|
+
const inProgressMatch = roadmap.match(/🚧\s*\*\*v(\d+(?:\.\d+)+)\s+([^*]+)\*\*/);
|
|
408
1375
|
if (inProgressMatch) {
|
|
409
1376
|
return {
|
|
410
1377
|
version: 'v' + inProgressMatch[1],
|
|
@@ -413,17 +1380,18 @@ function getMilestoneInfo(cwd) {
|
|
|
413
1380
|
}
|
|
414
1381
|
|
|
415
1382
|
// Second: heading-format roadmaps — strip shipped milestones in <details> blocks
|
|
416
|
-
const cleaned = roadmap
|
|
1383
|
+
const cleaned = stripShippedMilestones(roadmap);
|
|
417
1384
|
// Extract version and name from the same ## heading for consistency
|
|
418
|
-
|
|
1385
|
+
// Supports 2+ segment versions: v1.2, v1.2.1, v2.0.1, etc.
|
|
1386
|
+
const headingMatch = cleaned.match(/## .*v(\d+(?:\.\d+)+)[:\s]+([^\n(]+)/);
|
|
419
1387
|
if (headingMatch) {
|
|
420
1388
|
return {
|
|
421
1389
|
version: 'v' + headingMatch[1],
|
|
422
1390
|
name: headingMatch[2].trim(),
|
|
423
1391
|
};
|
|
424
1392
|
}
|
|
425
|
-
// Fallback: try bare version match
|
|
426
|
-
const versionMatch = cleaned.match(/v(\d
|
|
1393
|
+
// Fallback: try bare version match (greedy — capture longest version string)
|
|
1394
|
+
const versionMatch = cleaned.match(/v(\d+(?:\.\d+)+)/);
|
|
427
1395
|
return {
|
|
428
1396
|
version: versionMatch ? versionMatch[0] : 'v1.0',
|
|
429
1397
|
name: 'milestone',
|
|
@@ -441,13 +1409,14 @@ function getMilestoneInfo(cwd) {
|
|
|
441
1409
|
function getMilestonePhaseFilter(cwd) {
|
|
442
1410
|
const milestonePhaseNums = new Set();
|
|
443
1411
|
try {
|
|
444
|
-
const roadmap = fs.readFileSync(path.join(cwd, '
|
|
445
|
-
|
|
1412
|
+
const roadmap = extractCurrentMilestone(fs.readFileSync(path.join(planningDir(cwd), 'ROADMAP.md'), 'utf-8'), cwd);
|
|
1413
|
+
// Match both numeric phases (Phase 1:) and custom IDs (Phase PROJ-42:)
|
|
1414
|
+
const phasePattern = /#{2,4}\s*Phase\s+([\w][\w.-]*)\s*:/gi;
|
|
446
1415
|
let m;
|
|
447
1416
|
while ((m = phasePattern.exec(roadmap)) !== null) {
|
|
448
1417
|
milestonePhaseNums.add(m[1]);
|
|
449
1418
|
}
|
|
450
|
-
} catch {}
|
|
1419
|
+
} catch { /* intentionally empty */ }
|
|
451
1420
|
|
|
452
1421
|
if (milestonePhaseNums.size === 0) {
|
|
453
1422
|
const passAll = () => true;
|
|
@@ -460,26 +1429,76 @@ function getMilestonePhaseFilter(cwd) {
|
|
|
460
1429
|
);
|
|
461
1430
|
|
|
462
1431
|
function isDirInMilestone(dirName) {
|
|
1432
|
+
// Try numeric match first
|
|
463
1433
|
const m = dirName.match(/^0*(\d+[A-Za-z]?(?:\.\d+)*)/);
|
|
464
|
-
if (
|
|
465
|
-
|
|
1434
|
+
if (m && normalized.has(m[1].toLowerCase())) return true;
|
|
1435
|
+
// Try custom ID match (e.g. PROJ-42-description → PROJ-42)
|
|
1436
|
+
const customMatch = dirName.match(/^([A-Za-z][A-Za-z0-9]*(?:-[A-Za-z0-9]+)*)/);
|
|
1437
|
+
if (customMatch && normalized.has(customMatch[1].toLowerCase())) return true;
|
|
1438
|
+
return false;
|
|
466
1439
|
}
|
|
467
1440
|
isDirInMilestone.phaseCount = milestonePhaseNums.size;
|
|
468
1441
|
return isDirInMilestone;
|
|
469
1442
|
}
|
|
470
1443
|
|
|
1444
|
+
// ─── Phase file helpers ──────────────────────────────────────────────────────
|
|
1445
|
+
|
|
1446
|
+
/** Filter a file list to just PLAN.md / *-PLAN.md entries. */
|
|
1447
|
+
function filterPlanFiles(files) {
|
|
1448
|
+
return files.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md');
|
|
1449
|
+
}
|
|
1450
|
+
|
|
1451
|
+
/** Filter a file list to just SUMMARY.md / *-SUMMARY.md entries. */
|
|
1452
|
+
function filterSummaryFiles(files) {
|
|
1453
|
+
return files.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
|
1454
|
+
}
|
|
1455
|
+
|
|
1456
|
+
/**
|
|
1457
|
+
* read a phase directory and return counts/flags for common file types.
|
|
1458
|
+
* Returns an object with plans[], summaries[], and boolean flags for
|
|
1459
|
+
* research/context/verification files.
|
|
1460
|
+
*/
|
|
1461
|
+
function getPhaseFileStats(phaseDir) {
|
|
1462
|
+
const files = fs.readdirSync(phaseDir);
|
|
1463
|
+
return {
|
|
1464
|
+
plans: filterPlanFiles(files),
|
|
1465
|
+
summaries: filterSummaryFiles(files),
|
|
1466
|
+
hasResearch: files.some(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md'),
|
|
1467
|
+
hasContext: files.some(f => f.endsWith('-CONTEXT.md') || f === 'CONTEXT.md'),
|
|
1468
|
+
hasVerification: files.some(f => f.endsWith('-VERIFICATION.md') || f === 'VERIFICATION.md'),
|
|
1469
|
+
hasReviews: files.some(f => f.endsWith('-REVIEWS.md') || f === 'REVIEWS.md'),
|
|
1470
|
+
};
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1473
|
+
/**
|
|
1474
|
+
* read immediate child directories from a path.
|
|
1475
|
+
* Returns [] if the path doesn't exist or can't be read.
|
|
1476
|
+
* Pass sort=true to apply comparePhaseNum ordering.
|
|
1477
|
+
*/
|
|
1478
|
+
function readSubdirectories(dirPath, sort = false) {
|
|
1479
|
+
try {
|
|
1480
|
+
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
|
1481
|
+
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
|
1482
|
+
return sort ? dirs.sort((a, b) => comparePhaseNum(a, b)) : dirs;
|
|
1483
|
+
} catch {
|
|
1484
|
+
return [];
|
|
1485
|
+
}
|
|
1486
|
+
}
|
|
1487
|
+
|
|
471
1488
|
module.exports = {
|
|
472
|
-
MODEL_PROFILES,
|
|
473
1489
|
output,
|
|
474
1490
|
error,
|
|
475
1491
|
safeReadFile,
|
|
476
1492
|
loadConfig,
|
|
477
1493
|
isGitIgnored,
|
|
478
1494
|
execGit,
|
|
1495
|
+
normalizeMd,
|
|
479
1496
|
escapeRegex,
|
|
480
1497
|
normalizePhaseName,
|
|
481
1498
|
comparePhaseNum,
|
|
482
1499
|
searchPhaseInDir,
|
|
1500
|
+
extractPhaseToken,
|
|
1501
|
+
phaseTokenMatches,
|
|
483
1502
|
findPhaseInternal,
|
|
484
1503
|
getArchivedPhaseDirs,
|
|
485
1504
|
getRoadmapPhaseInternal,
|
|
@@ -488,5 +1507,27 @@ module.exports = {
|
|
|
488
1507
|
generateSlugInternal,
|
|
489
1508
|
getMilestoneInfo,
|
|
490
1509
|
getMilestonePhaseFilter,
|
|
1510
|
+
stripShippedMilestones,
|
|
1511
|
+
extractCurrentMilestone,
|
|
1512
|
+
replaceInCurrentMilestone,
|
|
491
1513
|
toPosixPath,
|
|
1514
|
+
extractOneLinerFromBody,
|
|
1515
|
+
resolveWorktreeRoot,
|
|
1516
|
+
withPlanningLock,
|
|
1517
|
+
findProjectRoot,
|
|
1518
|
+
detectSubRepos,
|
|
1519
|
+
reapStaleTempFiles,
|
|
1520
|
+
MODEL_ALIAS_MAP,
|
|
1521
|
+
CONFIG_DEFAULTS,
|
|
1522
|
+
planningDir,
|
|
1523
|
+
planningRoot,
|
|
1524
|
+
planningPaths,
|
|
1525
|
+
getActiveWorkstream,
|
|
1526
|
+
setActiveWorkstream,
|
|
1527
|
+
filterPlanFiles,
|
|
1528
|
+
filterSummaryFiles,
|
|
1529
|
+
getPhaseFileStats,
|
|
1530
|
+
readSubdirectories,
|
|
1531
|
+
getAgentsDir,
|
|
1532
|
+
checkAgentsInstalled,
|
|
492
1533
|
};
|