code-as-plan 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.ja-JP.md +834 -0
- package/README.ko-KR.md +823 -0
- package/README.md +1006 -0
- package/README.pt-BR.md +452 -0
- package/README.zh-CN.md +800 -0
- package/agents/cap-brainstormer.md +154 -0
- package/agents/cap-debugger.md +221 -0
- package/agents/cap-prototyper.md +170 -0
- package/agents/cap-reviewer.md +230 -0
- package/agents/cap-tester.md +193 -0
- package/bin/install.js +5002 -0
- package/cap/bin/gsd-tools.cjs +1141 -0
- package/cap/bin/lib/arc-scanner.cjs +341 -0
- package/cap/bin/lib/cap-feature-map.cjs +506 -0
- package/cap/bin/lib/cap-session.cjs +191 -0
- package/cap/bin/lib/cap-stack-docs.cjs +598 -0
- package/cap/bin/lib/cap-tag-scanner.cjs +458 -0
- package/cap/bin/lib/commands.cjs +959 -0
- package/cap/bin/lib/config.cjs +466 -0
- package/cap/bin/lib/convention-reader.cjs +180 -0
- package/cap/bin/lib/core.cjs +1230 -0
- package/cap/bin/lib/feature-aggregator.cjs +422 -0
- package/cap/bin/lib/frontmatter.cjs +336 -0
- package/cap/bin/lib/init.cjs +1442 -0
- package/cap/bin/lib/manifest-generator.cjs +381 -0
- package/cap/bin/lib/milestone.cjs +252 -0
- package/cap/bin/lib/model-profiles.cjs +68 -0
- package/cap/bin/lib/monorepo-context.cjs +224 -0
- package/cap/bin/lib/monorepo-migrator.cjs +507 -0
- package/cap/bin/lib/phase.cjs +888 -0
- package/cap/bin/lib/profile-output.cjs +952 -0
- package/cap/bin/lib/profile-pipeline.cjs +539 -0
- package/cap/bin/lib/roadmap.cjs +329 -0
- package/cap/bin/lib/security.cjs +382 -0
- package/cap/bin/lib/session-manager.cjs +290 -0
- package/cap/bin/lib/skeleton-generator.cjs +177 -0
- package/cap/bin/lib/state.cjs +1031 -0
- package/cap/bin/lib/template.cjs +222 -0
- package/cap/bin/lib/test-detector.cjs +61 -0
- package/cap/bin/lib/uat.cjs +282 -0
- package/cap/bin/lib/verify.cjs +888 -0
- package/cap/bin/lib/workspace-detector.cjs +369 -0
- package/cap/bin/lib/workstream.cjs +491 -0
- package/cap/commands/gsd/workstreams.md +63 -0
- package/cap/references/arc-standard.md +315 -0
- package/cap/references/cap-agent-architecture.md +102 -0
- package/cap/references/cap-gitignore-template +9 -0
- package/cap/references/cap-zero-deps.md +158 -0
- package/cap/references/checkpoints.md +778 -0
- package/cap/references/continuation-format.md +249 -0
- package/cap/references/decimal-phase-calculation.md +64 -0
- package/cap/references/feature-map-template.md +25 -0
- package/cap/references/git-integration.md +295 -0
- package/cap/references/git-planning-commit.md +38 -0
- package/cap/references/model-profile-resolution.md +36 -0
- package/cap/references/model-profiles.md +139 -0
- package/cap/references/phase-argument-parsing.md +61 -0
- package/cap/references/planning-config.md +202 -0
- package/cap/references/questioning.md +162 -0
- package/cap/references/session-template.json +8 -0
- package/cap/references/tdd.md +263 -0
- package/cap/references/ui-brand.md +160 -0
- package/cap/references/user-profiling.md +681 -0
- package/cap/references/verification-patterns.md +612 -0
- package/cap/references/workstream-flag.md +58 -0
- package/cap/templates/DEBUG.md +164 -0
- package/cap/templates/UAT.md +265 -0
- package/cap/templates/UI-SPEC.md +100 -0
- package/cap/templates/VALIDATION.md +76 -0
- package/cap/templates/claude-md.md +122 -0
- package/cap/templates/codebase/architecture.md +255 -0
- package/cap/templates/codebase/concerns.md +310 -0
- package/cap/templates/codebase/conventions.md +307 -0
- package/cap/templates/codebase/integrations.md +280 -0
- package/cap/templates/codebase/stack.md +186 -0
- package/cap/templates/codebase/structure.md +285 -0
- package/cap/templates/codebase/testing.md +480 -0
- package/cap/templates/config.json +44 -0
- package/cap/templates/context.md +352 -0
- package/cap/templates/continue-here.md +78 -0
- package/cap/templates/copilot-instructions.md +7 -0
- package/cap/templates/debug-subagent-prompt.md +91 -0
- package/cap/templates/dev-preferences.md +21 -0
- package/cap/templates/discovery.md +146 -0
- package/cap/templates/discussion-log.md +63 -0
- package/cap/templates/milestone-archive.md +123 -0
- package/cap/templates/milestone.md +115 -0
- package/cap/templates/phase-prompt.md +610 -0
- package/cap/templates/planner-subagent-prompt.md +117 -0
- package/cap/templates/project.md +186 -0
- package/cap/templates/requirements.md +231 -0
- package/cap/templates/research-project/ARCHITECTURE.md +204 -0
- package/cap/templates/research-project/FEATURES.md +147 -0
- package/cap/templates/research-project/PITFALLS.md +200 -0
- package/cap/templates/research-project/STACK.md +120 -0
- package/cap/templates/research-project/SUMMARY.md +170 -0
- package/cap/templates/research.md +552 -0
- package/cap/templates/retrospective.md +54 -0
- package/cap/templates/roadmap.md +202 -0
- package/cap/templates/state.md +176 -0
- package/cap/templates/summary-complex.md +59 -0
- package/cap/templates/summary-minimal.md +41 -0
- package/cap/templates/summary-standard.md +48 -0
- package/cap/templates/summary.md +248 -0
- package/cap/templates/user-profile.md +146 -0
- package/cap/templates/user-setup.md +311 -0
- package/cap/templates/verification-report.md +322 -0
- package/cap/workflows/add-phase.md +112 -0
- package/cap/workflows/add-tests.md +351 -0
- package/cap/workflows/add-todo.md +158 -0
- package/cap/workflows/audit-milestone.md +340 -0
- package/cap/workflows/audit-uat.md +109 -0
- package/cap/workflows/autonomous.md +891 -0
- package/cap/workflows/check-todos.md +177 -0
- package/cap/workflows/cleanup.md +152 -0
- package/cap/workflows/complete-milestone.md +767 -0
- package/cap/workflows/diagnose-issues.md +231 -0
- package/cap/workflows/discovery-phase.md +289 -0
- package/cap/workflows/discuss-phase-assumptions.md +653 -0
- package/cap/workflows/discuss-phase.md +1049 -0
- package/cap/workflows/do.md +104 -0
- package/cap/workflows/execute-phase.md +846 -0
- package/cap/workflows/execute-plan.md +514 -0
- package/cap/workflows/fast.md +105 -0
- package/cap/workflows/forensics.md +265 -0
- package/cap/workflows/health.md +181 -0
- package/cap/workflows/help.md +660 -0
- package/cap/workflows/insert-phase.md +130 -0
- package/cap/workflows/list-phase-assumptions.md +178 -0
- package/cap/workflows/list-workspaces.md +56 -0
- package/cap/workflows/manager.md +362 -0
- package/cap/workflows/map-codebase.md +377 -0
- package/cap/workflows/milestone-summary.md +223 -0
- package/cap/workflows/new-milestone.md +486 -0
- package/cap/workflows/new-project.md +1250 -0
- package/cap/workflows/new-workspace.md +237 -0
- package/cap/workflows/next.md +97 -0
- package/cap/workflows/node-repair.md +92 -0
- package/cap/workflows/note.md +156 -0
- package/cap/workflows/pause-work.md +176 -0
- package/cap/workflows/plan-milestone-gaps.md +273 -0
- package/cap/workflows/plan-phase.md +859 -0
- package/cap/workflows/plant-seed.md +169 -0
- package/cap/workflows/pr-branch.md +129 -0
- package/cap/workflows/profile-user.md +450 -0
- package/cap/workflows/progress.md +507 -0
- package/cap/workflows/quick.md +757 -0
- package/cap/workflows/remove-phase.md +155 -0
- package/cap/workflows/remove-workspace.md +90 -0
- package/cap/workflows/research-phase.md +82 -0
- package/cap/workflows/resume-project.md +326 -0
- package/cap/workflows/review.md +228 -0
- package/cap/workflows/session-report.md +146 -0
- package/cap/workflows/settings.md +283 -0
- package/cap/workflows/ship.md +228 -0
- package/cap/workflows/stats.md +60 -0
- package/cap/workflows/transition.md +671 -0
- package/cap/workflows/ui-phase.md +302 -0
- package/cap/workflows/ui-review.md +165 -0
- package/cap/workflows/update.md +323 -0
- package/cap/workflows/validate-phase.md +174 -0
- package/cap/workflows/verify-phase.md +254 -0
- package/cap/workflows/verify-work.md +637 -0
- package/commands/cap/annotate.md +165 -0
- package/commands/cap/brainstorm.md +238 -0
- package/commands/cap/debug.md +297 -0
- package/commands/cap/init.md +262 -0
- package/commands/cap/iterate.md +234 -0
- package/commands/cap/prototype.md +281 -0
- package/commands/cap/refresh-docs.md +37 -0
- package/commands/cap/review.md +272 -0
- package/commands/cap/scan.md +249 -0
- package/commands/cap/start.md +234 -0
- package/commands/cap/status.md +189 -0
- package/commands/cap/test.md +250 -0
- package/hooks/dist/gsd-check-update.js +114 -0
- package/hooks/dist/gsd-context-monitor.js +156 -0
- package/hooks/dist/gsd-prompt-guard.js +96 -0
- package/hooks/dist/gsd-statusline.js +119 -0
- package/hooks/dist/gsd-workflow-guard.js +94 -0
- package/package.json +51 -0
- package/scripts/base64-scan.sh +262 -0
- package/scripts/build-hooks.js +82 -0
- package/scripts/cap-removal-checklist.md +202 -0
- package/scripts/prompt-injection-scan.sh +198 -0
- package/scripts/run-tests.cjs +29 -0
- package/scripts/secret-scan.sh +227 -0
|
@@ -0,0 +1,381 @@
|
|
|
1
|
+
// @gsd-context Manifest generator for monorepo shared packages -- extracts public API surface and produces markdown summaries
|
|
2
|
+
// @gsd-decision Scans index/barrel files and TypeScript .d.ts files rather than full AST parsing -- regex is sufficient for export extraction
|
|
3
|
+
// @gsd-constraint Zero external dependencies -- uses only Node.js built-ins (fs, path)
|
|
4
|
+
// @gsd-ref(ref:AC-5) Shared packages get auto-generated API manifests stored in root .planning/manifests/
|
|
5
|
+
// @gsd-pattern Manifest output is markdown so it can be injected directly into agent context as lightweight reference
|
|
6
|
+
|
|
7
|
+
'use strict';
|
|
8
|
+
|
|
9
|
+
const fs = require('node:fs');
|
|
10
|
+
const path = require('node:path');
|
|
11
|
+
|
|
12
|
+
// @gsd-api generateManifest(packagePath, options) -- returns ManifestData object with exports, types, and description
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @typedef {Object} ExportEntry
|
|
16
|
+
* @property {string} name - Exported symbol name
|
|
17
|
+
* @property {'function'|'class'|'const'|'type'|'interface'|'enum'|'default'|'unknown'} kind - Export kind
|
|
18
|
+
* @property {string|null} description - One-line description if available from JSDoc/comment
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* @typedef {Object} ManifestData
|
|
23
|
+
* @property {string} packageName - Package name from package.json
|
|
24
|
+
* @property {string} packagePath - Relative path in monorepo
|
|
25
|
+
* @property {string|null} description - Package description from package.json
|
|
26
|
+
* @property {string|null} version - Package version
|
|
27
|
+
* @property {ExportEntry[]} exports - Public API exports
|
|
28
|
+
* @property {string[]} dependencies - Internal monorepo dependencies (workspace:*)
|
|
29
|
+
* @property {string} generatedAt - ISO timestamp
|
|
30
|
+
*/
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Generate a manifest for a single shared package.
|
|
34
|
+
*
|
|
35
|
+
* @param {string} packageAbsPath - Absolute path to the package directory
|
|
36
|
+
* @param {Object} [options]
|
|
37
|
+
* @param {string} [options.rootPath] - Monorepo root for computing relative paths
|
|
38
|
+
* @returns {ManifestData}
|
|
39
|
+
*/
|
|
40
|
+
function generateManifest(packageAbsPath, options) {
|
|
41
|
+
options = options || {};
|
|
42
|
+
const rootPath = options.rootPath || path.dirname(path.dirname(packageAbsPath));
|
|
43
|
+
|
|
44
|
+
const pkgJsonPath = path.join(packageAbsPath, 'package.json');
|
|
45
|
+
const pkg = safeReadJson(pkgJsonPath) || {};
|
|
46
|
+
|
|
47
|
+
const manifest = {
|
|
48
|
+
packageName: pkg.name || path.basename(packageAbsPath),
|
|
49
|
+
packagePath: path.relative(rootPath, packageAbsPath),
|
|
50
|
+
description: pkg.description || null,
|
|
51
|
+
version: pkg.version || null,
|
|
52
|
+
exports: [],
|
|
53
|
+
dependencies: [],
|
|
54
|
+
generatedAt: new Date().toISOString(),
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
// @gsd-decision Extract workspace:* dependencies to identify internal monorepo links
|
|
58
|
+
manifest.dependencies = extractWorkspaceDeps(pkg);
|
|
59
|
+
|
|
60
|
+
// @gsd-context Find and scan the main entry point / barrel file for exports
|
|
61
|
+
const entryFile = resolveEntryFile(packageAbsPath, pkg);
|
|
62
|
+
if (entryFile) {
|
|
63
|
+
manifest.exports = scanExports(entryFile);
|
|
64
|
+
} else {
|
|
65
|
+
// Fallback: scan .d.ts files for TypeScript type exports when no barrel file is found
|
|
66
|
+
const dtsExports = scanDtsFiles(packageAbsPath);
|
|
67
|
+
manifest.exports = dtsExports;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return manifest;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Resolve the main entry/barrel file for a package.
|
|
75
|
+
*
|
|
76
|
+
* @param {string} packageAbsPath
|
|
77
|
+
* @param {Object} pkg - Parsed package.json
|
|
78
|
+
* @returns {string|null} Absolute path to entry file, or null
|
|
79
|
+
*/
|
|
80
|
+
function resolveEntryFile(packageAbsPath, pkg) {
|
|
81
|
+
// @gsd-decision Check package.json exports/main/module fields, then fall back to index.ts/index.js convention
|
|
82
|
+
const candidates = [];
|
|
83
|
+
|
|
84
|
+
// From package.json fields
|
|
85
|
+
if (pkg.exports && typeof pkg.exports === 'string') {
|
|
86
|
+
candidates.push(pkg.exports);
|
|
87
|
+
} else if (pkg.exports && pkg.exports['.']) {
|
|
88
|
+
const dotExport = pkg.exports['.'];
|
|
89
|
+
if (typeof dotExport === 'string') candidates.push(dotExport);
|
|
90
|
+
else if (dotExport.import) candidates.push(dotExport.import);
|
|
91
|
+
else if (dotExport.require) candidates.push(dotExport.require);
|
|
92
|
+
else if (dotExport.default) candidates.push(dotExport.default);
|
|
93
|
+
}
|
|
94
|
+
if (pkg.main) candidates.push(pkg.main);
|
|
95
|
+
if (pkg.module) candidates.push(pkg.module);
|
|
96
|
+
|
|
97
|
+
// Convention-based fallbacks
|
|
98
|
+
candidates.push('src/index.ts', 'src/index.tsx', 'src/index.js', 'index.ts', 'index.js', 'lib/index.ts', 'lib/index.js');
|
|
99
|
+
|
|
100
|
+
for (const candidate of candidates) {
|
|
101
|
+
const absCandidate = path.join(packageAbsPath, candidate);
|
|
102
|
+
if (fs.existsSync(absCandidate)) {
|
|
103
|
+
return absCandidate;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return null;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Scan a file for export statements and extract public API entries.
|
|
112
|
+
*
|
|
113
|
+
* @param {string} filePath - Absolute path to the file
|
|
114
|
+
* @returns {ExportEntry[]}
|
|
115
|
+
*/
|
|
116
|
+
function scanExports(filePath) {
|
|
117
|
+
// @gsd-decision Use regex to extract exports rather than AST parsing -- language-agnostic and zero-dep
|
|
118
|
+
// @gsd-risk Regex export extraction may miss complex re-export patterns like `export * from './module'` chains
|
|
119
|
+
let content;
|
|
120
|
+
try {
|
|
121
|
+
content = fs.readFileSync(filePath, 'utf-8');
|
|
122
|
+
} catch {
|
|
123
|
+
return [];
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
const exports = [];
|
|
127
|
+
|
|
128
|
+
// Named exports: export function foo, export class Bar, export const baz, export type Qux
|
|
129
|
+
const namedExportRe = /^[ \t]*export\s+(function|class|const|let|var|type|interface|enum|abstract\s+class)\s+(\w+)/gm;
|
|
130
|
+
for (const match of content.matchAll(namedExportRe)) {
|
|
131
|
+
let kind = match[1].trim();
|
|
132
|
+
if (kind === 'let' || kind === 'var') kind = 'const';
|
|
133
|
+
if (kind.startsWith('abstract')) kind = 'class';
|
|
134
|
+
|
|
135
|
+
const name = match[2];
|
|
136
|
+
const description = extractPrecedingComment(content, match.index);
|
|
137
|
+
|
|
138
|
+
exports.push({ name, kind, description });
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Default export: export default function/class
|
|
142
|
+
const defaultExportRe = /^[ \t]*export\s+default\s+(function|class)\s+(\w+)?/gm;
|
|
143
|
+
for (const match of content.matchAll(defaultExportRe)) {
|
|
144
|
+
exports.push({
|
|
145
|
+
name: match[2] || 'default',
|
|
146
|
+
kind: 'default',
|
|
147
|
+
description: extractPrecedingComment(content, match.index),
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// Re-exports: export { Foo, Bar } from './module'
|
|
152
|
+
const reExportRe = /^[ \t]*export\s*\{([^}]+)\}\s*from\s*['"][^'"]+['"]/gm;
|
|
153
|
+
for (const match of content.matchAll(reExportRe)) {
|
|
154
|
+
const names = match[1].split(',').map(n => n.trim().split(/\s+as\s+/).pop().trim()).filter(Boolean);
|
|
155
|
+
for (const name of names) {
|
|
156
|
+
exports.push({ name, kind: 'unknown', description: null });
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
return exports;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
/**
|
|
164
|
+
* Extract the first line of a comment block immediately preceding a given position.
|
|
165
|
+
*
|
|
166
|
+
* @param {string} content - Full file content
|
|
167
|
+
* @param {number} position - Character position of the export statement
|
|
168
|
+
* @returns {string|null}
|
|
169
|
+
*/
|
|
170
|
+
function extractPrecedingComment(content, position) {
|
|
171
|
+
// Look at lines immediately above the export
|
|
172
|
+
const before = content.slice(0, position);
|
|
173
|
+
const lines = before.split('\n');
|
|
174
|
+
const lastLine = lines[lines.length - 1];
|
|
175
|
+
|
|
176
|
+
// Check the line above (lines.length - 2 because last element is partial line)
|
|
177
|
+
if (lines.length < 2) return null;
|
|
178
|
+
const prevLine = lines[lines.length - 2].trim();
|
|
179
|
+
|
|
180
|
+
// Single-line comment
|
|
181
|
+
if (prevLine.startsWith('//')) {
|
|
182
|
+
return prevLine.replace(/^\/\/\s*/, '').trim() || null;
|
|
183
|
+
}
|
|
184
|
+
// Block comment end
|
|
185
|
+
if (prevLine.endsWith('*/')) {
|
|
186
|
+
const commentText = prevLine.replace(/^\*\/?\s*|\s*\*\/$/g, '').replace(/^\*\s*/, '').trim();
|
|
187
|
+
return commentText || null;
|
|
188
|
+
}
|
|
189
|
+
// JSDoc @description or first line
|
|
190
|
+
if (prevLine.startsWith('*')) {
|
|
191
|
+
return prevLine.replace(/^\*\s*/, '').replace(/@\w+\s*/, '').trim() || null;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
return null;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Extract workspace:* dependencies from package.json.
|
|
199
|
+
*
|
|
200
|
+
* @param {Object} pkg - Parsed package.json
|
|
201
|
+
* @returns {string[]} List of internal package names
|
|
202
|
+
*/
|
|
203
|
+
function extractWorkspaceDeps(pkg) {
|
|
204
|
+
const deps = [];
|
|
205
|
+
const allDeps = { ...(pkg.dependencies || {}), ...(pkg.devDependencies || {}), ...(pkg.peerDependencies || {}) };
|
|
206
|
+
|
|
207
|
+
for (const [name, version] of Object.entries(allDeps)) {
|
|
208
|
+
if (typeof version === 'string' && version.startsWith('workspace:')) {
|
|
209
|
+
deps.push(name);
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
return deps;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/**
|
|
217
|
+
* Format a ManifestData object as a markdown document.
|
|
218
|
+
*
|
|
219
|
+
* @param {ManifestData} manifest
|
|
220
|
+
* @returns {string}
|
|
221
|
+
*/
|
|
222
|
+
function formatManifestMarkdown(manifest) {
|
|
223
|
+
const lines = [
|
|
224
|
+
`# ${manifest.packageName}`,
|
|
225
|
+
``,
|
|
226
|
+
`**Path:** ${manifest.packagePath}`,
|
|
227
|
+
`**Version:** ${manifest.version || 'n/a'}`,
|
|
228
|
+
`**Generated:** ${manifest.generatedAt}`,
|
|
229
|
+
``,
|
|
230
|
+
];
|
|
231
|
+
|
|
232
|
+
if (manifest.description) {
|
|
233
|
+
lines.push(`> ${manifest.description}`, ``);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// Exports table
|
|
237
|
+
if (manifest.exports.length > 0) {
|
|
238
|
+
lines.push(`## Exports`, ``);
|
|
239
|
+
lines.push(`| Name | Kind | Description |`);
|
|
240
|
+
lines.push(`|------|------|-------------|`);
|
|
241
|
+
for (const exp of manifest.exports) {
|
|
242
|
+
lines.push(`| ${exp.name} | ${exp.kind} | ${exp.description || '--'} |`);
|
|
243
|
+
}
|
|
244
|
+
lines.push(``);
|
|
245
|
+
} else {
|
|
246
|
+
lines.push(`## Exports`, ``, `No exports detected.`, ``);
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// Internal dependencies
|
|
250
|
+
if (manifest.dependencies.length > 0) {
|
|
251
|
+
lines.push(`## Internal Dependencies`, ``);
|
|
252
|
+
for (const dep of manifest.dependencies) {
|
|
253
|
+
lines.push(`- ${dep}`);
|
|
254
|
+
}
|
|
255
|
+
lines.push(``);
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
return lines.join('\n');
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
/**
|
|
262
|
+
* Generate manifests for all packages in a workspace and write them to .planning/manifests/.
|
|
263
|
+
*
|
|
264
|
+
* @param {string} rootPath - Monorepo root
|
|
265
|
+
* @param {Array<{name: string, path: string, absolutePath: string}>} packages - Workspace packages
|
|
266
|
+
* @param {Object} [options]
|
|
267
|
+
* @param {string} [options.outputDir] - Override manifest output directory
|
|
268
|
+
* @returns {string[]} Paths to generated manifest files
|
|
269
|
+
*/
|
|
270
|
+
// @gsd-api generateAllManifests(rootPath, packages, options) -- writes markdown manifests to .planning/manifests/ and returns file paths
|
|
271
|
+
function generateAllManifests(rootPath, packages, options) {
|
|
272
|
+
options = options || {};
|
|
273
|
+
const outputDir = options.outputDir || path.join(rootPath, '.planning', 'manifests');
|
|
274
|
+
|
|
275
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
276
|
+
|
|
277
|
+
const writtenFiles = [];
|
|
278
|
+
|
|
279
|
+
for (const pkg of packages) {
|
|
280
|
+
const manifest = generateManifest(pkg.absolutePath, { rootPath });
|
|
281
|
+
const markdown = formatManifestMarkdown(manifest);
|
|
282
|
+
const safeName = manifest.packageName.replace(/^@/, '').replace(/\//g, '__');
|
|
283
|
+
const outFile = path.join(outputDir, `${safeName}.md`);
|
|
284
|
+
|
|
285
|
+
fs.writeFileSync(outFile, markdown, 'utf-8');
|
|
286
|
+
writtenFiles.push(outFile);
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
return writtenFiles;
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
/**
|
|
293
|
+
* CLI entry point for generate-manifest subcommand.
|
|
294
|
+
*
|
|
295
|
+
* @param {string} cwd - Current working directory
|
|
296
|
+
* @param {string} packagePath - Relative path to the package
|
|
297
|
+
* @param {boolean} raw - Whether to output raw JSON
|
|
298
|
+
*/
|
|
299
|
+
function cmdGenerateManifest(cwd, packagePath, raw) {
|
|
300
|
+
if (!packagePath) {
|
|
301
|
+
process.stderr.write('Usage: generate-manifest <package-path>\n');
|
|
302
|
+
process.exitCode = 1;
|
|
303
|
+
return;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
const absPath = path.resolve(cwd, packagePath);
|
|
307
|
+
const manifest = generateManifest(absPath, { rootPath: cwd });
|
|
308
|
+
|
|
309
|
+
if (raw) {
|
|
310
|
+
process.stdout.write(JSON.stringify(manifest, null, 2) + '\n');
|
|
311
|
+
} else {
|
|
312
|
+
process.stdout.write(formatManifestMarkdown(manifest) + '\n');
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
/**
|
|
317
|
+
* Scan .d.ts files in a package root and src/ for type exports.
|
|
318
|
+
* Limited to first 5 .d.ts files to bound scan time.
|
|
319
|
+
*
|
|
320
|
+
* @param {string} packageAbsPath - Absolute path to package directory
|
|
321
|
+
* @returns {ExportEntry[]}
|
|
322
|
+
*/
|
|
323
|
+
function scanDtsFiles(packageAbsPath) {
|
|
324
|
+
const dtsFiles = [];
|
|
325
|
+
|
|
326
|
+
// Collect .d.ts files from package root and src/
|
|
327
|
+
const dirsToScan = [packageAbsPath, path.join(packageAbsPath, 'src')];
|
|
328
|
+
for (const dir of dirsToScan) {
|
|
329
|
+
try {
|
|
330
|
+
const entries = fs.readdirSync(dir);
|
|
331
|
+
for (const entry of entries) {
|
|
332
|
+
if (entry.endsWith('.d.ts') && entry !== 'node_modules') {
|
|
333
|
+
dtsFiles.push(path.join(dir, entry));
|
|
334
|
+
}
|
|
335
|
+
if (dtsFiles.length >= 5) break;
|
|
336
|
+
}
|
|
337
|
+
} catch {
|
|
338
|
+
// Directory does not exist or not readable
|
|
339
|
+
}
|
|
340
|
+
if (dtsFiles.length >= 5) break;
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
// Scan each .d.ts file and merge exports, deduplicate by name
|
|
344
|
+
const allExports = [];
|
|
345
|
+
const seenNames = new Set();
|
|
346
|
+
for (const dtsFile of dtsFiles) {
|
|
347
|
+
const exports = scanExports(dtsFile);
|
|
348
|
+
for (const exp of exports) {
|
|
349
|
+
if (!seenNames.has(exp.name)) {
|
|
350
|
+
seenNames.add(exp.name);
|
|
351
|
+
allExports.push(exp);
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
return allExports;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
/**
|
|
360
|
+
* Safely read and parse a JSON file.
|
|
361
|
+
* @param {string} filePath
|
|
362
|
+
* @returns {Object|null}
|
|
363
|
+
*/
|
|
364
|
+
function safeReadJson(filePath) {
|
|
365
|
+
try {
|
|
366
|
+
return JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
367
|
+
} catch {
|
|
368
|
+
return null;
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
module.exports = {
|
|
373
|
+
generateManifest,
|
|
374
|
+
generateAllManifests,
|
|
375
|
+
formatManifestMarkdown,
|
|
376
|
+
scanExports,
|
|
377
|
+
scanDtsFiles,
|
|
378
|
+
resolveEntryFile,
|
|
379
|
+
extractWorkspaceDeps,
|
|
380
|
+
cmdGenerateManifest,
|
|
381
|
+
};
|
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Milestone — Milestone and requirements lifecycle operations
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const { escapeRegex, getMilestonePhaseFilter, extractOneLinerFromBody, normalizeMd, planningPaths, output, error } = require('./core.cjs');
|
|
8
|
+
const { extractFrontmatter } = require('./frontmatter.cjs');
|
|
9
|
+
const { writeStateMd, stateReplaceFieldWithFallback } = require('./state.cjs');
|
|
10
|
+
|
|
11
|
+
function cmdRequirementsMarkComplete(cwd, reqIdsRaw, raw) {
|
|
12
|
+
if (!reqIdsRaw || reqIdsRaw.length === 0) {
|
|
13
|
+
error('requirement IDs required. Usage: requirements mark-complete REQ-01,REQ-02 or REQ-01 REQ-02');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// Accept comma-separated, space-separated, or bracket-wrapped: [REQ-01, REQ-02]
|
|
17
|
+
const reqIds = reqIdsRaw
|
|
18
|
+
.join(' ')
|
|
19
|
+
.replace(/[\[\]]/g, '')
|
|
20
|
+
.split(/[,\s]+/)
|
|
21
|
+
.map(r => r.trim())
|
|
22
|
+
.filter(Boolean);
|
|
23
|
+
|
|
24
|
+
if (reqIds.length === 0) {
|
|
25
|
+
error('no valid requirement IDs found');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const reqPath = planningPaths(cwd).requirements;
|
|
29
|
+
if (!fs.existsSync(reqPath)) {
|
|
30
|
+
output({ updated: false, reason: 'REQUIREMENTS.md not found', ids: reqIds }, raw, 'no requirements file');
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
let reqContent = fs.readFileSync(reqPath, 'utf-8');
|
|
35
|
+
const updated = [];
|
|
36
|
+
const alreadyComplete = [];
|
|
37
|
+
const notFound = [];
|
|
38
|
+
|
|
39
|
+
for (const reqId of reqIds) {
|
|
40
|
+
let found = false;
|
|
41
|
+
const reqEscaped = escapeRegex(reqId);
|
|
42
|
+
|
|
43
|
+
// Update checkbox: - [ ] **REQ-ID** → - [x] **REQ-ID**
|
|
44
|
+
const checkboxPattern = new RegExp(`(-\\s*\\[)[ ](\\]\\s*\\*\\*${reqEscaped}\\*\\*)`, 'gi');
|
|
45
|
+
if (checkboxPattern.test(reqContent)) {
|
|
46
|
+
reqContent = reqContent.replace(checkboxPattern, '$1x$2');
|
|
47
|
+
found = true;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Update traceability table: | REQ-ID | Phase N | Pending | → | REQ-ID | Phase N | Complete |
|
|
51
|
+
const tablePattern = new RegExp(`(\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|)\\s*Pending\\s*(\\|)`, 'gi');
|
|
52
|
+
if (tablePattern.test(reqContent)) {
|
|
53
|
+
// Re-read since test() advances lastIndex for global regex
|
|
54
|
+
reqContent = reqContent.replace(
|
|
55
|
+
new RegExp(`(\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|)\\s*Pending\\s*(\\|)`, 'gi'),
|
|
56
|
+
'$1 Complete $2'
|
|
57
|
+
);
|
|
58
|
+
found = true;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (found) {
|
|
62
|
+
updated.push(reqId);
|
|
63
|
+
} else {
|
|
64
|
+
// Check if already complete before declaring not_found
|
|
65
|
+
const doneCheckbox = new RegExp(`-\\s*\\[x\\]\\s*\\*\\*${reqEscaped}\\*\\*`, 'gi');
|
|
66
|
+
const doneTable = new RegExp(`\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|\\s*Complete\\s*\\|`, 'gi');
|
|
67
|
+
if (doneCheckbox.test(reqContent) || doneTable.test(reqContent)) {
|
|
68
|
+
alreadyComplete.push(reqId);
|
|
69
|
+
} else {
|
|
70
|
+
notFound.push(reqId);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (updated.length > 0) {
|
|
76
|
+
fs.writeFileSync(reqPath, reqContent, 'utf-8');
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
output({
|
|
80
|
+
updated: updated.length > 0,
|
|
81
|
+
marked_complete: updated,
|
|
82
|
+
already_complete: alreadyComplete,
|
|
83
|
+
not_found: notFound,
|
|
84
|
+
total: reqIds.length,
|
|
85
|
+
}, raw, `${updated.length}/${reqIds.length} requirements marked complete`);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function cmdMilestoneComplete(cwd, version, options, raw) {
|
|
89
|
+
if (!version) {
|
|
90
|
+
error('version required for milestone complete (e.g., v1.0)');
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const roadmapPath = planningPaths(cwd).roadmap;
|
|
94
|
+
const reqPath = planningPaths(cwd).requirements;
|
|
95
|
+
const statePath = planningPaths(cwd).state;
|
|
96
|
+
const milestonesPath = path.join(cwd, '.planning', 'MILESTONES.md');
|
|
97
|
+
const archiveDir = path.join(cwd, '.planning', 'milestones');
|
|
98
|
+
const phasesDir = planningPaths(cwd).phases;
|
|
99
|
+
const today = new Date().toISOString().split('T')[0];
|
|
100
|
+
const milestoneName = options.name || version;
|
|
101
|
+
|
|
102
|
+
// Ensure archive directory exists
|
|
103
|
+
fs.mkdirSync(archiveDir, { recursive: true });
|
|
104
|
+
|
|
105
|
+
// Scope stats and accomplishments to only the phases belonging to the
|
|
106
|
+
// current milestone's ROADMAP. Uses the shared filter from core.cjs
|
|
107
|
+
// (same logic used by cmdPhasesList and other callers).
|
|
108
|
+
const isDirInMilestone = getMilestonePhaseFilter(cwd);
|
|
109
|
+
|
|
110
|
+
// Gather stats from phases (scoped to current milestone only)
|
|
111
|
+
let phaseCount = 0;
|
|
112
|
+
let totalPlans = 0;
|
|
113
|
+
let totalTasks = 0;
|
|
114
|
+
const accomplishments = [];
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
|
118
|
+
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort();
|
|
119
|
+
|
|
120
|
+
for (const dir of dirs) {
|
|
121
|
+
if (!isDirInMilestone(dir)) continue;
|
|
122
|
+
|
|
123
|
+
phaseCount++;
|
|
124
|
+
const phaseFiles = fs.readdirSync(path.join(phasesDir, dir));
|
|
125
|
+
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md');
|
|
126
|
+
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
|
127
|
+
totalPlans += plans.length;
|
|
128
|
+
|
|
129
|
+
// Extract one-liners from summaries
|
|
130
|
+
for (const s of summaries) {
|
|
131
|
+
try {
|
|
132
|
+
const content = fs.readFileSync(path.join(phasesDir, dir, s), 'utf-8');
|
|
133
|
+
const fm = extractFrontmatter(content);
|
|
134
|
+
const oneLiner = fm['one-liner'] || extractOneLinerFromBody(content);
|
|
135
|
+
if (oneLiner) {
|
|
136
|
+
accomplishments.push(oneLiner);
|
|
137
|
+
}
|
|
138
|
+
// Count tasks: prefer **Tasks:** N from Performance section,
|
|
139
|
+
// then <task XML tags, then ## Task N markdown headers
|
|
140
|
+
const tasksFieldMatch = content.match(/\*\*Tasks:\*\*\s*(\d+)/);
|
|
141
|
+
if (tasksFieldMatch) {
|
|
142
|
+
totalTasks += parseInt(tasksFieldMatch[1], 10);
|
|
143
|
+
} else {
|
|
144
|
+
const xmlTaskMatches = content.match(/<task[\s>]/gi) || [];
|
|
145
|
+
const mdTaskMatches = content.match(/##\s*Task\s*\d+/gi) || [];
|
|
146
|
+
totalTasks += xmlTaskMatches.length || mdTaskMatches.length;
|
|
147
|
+
}
|
|
148
|
+
} catch { /* intentionally empty */ }
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
} catch { /* intentionally empty */ }
|
|
152
|
+
|
|
153
|
+
// Archive ROADMAP.md
|
|
154
|
+
if (fs.existsSync(roadmapPath)) {
|
|
155
|
+
const roadmapContent = fs.readFileSync(roadmapPath, 'utf-8');
|
|
156
|
+
fs.writeFileSync(path.join(archiveDir, `${version}-ROADMAP.md`), roadmapContent, 'utf-8');
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Archive REQUIREMENTS.md
|
|
160
|
+
if (fs.existsSync(reqPath)) {
|
|
161
|
+
const reqContent = fs.readFileSync(reqPath, 'utf-8');
|
|
162
|
+
const archiveHeader = `# Requirements Archive: ${version} ${milestoneName}\n\n**Archived:** ${today}\n**Status:** SHIPPED\n\nFor current requirements, see \`.planning/REQUIREMENTS.md\`.\n\n---\n\n`;
|
|
163
|
+
fs.writeFileSync(path.join(archiveDir, `${version}-REQUIREMENTS.md`), archiveHeader + reqContent, 'utf-8');
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// Archive audit file if exists
|
|
167
|
+
const auditFile = path.join(cwd, '.planning', `${version}-MILESTONE-AUDIT.md`);
|
|
168
|
+
if (fs.existsSync(auditFile)) {
|
|
169
|
+
fs.renameSync(auditFile, path.join(archiveDir, `${version}-MILESTONE-AUDIT.md`));
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Create/append MILESTONES.md entry
|
|
173
|
+
const accomplishmentsList = accomplishments.map(a => `- ${a}`).join('\n');
|
|
174
|
+
const milestoneEntry = `## ${version} ${milestoneName} (Shipped: ${today})\n\n**Phases completed:** ${phaseCount} phases, ${totalPlans} plans, ${totalTasks} tasks\n\n**Key accomplishments:**\n${accomplishmentsList || '- (none recorded)'}\n\n---\n\n`;
|
|
175
|
+
|
|
176
|
+
if (fs.existsSync(milestonesPath)) {
|
|
177
|
+
const existing = fs.readFileSync(milestonesPath, 'utf-8');
|
|
178
|
+
if (!existing.trim()) {
|
|
179
|
+
// Empty file — treat like new
|
|
180
|
+
fs.writeFileSync(milestonesPath, normalizeMd(`# Milestones\n\n${milestoneEntry}`), 'utf-8');
|
|
181
|
+
} else {
|
|
182
|
+
// Insert after the header line(s) for reverse chronological order (newest first)
|
|
183
|
+
const headerMatch = existing.match(/^(#{1,3}\s+[^\n]*\n\n?)/);
|
|
184
|
+
if (headerMatch) {
|
|
185
|
+
const header = headerMatch[1];
|
|
186
|
+
const rest = existing.slice(header.length);
|
|
187
|
+
fs.writeFileSync(milestonesPath, normalizeMd(header + milestoneEntry + rest), 'utf-8');
|
|
188
|
+
} else {
|
|
189
|
+
// No recognizable header — prepend the entry
|
|
190
|
+
fs.writeFileSync(milestonesPath, normalizeMd(milestoneEntry + existing), 'utf-8');
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
} else {
|
|
194
|
+
fs.writeFileSync(milestonesPath, normalizeMd(`# Milestones\n\n${milestoneEntry}`), 'utf-8');
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// Update STATE.md — use shared helpers that handle both **bold:** and plain Field: formats
|
|
198
|
+
if (fs.existsSync(statePath)) {
|
|
199
|
+
let stateContent = fs.readFileSync(statePath, 'utf-8');
|
|
200
|
+
|
|
201
|
+
stateContent = stateReplaceFieldWithFallback(stateContent, 'Status', null, `${version} milestone complete`);
|
|
202
|
+
stateContent = stateReplaceFieldWithFallback(stateContent, 'Last Activity', 'Last activity', today);
|
|
203
|
+
stateContent = stateReplaceFieldWithFallback(stateContent, 'Last Activity Description', null,
|
|
204
|
+
`${version} milestone completed and archived`);
|
|
205
|
+
|
|
206
|
+
writeStateMd(statePath, stateContent, cwd);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// Archive phase directories if requested
|
|
210
|
+
let phasesArchived = false;
|
|
211
|
+
if (options.archivePhases) {
|
|
212
|
+
try {
|
|
213
|
+
const phaseArchiveDir = path.join(archiveDir, `${version}-phases`);
|
|
214
|
+
fs.mkdirSync(phaseArchiveDir, { recursive: true });
|
|
215
|
+
|
|
216
|
+
const phaseEntries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
|
217
|
+
const phaseDirNames = phaseEntries.filter(e => e.isDirectory()).map(e => e.name);
|
|
218
|
+
let archivedCount = 0;
|
|
219
|
+
for (const dir of phaseDirNames) {
|
|
220
|
+
if (!isDirInMilestone(dir)) continue;
|
|
221
|
+
fs.renameSync(path.join(phasesDir, dir), path.join(phaseArchiveDir, dir));
|
|
222
|
+
archivedCount++;
|
|
223
|
+
}
|
|
224
|
+
phasesArchived = archivedCount > 0;
|
|
225
|
+
} catch { /* intentionally empty */ }
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const result = {
|
|
229
|
+
version,
|
|
230
|
+
name: milestoneName,
|
|
231
|
+
date: today,
|
|
232
|
+
phases: phaseCount,
|
|
233
|
+
plans: totalPlans,
|
|
234
|
+
tasks: totalTasks,
|
|
235
|
+
accomplishments,
|
|
236
|
+
archived: {
|
|
237
|
+
roadmap: fs.existsSync(path.join(archiveDir, `${version}-ROADMAP.md`)),
|
|
238
|
+
requirements: fs.existsSync(path.join(archiveDir, `${version}-REQUIREMENTS.md`)),
|
|
239
|
+
audit: fs.existsSync(path.join(archiveDir, `${version}-MILESTONE-AUDIT.md`)),
|
|
240
|
+
phases: phasesArchived,
|
|
241
|
+
},
|
|
242
|
+
milestones_updated: true,
|
|
243
|
+
state_updated: fs.existsSync(statePath),
|
|
244
|
+
};
|
|
245
|
+
|
|
246
|
+
output(result, raw);
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
module.exports = {
|
|
250
|
+
cmdRequirementsMarkComplete,
|
|
251
|
+
cmdMilestoneComplete,
|
|
252
|
+
};
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Mapping of GSD agent to model for each profile.
|
|
3
|
+
*
|
|
4
|
+
* Should be in sync with the profiles table in `cap/references/model-profiles.md`. But
|
|
5
|
+
* possibly worth making this the single source of truth at some point, and removing the markdown
|
|
6
|
+
* reference table in favor of programmatically determining the model to use for an agent (which
|
|
7
|
+
* would be faster, use fewer tokens, and be less error-prone).
|
|
8
|
+
*/
|
|
9
|
+
const MODEL_PROFILES = {
|
|
10
|
+
'gsd-planner': { quality: 'opus', balanced: 'opus', budget: 'sonnet' },
|
|
11
|
+
'gsd-roadmapper': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
12
|
+
'gsd-executor': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
13
|
+
'gsd-phase-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
14
|
+
'gsd-project-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
15
|
+
'gsd-research-synthesizer': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
16
|
+
'gsd-debugger': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
17
|
+
'gsd-codebase-mapper': { quality: 'sonnet', balanced: 'haiku', budget: 'haiku' },
|
|
18
|
+
'gsd-verifier': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
19
|
+
'gsd-plan-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
20
|
+
'gsd-integration-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
21
|
+
'gsd-nyquist-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
22
|
+
'gsd-ui-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
23
|
+
'gsd-ui-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
24
|
+
'gsd-ui-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
25
|
+
};
|
|
26
|
+
const VALID_PROFILES = Object.keys(MODEL_PROFILES['gsd-planner']);
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Formats the agent-to-model mapping as a human-readable table (in string format).
|
|
30
|
+
*
|
|
31
|
+
* @param {Object<string, string>} agentToModelMap - A mapping from agent to model
|
|
32
|
+
* @returns {string} A formatted table string
|
|
33
|
+
*/
|
|
34
|
+
function formatAgentToModelMapAsTable(agentToModelMap) {
|
|
35
|
+
const agentWidth = Math.max('Agent'.length, ...Object.keys(agentToModelMap).map((a) => a.length));
|
|
36
|
+
const modelWidth = Math.max(
|
|
37
|
+
'Model'.length,
|
|
38
|
+
...Object.values(agentToModelMap).map((m) => m.length)
|
|
39
|
+
);
|
|
40
|
+
const sep = '─'.repeat(agentWidth + 2) + '┼' + '─'.repeat(modelWidth + 2);
|
|
41
|
+
const header = ' ' + 'Agent'.padEnd(agentWidth) + ' │ ' + 'Model'.padEnd(modelWidth);
|
|
42
|
+
let agentToModelTable = header + '\n' + sep + '\n';
|
|
43
|
+
for (const [agent, model] of Object.entries(agentToModelMap)) {
|
|
44
|
+
agentToModelTable += ' ' + agent.padEnd(agentWidth) + ' │ ' + model.padEnd(modelWidth) + '\n';
|
|
45
|
+
}
|
|
46
|
+
return agentToModelTable;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Returns a mapping from agent to model for the given model profile.
|
|
51
|
+
*
|
|
52
|
+
* @param {string} normalizedProfile - The normalized (lowercase and trimmed) profile name
|
|
53
|
+
* @returns {Object<string, string>} A mapping from agent to model for the given profile
|
|
54
|
+
*/
|
|
55
|
+
function getAgentToModelMapForProfile(normalizedProfile) {
|
|
56
|
+
const agentToModelMap = {};
|
|
57
|
+
for (const [agent, profileToModelMap] of Object.entries(MODEL_PROFILES)) {
|
|
58
|
+
agentToModelMap[agent] = profileToModelMap[normalizedProfile];
|
|
59
|
+
}
|
|
60
|
+
return agentToModelMap;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
module.exports = {
|
|
64
|
+
MODEL_PROFILES,
|
|
65
|
+
VALID_PROFILES,
|
|
66
|
+
formatAgentToModelMapAsTable,
|
|
67
|
+
getAgentToModelMapForProfile,
|
|
68
|
+
};
|