agile-context-engineering 0.1.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +30 -0
- package/README.md +272 -78
- package/agents/ace-code-discovery-analyst.md +245 -0
- package/agents/ace-code-integration-analyst.md +248 -0
- package/agents/ace-code-reviewer.md +375 -0
- package/agents/ace-product-owner.md +361 -0
- package/agents/ace-project-researcher.md +606 -0
- package/agents/ace-research-synthesizer.md +228 -0
- package/agents/ace-technical-application-architect.md +287 -0
- package/agents/ace-wiki-mapper.md +334 -0
- package/agile-context-engineering/src/ace-tools.js +2881 -0
- package/agile-context-engineering/src/ace-tools.test.js +1089 -0
- package/agile-context-engineering/templates/_command.md +54 -0
- package/agile-context-engineering/templates/_workflow.xml +17 -0
- package/agile-context-engineering/templates/config.json +0 -0
- package/agile-context-engineering/templates/product/external-solution.xml +832 -0
- package/agile-context-engineering/templates/product/feature.xml +361 -0
- package/agile-context-engineering/templates/product/integration-solution.xml +0 -0
- package/agile-context-engineering/templates/product/product-backlog.xml +231 -0
- package/agile-context-engineering/templates/product/product-vision.xml +227 -0
- package/agile-context-engineering/templates/product/story-integration-solution.xml +1014 -0
- package/agile-context-engineering/templates/product/story-technical-solution.xml +1025 -0
- package/agile-context-engineering/templates/product/story-wiki.xml +190 -0
- package/agile-context-engineering/templates/product/story.xml +451 -0
- package/agile-context-engineering/templates/wiki/coding-standards.xml +493 -0
- package/agile-context-engineering/templates/wiki/decizions.xml +115 -0
- package/agile-context-engineering/templates/wiki/guide.xml +137 -0
- package/agile-context-engineering/templates/wiki/module-discovery.xml +174 -0
- package/agile-context-engineering/templates/wiki/pattern.xml +159 -0
- package/agile-context-engineering/templates/wiki/subsystem-architecture.xml +343 -0
- package/agile-context-engineering/templates/wiki/subsystem-structure.xml +235 -0
- package/agile-context-engineering/templates/wiki/system-architecture.xml +254 -0
- package/agile-context-engineering/templates/wiki/system-cross-cutting.xml +197 -0
- package/agile-context-engineering/templates/wiki/system-structure.xml +178 -0
- package/agile-context-engineering/templates/wiki/system.xml +381 -0
- package/agile-context-engineering/templates/wiki/tech-debt-index.xml +125 -0
- package/agile-context-engineering/templates/wiki/testing-framework.xml +283 -0
- package/agile-context-engineering/templates/wiki/wiki-readme.xml +276 -0
- package/agile-context-engineering/utils/questioning.xml +111 -0
- package/agile-context-engineering/utils/ui-formatting.md +300 -0
- package/agile-context-engineering/workflows/execute-story.xml +1145 -0
- package/agile-context-engineering/workflows/help.xml +540 -0
- package/agile-context-engineering/workflows/init-coding-standards.xml +386 -0
- package/agile-context-engineering/workflows/map-story.xml +797 -0
- package/agile-context-engineering/workflows/map-subsystem.xml +1177 -0
- package/agile-context-engineering/workflows/map-system.xml +672 -0
- package/agile-context-engineering/workflows/plan-backlog.xml +1356 -0
- package/agile-context-engineering/workflows/plan-feature.xml +1495 -0
- package/agile-context-engineering/workflows/plan-product-vision.xml +342 -0
- package/agile-context-engineering/workflows/plan-story.xml +909 -0
- package/agile-context-engineering/workflows/research-external-solution.xml +659 -0
- package/agile-context-engineering/workflows/research-integration-solution.xml +712 -0
- package/agile-context-engineering/workflows/research-story-wiki.xml +474 -0
- package/agile-context-engineering/workflows/research-technical-solution.xml +762 -0
- package/agile-context-engineering/workflows/review-story.xml +281 -0
- package/bin/install.js +102 -166
- package/commands/ace/execute-story.md +137 -0
- package/commands/ace/help.md +93 -0
- package/commands/ace/init-coding-standards.md +83 -0
- package/commands/ace/map-story.md +156 -0
- package/commands/ace/map-subsystem.md +138 -0
- package/commands/ace/map-system.md +92 -0
- package/commands/ace/plan-backlog.md +83 -0
- package/commands/ace/plan-feature.md +89 -0
- package/commands/ace/plan-product-vision.md +81 -0
- package/commands/ace/plan-story.md +145 -0
- package/commands/ace/research-external-solution.md +138 -0
- package/commands/ace/research-integration-solution.md +135 -0
- package/commands/ace/research-story-wiki.md +116 -0
- package/commands/ace/research-technical-solution.md +147 -0
- package/commands/ace/review-story.md +109 -0
- package/package.json +5 -8
- package/agents/executor.md +0 -88
- package/agents/planner.md +0 -78
- package/agents/researcher.md +0 -77
- package/agents/verifier.md +0 -116
- package/commands/ace-execute-story.md +0 -114
- package/commands/ace-init.md +0 -254
- package/commands/ace-plan-epic.md +0 -79
- package/commands/ace-plan-feature.md +0 -78
- package/commands/ace-plan-project.md +0 -205
- package/commands/ace-plan-story.md +0 -97
- package/commands/ace-refine-story.md +0 -90
- package/commands/ace-verify-story.md +0 -127
|
@@ -0,0 +1,2881 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ACE Tools — CLI utility for ACE workflow operations
|
|
5
|
+
*
|
|
6
|
+
* Inspired by GSD's gsd-tools.js (https://github.com/gsd-build/get-shit-done).
|
|
7
|
+
* Centralizes: config loading, model resolution, path checks, environment detection, slug/timestamp generation.
|
|
8
|
+
*
|
|
9
|
+
* Usage: node ace-tools.js <command> [args] [--raw]
|
|
10
|
+
*
|
|
11
|
+
* Atomic Commands:
|
|
12
|
+
* load-config Load ACE config with defaults
|
|
13
|
+
* resolve-model <agent-type> Get model for agent based on profile
|
|
14
|
+
* verify-path-exists <path> Check file/directory existence
|
|
15
|
+
* generate-slug <text> Convert text to URL-safe slug
|
|
16
|
+
* current-timestamp [format] Get timestamp (full|date|filename)
|
|
17
|
+
*
|
|
18
|
+
* Compound Commands:
|
|
19
|
+
* init new-project Environment detection for project init (status dashboard)
|
|
20
|
+
* init product-vision Environment detection for product vision upsert
|
|
21
|
+
* init coding-standards Environment detection for coding standards init
|
|
22
|
+
* init map-system Environment detection for map-system workflow
|
|
23
|
+
* init plan-backlog Environment detection for plan-backlog workflow
|
|
24
|
+
* init plan-story <story> Environment detection for plan-story workflow (deep questioning)
|
|
25
|
+
* init research-story <story> Validate story, extract metadata/requirements/wiki refs, compute paths
|
|
26
|
+
* init execute-story <story> Environment detection for execute-story workflow (execution context)
|
|
27
|
+
* init setup-github Detect gh CLI, repo, and list GitHub Projects
|
|
28
|
+
*
|
|
29
|
+
* ensure-settings Create .ace/settings.json with defaults if missing
|
|
30
|
+
* write-github-settings Write GitHub Project settings (key=value args)
|
|
31
|
+
* write-agent-teams <true|false> Enable/disable agent teams in ACE + Claude Code settings
|
|
32
|
+
* sync-agent-teams Sync agent_teams from runtime settings.json (source of truth) to .ace/settings.json
|
|
33
|
+
*
|
|
34
|
+
* Story Commands:
|
|
35
|
+
* story update-state Update story status across story file, feature file, and product backlog
|
|
36
|
+
*
|
|
37
|
+
* GitHub Commands:
|
|
38
|
+
* github resolve-fields Resolve native issue type IDs and project field IDs
|
|
39
|
+
* github create-issue Create issue, set type, add to project, set fields
|
|
40
|
+
* github update-issue Update an existing issue's title, body, and optionally project fields
|
|
41
|
+
* github sync-story Sync story/feature body and project status to GitHub
|
|
42
|
+
* github fetch-issues Fetch all Epics/Features from GitHub Project with full fields
|
|
43
|
+
*/
|
|
44
|
+
|
|
45
|
+
const fs = require('fs');
|
|
46
|
+
const path = require('path');
|
|
47
|
+
|
|
48
|
+
// ─── Runtime Detection ───────────────────────────────────────────────────────
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Detect the runtime config directory name from where this script is installed.
|
|
52
|
+
* Script location: <base>/<config-dir>/agile-context-engineering/src/ace-tools.js
|
|
53
|
+
* Returns '.claude' or '.opencode' depending on which runtime installed ACE.
|
|
54
|
+
*/
|
|
55
|
+
function getRuntimeConfigDirName() {
|
|
56
|
+
const aceDir = path.dirname(__dirname); // <base>/<config-dir>/agile-context-engineering
|
|
57
|
+
const configDir = path.dirname(aceDir); // <base>/<config-dir>
|
|
58
|
+
const dirName = path.basename(configDir); // '.claude' or '.opencode'
|
|
59
|
+
if (dirName === '.opencode' || dirName === '.claude') {
|
|
60
|
+
return dirName;
|
|
61
|
+
}
|
|
62
|
+
// Fallback for development/testing (running from repo source)
|
|
63
|
+
return '.claude';
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const RUNTIME_CONFIG_DIR = getRuntimeConfigDirName();
|
|
67
|
+
|
|
68
|
+
// ─── Model Profile Table ─────────────────────────────────────────────────────
|
|
69
|
+
|
|
70
|
+
const MODEL_PROFILES = {
|
|
71
|
+
'ace-product-owner': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
72
|
+
'ace-project-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
|
73
|
+
'ace-research-synthesizer': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
74
|
+
'ace-wiki-mapper': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
75
|
+
'ace-code-integration-analyst': { quality: 'opus', balanced: 'opus', budget: 'sonnet' },
|
|
76
|
+
'ace-code-discovery-analyst': { quality: 'opus', balanced: 'opus', budget: 'sonnet' },
|
|
77
|
+
'ace-executor': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
|
78
|
+
'ace-code-reviewer': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
|
82
|
+
|
|
83
|
+
function output(result, raw, rawValue) {
|
|
84
|
+
if (raw && rawValue !== undefined) {
|
|
85
|
+
process.stdout.write(String(rawValue));
|
|
86
|
+
} else {
|
|
87
|
+
process.stdout.write(JSON.stringify(result, null, 2));
|
|
88
|
+
}
|
|
89
|
+
process.exit(0);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
function error(message) {
|
|
93
|
+
process.stderr.write('Error: ' + message + '\n');
|
|
94
|
+
process.exit(1);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
function loadConfig(cwd) {
|
|
98
|
+
const configPath = path.join(cwd, '.ace', 'config.json');
|
|
99
|
+
const defaults = {
|
|
100
|
+
version: '0.1.0',
|
|
101
|
+
projectName: '',
|
|
102
|
+
description: '',
|
|
103
|
+
storage: 'local',
|
|
104
|
+
model_profile: 'quality',
|
|
105
|
+
commit_docs: true,
|
|
106
|
+
github: {
|
|
107
|
+
enabled: false,
|
|
108
|
+
repo: null,
|
|
109
|
+
labels: {
|
|
110
|
+
epic: 'ace:epic',
|
|
111
|
+
feature: 'ace:feature',
|
|
112
|
+
story: 'ace:story',
|
|
113
|
+
task: 'ace:task',
|
|
114
|
+
},
|
|
115
|
+
},
|
|
116
|
+
createdAt: '',
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
try {
|
|
120
|
+
const raw = fs.readFileSync(configPath, 'utf-8');
|
|
121
|
+
const parsed = JSON.parse(raw);
|
|
122
|
+
return {
|
|
123
|
+
version: parsed.version ?? defaults.version,
|
|
124
|
+
projectName: parsed.projectName ?? defaults.projectName,
|
|
125
|
+
description: parsed.description ?? defaults.description,
|
|
126
|
+
storage: parsed.storage ?? defaults.storage,
|
|
127
|
+
model_profile: parsed.model_profile ?? defaults.model_profile,
|
|
128
|
+
commit_docs: parsed.commit_docs ?? defaults.commit_docs,
|
|
129
|
+
github: {
|
|
130
|
+
enabled: parsed.github?.enabled ?? defaults.github.enabled,
|
|
131
|
+
repo: parsed.github?.repo ?? defaults.github.repo,
|
|
132
|
+
labels: {
|
|
133
|
+
epic: parsed.github?.labels?.epic ?? defaults.github.labels.epic,
|
|
134
|
+
feature: parsed.github?.labels?.feature ?? defaults.github.labels.feature,
|
|
135
|
+
story: parsed.github?.labels?.story ?? defaults.github.labels.story,
|
|
136
|
+
task: parsed.github?.labels?.task ?? defaults.github.labels.task,
|
|
137
|
+
},
|
|
138
|
+
},
|
|
139
|
+
createdAt: parsed.createdAt ?? defaults.createdAt,
|
|
140
|
+
};
|
|
141
|
+
} catch {
|
|
142
|
+
return defaults;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
function pathExistsInternal(cwd, targetPath) {
|
|
147
|
+
const fullPath = path.isAbsolute(targetPath) ? targetPath : path.join(cwd, targetPath);
|
|
148
|
+
try {
|
|
149
|
+
fs.statSync(fullPath);
|
|
150
|
+
return true;
|
|
151
|
+
} catch {
|
|
152
|
+
return false;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
function generateSlugInternal(text) {
|
|
157
|
+
if (!text) return null;
|
|
158
|
+
return text.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
function resolveModelInternal(cwd, agentType) {
|
|
162
|
+
const config = loadConfig(cwd);
|
|
163
|
+
const profile = config.model_profile || 'balanced';
|
|
164
|
+
const agentModels = MODEL_PROFILES[agentType];
|
|
165
|
+
if (!agentModels) return 'sonnet';
|
|
166
|
+
return agentModels[profile] || agentModels['balanced'] || 'sonnet';
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Detect existing code files by walking up to maxDepth levels.
|
|
171
|
+
* Cross-platform alternative to `find` shell command.
|
|
172
|
+
*/
|
|
173
|
+
function detectCodeFiles(cwd, maxDepth) {
|
|
174
|
+
const codeExtensions = new Set(['.cs', '.ts', '.js', '.py', '.go', '.rs', '.swift', '.java', '.tsx', '.jsx']);
|
|
175
|
+
const ignoreDirs = new Set(['node_modules', '.git', '.ace', '.gsd', 'dist', 'build', '__pycache__']);
|
|
176
|
+
const found = [];
|
|
177
|
+
|
|
178
|
+
function walk(dir, depth) {
|
|
179
|
+
if (depth > maxDepth || found.length >= 5) return;
|
|
180
|
+
let entries;
|
|
181
|
+
try {
|
|
182
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
183
|
+
} catch {
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
for (const entry of entries) {
|
|
187
|
+
if (found.length >= 5) return;
|
|
188
|
+
if (entry.isDirectory()) {
|
|
189
|
+
if (!ignoreDirs.has(entry.name)) {
|
|
190
|
+
walk(path.join(dir, entry.name), depth + 1);
|
|
191
|
+
}
|
|
192
|
+
} else if (entry.isFile()) {
|
|
193
|
+
const ext = path.extname(entry.name);
|
|
194
|
+
if (codeExtensions.has(ext)) {
|
|
195
|
+
found.push(path.join(dir, entry.name));
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
walk(cwd, 0);
|
|
202
|
+
return found;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// ─── Story Parsing Helpers ────────────────────────────────────────────────────
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Read a file safely, returning null on failure instead of throwing.
|
|
209
|
+
*/
|
|
210
|
+
function safeReadFile(filePath) {
|
|
211
|
+
try { return fs.readFileSync(filePath, 'utf-8'); }
|
|
212
|
+
catch { return null; }
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Classify a story parameter as file path, GitHub URL, or issue number.
|
|
217
|
+
* Returns { type, filePath?, repo?, issueNumber?, reason? }
|
|
218
|
+
*/
|
|
219
|
+
function classifyStoryParam(param) {
|
|
220
|
+
if (!param) return { type: null, reason: 'No story parameter provided' };
|
|
221
|
+
const trimmed = param.trim();
|
|
222
|
+
if (/^https?:\/\/github\.com\//.test(trimmed)) {
|
|
223
|
+
const match = trimmed.match(/github\.com\/([^/]+\/[^/]+)\/issues\/(\d+)/);
|
|
224
|
+
if (match) return { type: 'github-url', repo: match[1], issueNumber: parseInt(match[2]) };
|
|
225
|
+
return { type: 'invalid', reason: 'Unrecognized GitHub URL format. Expected: https://github.com/owner/repo/issues/123' };
|
|
226
|
+
}
|
|
227
|
+
if (/^\d+$/.test(trimmed)) {
|
|
228
|
+
return { type: 'issue-number', issueNumber: parseInt(trimmed) };
|
|
229
|
+
}
|
|
230
|
+
return { type: 'file', filePath: trimmed };
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
/**
|
|
234
|
+
* Extract a markdown section between a heading and the next heading of equal or higher level.
|
|
235
|
+
* Returns the section content (without the heading itself), or null if not found.
|
|
236
|
+
* headingLevel: number of '#' chars (2 for ##, 3 for ###)
|
|
237
|
+
*/
|
|
238
|
+
function extractMarkdownSection(content, sectionName, headingLevel) {
|
|
239
|
+
const prefix = '#'.repeat(headingLevel);
|
|
240
|
+
const escapedName = sectionName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
241
|
+
// Find the heading line
|
|
242
|
+
const headingPattern = new RegExp(`^${prefix}\\s+${escapedName}\\s*$`, 'm');
|
|
243
|
+
const headingMatch = headingPattern.exec(content);
|
|
244
|
+
if (!headingMatch) return null;
|
|
245
|
+
|
|
246
|
+
// Get everything after the heading line
|
|
247
|
+
const startIdx = headingMatch.index + headingMatch[0].length;
|
|
248
|
+
const rest = content.substring(startIdx);
|
|
249
|
+
|
|
250
|
+
// Find the next heading of equal or higher level
|
|
251
|
+
const nextHeadingPattern = new RegExp(`^#{1,${headingLevel}}\\s`, 'm');
|
|
252
|
+
const nextMatch = nextHeadingPattern.exec(rest);
|
|
253
|
+
|
|
254
|
+
const sectionContent = nextMatch ? rest.substring(0, nextMatch.index) : rest;
|
|
255
|
+
return sectionContent.trim() || null;
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
/**
|
|
259
|
+
* Parse the story markdown header to extract metadata and parent context.
|
|
260
|
+
*
|
|
261
|
+
* Expected format:
|
|
262
|
+
* # S3: Display OAuth Provider Buttons
|
|
263
|
+
* **Feature**: F3 OAuth2 Login Flow | **Epic**: #45 User Authentication
|
|
264
|
+
* **Status**: Refined | **Size**: 3 | **Sprint**: Sprint 2 | **Link**: [#95](url)
|
|
265
|
+
*/
|
|
266
|
+
function extractStoryMetadata(content) {
|
|
267
|
+
const result = {
|
|
268
|
+
id: null, title: null, status: null, size: null, sprint: null, link: null,
|
|
269
|
+
feature: { id: null, title: null },
|
|
270
|
+
epic: { id: null, title: null },
|
|
271
|
+
};
|
|
272
|
+
if (!content) return result;
|
|
273
|
+
|
|
274
|
+
// Header: # ID: Title
|
|
275
|
+
const headerMatch = content.match(/^#\s+([^:\n]+?):\s+(.+)$/m);
|
|
276
|
+
if (headerMatch) {
|
|
277
|
+
result.id = headerMatch[1].trim();
|
|
278
|
+
result.title = headerMatch[2].trim();
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
// Feature/Epic line: **Feature**: F3 OAuth2 Login Flow | **Epic**: #45 User Authentication
|
|
282
|
+
const featureEpicMatch = content.match(/\*\*Feature\*\*:\s*(.+?)\s*\|\s*\*\*Epic\*\*:\s*(.+)$/m);
|
|
283
|
+
if (featureEpicMatch) {
|
|
284
|
+
const featureStr = featureEpicMatch[1].trim();
|
|
285
|
+
const epicStr = featureEpicMatch[2].trim();
|
|
286
|
+
// Parse "F3 OAuth2 Login Flow" → id="F3", title="OAuth2 Login Flow"
|
|
287
|
+
const featureParts = featureStr.match(/^(\S+)\s+(.+)$/);
|
|
288
|
+
if (featureParts) {
|
|
289
|
+
result.feature.id = featureParts[1];
|
|
290
|
+
result.feature.title = featureParts[2];
|
|
291
|
+
} else {
|
|
292
|
+
result.feature.title = featureStr;
|
|
293
|
+
}
|
|
294
|
+
const epicParts = epicStr.match(/^(\S+)\s+(.+)$/);
|
|
295
|
+
if (epicParts) {
|
|
296
|
+
result.epic.id = epicParts[1];
|
|
297
|
+
result.epic.title = epicParts[2];
|
|
298
|
+
} else {
|
|
299
|
+
result.epic.title = epicStr;
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// Status line: **Status**: Refined | **Size**: 3 | **Sprint**: Sprint 2 | **Link**: [#95](url)
|
|
304
|
+
const statusMatch = content.match(/\*\*Status\*\*:\s*([^|*]+)/);
|
|
305
|
+
if (statusMatch) result.status = statusMatch[1].trim();
|
|
306
|
+
|
|
307
|
+
const sizeMatch = content.match(/\*\*Size\*\*:\s*([^|*]+)/);
|
|
308
|
+
if (sizeMatch) result.size = sizeMatch[1].trim();
|
|
309
|
+
|
|
310
|
+
const sprintMatch = content.match(/\*\*Sprint\*\*:\s*([^|*]+)/);
|
|
311
|
+
if (sprintMatch) result.sprint = sprintMatch[1].trim();
|
|
312
|
+
|
|
313
|
+
const linkMatch = content.match(/\*\*Link\*\*:\s*([^|*\n]+)/);
|
|
314
|
+
if (linkMatch) result.link = linkMatch[1].trim();
|
|
315
|
+
|
|
316
|
+
return result;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
/**
|
|
320
|
+
* Extract GitHub issue number from a Link field value.
|
|
321
|
+
* Handles formats: "[#187](url)", "#187", "187"
|
|
322
|
+
* Returns the issue number as an integer, or null if not parseable.
|
|
323
|
+
*/
|
|
324
|
+
function extractIssueNumber(linkStr) {
|
|
325
|
+
if (!linkStr) return null;
|
|
326
|
+
const match = linkStr.match(/#(\d+)/);
|
|
327
|
+
return match ? parseInt(match[1], 10) : null;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
/**
|
|
331
|
+
* Extract GitHub issue number from a file's **Link** header field.
|
|
332
|
+
* Reads the file, finds **Link**: [#N](url), returns the issue number or null.
|
|
333
|
+
*/
|
|
334
|
+
function extractIssueNumberFromFile(cwd, filePath) {
|
|
335
|
+
if (!filePath) return null;
|
|
336
|
+
const resolved = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
|
337
|
+
const content = safeReadFile(resolved);
|
|
338
|
+
if (!content) return null;
|
|
339
|
+
const linkMatch = content.match(/\*\*Link\*\*:\s*([^|*\n]+)/);
|
|
340
|
+
if (!linkMatch) return null;
|
|
341
|
+
return extractIssueNumber(linkMatch[1].trim());
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
/**
|
|
345
|
+
* Extract story requirements: user story statement, description, and AC scenario count.
|
|
346
|
+
*/
|
|
347
|
+
function extractStoryRequirements(content) {
|
|
348
|
+
const result = { user_story: null, description: null, acceptance_criteria_count: 0 };
|
|
349
|
+
if (!content) return result;
|
|
350
|
+
|
|
351
|
+
// User Story: content between "## User Story" and next "## "
|
|
352
|
+
const userStorySection = extractMarkdownSection(content, 'User Story', 2);
|
|
353
|
+
if (userStorySection) {
|
|
354
|
+
// Strip blockquote prefixes (> )
|
|
355
|
+
result.user_story = userStorySection.replace(/^>\s?/gm, '').trim();
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// Description: content between "## Description" and next "## "
|
|
359
|
+
const descSection = extractMarkdownSection(content, 'Description', 2);
|
|
360
|
+
if (descSection) {
|
|
361
|
+
result.description = descSection.trim();
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// Count AC scenarios: occurrences of "### Scenario:"
|
|
365
|
+
const scenarioMatches = content.match(/^###\s+Scenario:/gm);
|
|
366
|
+
result.acceptance_criteria_count = scenarioMatches ? scenarioMatches.length : 0;
|
|
367
|
+
|
|
368
|
+
return result;
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Parse the "## Relevant Wiki" section to extract structured wiki file references.
|
|
373
|
+
* Returns { system_wide: string[], subsystem_docs: [{path, category, reason}], total_count }
|
|
374
|
+
*/
|
|
375
|
+
function extractWikiReferences(content) {
|
|
376
|
+
const result = { system_wide: [], subsystem_docs: [], total_count: 0 };
|
|
377
|
+
if (!content) return result;
|
|
378
|
+
|
|
379
|
+
const wikiSection = extractMarkdownSection(content, 'Relevant Wiki', 2);
|
|
380
|
+
if (!wikiSection) return result;
|
|
381
|
+
|
|
382
|
+
// Parse each line that starts with "- `path`" — extract path and reason
|
|
383
|
+
const linePattern = /^-\s+`([^`]+)`\s*[—–-]\s*(.+)$/gm;
|
|
384
|
+
let match;
|
|
385
|
+
while ((match = linePattern.exec(wikiSection)) !== null) {
|
|
386
|
+
const filePath = match[1].trim();
|
|
387
|
+
const reason = match[2].trim();
|
|
388
|
+
|
|
389
|
+
if (filePath.includes('/system-wide/')) {
|
|
390
|
+
result.system_wide.push(filePath);
|
|
391
|
+
} else {
|
|
392
|
+
// Classify subsystem doc category from path
|
|
393
|
+
let category = 'other';
|
|
394
|
+
if (filePath.includes('/systems/')) category = 'systems';
|
|
395
|
+
else if (filePath.includes('/patterns/')) category = 'patterns';
|
|
396
|
+
else if (filePath.includes('/cross-cutting/')) category = 'cross-cutting';
|
|
397
|
+
else if (filePath.includes('/guides/')) category = 'guides';
|
|
398
|
+
else if (filePath.includes('/decisions/')) category = 'decisions';
|
|
399
|
+
else if (filePath.includes('/architecture')) category = 'architecture';
|
|
400
|
+
|
|
401
|
+
result.subsystem_docs.push({ path: filePath, category, reason });
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
result.total_count = result.system_wide.length + result.subsystem_docs.length;
|
|
406
|
+
return result;
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
/**
|
|
410
|
+
* Compute all story-related paths and slugs from parent context.
|
|
411
|
+
*/
|
|
412
|
+
function computeStoryPaths(epicId, epicTitle, featureId, featureTitle, storyId, storyTitle) {
|
|
413
|
+
const epicSlug = generateSlugInternal(`${epicId}-${epicTitle}`) || 'unknown-epic';
|
|
414
|
+
const featureSlug = generateSlugInternal(`${featureId}-${featureTitle}`) || 'unknown-feature';
|
|
415
|
+
const storySlug = generateSlugInternal(`${storyId}-${storyTitle}`) || 'unknown-story';
|
|
416
|
+
|
|
417
|
+
const storyDir = `.ace/artifacts/product/${epicSlug}/${featureSlug}/${storySlug}`;
|
|
418
|
+
const featureDir = `.ace/artifacts/product/${epicSlug}/${featureSlug}`;
|
|
419
|
+
|
|
420
|
+
return {
|
|
421
|
+
epic_slug: epicSlug,
|
|
422
|
+
feature_slug: featureSlug,
|
|
423
|
+
story_slug: storySlug,
|
|
424
|
+
story_dir: storyDir,
|
|
425
|
+
story_file: `${storyDir}/${storySlug}.md`,
|
|
426
|
+
external_analysis_file: `${storyDir}/external-analysis.md`,
|
|
427
|
+
integration_analysis_file: `${storyDir}/integration-analysis.md`,
|
|
428
|
+
feature_dir: featureDir,
|
|
429
|
+
feature_file: `${featureDir}/${featureSlug}.md`,
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
// ─── Commands ─────────────────────────────────────────────────────────────────
|
|
434
|
+
|
|
435
|
+
function cmdLoadConfig(cwd, raw) {
|
|
436
|
+
const config = loadConfig(cwd);
|
|
437
|
+
output(config, raw);
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
function cmdResolveModel(cwd, agentType, raw) {
|
|
441
|
+
if (!agentType) {
|
|
442
|
+
error('agent-type required for resolve-model. Available: ' + Object.keys(MODEL_PROFILES).join(', '));
|
|
443
|
+
}
|
|
444
|
+
const model = resolveModelInternal(cwd, agentType);
|
|
445
|
+
const config = loadConfig(cwd);
|
|
446
|
+
output({ model, agent: agentType, profile: config.model_profile }, raw, model);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
function cmdVerifyPathExists(cwd, targetPath, raw) {
|
|
450
|
+
if (!targetPath) {
|
|
451
|
+
error('path required for verify-path-exists');
|
|
452
|
+
}
|
|
453
|
+
const exists = pathExistsInternal(cwd, targetPath);
|
|
454
|
+
output({ exists, path: targetPath }, raw, exists ? 'true' : 'false');
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
function cmdGenerateSlug(text, raw) {
|
|
458
|
+
if (!text) {
|
|
459
|
+
error('text required for slug generation');
|
|
460
|
+
}
|
|
461
|
+
const slug = generateSlugInternal(text);
|
|
462
|
+
output({ slug }, raw, slug);
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
function cmdCurrentTimestamp(format, raw) {
|
|
466
|
+
const now = new Date();
|
|
467
|
+
let value;
|
|
468
|
+
switch (format) {
|
|
469
|
+
case 'date':
|
|
470
|
+
value = now.toISOString().split('T')[0];
|
|
471
|
+
break;
|
|
472
|
+
case 'filename':
|
|
473
|
+
value = now.toISOString().replace(/[:.]/g, '-').replace('T', '_').split('Z')[0];
|
|
474
|
+
break;
|
|
475
|
+
case 'full':
|
|
476
|
+
default:
|
|
477
|
+
value = now.toISOString();
|
|
478
|
+
break;
|
|
479
|
+
}
|
|
480
|
+
output({ timestamp: value, format }, raw, value);
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
// ─── Brownfield Detection (shared) ───────────────────────────────────────────
|
|
484
|
+
|
|
485
|
+
/**
|
|
486
|
+
* Detect whether the project is brownfield (has existing code/manifests) or greenfield.
|
|
487
|
+
* Returns a reusable object with detection results.
|
|
488
|
+
*/
|
|
489
|
+
function detectBrownfieldStatus(cwd) {
|
|
490
|
+
const codeFiles = detectCodeFiles(cwd, 3);
|
|
491
|
+
const hasExistingCode = codeFiles.length > 0;
|
|
492
|
+
|
|
493
|
+
const packageFiles = [
|
|
494
|
+
'package.json', // Node.js
|
|
495
|
+
'requirements.txt', // Python (pip)
|
|
496
|
+
'pyproject.toml', // Python (modern)
|
|
497
|
+
'Cargo.toml', // Rust
|
|
498
|
+
'go.mod', // Go
|
|
499
|
+
'Package.swift', // Swift
|
|
500
|
+
'pom.xml', // Java (Maven)
|
|
501
|
+
'build.gradle', // Java/Kotlin (Gradle)
|
|
502
|
+
];
|
|
503
|
+
|
|
504
|
+
// C# / .NET — look for *.sln or *.csproj in project root
|
|
505
|
+
const hasDotnetProject = (() => {
|
|
506
|
+
try {
|
|
507
|
+
const rootFiles = fs.readdirSync(cwd);
|
|
508
|
+
return rootFiles.some(f => f.endsWith('.sln') || f.endsWith('.csproj'));
|
|
509
|
+
} catch {
|
|
510
|
+
return false;
|
|
511
|
+
}
|
|
512
|
+
})();
|
|
513
|
+
|
|
514
|
+
const hasPackageFile = packageFiles.some(f => pathExistsInternal(cwd, f)) || hasDotnetProject;
|
|
515
|
+
const isBrownfield = hasExistingCode || hasPackageFile;
|
|
516
|
+
|
|
517
|
+
return {
|
|
518
|
+
has_existing_code: hasExistingCode,
|
|
519
|
+
has_package_file: hasPackageFile,
|
|
520
|
+
is_brownfield: isBrownfield,
|
|
521
|
+
is_greenfield: !isBrownfield,
|
|
522
|
+
};
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
// ─── Settings ─────────────────────────────────────────────────────────────────
|
|
526
|
+
|
|
527
|
+
const SETTINGS_DEFAULTS = {
|
|
528
|
+
model_profile: 'balanced',
|
|
529
|
+
commit_docs: true,
|
|
530
|
+
agent_teams: false,
|
|
531
|
+
github_project: {
|
|
532
|
+
enabled: false,
|
|
533
|
+
gh_installed: false,
|
|
534
|
+
repo: '',
|
|
535
|
+
project_number: null,
|
|
536
|
+
owner: '',
|
|
537
|
+
},
|
|
538
|
+
};
|
|
539
|
+
|
|
540
|
+
function loadSettings(cwd) {
|
|
541
|
+
const settingsPath = path.join(cwd, '.ace', 'settings.json');
|
|
542
|
+
try {
|
|
543
|
+
const raw = fs.readFileSync(settingsPath, 'utf-8');
|
|
544
|
+
const parsed = JSON.parse(raw);
|
|
545
|
+
return {
|
|
546
|
+
model_profile: parsed.model_profile ?? SETTINGS_DEFAULTS.model_profile,
|
|
547
|
+
commit_docs: parsed.commit_docs ?? SETTINGS_DEFAULTS.commit_docs,
|
|
548
|
+
agent_teams: parsed.agent_teams ?? SETTINGS_DEFAULTS.agent_teams,
|
|
549
|
+
github_project: {
|
|
550
|
+
enabled: parsed.github_project?.enabled ?? SETTINGS_DEFAULTS.github_project.enabled,
|
|
551
|
+
gh_installed: parsed.github_project?.gh_installed ?? SETTINGS_DEFAULTS.github_project.gh_installed,
|
|
552
|
+
repo: parsed.github_project?.repo ?? SETTINGS_DEFAULTS.github_project.repo,
|
|
553
|
+
project_number: parsed.github_project?.project_number ?? SETTINGS_DEFAULTS.github_project.project_number,
|
|
554
|
+
owner: parsed.github_project?.owner ?? SETTINGS_DEFAULTS.github_project.owner,
|
|
555
|
+
},
|
|
556
|
+
};
|
|
557
|
+
} catch {
|
|
558
|
+
return JSON.parse(JSON.stringify(SETTINGS_DEFAULTS));
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
function writeSettings(cwd, settings) {
|
|
563
|
+
const aceDir = path.join(cwd, '.ace');
|
|
564
|
+
if (!fs.existsSync(aceDir)) {
|
|
565
|
+
fs.mkdirSync(aceDir, { recursive: true });
|
|
566
|
+
}
|
|
567
|
+
const settingsPath = path.join(aceDir, 'settings.json');
|
|
568
|
+
fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2) + '\n', 'utf-8');
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
function cmdEnsureSettings(cwd, raw) {
|
|
572
|
+
const settingsPath = path.join(cwd, '.ace', 'settings.json');
|
|
573
|
+
const alreadyExists = pathExistsInternal(cwd, '.ace/settings.json');
|
|
574
|
+
|
|
575
|
+
if (!alreadyExists) {
|
|
576
|
+
const defaults = JSON.parse(JSON.stringify(SETTINGS_DEFAULTS));
|
|
577
|
+
writeSettings(cwd, defaults);
|
|
578
|
+
output({ created: true, path: settingsPath, settings: defaults }, raw);
|
|
579
|
+
} else {
|
|
580
|
+
const settings = loadSettings(cwd);
|
|
581
|
+
output({ created: false, path: settingsPath, settings }, raw);
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
function cmdSetupGithubProject(cwd, raw) {
|
|
586
|
+
const { execSync } = require('child_process');
|
|
587
|
+
const settings = loadSettings(cwd);
|
|
588
|
+
|
|
589
|
+
// Detect gh CLI
|
|
590
|
+
let ghInstalled = false;
|
|
591
|
+
try {
|
|
592
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
593
|
+
ghInstalled = true;
|
|
594
|
+
} catch {}
|
|
595
|
+
|
|
596
|
+
// Detect repo
|
|
597
|
+
let repo = '';
|
|
598
|
+
let owner = '';
|
|
599
|
+
if (ghInstalled) {
|
|
600
|
+
try {
|
|
601
|
+
repo = execSync('gh repo view --json nameWithOwner -q .nameWithOwner', {
|
|
602
|
+
cwd,
|
|
603
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
604
|
+
encoding: 'utf-8',
|
|
605
|
+
}).trim();
|
|
606
|
+
owner = repo.split('/')[0] || '';
|
|
607
|
+
} catch {}
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
// List projects
|
|
611
|
+
let projects = [];
|
|
612
|
+
if (ghInstalled && owner) {
|
|
613
|
+
try {
|
|
614
|
+
const projectsJson = execSync(`gh project list --owner ${owner} --limit 10 --format json`, {
|
|
615
|
+
cwd,
|
|
616
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
617
|
+
encoding: 'utf-8',
|
|
618
|
+
}).trim();
|
|
619
|
+
const parsed = JSON.parse(projectsJson);
|
|
620
|
+
projects = (parsed.projects || parsed || []).map(p => ({
|
|
621
|
+
number: p.number,
|
|
622
|
+
title: p.title,
|
|
623
|
+
}));
|
|
624
|
+
} catch {}
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
output({
|
|
628
|
+
gh_installed: ghInstalled,
|
|
629
|
+
repo,
|
|
630
|
+
owner,
|
|
631
|
+
projects,
|
|
632
|
+
current_settings: settings.github_project,
|
|
633
|
+
}, raw);
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
function cmdWriteGithubSettings(cwd, raw, extraArgs) {
|
|
637
|
+
const settings = loadSettings(cwd);
|
|
638
|
+
|
|
639
|
+
// Parse key=value pairs from extra args
|
|
640
|
+
for (const arg of extraArgs) {
|
|
641
|
+
const eqIndex = arg.indexOf('=');
|
|
642
|
+
if (eqIndex === -1) continue;
|
|
643
|
+
const key = arg.substring(0, eqIndex);
|
|
644
|
+
const value = arg.substring(eqIndex + 1);
|
|
645
|
+
|
|
646
|
+
switch (key) {
|
|
647
|
+
case 'enabled':
|
|
648
|
+
settings.github_project.enabled = value === 'true';
|
|
649
|
+
break;
|
|
650
|
+
case 'gh_installed':
|
|
651
|
+
settings.github_project.gh_installed = value === 'true';
|
|
652
|
+
break;
|
|
653
|
+
case 'repo':
|
|
654
|
+
settings.github_project.repo = value;
|
|
655
|
+
break;
|
|
656
|
+
case 'project_number':
|
|
657
|
+
settings.github_project.project_number = value === 'null' ? null : parseInt(value, 10);
|
|
658
|
+
break;
|
|
659
|
+
case 'owner':
|
|
660
|
+
settings.github_project.owner = value;
|
|
661
|
+
break;
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
writeSettings(cwd, settings);
|
|
666
|
+
output({ written: true, settings }, raw);
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
function cmdSyncAgentTeams(cwd, raw) {
|
|
670
|
+
// Source of truth: runtime settings.json env var (e.g. .claude/settings.json or .opencode/settings.json)
|
|
671
|
+
const claudeSettingsPath = path.join(cwd, RUNTIME_CONFIG_DIR, 'settings.json');
|
|
672
|
+
let claudeEnabled = false;
|
|
673
|
+
try {
|
|
674
|
+
const claudeRaw = fs.readFileSync(claudeSettingsPath, 'utf-8');
|
|
675
|
+
const claudeSettings = JSON.parse(claudeRaw);
|
|
676
|
+
const val = claudeSettings?.env?.CLAUDE_CODE_EXPERIMENTAL_AGENT_TEAMS;
|
|
677
|
+
claudeEnabled = val === '1' || val === 'true';
|
|
678
|
+
} catch {
|
|
679
|
+
// File doesn't exist or is invalid — treat as disabled
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
// Sync ACE settings to match Claude's source of truth
|
|
683
|
+
const settings = loadSettings(cwd);
|
|
684
|
+
const wasDifferent = settings.agent_teams !== claudeEnabled;
|
|
685
|
+
if (wasDifferent) {
|
|
686
|
+
settings.agent_teams = claudeEnabled;
|
|
687
|
+
writeSettings(cwd, settings);
|
|
688
|
+
}
|
|
689
|
+
|
|
690
|
+
output({ agent_teams: claudeEnabled, synced: wasDifferent }, raw);
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
function cmdWriteAgentTeamsSetting(cwd, raw, extraArgs) {
|
|
694
|
+
const enabled = extraArgs[0] === 'true';
|
|
695
|
+
const settings = loadSettings(cwd);
|
|
696
|
+
settings.agent_teams = enabled;
|
|
697
|
+
writeSettings(cwd, settings);
|
|
698
|
+
|
|
699
|
+
// Also update the project's runtime settings.json (e.g. .claude/ or .opencode/)
|
|
700
|
+
const claudeDir = path.join(cwd, RUNTIME_CONFIG_DIR);
|
|
701
|
+
const claudeSettingsPath = path.join(claudeDir, 'settings.json');
|
|
702
|
+
|
|
703
|
+
let claudeSettings = {};
|
|
704
|
+
try {
|
|
705
|
+
const existing = fs.readFileSync(claudeSettingsPath, 'utf-8');
|
|
706
|
+
claudeSettings = JSON.parse(existing);
|
|
707
|
+
} catch {
|
|
708
|
+
// File doesn't exist or is invalid — start fresh
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
if (!claudeSettings.env) {
|
|
712
|
+
claudeSettings.env = {};
|
|
713
|
+
}
|
|
714
|
+
|
|
715
|
+
if (enabled) {
|
|
716
|
+
claudeSettings.env.CLAUDE_CODE_EXPERIMENTAL_AGENT_TEAMS = '1';
|
|
717
|
+
} else {
|
|
718
|
+
delete claudeSettings.env.CLAUDE_CODE_EXPERIMENTAL_AGENT_TEAMS;
|
|
719
|
+
// Clean up empty env object
|
|
720
|
+
if (Object.keys(claudeSettings.env).length === 0) {
|
|
721
|
+
delete claudeSettings.env;
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
if (!fs.existsSync(claudeDir)) {
|
|
726
|
+
fs.mkdirSync(claudeDir, { recursive: true });
|
|
727
|
+
}
|
|
728
|
+
fs.writeFileSync(claudeSettingsPath, JSON.stringify(claudeSettings, null, 2) + '\n', 'utf-8');
|
|
729
|
+
|
|
730
|
+
output({ written: true, agent_teams: enabled, settings, claude_settings: claudeSettings }, raw);
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
// ─── Compound Commands ────────────────────────────────────────────────────────
|
|
734
|
+
|
|
735
|
+
function cmdInitNewProject(cwd, raw) {
|
|
736
|
+
const config = loadConfig(cwd);
|
|
737
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
738
|
+
|
|
739
|
+
const result = {
|
|
740
|
+
// Models (pre-resolved so workflows know which model to spawn each agent with)
|
|
741
|
+
product_owner_model: resolveModelInternal(cwd, 'ace-product-owner'),
|
|
742
|
+
|
|
743
|
+
researcher_model: resolveModelInternal(cwd, 'ace-project-researcher'),
|
|
744
|
+
synthesizer_model: resolveModelInternal(cwd, 'ace-research-synthesizer'),
|
|
745
|
+
|
|
746
|
+
// Config
|
|
747
|
+
commit_docs: config.commit_docs,
|
|
748
|
+
|
|
749
|
+
// Existing state
|
|
750
|
+
has_product_vision: pathExistsInternal(cwd, '.docs/product/product-vision.md'),
|
|
751
|
+
has_system_architecture: pathExistsInternal(cwd, '.docs/wiki/system-wide/system-architecture.md'),
|
|
752
|
+
has_system_structure: pathExistsInternal(cwd, '.docs/wiki/system-wide/system-structure.md'),
|
|
753
|
+
has_coding_standards: pathExistsInternal(cwd, '.docs/wiki/system-wide/coding-standards.md'),
|
|
754
|
+
has_testing_framework: pathExistsInternal(cwd, '.docs/wiki/system-wide/testing-framework.md'),
|
|
755
|
+
project_exists: pathExistsInternal(cwd, '.docs/product/product-vision.md'),
|
|
756
|
+
has_codebase_map: pathExistsInternal(cwd, '.ace/codebase'),
|
|
757
|
+
planning_exists: pathExistsInternal(cwd, '.ace'),
|
|
758
|
+
|
|
759
|
+
// Brownfield detection
|
|
760
|
+
...brownfield,
|
|
761
|
+
needs_codebase_map: brownfield.is_brownfield && !pathExistsInternal(cwd, '.ace/codebase'),
|
|
762
|
+
|
|
763
|
+
// Git state
|
|
764
|
+
has_git: pathExistsInternal(cwd, '.git'),
|
|
765
|
+
|
|
766
|
+
// GitHub CLI
|
|
767
|
+
has_gh_cli: (() => {
|
|
768
|
+
try {
|
|
769
|
+
const { execSync } = require('child_process');
|
|
770
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
771
|
+
return true;
|
|
772
|
+
} catch {
|
|
773
|
+
return false;
|
|
774
|
+
}
|
|
775
|
+
})(),
|
|
776
|
+
};
|
|
777
|
+
|
|
778
|
+
output(result, raw);
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
function cmdInitCodingStandards(cwd, raw) {
|
|
782
|
+
const config = loadConfig(cwd);
|
|
783
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
784
|
+
|
|
785
|
+
const result = {
|
|
786
|
+
// Config
|
|
787
|
+
commit_docs: config.commit_docs,
|
|
788
|
+
|
|
789
|
+
// Brownfield detection
|
|
790
|
+
...brownfield,
|
|
791
|
+
|
|
792
|
+
// Existing coding standards
|
|
793
|
+
has_coding_standards: pathExistsInternal(cwd, '.docs/wiki/system-wide/coding-standards.md'),
|
|
794
|
+
wiki_dir_exists: pathExistsInternal(cwd, '.docs/wiki/system-wide'),
|
|
795
|
+
|
|
796
|
+
// Existing wiki context (useful for cross-referencing)
|
|
797
|
+
has_system_architecture: pathExistsInternal(cwd, '.docs/wiki/system-wide/system-architecture.md'),
|
|
798
|
+
has_system_structure: pathExistsInternal(cwd, '.docs/wiki/system-wide/system-structure.md'),
|
|
799
|
+
|
|
800
|
+
// Git state
|
|
801
|
+
has_git: pathExistsInternal(cwd, '.git'),
|
|
802
|
+
};
|
|
803
|
+
|
|
804
|
+
output(result, raw);
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
function cmdInitMapSystem(cwd, raw) {
|
|
808
|
+
const config = loadConfig(cwd);
|
|
809
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
810
|
+
|
|
811
|
+
// Check existing wiki documents
|
|
812
|
+
const wikiDir = '.docs/wiki/system-wide';
|
|
813
|
+
const wikiDirExists = pathExistsInternal(cwd, wikiDir);
|
|
814
|
+
|
|
815
|
+
const has_system_structure = pathExistsInternal(cwd, path.join(wikiDir, 'system-structure.md'));
|
|
816
|
+
const has_system_architecture = pathExistsInternal(cwd, path.join(wikiDir, 'system-architecture.md'));
|
|
817
|
+
const has_testing_framework = pathExistsInternal(cwd, path.join(wikiDir, 'testing-framework.md'));
|
|
818
|
+
const has_coding_standards = pathExistsInternal(cwd, path.join(wikiDir, 'coding-standards.md'));
|
|
819
|
+
|
|
820
|
+
// List existing wiki files if directory exists
|
|
821
|
+
let existing_wiki_files = [];
|
|
822
|
+
if (wikiDirExists) {
|
|
823
|
+
try {
|
|
824
|
+
existing_wiki_files = fs.readdirSync(path.join(cwd, wikiDir)).filter(f => f.endsWith('.md'));
|
|
825
|
+
} catch {}
|
|
826
|
+
}
|
|
827
|
+
|
|
828
|
+
const result = {
|
|
829
|
+
// Models
|
|
830
|
+
mapper_model: resolveModelInternal(cwd, 'ace-wiki-mapper'),
|
|
831
|
+
|
|
832
|
+
// Config
|
|
833
|
+
commit_docs: config.commit_docs,
|
|
834
|
+
|
|
835
|
+
// Brownfield detection
|
|
836
|
+
...brownfield,
|
|
837
|
+
|
|
838
|
+
// Wiki directory state
|
|
839
|
+
wiki_dir_exists: wikiDirExists,
|
|
840
|
+
existing_wiki_files,
|
|
841
|
+
|
|
842
|
+
// Per-document existence
|
|
843
|
+
has_system_structure,
|
|
844
|
+
has_system_architecture,
|
|
845
|
+
has_testing_framework,
|
|
846
|
+
has_coding_standards,
|
|
847
|
+
|
|
848
|
+
// Git state
|
|
849
|
+
has_git: pathExistsInternal(cwd, '.git'),
|
|
850
|
+
};
|
|
851
|
+
|
|
852
|
+
output(result, raw);
|
|
853
|
+
}
|
|
854
|
+
|
|
855
|
+
function cmdInitMapSubsystem(cwd, raw) {
|
|
856
|
+
const config = loadConfig(cwd);
|
|
857
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
858
|
+
|
|
859
|
+
const wikiDir = '.docs/wiki/subsystems';
|
|
860
|
+
const wikiDirExists = pathExistsInternal(cwd, wikiDir);
|
|
861
|
+
|
|
862
|
+
const result = {
|
|
863
|
+
// Models
|
|
864
|
+
mapper_model: resolveModelInternal(cwd, 'ace-wiki-mapper'),
|
|
865
|
+
|
|
866
|
+
// Config
|
|
867
|
+
commit_docs: config.commit_docs,
|
|
868
|
+
|
|
869
|
+
// Brownfield detection
|
|
870
|
+
...brownfield,
|
|
871
|
+
|
|
872
|
+
// Wiki directory state
|
|
873
|
+
wiki_dir_exists: wikiDirExists,
|
|
874
|
+
|
|
875
|
+
// Git state
|
|
876
|
+
has_git: pathExistsInternal(cwd, '.git'),
|
|
877
|
+
};
|
|
878
|
+
|
|
879
|
+
output(result, raw);
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
function cmdInitProductVision(cwd, raw) {
|
|
883
|
+
const config = loadConfig(cwd);
|
|
884
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
885
|
+
|
|
886
|
+
const result = {
|
|
887
|
+
// Models
|
|
888
|
+
product_owner_model: resolveModelInternal(cwd, 'ace-product-owner'),
|
|
889
|
+
|
|
890
|
+
// Config
|
|
891
|
+
commit_docs: config.commit_docs,
|
|
892
|
+
|
|
893
|
+
// Existing state
|
|
894
|
+
has_product_vision: pathExistsInternal(cwd, '.docs/product/product-vision.md'),
|
|
895
|
+
|
|
896
|
+
// Brownfield detection
|
|
897
|
+
...brownfield,
|
|
898
|
+
|
|
899
|
+
// Architecture context
|
|
900
|
+
has_system_architecture: pathExistsInternal(cwd, '.docs/wiki/system-wide/system-architecture.md'),
|
|
901
|
+
|
|
902
|
+
// Git state
|
|
903
|
+
has_git: pathExistsInternal(cwd, '.git'),
|
|
904
|
+
};
|
|
905
|
+
|
|
906
|
+
output(result, raw);
|
|
907
|
+
}
|
|
908
|
+
|
|
909
|
+
function cmdInitPlanBacklog(cwd, raw) {
|
|
910
|
+
const config = loadConfig(cwd);
|
|
911
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
912
|
+
|
|
913
|
+
// Wiki detection — system-wide
|
|
914
|
+
const wikiSystemDir = '.docs/wiki/system-wide';
|
|
915
|
+
const has_wiki_system_wide = pathExistsInternal(cwd, wikiSystemDir);
|
|
916
|
+
const has_system_architecture = pathExistsInternal(cwd, path.join(wikiSystemDir, 'system-architecture.md'));
|
|
917
|
+
const has_system_structure = pathExistsInternal(cwd, path.join(wikiSystemDir, 'system-structure.md'));
|
|
918
|
+
const has_testing_framework = pathExistsInternal(cwd, path.join(wikiSystemDir, 'testing-framework.md'));
|
|
919
|
+
|
|
920
|
+
// Wiki detection — subsystems
|
|
921
|
+
const wikiSubsystemsDir = '.docs/wiki/subsystems';
|
|
922
|
+
const has_wiki_subsystems = pathExistsInternal(cwd, wikiSubsystemsDir);
|
|
923
|
+
|
|
924
|
+
let wiki_subsystem_names = [];
|
|
925
|
+
if (has_wiki_subsystems) {
|
|
926
|
+
try {
|
|
927
|
+
const entries = fs.readdirSync(path.join(cwd, wikiSubsystemsDir), { withFileTypes: true });
|
|
928
|
+
wiki_subsystem_names = entries
|
|
929
|
+
.filter(e => e.isDirectory())
|
|
930
|
+
.map(e => e.name);
|
|
931
|
+
} catch {}
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
const has_wiki = has_wiki_system_wide || has_wiki_subsystems;
|
|
935
|
+
|
|
936
|
+
const result = {
|
|
937
|
+
// Models
|
|
938
|
+
product_owner_model: resolveModelInternal(cwd, 'ace-product-owner'),
|
|
939
|
+
researcher_model: resolveModelInternal(cwd, 'ace-project-researcher'),
|
|
940
|
+
|
|
941
|
+
// Config
|
|
942
|
+
commit_docs: config.commit_docs,
|
|
943
|
+
|
|
944
|
+
// Product artifacts
|
|
945
|
+
has_product_vision: pathExistsInternal(cwd, '.docs/product/product-vision.md'),
|
|
946
|
+
has_product_backlog: pathExistsInternal(cwd, '.ace/artifacts/product/product-backlog.md'),
|
|
947
|
+
|
|
948
|
+
// Research artifacts (from previous runs)
|
|
949
|
+
has_features_research: pathExistsInternal(cwd, '.ace/research/FEATURES.md'),
|
|
950
|
+
has_architecture_research: pathExistsInternal(cwd, '.ace/research/ARCHITECTURE.md'),
|
|
951
|
+
|
|
952
|
+
// Wiki analysis cache (from previous runs)
|
|
953
|
+
has_wiki_analysis: pathExistsInternal(cwd, '.ace/artifacts/wiki/wiki-analysis.md'),
|
|
954
|
+
|
|
955
|
+
// Brownfield detection
|
|
956
|
+
...brownfield,
|
|
957
|
+
|
|
958
|
+
// Wiki state — system-wide
|
|
959
|
+
has_wiki,
|
|
960
|
+
has_wiki_system_wide,
|
|
961
|
+
has_system_architecture,
|
|
962
|
+
has_system_structure,
|
|
963
|
+
has_testing_framework,
|
|
964
|
+
|
|
965
|
+
// Wiki state — subsystems
|
|
966
|
+
has_wiki_subsystems,
|
|
967
|
+
wiki_subsystem_names,
|
|
968
|
+
|
|
969
|
+
// Git state
|
|
970
|
+
has_git: pathExistsInternal(cwd, '.git'),
|
|
971
|
+
|
|
972
|
+
// GitHub CLI
|
|
973
|
+
has_gh_cli: (() => {
|
|
974
|
+
try {
|
|
975
|
+
const { execSync } = require('child_process');
|
|
976
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
977
|
+
return true;
|
|
978
|
+
} catch {
|
|
979
|
+
return false;
|
|
980
|
+
}
|
|
981
|
+
})(),
|
|
982
|
+
|
|
983
|
+
// GitHub Project settings (from settings.json)
|
|
984
|
+
github_project: (() => {
|
|
985
|
+
const settings = loadSettings(cwd);
|
|
986
|
+
return settings.github_project;
|
|
987
|
+
})(),
|
|
988
|
+
};
|
|
989
|
+
|
|
990
|
+
output(result, raw);
|
|
991
|
+
}
|
|
992
|
+
|
|
993
|
+
// ─── Init: Plan Feature ──────────────────────────────────────────────────────
|
|
994
|
+
|
|
995
|
+
function cmdInitPlanFeature(cwd, raw) {
|
|
996
|
+
const config = loadConfig(cwd);
|
|
997
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
998
|
+
|
|
999
|
+
// Wiki detection — system-wide
|
|
1000
|
+
const wikiSystemDir = '.docs/wiki/system-wide';
|
|
1001
|
+
const has_wiki_system_wide = pathExistsInternal(cwd, wikiSystemDir);
|
|
1002
|
+
const has_system_architecture = pathExistsInternal(cwd, path.join(wikiSystemDir, 'system-architecture.md'));
|
|
1003
|
+
const has_system_structure = pathExistsInternal(cwd, path.join(wikiSystemDir, 'system-structure.md'));
|
|
1004
|
+
const has_testing_framework = pathExistsInternal(cwd, path.join(wikiSystemDir, 'testing-framework.md'));
|
|
1005
|
+
|
|
1006
|
+
// Wiki detection — subsystems
|
|
1007
|
+
const wikiSubsystemsDir = '.docs/wiki/subsystems';
|
|
1008
|
+
const has_wiki_subsystems = pathExistsInternal(cwd, wikiSubsystemsDir);
|
|
1009
|
+
|
|
1010
|
+
let wiki_subsystem_names = [];
|
|
1011
|
+
if (has_wiki_subsystems) {
|
|
1012
|
+
try {
|
|
1013
|
+
const entries = fs.readdirSync(path.join(cwd, wikiSubsystemsDir), { withFileTypes: true });
|
|
1014
|
+
wiki_subsystem_names = entries
|
|
1015
|
+
.filter(e => e.isDirectory())
|
|
1016
|
+
.map(e => e.name);
|
|
1017
|
+
} catch {}
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
const has_wiki = has_wiki_system_wide || has_wiki_subsystems;
|
|
1021
|
+
|
|
1022
|
+
const result = {
|
|
1023
|
+
// Models
|
|
1024
|
+
product_owner_model: resolveModelInternal(cwd, 'ace-product-owner'),
|
|
1025
|
+
researcher_model: resolveModelInternal(cwd, 'ace-project-researcher'),
|
|
1026
|
+
|
|
1027
|
+
// Config
|
|
1028
|
+
commit_docs: config.commit_docs,
|
|
1029
|
+
|
|
1030
|
+
// Product artifacts
|
|
1031
|
+
has_product_vision: pathExistsInternal(cwd, '.docs/product/product-vision.md'),
|
|
1032
|
+
has_product_backlog: pathExistsInternal(cwd, '.ace/artifacts/product/product-backlog.md'),
|
|
1033
|
+
|
|
1034
|
+
// Wiki analysis cache (from previous runs)
|
|
1035
|
+
has_wiki_analysis: pathExistsInternal(cwd, '.ace/artifacts/wiki/wiki-analysis.md'),
|
|
1036
|
+
|
|
1037
|
+
// Brownfield detection
|
|
1038
|
+
...brownfield,
|
|
1039
|
+
|
|
1040
|
+
// Wiki state — system-wide
|
|
1041
|
+
has_wiki,
|
|
1042
|
+
has_wiki_system_wide,
|
|
1043
|
+
has_system_architecture,
|
|
1044
|
+
has_system_structure,
|
|
1045
|
+
has_testing_framework,
|
|
1046
|
+
|
|
1047
|
+
// Wiki state — subsystems
|
|
1048
|
+
has_wiki_subsystems,
|
|
1049
|
+
wiki_subsystem_names,
|
|
1050
|
+
|
|
1051
|
+
// Git state
|
|
1052
|
+
has_git: pathExistsInternal(cwd, '.git'),
|
|
1053
|
+
|
|
1054
|
+
// GitHub CLI
|
|
1055
|
+
has_gh_cli: (() => {
|
|
1056
|
+
try {
|
|
1057
|
+
const { execSync } = require('child_process');
|
|
1058
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
1059
|
+
return true;
|
|
1060
|
+
} catch {
|
|
1061
|
+
return false;
|
|
1062
|
+
}
|
|
1063
|
+
})(),
|
|
1064
|
+
|
|
1065
|
+
// GitHub Project settings (from settings.json)
|
|
1066
|
+
github_project: (() => {
|
|
1067
|
+
const settings = loadSettings(cwd);
|
|
1068
|
+
return settings.github_project;
|
|
1069
|
+
})(),
|
|
1070
|
+
};
|
|
1071
|
+
|
|
1072
|
+
output(result, raw);
|
|
1073
|
+
}
|
|
1074
|
+
|
|
1075
|
+
// ─── Init: Plan Story ─────────────────────────────────────────────────────────
|
|
1076
|
+
|
|
1077
|
+
/**
|
|
1078
|
+
* init plan-story <story-param>
|
|
1079
|
+
*
|
|
1080
|
+
* Environment detection for the plan-story workflow.
|
|
1081
|
+
* Validates the story source (file path, GitHub URL, or issue number),
|
|
1082
|
+
* loads feature context, detects existing story state, and returns
|
|
1083
|
+
* everything the workflow needs to run deep questioning.
|
|
1084
|
+
*
|
|
1085
|
+
* story-param can be:
|
|
1086
|
+
* - File path: .ace/artifacts/product/.../story.md or any markdown file
|
|
1087
|
+
* - GitHub URL: https://github.com/owner/repo/issues/123
|
|
1088
|
+
* - Issue number: 123
|
|
1089
|
+
*/
|
|
1090
|
+
function cmdInitPlanStory(cwd, raw, storyParam) {
|
|
1091
|
+
const config = loadConfig(cwd);
|
|
1092
|
+
const brownfield = detectBrownfieldStatus(cwd);
|
|
1093
|
+
|
|
1094
|
+
// ── Environment detection ──
|
|
1095
|
+
const has_git = pathExistsInternal(cwd, '.git');
|
|
1096
|
+
const has_gh_cli = (() => {
|
|
1097
|
+
try {
|
|
1098
|
+
const { execSync } = require('child_process');
|
|
1099
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
1100
|
+
return true;
|
|
1101
|
+
} catch { return false; }
|
|
1102
|
+
})();
|
|
1103
|
+
const github_project = (() => {
|
|
1104
|
+
const settings = loadSettings(cwd);
|
|
1105
|
+
return settings.github_project;
|
|
1106
|
+
})();
|
|
1107
|
+
|
|
1108
|
+
// Wiki detection
|
|
1109
|
+
const wikiSystemDir = '.docs/wiki/system-wide';
|
|
1110
|
+
const has_wiki_system_wide = pathExistsInternal(cwd, wikiSystemDir);
|
|
1111
|
+
const wikiSubsystemsDir = '.docs/wiki/subsystems';
|
|
1112
|
+
const has_wiki_subsystems = pathExistsInternal(cwd, wikiSubsystemsDir);
|
|
1113
|
+
let wiki_subsystem_names = [];
|
|
1114
|
+
if (has_wiki_subsystems) {
|
|
1115
|
+
try {
|
|
1116
|
+
const entries = fs.readdirSync(path.join(cwd, wikiSubsystemsDir), { withFileTypes: true });
|
|
1117
|
+
wiki_subsystem_names = entries.filter(e => e.isDirectory()).map(e => e.name);
|
|
1118
|
+
} catch {}
|
|
1119
|
+
}
|
|
1120
|
+
const has_wiki = has_wiki_system_wide || has_wiki_subsystems;
|
|
1121
|
+
|
|
1122
|
+
// ── Classify the story parameter ──
|
|
1123
|
+
const classified = classifyStoryParam(storyParam);
|
|
1124
|
+
|
|
1125
|
+
// Early exit if invalid
|
|
1126
|
+
if (classified.type === null || classified.type === 'invalid') {
|
|
1127
|
+
output({
|
|
1128
|
+
product_owner_model: resolveModelInternal(cwd, 'ace-product-owner'),
|
|
1129
|
+
commit_docs: config.commit_docs,
|
|
1130
|
+
has_git, has_gh_cli, github_project,
|
|
1131
|
+
...brownfield,
|
|
1132
|
+
has_wiki, has_wiki_system_wide, has_wiki_subsystems, wiki_subsystem_names,
|
|
1133
|
+
has_product_vision: pathExistsInternal(cwd, '.docs/product/product-vision.md'),
|
|
1134
|
+
has_product_backlog: pathExistsInternal(cwd, '.ace/artifacts/product/product-backlog.md'),
|
|
1135
|
+
story_source: null,
|
|
1136
|
+
story_valid: false,
|
|
1137
|
+
story_error: classified.reason || 'No story parameter provided',
|
|
1138
|
+
story_content: null,
|
|
1139
|
+
story: { id: null, title: null, status: null, size: null },
|
|
1140
|
+
feature: { id: null, title: null },
|
|
1141
|
+
epic: { id: null, title: null },
|
|
1142
|
+
user_story: null, description: null, acceptance_criteria_count: 0,
|
|
1143
|
+
paths: null,
|
|
1144
|
+
has_external_analysis: false, has_integration_analysis: false,
|
|
1145
|
+
has_feature_file: false, has_story_file: false,
|
|
1146
|
+
}, raw);
|
|
1147
|
+
return;
|
|
1148
|
+
}
|
|
1149
|
+
|
|
1150
|
+
// ── Load story content ──
|
|
1151
|
+
let storyContent = null;
|
|
1152
|
+
let storySource = classified.type === 'file' ? 'file' : 'github';
|
|
1153
|
+
let storyError = null;
|
|
1154
|
+
let storyFilePath = null;
|
|
1155
|
+
|
|
1156
|
+
if (classified.type === 'file') {
|
|
1157
|
+
const resolvedPath = path.isAbsolute(classified.filePath)
|
|
1158
|
+
? classified.filePath
|
|
1159
|
+
: path.join(cwd, classified.filePath);
|
|
1160
|
+
if (!pathExistsInternal(cwd, classified.filePath)) {
|
|
1161
|
+
storyError = `Story file not found: ${classified.filePath}`;
|
|
1162
|
+
} else {
|
|
1163
|
+
storyContent = safeReadFile(resolvedPath);
|
|
1164
|
+
storyFilePath = classified.filePath;
|
|
1165
|
+
if (!storyContent) storyError = `Could not read story file: ${classified.filePath}`;
|
|
1166
|
+
}
|
|
1167
|
+
} else {
|
|
1168
|
+
// github-url or issue-number
|
|
1169
|
+
if (!has_gh_cli) {
|
|
1170
|
+
storyError = 'GitHub CLI (gh) not installed. Cannot fetch GitHub issues.';
|
|
1171
|
+
} else {
|
|
1172
|
+
const repo = classified.repo || (github_project.repo || null);
|
|
1173
|
+
if (!repo) {
|
|
1174
|
+
storyError = 'No repository configured. Provide a full GitHub URL or configure github_project.repo in settings.';
|
|
1175
|
+
} else {
|
|
1176
|
+
const ghResult = execCommand(
|
|
1177
|
+
`gh issue view ${classified.issueNumber} --repo ${repo} --json title,body,labels,state`,
|
|
1178
|
+
cwd
|
|
1179
|
+
);
|
|
1180
|
+
if (!ghResult) {
|
|
1181
|
+
storyError = `Could not fetch GitHub issue #${classified.issueNumber} from ${repo}.`;
|
|
1182
|
+
} else {
|
|
1183
|
+
try {
|
|
1184
|
+
const issue = JSON.parse(ghResult);
|
|
1185
|
+
storyContent = issue.body || '';
|
|
1186
|
+
if (storyContent && !storyContent.match(/^#\s+/m)) {
|
|
1187
|
+
storyContent = `# ${issue.title}\n\n${storyContent}`;
|
|
1188
|
+
}
|
|
1189
|
+
} catch {
|
|
1190
|
+
storyError = `Failed to parse GitHub issue response for #${classified.issueNumber}.`;
|
|
1191
|
+
}
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
|
|
1197
|
+
// ── Extract metadata & requirements (may be empty for seed stories) ──
|
|
1198
|
+
const metadata = extractStoryMetadata(storyContent);
|
|
1199
|
+
const requirements = extractStoryRequirements(storyContent);
|
|
1200
|
+
|
|
1201
|
+
// ── Compute paths ──
|
|
1202
|
+
let paths = null;
|
|
1203
|
+
let has_story_file = false;
|
|
1204
|
+
|
|
1205
|
+
if (storyFilePath) {
|
|
1206
|
+
const resolvedPath = path.isAbsolute(storyFilePath)
|
|
1207
|
+
? storyFilePath
|
|
1208
|
+
: path.join(cwd, storyFilePath);
|
|
1209
|
+
const storyDir = path.dirname(resolvedPath);
|
|
1210
|
+
const relStoryDir = path.relative(cwd, storyDir).replace(/\\/g, '/');
|
|
1211
|
+
const storySlug = path.basename(storyDir);
|
|
1212
|
+
const featureDir = path.dirname(storyDir);
|
|
1213
|
+
const relFeatureDir = path.relative(cwd, featureDir).replace(/\\/g, '/');
|
|
1214
|
+
const featureSlug = path.basename(featureDir);
|
|
1215
|
+
|
|
1216
|
+
paths = {
|
|
1217
|
+
epic_slug: null,
|
|
1218
|
+
feature_slug: featureSlug,
|
|
1219
|
+
story_slug: storySlug,
|
|
1220
|
+
story_dir: relStoryDir,
|
|
1221
|
+
story_file: storyFilePath.replace(/\\/g, '/'),
|
|
1222
|
+
external_analysis_file: `${relStoryDir}/external-analysis.md`,
|
|
1223
|
+
integration_analysis_file: `${relStoryDir}/integration-analysis.md`,
|
|
1224
|
+
feature_dir: relFeatureDir,
|
|
1225
|
+
feature_file: `${relFeatureDir}/${featureSlug}.md`,
|
|
1226
|
+
};
|
|
1227
|
+
has_story_file = true;
|
|
1228
|
+
} else if (metadata.epic.id && metadata.feature.id && metadata.id) {
|
|
1229
|
+
paths = computeStoryPaths(
|
|
1230
|
+
metadata.epic.id, metadata.epic.title || '',
|
|
1231
|
+
metadata.feature.id, metadata.feature.title || '',
|
|
1232
|
+
metadata.id, metadata.title || ''
|
|
1233
|
+
);
|
|
1234
|
+
has_story_file = paths ? pathExistsInternal(cwd, paths.story_file) : false;
|
|
1235
|
+
}
|
|
1236
|
+
|
|
1237
|
+
// ── Check artifact existence ──
|
|
1238
|
+
const has_external_analysis = paths ? pathExistsInternal(cwd, paths.external_analysis_file) : false;
|
|
1239
|
+
const has_integration_analysis = paths ? pathExistsInternal(cwd, paths.integration_analysis_file) : false;
|
|
1240
|
+
const has_feature_file = paths ? pathExistsInternal(cwd, paths.feature_file) : false;
|
|
1241
|
+
|
|
1242
|
+
// ── Build result ──
|
|
1243
|
+
const result = {
|
|
1244
|
+
// Models
|
|
1245
|
+
product_owner_model: resolveModelInternal(cwd, 'ace-product-owner'),
|
|
1246
|
+
|
|
1247
|
+
// Config
|
|
1248
|
+
commit_docs: config.commit_docs,
|
|
1249
|
+
|
|
1250
|
+
// Environment
|
|
1251
|
+
has_git, has_gh_cli, github_project,
|
|
1252
|
+
|
|
1253
|
+
// Brownfield detection
|
|
1254
|
+
...brownfield,
|
|
1255
|
+
|
|
1256
|
+
// Wiki state
|
|
1257
|
+
has_wiki, has_wiki_system_wide, has_wiki_subsystems, wiki_subsystem_names,
|
|
1258
|
+
|
|
1259
|
+
// Product artifacts
|
|
1260
|
+
has_product_vision: pathExistsInternal(cwd, '.docs/product/product-vision.md'),
|
|
1261
|
+
has_product_backlog: pathExistsInternal(cwd, '.ace/artifacts/product/product-backlog.md'),
|
|
1262
|
+
|
|
1263
|
+
// Story source
|
|
1264
|
+
story_source: storySource,
|
|
1265
|
+
story_valid: storyContent !== null && storyError === null,
|
|
1266
|
+
story_error: storyError,
|
|
1267
|
+
|
|
1268
|
+
// Raw story content (for the workflow to analyze)
|
|
1269
|
+
story_content: storyContent,
|
|
1270
|
+
|
|
1271
|
+
// Story metadata (may be partial for seed stories)
|
|
1272
|
+
story: {
|
|
1273
|
+
id: metadata.id,
|
|
1274
|
+
title: metadata.title,
|
|
1275
|
+
status: metadata.status,
|
|
1276
|
+
size: metadata.size,
|
|
1277
|
+
issue_number: extractIssueNumber(metadata.link),
|
|
1278
|
+
},
|
|
1279
|
+
feature: {
|
|
1280
|
+
...metadata.feature,
|
|
1281
|
+
issue_number: paths ? extractIssueNumberFromFile(cwd, paths.feature_file) : null,
|
|
1282
|
+
},
|
|
1283
|
+
epic: metadata.epic,
|
|
1284
|
+
|
|
1285
|
+
// Requirements (may be empty for seed stories)
|
|
1286
|
+
user_story: requirements.user_story,
|
|
1287
|
+
description: requirements.description,
|
|
1288
|
+
acceptance_criteria_count: requirements.acceptance_criteria_count,
|
|
1289
|
+
|
|
1290
|
+
// Computed paths
|
|
1291
|
+
paths,
|
|
1292
|
+
|
|
1293
|
+
// Artifact existence
|
|
1294
|
+
has_external_analysis,
|
|
1295
|
+
has_integration_analysis,
|
|
1296
|
+
has_feature_file,
|
|
1297
|
+
has_story_file,
|
|
1298
|
+
};
|
|
1299
|
+
|
|
1300
|
+
output(result, raw);
|
|
1301
|
+
}
|
|
1302
|
+
|
|
1303
|
+
// ─── Init: Execute Story ─────────────────────────────────────────────────────
|
|
1304
|
+
|
|
1305
|
+
/**
|
|
1306
|
+
* init execute-story <story-param>
|
|
1307
|
+
*
|
|
1308
|
+
* Environment detection for the execute-story workflow.
|
|
1309
|
+
* Similar to init plan-story but with additional fields for execution:
|
|
1310
|
+
* - agent_teams status (synced from Claude Code settings)
|
|
1311
|
+
* - has_technical_solution, has_acceptance_criteria checks
|
|
1312
|
+
* - has_coding_standards, has_wiki_refs checks
|
|
1313
|
+
* - executor_model, reviewer_model (resolved from profiles)
|
|
1314
|
+
* - product_backlog path
|
|
1315
|
+
*
|
|
1316
|
+
* story-param can be:
|
|
1317
|
+
* - File path: .ace/artifacts/product/.../story.md
|
|
1318
|
+
* - GitHub URL: https://github.com/owner/repo/issues/123
|
|
1319
|
+
* - Issue number: 123
|
|
1320
|
+
*/
|
|
1321
|
+
function cmdInitExecuteStory(cwd, raw, storyParam) {
|
|
1322
|
+
const config = loadConfig(cwd);
|
|
1323
|
+
|
|
1324
|
+
// ── Environment detection ──
|
|
1325
|
+
const has_git = pathExistsInternal(cwd, '.git');
|
|
1326
|
+
const has_gh_cli = (() => {
|
|
1327
|
+
try {
|
|
1328
|
+
const { execSync } = require('child_process');
|
|
1329
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
1330
|
+
return true;
|
|
1331
|
+
} catch { return false; }
|
|
1332
|
+
})();
|
|
1333
|
+
const settings = loadSettings(cwd);
|
|
1334
|
+
const github_project = settings.github_project;
|
|
1335
|
+
|
|
1336
|
+
// ── Agent teams detection (sync from runtime settings) ──
|
|
1337
|
+
const claudeSettingsPath = path.join(cwd, RUNTIME_CONFIG_DIR, 'settings.json');
|
|
1338
|
+
let agent_teams = settings.agent_teams || false;
|
|
1339
|
+
try {
|
|
1340
|
+
const claudeRaw = fs.readFileSync(claudeSettingsPath, 'utf-8');
|
|
1341
|
+
const claudeSettings = JSON.parse(claudeRaw);
|
|
1342
|
+
const val = claudeSettings?.env?.CLAUDE_CODE_EXPERIMENTAL_AGENT_TEAMS;
|
|
1343
|
+
agent_teams = val === '1' || val === 'true';
|
|
1344
|
+
} catch {}
|
|
1345
|
+
|
|
1346
|
+
// ── Classify the story parameter ──
|
|
1347
|
+
const classified = classifyStoryParam(storyParam);
|
|
1348
|
+
|
|
1349
|
+
// Early exit if invalid
|
|
1350
|
+
if (classified.type === null || classified.type === 'invalid') {
|
|
1351
|
+
output({
|
|
1352
|
+
executor_model: resolveModelInternal(cwd, 'ace-executor'),
|
|
1353
|
+
reviewer_model: resolveModelInternal(cwd, 'ace-code-reviewer'),
|
|
1354
|
+
commit_docs: config.commit_docs,
|
|
1355
|
+
has_git, has_gh_cli, github_project, agent_teams,
|
|
1356
|
+
story_source: null,
|
|
1357
|
+
story_valid: false,
|
|
1358
|
+
story_error: classified.reason || 'No story parameter provided',
|
|
1359
|
+
story_content: null,
|
|
1360
|
+
story: { id: null, title: null, status: null, size: null },
|
|
1361
|
+
feature: { id: null, title: null },
|
|
1362
|
+
epic: { id: null, title: null },
|
|
1363
|
+
has_acceptance_criteria: false,
|
|
1364
|
+
acceptance_criteria_count: 0,
|
|
1365
|
+
has_technical_solution: false,
|
|
1366
|
+
has_wiki_refs: false,
|
|
1367
|
+
has_coding_standards: false,
|
|
1368
|
+
paths: null,
|
|
1369
|
+
}, raw);
|
|
1370
|
+
return;
|
|
1371
|
+
}
|
|
1372
|
+
|
|
1373
|
+
// ── Load story content ──
|
|
1374
|
+
let storyContent = null;
|
|
1375
|
+
let storySource = classified.type === 'file' ? 'file' : 'github';
|
|
1376
|
+
let storyError = null;
|
|
1377
|
+
let storyFilePath = null;
|
|
1378
|
+
|
|
1379
|
+
if (classified.type === 'file') {
|
|
1380
|
+
const resolvedPath = path.isAbsolute(classified.filePath)
|
|
1381
|
+
? classified.filePath
|
|
1382
|
+
: path.join(cwd, classified.filePath);
|
|
1383
|
+
if (!pathExistsInternal(cwd, classified.filePath)) {
|
|
1384
|
+
storyError = `Story file not found: ${classified.filePath}`;
|
|
1385
|
+
} else {
|
|
1386
|
+
storyContent = safeReadFile(resolvedPath);
|
|
1387
|
+
storyFilePath = classified.filePath;
|
|
1388
|
+
if (!storyContent) storyError = `Could not read story file: ${classified.filePath}`;
|
|
1389
|
+
}
|
|
1390
|
+
} else {
|
|
1391
|
+
// github-url or issue-number
|
|
1392
|
+
if (!has_gh_cli) {
|
|
1393
|
+
storyError = 'GitHub CLI (gh) not installed. Cannot fetch GitHub issues.';
|
|
1394
|
+
} else {
|
|
1395
|
+
const repo = classified.repo || (github_project.repo || null);
|
|
1396
|
+
if (!repo) {
|
|
1397
|
+
storyError = 'No repository configured. Provide a full GitHub URL or configure github_project.repo in settings.';
|
|
1398
|
+
} else {
|
|
1399
|
+
const ghResult = execCommand(
|
|
1400
|
+
`gh issue view ${classified.issueNumber} --repo ${repo} --json title,body,labels,state`,
|
|
1401
|
+
cwd
|
|
1402
|
+
);
|
|
1403
|
+
if (!ghResult) {
|
|
1404
|
+
storyError = `Could not fetch GitHub issue #${classified.issueNumber} from ${repo}.`;
|
|
1405
|
+
} else {
|
|
1406
|
+
try {
|
|
1407
|
+
const issue = JSON.parse(ghResult);
|
|
1408
|
+
storyContent = issue.body || '';
|
|
1409
|
+
if (storyContent && !storyContent.match(/^#\s+/m)) {
|
|
1410
|
+
storyContent = `# ${issue.title}\n\n${storyContent}`;
|
|
1411
|
+
}
|
|
1412
|
+
} catch {
|
|
1413
|
+
storyError = `Failed to parse GitHub issue response for #${classified.issueNumber}.`;
|
|
1414
|
+
}
|
|
1415
|
+
}
|
|
1416
|
+
}
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
|
|
1420
|
+
// ── Extract metadata & requirements ──
|
|
1421
|
+
const metadata = extractStoryMetadata(storyContent);
|
|
1422
|
+
const requirements = extractStoryRequirements(storyContent);
|
|
1423
|
+
|
|
1424
|
+
// ── Detect key sections ──
|
|
1425
|
+
const has_acceptance_criteria = requirements.acceptance_criteria_count > 0;
|
|
1426
|
+
const has_technical_solution = storyContent
|
|
1427
|
+
? !!extractMarkdownSection(storyContent, 'Technical Solution', 2)
|
|
1428
|
+
: false;
|
|
1429
|
+
const has_wiki_refs = storyContent
|
|
1430
|
+
? !!extractMarkdownSection(storyContent, 'Relevant Wiki', 2)
|
|
1431
|
+
: false;
|
|
1432
|
+
const has_coding_standards = pathExistsInternal(cwd, '.docs/wiki/system-wide/coding-standards.md');
|
|
1433
|
+
|
|
1434
|
+
// ── Compute paths ──
|
|
1435
|
+
let paths = null;
|
|
1436
|
+
let has_story_file = false;
|
|
1437
|
+
|
|
1438
|
+
if (storyFilePath) {
|
|
1439
|
+
const resolvedPath = path.isAbsolute(storyFilePath)
|
|
1440
|
+
? storyFilePath
|
|
1441
|
+
: path.join(cwd, storyFilePath);
|
|
1442
|
+
const storyDir = path.dirname(resolvedPath);
|
|
1443
|
+
const relStoryDir = path.relative(cwd, storyDir).replace(/\\/g, '/');
|
|
1444
|
+
const storySlug = path.basename(storyDir);
|
|
1445
|
+
const featureDir = path.dirname(storyDir);
|
|
1446
|
+
const relFeatureDir = path.relative(cwd, featureDir).replace(/\\/g, '/');
|
|
1447
|
+
const featureSlug = path.basename(featureDir);
|
|
1448
|
+
|
|
1449
|
+
paths = {
|
|
1450
|
+
epic_slug: null,
|
|
1451
|
+
feature_slug: featureSlug,
|
|
1452
|
+
story_slug: storySlug,
|
|
1453
|
+
story_dir: relStoryDir,
|
|
1454
|
+
story_file: storyFilePath.replace(/\\/g, '/'),
|
|
1455
|
+
external_analysis_file: `${relStoryDir}/external-analysis.md`,
|
|
1456
|
+
integration_analysis_file: `${relStoryDir}/integration-analysis.md`,
|
|
1457
|
+
feature_dir: relFeatureDir,
|
|
1458
|
+
feature_file: `${relFeatureDir}/${featureSlug}.md`,
|
|
1459
|
+
product_backlog: '.ace/artifacts/product/product-backlog.md',
|
|
1460
|
+
coding_standards: '.docs/wiki/system-wide/coding-standards.md',
|
|
1461
|
+
};
|
|
1462
|
+
has_story_file = true;
|
|
1463
|
+
} else if (metadata.epic.id && metadata.feature.id && metadata.id) {
|
|
1464
|
+
const computed = computeStoryPaths(
|
|
1465
|
+
metadata.epic.id, metadata.epic.title || '',
|
|
1466
|
+
metadata.feature.id, metadata.feature.title || '',
|
|
1467
|
+
metadata.id, metadata.title || ''
|
|
1468
|
+
);
|
|
1469
|
+
if (computed) {
|
|
1470
|
+
paths = {
|
|
1471
|
+
...computed,
|
|
1472
|
+
product_backlog: '.ace/artifacts/product/product-backlog.md',
|
|
1473
|
+
coding_standards: '.docs/wiki/system-wide/coding-standards.md',
|
|
1474
|
+
};
|
|
1475
|
+
has_story_file = pathExistsInternal(cwd, paths.story_file);
|
|
1476
|
+
}
|
|
1477
|
+
}
|
|
1478
|
+
|
|
1479
|
+
// ── Extract GitHub issue numbers ──
|
|
1480
|
+
const storyIssueNumber = extractIssueNumber(metadata.link);
|
|
1481
|
+
const featureIssueNumber = paths ? extractIssueNumberFromFile(cwd, paths.feature_file) : null;
|
|
1482
|
+
|
|
1483
|
+
// ── Build result ──
|
|
1484
|
+
const result = {
|
|
1485
|
+
// Models
|
|
1486
|
+
executor_model: resolveModelInternal(cwd, 'ace-executor'),
|
|
1487
|
+
reviewer_model: resolveModelInternal(cwd, 'ace-code-reviewer'),
|
|
1488
|
+
|
|
1489
|
+
// Config
|
|
1490
|
+
commit_docs: config.commit_docs,
|
|
1491
|
+
|
|
1492
|
+
// Environment
|
|
1493
|
+
has_git, has_gh_cli, github_project, agent_teams,
|
|
1494
|
+
|
|
1495
|
+
// Story source
|
|
1496
|
+
story_source: storySource,
|
|
1497
|
+
story_valid: storyContent !== null && storyError === null,
|
|
1498
|
+
story_error: storyError,
|
|
1499
|
+
|
|
1500
|
+
// Raw story content
|
|
1501
|
+
story_content: storyContent,
|
|
1502
|
+
|
|
1503
|
+
// Story metadata
|
|
1504
|
+
story: {
|
|
1505
|
+
id: metadata.id,
|
|
1506
|
+
title: metadata.title,
|
|
1507
|
+
status: metadata.status,
|
|
1508
|
+
size: metadata.size,
|
|
1509
|
+
issue_number: storyIssueNumber,
|
|
1510
|
+
},
|
|
1511
|
+
feature: {
|
|
1512
|
+
...metadata.feature,
|
|
1513
|
+
issue_number: featureIssueNumber,
|
|
1514
|
+
},
|
|
1515
|
+
epic: metadata.epic,
|
|
1516
|
+
|
|
1517
|
+
// Section detection
|
|
1518
|
+
has_acceptance_criteria,
|
|
1519
|
+
acceptance_criteria_count: requirements.acceptance_criteria_count,
|
|
1520
|
+
has_technical_solution,
|
|
1521
|
+
has_wiki_refs,
|
|
1522
|
+
has_coding_standards,
|
|
1523
|
+
|
|
1524
|
+
// Computed paths
|
|
1525
|
+
paths,
|
|
1526
|
+
|
|
1527
|
+
// Artifact existence
|
|
1528
|
+
has_story_file,
|
|
1529
|
+
};
|
|
1530
|
+
|
|
1531
|
+
output(result, raw);
|
|
1532
|
+
}
|
|
1533
|
+
|
|
1534
|
+
// ─── Init: Research Story ────────────────────────────────────────────────────
|
|
1535
|
+
|
|
1536
|
+
/**
|
|
1537
|
+
* init research-story <story-param>
|
|
1538
|
+
*
|
|
1539
|
+
* Single compound command that validates a story source, extracts all metadata,
|
|
1540
|
+
* requirements, wiki references, computes paths, and checks artifact existence.
|
|
1541
|
+
* Replaces 5-7 separate ace-tools calls in story-level workflows.
|
|
1542
|
+
*
|
|
1543
|
+
* story-param can be:
|
|
1544
|
+
* - File path: .ace/artifacts/product/.../story.md
|
|
1545
|
+
* - GitHub URL: https://github.com/owner/repo/issues/123
|
|
1546
|
+
* - Issue number: 123
|
|
1547
|
+
*/
|
|
1548
|
+
function cmdInitResearchStory(cwd, raw, storyParam) {
|
|
1549
|
+
const config = loadConfig(cwd);
|
|
1550
|
+
|
|
1551
|
+
// ── Environment detection (reused from other init commands) ──
|
|
1552
|
+
const has_git = pathExistsInternal(cwd, '.git');
|
|
1553
|
+
const has_gh_cli = (() => {
|
|
1554
|
+
try {
|
|
1555
|
+
const { execSync } = require('child_process');
|
|
1556
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
1557
|
+
return true;
|
|
1558
|
+
} catch { return false; }
|
|
1559
|
+
})();
|
|
1560
|
+
const github_project = (() => {
|
|
1561
|
+
const settings = loadSettings(cwd);
|
|
1562
|
+
return settings.github_project;
|
|
1563
|
+
})();
|
|
1564
|
+
|
|
1565
|
+
// ── Classify the story parameter ──
|
|
1566
|
+
const classified = classifyStoryParam(storyParam);
|
|
1567
|
+
|
|
1568
|
+
// Early exit if invalid
|
|
1569
|
+
if (classified.type === null || classified.type === 'invalid') {
|
|
1570
|
+
output({
|
|
1571
|
+
analyst_model: resolveModelInternal(cwd, 'ace-code-integration-analyst'),
|
|
1572
|
+
mapper_model: resolveModelInternal(cwd, 'ace-wiki-mapper'),
|
|
1573
|
+
commit_docs: config.commit_docs,
|
|
1574
|
+
has_git, has_gh_cli, github_project,
|
|
1575
|
+
story_source: null,
|
|
1576
|
+
story_valid: false,
|
|
1577
|
+
story_error: classified.reason || 'No story parameter provided',
|
|
1578
|
+
story: { id: null, title: null, status: null, size: null },
|
|
1579
|
+
feature: { id: null, title: null },
|
|
1580
|
+
epic: { id: null, title: null },
|
|
1581
|
+
user_story: null, description: null, acceptance_criteria_count: 0,
|
|
1582
|
+
paths: null,
|
|
1583
|
+
has_external_analysis: false, has_integration_analysis: false, has_feature_file: false,
|
|
1584
|
+
wiki_references: { system_wide: [], subsystem_docs: [], total_count: 0 },
|
|
1585
|
+
wiki_docs_exist: { existing: [], missing: [] },
|
|
1586
|
+
}, raw);
|
|
1587
|
+
return;
|
|
1588
|
+
}
|
|
1589
|
+
|
|
1590
|
+
// ── Load story content ──
|
|
1591
|
+
let storyContent = null;
|
|
1592
|
+
let storySource = classified.type === 'file' ? 'file' : 'github';
|
|
1593
|
+
let storyError = null;
|
|
1594
|
+
let storyFilePath = null;
|
|
1595
|
+
|
|
1596
|
+
if (classified.type === 'file') {
|
|
1597
|
+
const resolvedPath = path.isAbsolute(classified.filePath)
|
|
1598
|
+
? classified.filePath
|
|
1599
|
+
: path.join(cwd, classified.filePath);
|
|
1600
|
+
if (!pathExistsInternal(cwd, classified.filePath)) {
|
|
1601
|
+
storyError = `Story file not found: ${classified.filePath}`;
|
|
1602
|
+
} else {
|
|
1603
|
+
storyContent = safeReadFile(resolvedPath);
|
|
1604
|
+
storyFilePath = classified.filePath;
|
|
1605
|
+
if (!storyContent) storyError = `Could not read story file: ${classified.filePath}`;
|
|
1606
|
+
}
|
|
1607
|
+
} else {
|
|
1608
|
+
// github-url or issue-number
|
|
1609
|
+
if (!has_gh_cli) {
|
|
1610
|
+
storyError = 'GitHub CLI (gh) not installed. Cannot fetch GitHub issues.';
|
|
1611
|
+
} else {
|
|
1612
|
+
const repo = classified.repo || (github_project.repo || null);
|
|
1613
|
+
if (!repo) {
|
|
1614
|
+
storyError = 'No repository configured. Provide a full GitHub URL or configure github_project.repo in settings.';
|
|
1615
|
+
} else {
|
|
1616
|
+
const ghResult = execCommand(
|
|
1617
|
+
`gh issue view ${classified.issueNumber} --repo ${repo} --json title,body,labels,state`,
|
|
1618
|
+
cwd
|
|
1619
|
+
);
|
|
1620
|
+
if (!ghResult) {
|
|
1621
|
+
storyError = `Could not fetch GitHub issue #${classified.issueNumber} from ${repo}.`;
|
|
1622
|
+
} else {
|
|
1623
|
+
try {
|
|
1624
|
+
const issue = JSON.parse(ghResult);
|
|
1625
|
+
// Reconstruct story content from issue body (the body IS the story markdown)
|
|
1626
|
+
storyContent = issue.body || '';
|
|
1627
|
+
// If the title isn't in the body, prepend it as a header
|
|
1628
|
+
if (storyContent && !storyContent.match(/^#\s+/m)) {
|
|
1629
|
+
storyContent = `# ${issue.title}\n\n${storyContent}`;
|
|
1630
|
+
}
|
|
1631
|
+
} catch {
|
|
1632
|
+
storyError = `Failed to parse GitHub issue response for #${classified.issueNumber}.`;
|
|
1633
|
+
}
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
}
|
|
1637
|
+
}
|
|
1638
|
+
|
|
1639
|
+
// ── Extract metadata & requirements ──
|
|
1640
|
+
const metadata = extractStoryMetadata(storyContent);
|
|
1641
|
+
const requirements = extractStoryRequirements(storyContent);
|
|
1642
|
+
const wikiRefs = extractWikiReferences(storyContent);
|
|
1643
|
+
|
|
1644
|
+
// ── Compute paths ──
|
|
1645
|
+
let paths = null;
|
|
1646
|
+
if (storyFilePath) {
|
|
1647
|
+
// Story loaded from file — derive paths from actual file location
|
|
1648
|
+
const resolvedPath = path.isAbsolute(storyFilePath)
|
|
1649
|
+
? storyFilePath
|
|
1650
|
+
: path.join(cwd, storyFilePath);
|
|
1651
|
+
const storyDir = path.dirname(resolvedPath);
|
|
1652
|
+
const relStoryDir = path.relative(cwd, storyDir).replace(/\\/g, '/');
|
|
1653
|
+
const storySlug = path.basename(storyDir);
|
|
1654
|
+
const featureDir = path.dirname(storyDir);
|
|
1655
|
+
const relFeatureDir = path.relative(cwd, featureDir).replace(/\\/g, '/');
|
|
1656
|
+
const featureSlug = path.basename(featureDir);
|
|
1657
|
+
|
|
1658
|
+
paths = {
|
|
1659
|
+
epic_slug: null,
|
|
1660
|
+
feature_slug: featureSlug,
|
|
1661
|
+
story_slug: storySlug,
|
|
1662
|
+
story_dir: relStoryDir,
|
|
1663
|
+
story_file: storyFilePath.replace(/\\/g, '/'),
|
|
1664
|
+
external_analysis_file: `${relStoryDir}/external-analysis.md`,
|
|
1665
|
+
integration_analysis_file: `${relStoryDir}/integration-analysis.md`,
|
|
1666
|
+
feature_dir: relFeatureDir,
|
|
1667
|
+
feature_file: `${relFeatureDir}/${featureSlug}.md`,
|
|
1668
|
+
};
|
|
1669
|
+
} else if (metadata.epic.id && metadata.feature.id && metadata.id) {
|
|
1670
|
+
// Story loaded from GitHub — compute paths from metadata
|
|
1671
|
+
paths = computeStoryPaths(
|
|
1672
|
+
metadata.epic.id, metadata.epic.title || '',
|
|
1673
|
+
metadata.feature.id, metadata.feature.title || '',
|
|
1674
|
+
metadata.id, metadata.title || ''
|
|
1675
|
+
);
|
|
1676
|
+
}
|
|
1677
|
+
|
|
1678
|
+
// ── Check artifact existence ──
|
|
1679
|
+
const has_external_analysis = paths ? pathExistsInternal(cwd, paths.external_analysis_file) : false;
|
|
1680
|
+
const has_integration_analysis = paths ? pathExistsInternal(cwd, paths.integration_analysis_file) : false;
|
|
1681
|
+
const has_feature_file = paths ? pathExistsInternal(cwd, paths.feature_file) : false;
|
|
1682
|
+
|
|
1683
|
+
// ── Verify wiki doc existence ──
|
|
1684
|
+
const allWikiPaths = [...wikiRefs.system_wide, ...wikiRefs.subsystem_docs.map(d => d.path)];
|
|
1685
|
+
const wikiExisting = [];
|
|
1686
|
+
const wikiMissing = [];
|
|
1687
|
+
for (const wikiPath of allWikiPaths) {
|
|
1688
|
+
if (pathExistsInternal(cwd, wikiPath)) {
|
|
1689
|
+
wikiExisting.push(wikiPath);
|
|
1690
|
+
} else {
|
|
1691
|
+
wikiMissing.push(wikiPath);
|
|
1692
|
+
}
|
|
1693
|
+
}
|
|
1694
|
+
|
|
1695
|
+
// ── Build result ──
|
|
1696
|
+
const result = {
|
|
1697
|
+
// Models
|
|
1698
|
+
analyst_model: resolveModelInternal(cwd, 'ace-code-integration-analyst'),
|
|
1699
|
+
mapper_model: resolveModelInternal(cwd, 'ace-wiki-mapper'),
|
|
1700
|
+
|
|
1701
|
+
// Config
|
|
1702
|
+
commit_docs: config.commit_docs,
|
|
1703
|
+
|
|
1704
|
+
// Environment
|
|
1705
|
+
has_git,
|
|
1706
|
+
has_gh_cli,
|
|
1707
|
+
github_project,
|
|
1708
|
+
|
|
1709
|
+
// Story source
|
|
1710
|
+
story_source: storySource,
|
|
1711
|
+
story_valid: storyContent !== null && storyError === null,
|
|
1712
|
+
story_error: storyError,
|
|
1713
|
+
|
|
1714
|
+
// Story metadata
|
|
1715
|
+
story: {
|
|
1716
|
+
id: metadata.id,
|
|
1717
|
+
title: metadata.title,
|
|
1718
|
+
status: metadata.status,
|
|
1719
|
+
size: metadata.size,
|
|
1720
|
+
},
|
|
1721
|
+
feature: metadata.feature,
|
|
1722
|
+
epic: metadata.epic,
|
|
1723
|
+
|
|
1724
|
+
// Requirements
|
|
1725
|
+
user_story: requirements.user_story,
|
|
1726
|
+
description: requirements.description,
|
|
1727
|
+
acceptance_criteria_count: requirements.acceptance_criteria_count,
|
|
1728
|
+
|
|
1729
|
+
// Computed paths
|
|
1730
|
+
paths,
|
|
1731
|
+
|
|
1732
|
+
// Artifact existence
|
|
1733
|
+
has_external_analysis,
|
|
1734
|
+
has_integration_analysis,
|
|
1735
|
+
has_feature_file,
|
|
1736
|
+
|
|
1737
|
+
// Wiki references (structured)
|
|
1738
|
+
wiki_references: wikiRefs,
|
|
1739
|
+
|
|
1740
|
+
// Wiki doc verification
|
|
1741
|
+
wiki_docs_exist: {
|
|
1742
|
+
existing: wikiExisting,
|
|
1743
|
+
missing: wikiMissing,
|
|
1744
|
+
},
|
|
1745
|
+
};
|
|
1746
|
+
|
|
1747
|
+
output(result, raw);
|
|
1748
|
+
}
|
|
1749
|
+
|
|
1750
|
+
// ─── Story State Commands ────────────────────────────────────────────────────
|
|
1751
|
+
|
|
1752
|
+
/**
|
|
1753
|
+
* story update-state <story-param> --status <Done|DevReady|InProgress>
|
|
1754
|
+
*
|
|
1755
|
+
* Updates the story status across all ACE artifacts:
|
|
1756
|
+
* 1. Story file header (Status field)
|
|
1757
|
+
* 2. Feature file story index table
|
|
1758
|
+
* 3. Product backlog story entry
|
|
1759
|
+
*
|
|
1760
|
+
* Also checks if all stories in the feature are Done — if so, updates
|
|
1761
|
+
* the feature status to Done in both the feature file and product backlog.
|
|
1762
|
+
*
|
|
1763
|
+
* Returns: { story_updated, feature_updated, backlog_updated, feature_status_changed }
|
|
1764
|
+
*/
|
|
1765
|
+
function cmdStoryUpdateState(cwd, raw, extraArgs) {
|
|
1766
|
+
const params = parseKeyValueArgs(extraArgs);
|
|
1767
|
+
const storyParam = params.story;
|
|
1768
|
+
const newStatus = params.status;
|
|
1769
|
+
|
|
1770
|
+
if (!storyParam) {
|
|
1771
|
+
error('story update-state requires: story=<path|github-url>');
|
|
1772
|
+
}
|
|
1773
|
+
if (!newStatus || !['Done', 'DevReady', 'Refined', 'InProgress', 'In Progress'].includes(newStatus)) {
|
|
1774
|
+
error('story update-state requires: status=Done|DevReady|Refined|InProgress');
|
|
1775
|
+
}
|
|
1776
|
+
|
|
1777
|
+
// Normalize "InProgress" to "In Progress" for display
|
|
1778
|
+
const displayStatus = newStatus === 'InProgress' ? 'In Progress' : newStatus;
|
|
1779
|
+
|
|
1780
|
+
const result = {
|
|
1781
|
+
story_updated: false,
|
|
1782
|
+
feature_updated: false,
|
|
1783
|
+
backlog_updated: false,
|
|
1784
|
+
feature_status_changed: false,
|
|
1785
|
+
new_status: displayStatus,
|
|
1786
|
+
errors: [],
|
|
1787
|
+
};
|
|
1788
|
+
|
|
1789
|
+
// ── Resolve story file path ──
|
|
1790
|
+
const classified = classifyStoryParam(storyParam);
|
|
1791
|
+
if (classified.type !== 'file' || !classified.filePath) {
|
|
1792
|
+
result.errors.push('story update-state currently only supports file paths');
|
|
1793
|
+
output(result, raw);
|
|
1794
|
+
return;
|
|
1795
|
+
}
|
|
1796
|
+
|
|
1797
|
+
const storyFilePath = path.isAbsolute(classified.filePath)
|
|
1798
|
+
? classified.filePath
|
|
1799
|
+
: path.join(cwd, classified.filePath);
|
|
1800
|
+
|
|
1801
|
+
// ── 1. Update story file header ──
|
|
1802
|
+
const storyContent = safeReadFile(storyFilePath);
|
|
1803
|
+
if (!storyContent) {
|
|
1804
|
+
result.errors.push(`Could not read story file: ${classified.filePath}`);
|
|
1805
|
+
output(result, raw);
|
|
1806
|
+
return;
|
|
1807
|
+
}
|
|
1808
|
+
|
|
1809
|
+
const updatedStory = storyContent.replace(
|
|
1810
|
+
/(\*\*Status\*\*:\s*)([^|*\n]+)/,
|
|
1811
|
+
`$1${displayStatus}`
|
|
1812
|
+
);
|
|
1813
|
+
if (updatedStory !== storyContent) {
|
|
1814
|
+
try {
|
|
1815
|
+
fs.writeFileSync(storyFilePath, updatedStory, 'utf-8');
|
|
1816
|
+
result.story_updated = true;
|
|
1817
|
+
} catch (e) {
|
|
1818
|
+
result.errors.push(`Failed to write story file: ${e.message}`);
|
|
1819
|
+
}
|
|
1820
|
+
}
|
|
1821
|
+
|
|
1822
|
+
// Extract story metadata for lookups
|
|
1823
|
+
const metadata = extractStoryMetadata(storyContent);
|
|
1824
|
+
const storyId = metadata.id;
|
|
1825
|
+
const storyTitle = metadata.title;
|
|
1826
|
+
|
|
1827
|
+
// ── 2. Update feature file story index ──
|
|
1828
|
+
const storyDir = path.dirname(storyFilePath);
|
|
1829
|
+
const featureDir = path.dirname(storyDir);
|
|
1830
|
+
const featureSlug = path.basename(featureDir);
|
|
1831
|
+
const featureFilePath = path.join(featureDir, `${featureSlug}.md`);
|
|
1832
|
+
|
|
1833
|
+
const featureContent = safeReadFile(featureFilePath);
|
|
1834
|
+
if (featureContent && storyId) {
|
|
1835
|
+
// Find the story in the feature's story index table and update its status
|
|
1836
|
+
// Table format: | ID | Title | Size | Status | Sprint | Link |
|
|
1837
|
+
const storyIdEscaped = storyId.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
1838
|
+
const tableRowPattern = new RegExp(
|
|
1839
|
+
`(\\|\\s*${storyIdEscaped}\\s*\\|[^|]*\\|[^|]*\\|\\s*)([^|]*)(\\s*\\|)`,
|
|
1840
|
+
'm'
|
|
1841
|
+
);
|
|
1842
|
+
const updatedFeature = featureContent.replace(tableRowPattern, `$1${displayStatus}$3`);
|
|
1843
|
+
|
|
1844
|
+
if (updatedFeature !== featureContent) {
|
|
1845
|
+
try {
|
|
1846
|
+
fs.writeFileSync(featureFilePath, updatedFeature, 'utf-8');
|
|
1847
|
+
result.feature_updated = true;
|
|
1848
|
+
} catch (e) {
|
|
1849
|
+
result.errors.push(`Failed to write feature file: ${e.message}`);
|
|
1850
|
+
}
|
|
1851
|
+
}
|
|
1852
|
+
|
|
1853
|
+
// ── Check if all stories in the feature are Done ──
|
|
1854
|
+
if (displayStatus === 'Done') {
|
|
1855
|
+
const updatedFeatureContent = safeReadFile(featureFilePath) || updatedFeature;
|
|
1856
|
+
// Find all status cells in the story index table
|
|
1857
|
+
// Match rows like: | S1 | ... | ... | Status | ... | ... |
|
|
1858
|
+
const statusPattern = /\|\s*(?:S\d+|#\d+)\s*\|[^|]*\|[^|]*\|\s*([^|]*)\s*\|/gm;
|
|
1859
|
+
let allDone = true;
|
|
1860
|
+
let match;
|
|
1861
|
+
let storyCount = 0;
|
|
1862
|
+
while ((match = statusPattern.exec(updatedFeatureContent)) !== null) {
|
|
1863
|
+
storyCount++;
|
|
1864
|
+
const status = match[1].trim();
|
|
1865
|
+
if (status !== 'Done') {
|
|
1866
|
+
allDone = false;
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
|
|
1870
|
+
if (allDone && storyCount > 0) {
|
|
1871
|
+
// Update feature status to Done in the feature file header
|
|
1872
|
+
const featureWithDoneStatus = updatedFeatureContent.replace(
|
|
1873
|
+
/(\*\*Status\*\*:\s*)([^|*\n]+)/,
|
|
1874
|
+
'$1Done'
|
|
1875
|
+
);
|
|
1876
|
+
if (featureWithDoneStatus !== updatedFeatureContent) {
|
|
1877
|
+
try {
|
|
1878
|
+
fs.writeFileSync(featureFilePath, featureWithDoneStatus, 'utf-8');
|
|
1879
|
+
result.feature_status_changed = true;
|
|
1880
|
+
} catch (e) {
|
|
1881
|
+
result.errors.push(`Failed to update feature status: ${e.message}`);
|
|
1882
|
+
}
|
|
1883
|
+
}
|
|
1884
|
+
}
|
|
1885
|
+
}
|
|
1886
|
+
}
|
|
1887
|
+
|
|
1888
|
+
// ── 3. Update product backlog ──
|
|
1889
|
+
const backlogPath = path.join(cwd, '.ace', 'artifacts', 'product', 'product-backlog.md');
|
|
1890
|
+
const backlogContent = safeReadFile(backlogPath);
|
|
1891
|
+
if (backlogContent && storyId) {
|
|
1892
|
+
let updatedBacklog = backlogContent;
|
|
1893
|
+
|
|
1894
|
+
// Update story status in backlog
|
|
1895
|
+
// Table format varies but story ID should be in a table row
|
|
1896
|
+
const storyIdEscaped = storyId.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
1897
|
+
const backlogStoryPattern = new RegExp(
|
|
1898
|
+
`(\\|\\s*${storyIdEscaped}\\s*\\|[^|]*\\|[^|]*\\|\\s*)([^|]*)(\\s*\\|)`,
|
|
1899
|
+
'm'
|
|
1900
|
+
);
|
|
1901
|
+
updatedBacklog = updatedBacklog.replace(backlogStoryPattern, `$1${displayStatus}$3`);
|
|
1902
|
+
|
|
1903
|
+
// If feature status changed to Done, also update feature in backlog
|
|
1904
|
+
if (result.feature_status_changed && metadata.feature.id) {
|
|
1905
|
+
const featureIdEscaped = metadata.feature.id.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
1906
|
+
const backlogFeaturePattern = new RegExp(
|
|
1907
|
+
`(\\|\\s*${featureIdEscaped}\\s*\\|[^|]*\\|[^|]*\\|\\s*)([^|]*)(\\s*\\|)`,
|
|
1908
|
+
'm'
|
|
1909
|
+
);
|
|
1910
|
+
updatedBacklog = updatedBacklog.replace(backlogFeaturePattern, `$1Done$3`);
|
|
1911
|
+
}
|
|
1912
|
+
|
|
1913
|
+
if (updatedBacklog !== backlogContent) {
|
|
1914
|
+
try {
|
|
1915
|
+
fs.writeFileSync(backlogPath, updatedBacklog, 'utf-8');
|
|
1916
|
+
result.backlog_updated = true;
|
|
1917
|
+
} catch (e) {
|
|
1918
|
+
result.errors.push(`Failed to write product backlog: ${e.message}`);
|
|
1919
|
+
}
|
|
1920
|
+
}
|
|
1921
|
+
}
|
|
1922
|
+
|
|
1923
|
+
if (result.errors.length === 0) delete result.errors;
|
|
1924
|
+
output(result, raw);
|
|
1925
|
+
}
|
|
1926
|
+
|
|
1927
|
+
// ─── GitHub Integration Commands ──────────────────────────────────────────────
|
|
1928
|
+
|
|
1929
|
+
/**
|
|
1930
|
+
* Parse key=value arguments into an object.
|
|
1931
|
+
* Handles values with spaces when properly quoted in shell.
|
|
1932
|
+
*/
|
|
1933
|
+
function parseKeyValueArgs(args) {
|
|
1934
|
+
const result = {};
|
|
1935
|
+
for (const arg of args) {
|
|
1936
|
+
const eqIndex = arg.indexOf('=');
|
|
1937
|
+
if (eqIndex === -1) continue;
|
|
1938
|
+
result[arg.substring(0, eqIndex)] = arg.substring(eqIndex + 1);
|
|
1939
|
+
}
|
|
1940
|
+
return result;
|
|
1941
|
+
}
|
|
1942
|
+
|
|
1943
|
+
/**
|
|
1944
|
+
* Run a shell command and return trimmed stdout. Returns null on failure.
|
|
1945
|
+
* Uses bash explicitly to ensure consistent quoting behavior across platforms.
|
|
1946
|
+
*/
|
|
1947
|
+
function execCommand(cmd, cwd) {
|
|
1948
|
+
const { execSync } = require('child_process');
|
|
1949
|
+
try {
|
|
1950
|
+
return execSync(cmd, {
|
|
1951
|
+
cwd,
|
|
1952
|
+
shell: 'bash',
|
|
1953
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
1954
|
+
encoding: 'utf-8',
|
|
1955
|
+
timeout: 30000,
|
|
1956
|
+
}).trim();
|
|
1957
|
+
} catch (e) {
|
|
1958
|
+
return null;
|
|
1959
|
+
}
|
|
1960
|
+
}
|
|
1961
|
+
|
|
1962
|
+
/**
|
|
1963
|
+
* Resolve project ID and field definitions for a GitHub Project.
|
|
1964
|
+
* Returns { project_id, fields } where fields maps field names to { id, type, options? }.
|
|
1965
|
+
* Returns { project_id: null, fields: {} } on failure.
|
|
1966
|
+
*/
|
|
1967
|
+
function resolveProjectContext(owner, project, cwd) {
|
|
1968
|
+
const projectListRaw = execCommand(
|
|
1969
|
+
`gh project list --owner ${owner} --format json --limit 20`,
|
|
1970
|
+
cwd
|
|
1971
|
+
);
|
|
1972
|
+
|
|
1973
|
+
let project_id = null;
|
|
1974
|
+
if (projectListRaw) {
|
|
1975
|
+
try {
|
|
1976
|
+
const parsed = JSON.parse(projectListRaw);
|
|
1977
|
+
const projects = parsed.projects || parsed || [];
|
|
1978
|
+
const match = projects.find(p => String(p.number) === String(project));
|
|
1979
|
+
if (match) project_id = match.id;
|
|
1980
|
+
} catch {}
|
|
1981
|
+
}
|
|
1982
|
+
|
|
1983
|
+
const fieldsRaw = execCommand(
|
|
1984
|
+
`gh project field-list ${project} --owner ${owner} --format json`,
|
|
1985
|
+
cwd
|
|
1986
|
+
);
|
|
1987
|
+
|
|
1988
|
+
const fields = {};
|
|
1989
|
+
if (fieldsRaw) {
|
|
1990
|
+
try {
|
|
1991
|
+
const parsed = JSON.parse(fieldsRaw);
|
|
1992
|
+
const fieldList = parsed.fields || parsed || [];
|
|
1993
|
+
for (const field of fieldList) {
|
|
1994
|
+
const entry = { id: field.id, type: field.type };
|
|
1995
|
+
if (field.options) {
|
|
1996
|
+
entry.options = {};
|
|
1997
|
+
for (const opt of field.options) {
|
|
1998
|
+
entry.options[opt.name] = opt.id;
|
|
1999
|
+
}
|
|
2000
|
+
}
|
|
2001
|
+
fields[field.name] = entry;
|
|
2002
|
+
}
|
|
2003
|
+
} catch {}
|
|
2004
|
+
}
|
|
2005
|
+
|
|
2006
|
+
return { project_id, fields };
|
|
2007
|
+
}
|
|
2008
|
+
|
|
2009
|
+
/**
|
|
2010
|
+
* github resolve-fields — Resolve all GitHub field/type IDs needed for issue creation.
|
|
2011
|
+
*
|
|
2012
|
+
* Required args: repo=owner/name owner=org project=number
|
|
2013
|
+
*
|
|
2014
|
+
* Returns JSON with:
|
|
2015
|
+
* - issue_types: { Epic: "IT_...", Feature: "IT_...", Story: "IT_...", ... }
|
|
2016
|
+
* - project_id: "PVT_..."
|
|
2017
|
+
* - fields: { Priority: { id, options: { P0: id, ... } }, Estimate: { id }, Sprint: { id }, Status: { id, options } }
|
|
2018
|
+
*/
|
|
2019
|
+
function cmdGithubResolveFields(cwd, raw, extraArgs) {
|
|
2020
|
+
const params = parseKeyValueArgs(extraArgs);
|
|
2021
|
+
const repo = params.repo; // e.g., "Quantarcane/qarc"
|
|
2022
|
+
const owner = params.owner; // e.g., "Quantarcane"
|
|
2023
|
+
const project = params.project; // e.g., "1"
|
|
2024
|
+
|
|
2025
|
+
if (!repo || !owner || !project) {
|
|
2026
|
+
error('github resolve-fields requires: repo=owner/name owner=org project=number');
|
|
2027
|
+
}
|
|
2028
|
+
|
|
2029
|
+
const repoName = repo.split('/')[1] || repo;
|
|
2030
|
+
|
|
2031
|
+
// 1. Resolve native issue types
|
|
2032
|
+
const issueTypesRaw = execCommand(
|
|
2033
|
+
`gh api graphql -f query='query { repository(owner: "${owner}", name: "${repoName}") { issueTypes(first: 10) { nodes { id name } } } }'`,
|
|
2034
|
+
cwd
|
|
2035
|
+
);
|
|
2036
|
+
|
|
2037
|
+
const issueTypes = {};
|
|
2038
|
+
if (issueTypesRaw) {
|
|
2039
|
+
try {
|
|
2040
|
+
const parsed = JSON.parse(issueTypesRaw);
|
|
2041
|
+
const nodes = parsed.data?.repository?.issueTypes?.nodes || [];
|
|
2042
|
+
for (const node of nodes) {
|
|
2043
|
+
issueTypes[node.name] = node.id;
|
|
2044
|
+
}
|
|
2045
|
+
} catch {}
|
|
2046
|
+
}
|
|
2047
|
+
|
|
2048
|
+
// 2 & 3. Resolve project ID and fields
|
|
2049
|
+
const { project_id, fields } = resolveProjectContext(owner, project, cwd);
|
|
2050
|
+
|
|
2051
|
+
output({
|
|
2052
|
+
issue_types: issueTypes,
|
|
2053
|
+
project_id,
|
|
2054
|
+
fields,
|
|
2055
|
+
}, raw);
|
|
2056
|
+
}
|
|
2057
|
+
|
|
2058
|
+
/**
|
|
2059
|
+
* github create-issue — Create a GitHub issue, set its native type, add to project,
|
|
2060
|
+
* and optionally set project fields (Priority, Estimate) and issue metadata (parent, milestone).
|
|
2061
|
+
*
|
|
2062
|
+
* Required args: type=Epic|Feature|Story title=... repo=owner/name owner=org
|
|
2063
|
+
* project=number project_id=PVT_... type_id=IT_...
|
|
2064
|
+
*
|
|
2065
|
+
* Optional args: body=... body_file=path (reads body from file, preferred for large bodies)
|
|
2066
|
+
* status_field_id=... status_option_id=...
|
|
2067
|
+
* priority_field_id=... priority_option_id=...
|
|
2068
|
+
* estimate_field_id=... estimate=number
|
|
2069
|
+
* parent=issue_number milestone=name
|
|
2070
|
+
*
|
|
2071
|
+
* The title is auto-prefixed: type=Epic + title="My Epic" → "[Epic] My Epic"
|
|
2072
|
+
*
|
|
2073
|
+
* Returns JSON with: { number, url, item_id, type_set, priority_set, estimate_set, parent_set, milestone_set }
|
|
2074
|
+
*/
|
|
2075
|
+
function cmdGithubCreateIssue(cwd, raw, extraArgs) {
|
|
2076
|
+
const params = parseKeyValueArgs(extraArgs);
|
|
2077
|
+
|
|
2078
|
+
const type = params.type; // Epic, Feature, or Story
|
|
2079
|
+
const title = params.title;
|
|
2080
|
+
let body = params.body || '';
|
|
2081
|
+
if (!body && params.body_file) {
|
|
2082
|
+
const bodyPath = path.isAbsolute(params.body_file)
|
|
2083
|
+
? params.body_file
|
|
2084
|
+
: path.join(cwd, params.body_file);
|
|
2085
|
+
if (fs.existsSync(bodyPath)) {
|
|
2086
|
+
body = fs.readFileSync(bodyPath, 'utf8');
|
|
2087
|
+
}
|
|
2088
|
+
}
|
|
2089
|
+
const repo = params.repo;
|
|
2090
|
+
const owner = params.owner;
|
|
2091
|
+
const project = params.project;
|
|
2092
|
+
const projectId = params.project_id;
|
|
2093
|
+
const typeId = params.type_id;
|
|
2094
|
+
|
|
2095
|
+
if (!type || !title || !repo || !owner || !project || !projectId || !typeId) {
|
|
2096
|
+
error('github create-issue requires: type, title, repo, owner, project, project_id, type_id');
|
|
2097
|
+
}
|
|
2098
|
+
|
|
2099
|
+
const repoName = repo.split('/')[1] || repo;
|
|
2100
|
+
const result = {
|
|
2101
|
+
number: null,
|
|
2102
|
+
url: null,
|
|
2103
|
+
item_id: null,
|
|
2104
|
+
type_set: false,
|
|
2105
|
+
status_set: false,
|
|
2106
|
+
priority_set: false,
|
|
2107
|
+
estimate_set: false,
|
|
2108
|
+
parent_set: false,
|
|
2109
|
+
milestone_set: false,
|
|
2110
|
+
errors: [],
|
|
2111
|
+
};
|
|
2112
|
+
|
|
2113
|
+
// 1. Create the issue (auto-prefix title with [Type])
|
|
2114
|
+
// Use --body-file to avoid shell escaping issues with backticks, $, code blocks, etc.
|
|
2115
|
+
const fullTitle = `[${type}] ${title}`;
|
|
2116
|
+
const safeTitle = fullTitle.replace(/"/g, '\\"');
|
|
2117
|
+
|
|
2118
|
+
let createBodyFile = null;
|
|
2119
|
+
if (params.body_file) {
|
|
2120
|
+
createBodyFile = path.isAbsolute(params.body_file)
|
|
2121
|
+
? params.body_file
|
|
2122
|
+
: path.join(cwd, params.body_file);
|
|
2123
|
+
} else {
|
|
2124
|
+
const os = require('os');
|
|
2125
|
+
createBodyFile = path.join(os.tmpdir(), `ace-gh-body-${Date.now()}.md`);
|
|
2126
|
+
fs.writeFileSync(createBodyFile, body, 'utf8');
|
|
2127
|
+
}
|
|
2128
|
+
|
|
2129
|
+
const safeBodyPath = createBodyFile.replace(/\\/g, '/');
|
|
2130
|
+
const issueUrl = execCommand(
|
|
2131
|
+
`gh issue create --repo ${repo} --title "${safeTitle}" --body-file "${safeBodyPath}"`,
|
|
2132
|
+
cwd
|
|
2133
|
+
);
|
|
2134
|
+
|
|
2135
|
+
// Clean up temp file if we created one
|
|
2136
|
+
if (!params.body_file && createBodyFile && fs.existsSync(createBodyFile)) {
|
|
2137
|
+
try { fs.unlinkSync(createBodyFile); } catch {}
|
|
2138
|
+
}
|
|
2139
|
+
|
|
2140
|
+
if (!issueUrl) {
|
|
2141
|
+
result.errors.push('Failed to create issue');
|
|
2142
|
+
output(result, raw);
|
|
2143
|
+
return;
|
|
2144
|
+
}
|
|
2145
|
+
|
|
2146
|
+
result.url = issueUrl;
|
|
2147
|
+
const urlParts = issueUrl.split('/');
|
|
2148
|
+
result.number = parseInt(urlParts[urlParts.length - 1], 10);
|
|
2149
|
+
|
|
2150
|
+
// 2. Set native issue type via GraphQL
|
|
2151
|
+
const nodeIdRaw = execCommand(
|
|
2152
|
+
`gh api graphql -f query="query { repository(owner: \\"${owner}\\", name: \\"${repoName}\\") { issue(number: ${result.number}) { id } } }" --jq ".data.repository.issue.id"`,
|
|
2153
|
+
cwd
|
|
2154
|
+
);
|
|
2155
|
+
|
|
2156
|
+
if (nodeIdRaw) {
|
|
2157
|
+
const setTypeRaw = execCommand(
|
|
2158
|
+
`gh api graphql -f query="mutation { updateIssue(input: { id: \\"${nodeIdRaw}\\", issueTypeId: \\"${typeId}\\" }) { issue { id } } }"`,
|
|
2159
|
+
cwd
|
|
2160
|
+
);
|
|
2161
|
+
result.type_set = !!setTypeRaw;
|
|
2162
|
+
if (!setTypeRaw) result.errors.push('Failed to set issue type');
|
|
2163
|
+
} else {
|
|
2164
|
+
result.errors.push('Failed to get issue node ID');
|
|
2165
|
+
}
|
|
2166
|
+
|
|
2167
|
+
// 3. Add to project
|
|
2168
|
+
const addRaw = execCommand(
|
|
2169
|
+
`gh project item-add ${project} --owner ${owner} --url ${issueUrl} --format json`,
|
|
2170
|
+
cwd
|
|
2171
|
+
);
|
|
2172
|
+
|
|
2173
|
+
if (addRaw) {
|
|
2174
|
+
try {
|
|
2175
|
+
const parsed = JSON.parse(addRaw);
|
|
2176
|
+
result.item_id = parsed.id;
|
|
2177
|
+
} catch {}
|
|
2178
|
+
} else {
|
|
2179
|
+
result.errors.push('Failed to add to project');
|
|
2180
|
+
}
|
|
2181
|
+
|
|
2182
|
+
// 4. Set Status (optional)
|
|
2183
|
+
if (result.item_id && params.status_field_id && params.status_option_id) {
|
|
2184
|
+
const statusOk = execCommand(
|
|
2185
|
+
`gh project item-edit --project-id ${projectId} --id ${result.item_id} --field-id ${params.status_field_id} --single-select-option-id ${params.status_option_id}`,
|
|
2186
|
+
cwd
|
|
2187
|
+
);
|
|
2188
|
+
result.status_set = statusOk !== null;
|
|
2189
|
+
if (!result.status_set) result.errors.push('Failed to set status');
|
|
2190
|
+
}
|
|
2191
|
+
|
|
2192
|
+
// 5. Set Priority (optional — single-select field)
|
|
2193
|
+
if (result.item_id && params.priority_field_id && params.priority_option_id) {
|
|
2194
|
+
const priorityOk = execCommand(
|
|
2195
|
+
`gh project item-edit --project-id ${projectId} --id ${result.item_id} --field-id ${params.priority_field_id} --single-select-option-id ${params.priority_option_id}`,
|
|
2196
|
+
cwd
|
|
2197
|
+
);
|
|
2198
|
+
result.priority_set = priorityOk !== null;
|
|
2199
|
+
if (!result.priority_set) result.errors.push('Failed to set priority');
|
|
2200
|
+
}
|
|
2201
|
+
|
|
2202
|
+
// 6. Set Estimate (optional)
|
|
2203
|
+
if (result.item_id && params.estimate_field_id && params.estimate) {
|
|
2204
|
+
const estimateOk = execCommand(
|
|
2205
|
+
`gh project item-edit --project-id ${projectId} --id ${result.item_id} --field-id ${params.estimate_field_id} --number ${params.estimate}`,
|
|
2206
|
+
cwd
|
|
2207
|
+
);
|
|
2208
|
+
result.estimate_set = estimateOk !== null;
|
|
2209
|
+
if (!result.estimate_set) result.errors.push('Failed to set estimate');
|
|
2210
|
+
}
|
|
2211
|
+
|
|
2212
|
+
// 7. Set parent issue via GraphQL addSubIssue (optional — Features under Epics, Stories under Features)
|
|
2213
|
+
if (params.parent) {
|
|
2214
|
+
// Get the parent issue's node ID
|
|
2215
|
+
const parentNodeId = execCommand(
|
|
2216
|
+
`gh api graphql -f query="query { repository(owner: \\"${owner}\\", name: \\"${repoName}\\") { issue(number: ${params.parent}) { id } } }" --jq ".data.repository.issue.id"`,
|
|
2217
|
+
cwd
|
|
2218
|
+
);
|
|
2219
|
+
// Get the child issue's node ID (we may already have it from step 2, but safer to re-fetch)
|
|
2220
|
+
const childNodeId = nodeIdRaw || execCommand(
|
|
2221
|
+
`gh api graphql -f query="query { repository(owner: \\"${owner}\\", name: \\"${repoName}\\") { issue(number: ${result.number}) { id } } }" --jq ".data.repository.issue.id"`,
|
|
2222
|
+
cwd
|
|
2223
|
+
);
|
|
2224
|
+
if (parentNodeId && childNodeId) {
|
|
2225
|
+
const parentOk = execCommand(
|
|
2226
|
+
`gh api graphql -f query="mutation { addSubIssue(input: { issueId: \\"${parentNodeId}\\", subIssueId: \\"${childNodeId}\\" }) { issue { id } } }"`,
|
|
2227
|
+
cwd
|
|
2228
|
+
);
|
|
2229
|
+
result.parent_set = parentOk !== null;
|
|
2230
|
+
if (!result.parent_set) result.errors.push('Failed to set parent');
|
|
2231
|
+
} else {
|
|
2232
|
+
result.errors.push('Failed to resolve parent/child node IDs');
|
|
2233
|
+
}
|
|
2234
|
+
}
|
|
2235
|
+
|
|
2236
|
+
// 8. Set milestone (optional)
|
|
2237
|
+
if (params.milestone) {
|
|
2238
|
+
const safeMilestone = params.milestone.replace(/"/g, '\\"');
|
|
2239
|
+
const milestoneOk = execCommand(
|
|
2240
|
+
`gh issue edit ${result.number} --repo ${repo} --milestone "${safeMilestone}"`,
|
|
2241
|
+
cwd
|
|
2242
|
+
);
|
|
2243
|
+
result.milestone_set = milestoneOk !== null;
|
|
2244
|
+
if (!result.milestone_set) result.errors.push('Failed to set milestone');
|
|
2245
|
+
}
|
|
2246
|
+
|
|
2247
|
+
if (result.errors.length === 0) delete result.errors;
|
|
2248
|
+
output(result, raw);
|
|
2249
|
+
}
|
|
2250
|
+
|
|
2251
|
+
/**
|
|
2252
|
+
* github update-issue — Update an existing GitHub issue's title and body,
|
|
2253
|
+
* and optionally update project fields (Status, Priority, Estimate).
|
|
2254
|
+
*
|
|
2255
|
+
* Required args: number=issue_number repo=owner/name
|
|
2256
|
+
*
|
|
2257
|
+
* Optional args: title=... body=... body_file=path (reads body from file)
|
|
2258
|
+
* owner=org project=number project_id=PVT_...
|
|
2259
|
+
* status_field_id=... status_option_id=...
|
|
2260
|
+
* priority_field_id=... priority_option_id=...
|
|
2261
|
+
* estimate_field_id=... estimate=number
|
|
2262
|
+
*
|
|
2263
|
+
* Returns JSON with: { number, updated_title, updated_body, status_set, priority_set, estimate_set }
|
|
2264
|
+
*/
|
|
2265
|
+
function cmdGithubUpdateIssue(cwd, raw, extraArgs) {
|
|
2266
|
+
const params = parseKeyValueArgs(extraArgs);
|
|
2267
|
+
|
|
2268
|
+
const number = params.number;
|
|
2269
|
+
const repo = params.repo;
|
|
2270
|
+
|
|
2271
|
+
if (!number || !repo) {
|
|
2272
|
+
error('github update-issue requires: number, repo');
|
|
2273
|
+
}
|
|
2274
|
+
|
|
2275
|
+
const result = {
|
|
2276
|
+
number: parseInt(number, 10),
|
|
2277
|
+
updated_title: false,
|
|
2278
|
+
updated_body: false,
|
|
2279
|
+
status_set: false,
|
|
2280
|
+
priority_set: false,
|
|
2281
|
+
estimate_set: false,
|
|
2282
|
+
errors: [],
|
|
2283
|
+
};
|
|
2284
|
+
|
|
2285
|
+
// 1. Update title and/or body via gh issue edit
|
|
2286
|
+
const editParts = [`gh issue edit ${number} --repo ${repo}`];
|
|
2287
|
+
|
|
2288
|
+
if (params.title) {
|
|
2289
|
+
const safeTitle = params.title.replace(/"/g, '\\"');
|
|
2290
|
+
editParts.push(`--title "${safeTitle}"`);
|
|
2291
|
+
result.updated_title = true;
|
|
2292
|
+
}
|
|
2293
|
+
|
|
2294
|
+
// Body can come from body_file= param (preferred — avoids shell escaping)
|
|
2295
|
+
// or body= param (for short text).
|
|
2296
|
+
// Uses gh's --body-file flag to avoid shell escaping issues with
|
|
2297
|
+
// backticks, $, newlines, code blocks, mermaid diagrams, etc.
|
|
2298
|
+
let bodyFilePath = null;
|
|
2299
|
+
let tempBodyFile = null;
|
|
2300
|
+
|
|
2301
|
+
if (params.body_file) {
|
|
2302
|
+
bodyFilePath = path.isAbsolute(params.body_file)
|
|
2303
|
+
? params.body_file
|
|
2304
|
+
: path.join(cwd, params.body_file);
|
|
2305
|
+
if (!fs.existsSync(bodyFilePath)) {
|
|
2306
|
+
result.errors.push(`body_file not found: ${params.body_file}`);
|
|
2307
|
+
bodyFilePath = null;
|
|
2308
|
+
}
|
|
2309
|
+
} else if (params.body) {
|
|
2310
|
+
// Write body text to a temp file to use --body-file
|
|
2311
|
+
const os = require('os');
|
|
2312
|
+
tempBodyFile = path.join(os.tmpdir(), `ace-gh-body-${Date.now()}.md`);
|
|
2313
|
+
fs.writeFileSync(tempBodyFile, params.body, 'utf8');
|
|
2314
|
+
bodyFilePath = tempBodyFile;
|
|
2315
|
+
}
|
|
2316
|
+
|
|
2317
|
+
if (bodyFilePath) {
|
|
2318
|
+
const safeBodyPath = bodyFilePath.replace(/\\/g, '/');
|
|
2319
|
+
editParts.push(`--body-file "${safeBodyPath}"`);
|
|
2320
|
+
result.updated_body = true;
|
|
2321
|
+
}
|
|
2322
|
+
|
|
2323
|
+
if (result.updated_title || result.updated_body) {
|
|
2324
|
+
const fullCmd = editParts.join(' ');
|
|
2325
|
+
const { execSync } = require('child_process');
|
|
2326
|
+
try {
|
|
2327
|
+
const editResult = execSync(fullCmd, {
|
|
2328
|
+
cwd,
|
|
2329
|
+
shell: 'bash',
|
|
2330
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
2331
|
+
encoding: 'utf-8',
|
|
2332
|
+
timeout: 30000,
|
|
2333
|
+
}).trim();
|
|
2334
|
+
// success — editResult is the URL
|
|
2335
|
+
} catch (e) {
|
|
2336
|
+
result.errors.push('Failed to update issue: ' + (e.stderr || e.message || 'unknown error'));
|
|
2337
|
+
result.updated_title = false;
|
|
2338
|
+
result.updated_body = false;
|
|
2339
|
+
}
|
|
2340
|
+
}
|
|
2341
|
+
|
|
2342
|
+
// Clean up temp file if created
|
|
2343
|
+
if (tempBodyFile && fs.existsSync(tempBodyFile)) {
|
|
2344
|
+
try { fs.unlinkSync(tempBodyFile); } catch {}
|
|
2345
|
+
}
|
|
2346
|
+
|
|
2347
|
+
// 2. Update project fields (optional — requires project context)
|
|
2348
|
+
if (params.owner && params.project && params.project_id) {
|
|
2349
|
+
const owner = params.owner;
|
|
2350
|
+
|
|
2351
|
+
// Find the project item ID via GraphQL (direct query — no pagination issues)
|
|
2352
|
+
const repoParts = repo.split('/');
|
|
2353
|
+
const repoOwner = repoParts[0];
|
|
2354
|
+
const repoName = repoParts[1] || repoParts[0];
|
|
2355
|
+
const itemQuery = `query { repository(owner: \\"${repoOwner}\\", name: \\"${repoName}\\") { issue(number: ${number}) { projectItems(first: 10) { nodes { id project { id } } } } } }`;
|
|
2356
|
+
const itemResult = execCommand(
|
|
2357
|
+
`gh api graphql -f query="${itemQuery}"`,
|
|
2358
|
+
cwd
|
|
2359
|
+
);
|
|
2360
|
+
let itemId = null;
|
|
2361
|
+
if (itemResult) {
|
|
2362
|
+
try {
|
|
2363
|
+
const parsed = JSON.parse(itemResult);
|
|
2364
|
+
const nodes = parsed.data?.repository?.issue?.projectItems?.nodes || [];
|
|
2365
|
+
const match = nodes.find(n => n.project?.id === params.project_id);
|
|
2366
|
+
itemId = match?.id || null;
|
|
2367
|
+
} catch {}
|
|
2368
|
+
}
|
|
2369
|
+
|
|
2370
|
+
if (itemId) {
|
|
2371
|
+
// Set Status
|
|
2372
|
+
if (params.status_field_id && params.status_option_id) {
|
|
2373
|
+
const statusOk = execCommand(
|
|
2374
|
+
`gh project item-edit --project-id ${params.project_id} --id ${itemId} --field-id ${params.status_field_id} --single-select-option-id ${params.status_option_id}`,
|
|
2375
|
+
cwd
|
|
2376
|
+
);
|
|
2377
|
+
result.status_set = statusOk !== null;
|
|
2378
|
+
if (!result.status_set) result.errors.push('Failed to set status');
|
|
2379
|
+
}
|
|
2380
|
+
|
|
2381
|
+
// Set Priority
|
|
2382
|
+
if (params.priority_field_id && params.priority_option_id) {
|
|
2383
|
+
const priorityOk = execCommand(
|
|
2384
|
+
`gh project item-edit --project-id ${params.project_id} --id ${itemId} --field-id ${params.priority_field_id} --single-select-option-id ${params.priority_option_id}`,
|
|
2385
|
+
cwd
|
|
2386
|
+
);
|
|
2387
|
+
result.priority_set = priorityOk !== null;
|
|
2388
|
+
if (!result.priority_set) result.errors.push('Failed to set priority');
|
|
2389
|
+
}
|
|
2390
|
+
|
|
2391
|
+
// Set Estimate
|
|
2392
|
+
if (params.estimate_field_id && params.estimate) {
|
|
2393
|
+
const estimateOk = execCommand(
|
|
2394
|
+
`gh project item-edit --project-id ${params.project_id} --id ${itemId} --field-id ${params.estimate_field_id} --number ${params.estimate}`,
|
|
2395
|
+
cwd
|
|
2396
|
+
);
|
|
2397
|
+
result.estimate_set = estimateOk !== null;
|
|
2398
|
+
if (!result.estimate_set) result.errors.push('Failed to set estimate');
|
|
2399
|
+
}
|
|
2400
|
+
} else {
|
|
2401
|
+
result.errors.push('Issue not found in project — cannot update fields');
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
|
|
2405
|
+
if (result.errors.length === 0) delete result.errors;
|
|
2406
|
+
output(result, raw);
|
|
2407
|
+
}
|
|
2408
|
+
|
|
2409
|
+
/**
|
|
2410
|
+
* github fetch-issues — Fetch all Epics and Features from a GitHub Project with full field data.
|
|
2411
|
+
*
|
|
2412
|
+
* Uses a single paginated GraphQL query to retrieve project items with:
|
|
2413
|
+
* - Native issue type (Epic/Feature/Story)
|
|
2414
|
+
* - All project field values (Status, Priority, Estimate, Sprint, Size, etc.)
|
|
2415
|
+
* - Parent issue relationships (sub-issues)
|
|
2416
|
+
* - Milestone
|
|
2417
|
+
*
|
|
2418
|
+
* Required args: repo=owner/name owner=org project=number
|
|
2419
|
+
*
|
|
2420
|
+
* Returns JSON with:
|
|
2421
|
+
* - epics: [{ number, title, status, priority, estimate, sprint, milestone, url, state }]
|
|
2422
|
+
* - features: [{ number, title, status, priority, estimate, sprint, milestone, parent_number, parent_title, url, state }]
|
|
2423
|
+
* - counts: { total, epics, features, skipped }
|
|
2424
|
+
*/
|
|
2425
|
+
|
|
2426
|
+
/**
|
|
2427
|
+
* github sync-story — Update a story's GitHub issue AND its parent feature's GitHub issue
|
|
2428
|
+
* in a single call. Pushes file content as body AND updates the GitHub Project Status field
|
|
2429
|
+
* to match each file's local **Status** value.
|
|
2430
|
+
*
|
|
2431
|
+
* Prints human-readable status lines to stderr so the console always
|
|
2432
|
+
* shows what happened, regardless of whether the calling workflow displays it.
|
|
2433
|
+
*
|
|
2434
|
+
* Required args: repo=owner/name story_file=path
|
|
2435
|
+
* Optional args: feature_file=path owner=org project=number
|
|
2436
|
+
*
|
|
2437
|
+
* When owner and project are provided, resolves the GitHub Project's Status field
|
|
2438
|
+
* and updates each issue's project status to match the local file status.
|
|
2439
|
+
*
|
|
2440
|
+
* Reads each file's **Link** header to extract the issue number.
|
|
2441
|
+
* Uses --body-file to push the full file content to GitHub.
|
|
2442
|
+
*
|
|
2443
|
+
* Returns JSON with: { story: { number, updated, status_synced, error }, feature: { number, updated, status_synced, error } }
|
|
2444
|
+
*/
|
|
2445
|
+
function cmdGithubSyncStory(cwd, raw, extraArgs) {
|
|
2446
|
+
const params = parseKeyValueArgs(extraArgs);
|
|
2447
|
+
const repo = params.repo;
|
|
2448
|
+
const storyFile = params.story_file;
|
|
2449
|
+
|
|
2450
|
+
if (!repo || !storyFile) {
|
|
2451
|
+
error('github sync-story requires: repo=owner/name story_file=path');
|
|
2452
|
+
}
|
|
2453
|
+
|
|
2454
|
+
const result = {
|
|
2455
|
+
story: { number: null, updated: false, status_synced: false, error: null },
|
|
2456
|
+
feature: { number: null, updated: false, status_synced: false, error: null },
|
|
2457
|
+
};
|
|
2458
|
+
|
|
2459
|
+
const { execSync } = require('child_process');
|
|
2460
|
+
|
|
2461
|
+
// --- Resolve project context for status updates (optional) ---
|
|
2462
|
+
const owner = params.owner;
|
|
2463
|
+
const project = params.project;
|
|
2464
|
+
let projectCtx = null;
|
|
2465
|
+
|
|
2466
|
+
if (owner && project) {
|
|
2467
|
+
projectCtx = resolveProjectContext(owner, project, cwd);
|
|
2468
|
+
if (!projectCtx.project_id) {
|
|
2469
|
+
process.stderr.write(` ! Could not resolve GitHub Project #${project}. Status updates skipped.\n`);
|
|
2470
|
+
projectCtx = null;
|
|
2471
|
+
} else if (!projectCtx.fields.Status) {
|
|
2472
|
+
process.stderr.write(' ! GitHub Project has no Status field. Status updates skipped.\n');
|
|
2473
|
+
projectCtx = null;
|
|
2474
|
+
}
|
|
2475
|
+
}
|
|
2476
|
+
|
|
2477
|
+
// --- Helper: update project status for a single issue ---
|
|
2478
|
+
function syncProjectStatus(issueNumber, filePath, label) {
|
|
2479
|
+
if (!projectCtx) return false;
|
|
2480
|
+
|
|
2481
|
+
const content = safeReadFile(filePath);
|
|
2482
|
+
if (!content) return false;
|
|
2483
|
+
|
|
2484
|
+
const metadata = extractStoryMetadata(content);
|
|
2485
|
+
const localStatus = metadata.status;
|
|
2486
|
+
if (!localStatus) {
|
|
2487
|
+
process.stderr.write(` — ${label} has no Status field. Skipping project status update.\n`);
|
|
2488
|
+
return false;
|
|
2489
|
+
}
|
|
2490
|
+
|
|
2491
|
+
const statusField = projectCtx.fields.Status;
|
|
2492
|
+
const statusOptionId = statusField.options?.[localStatus];
|
|
2493
|
+
if (!statusOptionId) {
|
|
2494
|
+
process.stderr.write(` ! GitHub Project has no status option "${localStatus}". Skipping status update for ${label}.\n`);
|
|
2495
|
+
return false;
|
|
2496
|
+
}
|
|
2497
|
+
|
|
2498
|
+
// Look up project item ID via GraphQL (direct query — no pagination issues)
|
|
2499
|
+
const repoParts = repo.split('/');
|
|
2500
|
+
const repoOwner = repoParts[0];
|
|
2501
|
+
const repoName = repoParts[1] || repoParts[0];
|
|
2502
|
+
const itemQuery = `query { repository(owner: \\"${repoOwner}\\", name: \\"${repoName}\\") { issue(number: ${issueNumber}) { projectItems(first: 10) { nodes { id project { id } } } } } }`;
|
|
2503
|
+
const itemResult = execCommand(
|
|
2504
|
+
`gh api graphql -f query="${itemQuery}"`,
|
|
2505
|
+
cwd
|
|
2506
|
+
);
|
|
2507
|
+
let itemId = null;
|
|
2508
|
+
if (itemResult) {
|
|
2509
|
+
try {
|
|
2510
|
+
const parsed = JSON.parse(itemResult);
|
|
2511
|
+
const nodes = parsed.data?.repository?.issue?.projectItems?.nodes || [];
|
|
2512
|
+
const match = nodes.find(n => n.project?.id === projectCtx.project_id);
|
|
2513
|
+
itemId = match?.id || null;
|
|
2514
|
+
} catch {}
|
|
2515
|
+
}
|
|
2516
|
+
if (!itemId) {
|
|
2517
|
+
process.stderr.write(` ! ${label} #${issueNumber} not found in GitHub Project. Skipping status update.\n`);
|
|
2518
|
+
return false;
|
|
2519
|
+
}
|
|
2520
|
+
|
|
2521
|
+
const statusOk = execCommand(
|
|
2522
|
+
`gh project item-edit --project-id ${projectCtx.project_id} --id ${itemId} --field-id ${statusField.id} --single-select-option-id ${statusOptionId}`,
|
|
2523
|
+
cwd
|
|
2524
|
+
);
|
|
2525
|
+
if (statusOk !== null) {
|
|
2526
|
+
process.stderr.write(` + Updated ${label} #${issueNumber} project status → "${localStatus}".\n`);
|
|
2527
|
+
return true;
|
|
2528
|
+
} else {
|
|
2529
|
+
process.stderr.write(` x FAILED to update ${label} #${issueNumber} project status.\n`);
|
|
2530
|
+
return false;
|
|
2531
|
+
}
|
|
2532
|
+
}
|
|
2533
|
+
|
|
2534
|
+
// --- Sync story issue ---
|
|
2535
|
+
const storyPath = path.isAbsolute(storyFile) ? storyFile : path.join(cwd, storyFile);
|
|
2536
|
+
const storyIssue = extractIssueNumberFromFile(cwd, storyFile);
|
|
2537
|
+
|
|
2538
|
+
if (!storyIssue) {
|
|
2539
|
+
result.story.error = 'No GitHub issue linked';
|
|
2540
|
+
process.stderr.write(' — Story has no GitHub issue linked. Skipping.\n');
|
|
2541
|
+
} else {
|
|
2542
|
+
result.story.number = storyIssue;
|
|
2543
|
+
const safePath = storyPath.replace(/\\/g, '/');
|
|
2544
|
+
try {
|
|
2545
|
+
execSync(`gh issue edit ${storyIssue} --repo ${repo} --body-file "${safePath}"`, {
|
|
2546
|
+
cwd, shell: 'bash', stdio: ['pipe', 'pipe', 'pipe'], encoding: 'utf-8', timeout: 30000,
|
|
2547
|
+
});
|
|
2548
|
+
result.story.updated = true;
|
|
2549
|
+
process.stderr.write(` + Updated GitHub story issue #${storyIssue}.\n`);
|
|
2550
|
+
} catch (e) {
|
|
2551
|
+
result.story.error = (e.stderr || e.message || 'unknown error').trim();
|
|
2552
|
+
process.stderr.write(` x FAILED to update GitHub story issue #${storyIssue}.\n`);
|
|
2553
|
+
process.stderr.write(` Error: ${result.story.error}\n`);
|
|
2554
|
+
}
|
|
2555
|
+
|
|
2556
|
+
if (result.story.updated) {
|
|
2557
|
+
result.story.status_synced = syncProjectStatus(storyIssue, storyPath, 'Story');
|
|
2558
|
+
}
|
|
2559
|
+
}
|
|
2560
|
+
|
|
2561
|
+
// --- Sync feature issue ---
|
|
2562
|
+
const featureFile = params.feature_file;
|
|
2563
|
+
if (featureFile) {
|
|
2564
|
+
const featurePath = path.isAbsolute(featureFile) ? featureFile : path.join(cwd, featureFile);
|
|
2565
|
+
const featureIssue = extractIssueNumberFromFile(cwd, featureFile);
|
|
2566
|
+
|
|
2567
|
+
if (!featureIssue) {
|
|
2568
|
+
result.feature.error = 'No GitHub issue linked';
|
|
2569
|
+
process.stderr.write(' — Feature has no GitHub issue linked. Skipping.\n');
|
|
2570
|
+
} else {
|
|
2571
|
+
result.feature.number = featureIssue;
|
|
2572
|
+
const safePath = featurePath.replace(/\\/g, '/');
|
|
2573
|
+
try {
|
|
2574
|
+
execSync(`gh issue edit ${featureIssue} --repo ${repo} --body-file "${safePath}"`, {
|
|
2575
|
+
cwd, shell: 'bash', stdio: ['pipe', 'pipe', 'pipe'], encoding: 'utf-8', timeout: 30000,
|
|
2576
|
+
});
|
|
2577
|
+
result.feature.updated = true;
|
|
2578
|
+
process.stderr.write(` + Updated GitHub feature issue #${featureIssue}.\n`);
|
|
2579
|
+
} catch (e) {
|
|
2580
|
+
result.feature.error = (e.stderr || e.message || 'unknown error').trim();
|
|
2581
|
+
process.stderr.write(` x FAILED to update GitHub feature issue #${featureIssue}.\n`);
|
|
2582
|
+
process.stderr.write(` Error: ${result.feature.error}\n`);
|
|
2583
|
+
}
|
|
2584
|
+
|
|
2585
|
+
if (result.feature.updated) {
|
|
2586
|
+
result.feature.status_synced = syncProjectStatus(featureIssue, featurePath, 'Feature');
|
|
2587
|
+
}
|
|
2588
|
+
}
|
|
2589
|
+
}
|
|
2590
|
+
|
|
2591
|
+
output(result, raw);
|
|
2592
|
+
}
|
|
2593
|
+
|
|
2594
|
+
function cmdGithubFetchIssues(cwd, raw, extraArgs) {
|
|
2595
|
+
const params = parseKeyValueArgs(extraArgs);
|
|
2596
|
+
const repo = params.repo;
|
|
2597
|
+
const owner = params.owner;
|
|
2598
|
+
const project = params.project;
|
|
2599
|
+
|
|
2600
|
+
if (!repo || !owner || !project) {
|
|
2601
|
+
error('github fetch-issues requires: repo=owner/name owner=org project=number');
|
|
2602
|
+
}
|
|
2603
|
+
|
|
2604
|
+
// 1. Get project node ID
|
|
2605
|
+
const projectListRaw = execCommand(
|
|
2606
|
+
`gh project list --owner ${owner} --format json --limit 20`,
|
|
2607
|
+
cwd
|
|
2608
|
+
);
|
|
2609
|
+
|
|
2610
|
+
let projectId = null;
|
|
2611
|
+
if (projectListRaw) {
|
|
2612
|
+
try {
|
|
2613
|
+
const parsed = JSON.parse(projectListRaw);
|
|
2614
|
+
const projects = parsed.projects || parsed || [];
|
|
2615
|
+
const match = projects.find(p => String(p.number) === String(project));
|
|
2616
|
+
if (match) projectId = match.id;
|
|
2617
|
+
} catch {}
|
|
2618
|
+
}
|
|
2619
|
+
|
|
2620
|
+
if (!projectId) {
|
|
2621
|
+
error('Could not find project #' + project + ' for owner ' + owner);
|
|
2622
|
+
}
|
|
2623
|
+
|
|
2624
|
+
// 2. Fetch all project items via paginated GraphQL query
|
|
2625
|
+
// Single query gets: issue details, native type, parent, milestone, and all project field values
|
|
2626
|
+
const allItems = [];
|
|
2627
|
+
let hasNextPage = true;
|
|
2628
|
+
let cursor = null;
|
|
2629
|
+
|
|
2630
|
+
while (hasNextPage) {
|
|
2631
|
+
const afterClause = cursor ? `, after: "${cursor}"` : '';
|
|
2632
|
+
const query = `query { node(id: "${projectId}") { ... on ProjectV2 { items(first: 100${afterClause}) { nodes { id fieldValues(first: 20) { nodes { ... on ProjectV2ItemFieldSingleSelectValue { name field { ... on ProjectV2SingleSelectField { name } } } ... on ProjectV2ItemFieldNumberValue { number field { ... on ProjectV2Field { name } } } ... on ProjectV2ItemFieldIterationValue { title field { ... on ProjectV2IterationField { name } } } } } content { ... on Issue { number title url state issueType { name } parent { number title } milestone { title } } } } pageInfo { hasNextPage endCursor } } } } }`;
|
|
2633
|
+
|
|
2634
|
+
const result = execCommand(
|
|
2635
|
+
`gh api graphql -f query='${query}'`,
|
|
2636
|
+
cwd
|
|
2637
|
+
);
|
|
2638
|
+
|
|
2639
|
+
if (!result) {
|
|
2640
|
+
if (allItems.length === 0) {
|
|
2641
|
+
error('Failed to fetch project items via GraphQL');
|
|
2642
|
+
}
|
|
2643
|
+
break; // partial success — return what we have
|
|
2644
|
+
}
|
|
2645
|
+
|
|
2646
|
+
try {
|
|
2647
|
+
const parsed = JSON.parse(result);
|
|
2648
|
+
const itemsData = parsed.data?.node?.items;
|
|
2649
|
+
if (itemsData?.nodes) {
|
|
2650
|
+
allItems.push(...itemsData.nodes);
|
|
2651
|
+
}
|
|
2652
|
+
hasNextPage = itemsData?.pageInfo?.hasNextPage || false;
|
|
2653
|
+
cursor = itemsData?.pageInfo?.endCursor || null;
|
|
2654
|
+
} catch {
|
|
2655
|
+
break;
|
|
2656
|
+
}
|
|
2657
|
+
}
|
|
2658
|
+
|
|
2659
|
+
// 3. Parse items into structured epics/features
|
|
2660
|
+
const epics = [];
|
|
2661
|
+
const features = [];
|
|
2662
|
+
let skipped = 0;
|
|
2663
|
+
|
|
2664
|
+
for (const item of allItems) {
|
|
2665
|
+
const content = item.content;
|
|
2666
|
+
if (!content || !content.number) {
|
|
2667
|
+
skipped++;
|
|
2668
|
+
continue; // DraftIssue or PR — skip
|
|
2669
|
+
}
|
|
2670
|
+
|
|
2671
|
+
// Extract project field values into a flat map
|
|
2672
|
+
const fieldMap = {};
|
|
2673
|
+
if (item.fieldValues?.nodes) {
|
|
2674
|
+
for (const fv of item.fieldValues.nodes) {
|
|
2675
|
+
if (fv.field?.name) {
|
|
2676
|
+
if (fv.name !== undefined) {
|
|
2677
|
+
fieldMap[fv.field.name] = fv.name; // single-select (Status, Priority, Size)
|
|
2678
|
+
} else if (fv.number !== undefined) {
|
|
2679
|
+
fieldMap[fv.field.name] = fv.number; // number (Estimate)
|
|
2680
|
+
} else if (fv.title !== undefined) {
|
|
2681
|
+
fieldMap[fv.field.name] = fv.title; // iteration (Sprint)
|
|
2682
|
+
}
|
|
2683
|
+
}
|
|
2684
|
+
}
|
|
2685
|
+
}
|
|
2686
|
+
|
|
2687
|
+
// Determine type: native issueType → title prefix → skip
|
|
2688
|
+
let type = null;
|
|
2689
|
+
if (content.issueType?.name) {
|
|
2690
|
+
type = content.issueType.name;
|
|
2691
|
+
} else if (content.title?.startsWith('[Epic]')) {
|
|
2692
|
+
type = 'Epic';
|
|
2693
|
+
} else if (content.title?.startsWith('[Feature]')) {
|
|
2694
|
+
type = 'Feature';
|
|
2695
|
+
}
|
|
2696
|
+
|
|
2697
|
+
if (type !== 'Epic' && type !== 'Feature') {
|
|
2698
|
+
skipped++;
|
|
2699
|
+
continue;
|
|
2700
|
+
}
|
|
2701
|
+
|
|
2702
|
+
const entry = {
|
|
2703
|
+
number: content.number,
|
|
2704
|
+
title: content.title,
|
|
2705
|
+
status: fieldMap.Status || null,
|
|
2706
|
+
priority: fieldMap.Priority || null,
|
|
2707
|
+
estimate: fieldMap.Estimate || null,
|
|
2708
|
+
sprint: fieldMap.Sprint || null,
|
|
2709
|
+
milestone: content.milestone?.title || null,
|
|
2710
|
+
url: content.url,
|
|
2711
|
+
state: content.state || null,
|
|
2712
|
+
};
|
|
2713
|
+
|
|
2714
|
+
if (type === 'Epic') {
|
|
2715
|
+
epics.push(entry);
|
|
2716
|
+
} else {
|
|
2717
|
+
entry.parent_number = content.parent?.number || null;
|
|
2718
|
+
entry.parent_title = content.parent?.title || null;
|
|
2719
|
+
features.push(entry);
|
|
2720
|
+
}
|
|
2721
|
+
}
|
|
2722
|
+
|
|
2723
|
+
output({
|
|
2724
|
+
epics,
|
|
2725
|
+
features,
|
|
2726
|
+
counts: {
|
|
2727
|
+
total: allItems.length,
|
|
2728
|
+
epics: epics.length,
|
|
2729
|
+
features: features.length,
|
|
2730
|
+
skipped,
|
|
2731
|
+
},
|
|
2732
|
+
}, raw);
|
|
2733
|
+
}
|
|
2734
|
+
|
|
2735
|
+
// ─── CLI Router ───────────────────────────────────────────────────────────────
|
|
2736
|
+
|
|
2737
|
+
function main() {
|
|
2738
|
+
const args = process.argv.slice(2);
|
|
2739
|
+
const rawIndex = args.indexOf('--raw');
|
|
2740
|
+
const raw = rawIndex !== -1;
|
|
2741
|
+
if (rawIndex !== -1) args.splice(rawIndex, 1);
|
|
2742
|
+
|
|
2743
|
+
const command = args[0];
|
|
2744
|
+
const cwd = process.cwd();
|
|
2745
|
+
|
|
2746
|
+
if (!command) {
|
|
2747
|
+
error('Usage: ace-tools <command> [args] [--raw]\nCommands: load-config, resolve-model, verify-path-exists, generate-slug, current-timestamp, ensure-settings, write-github-settings, write-agent-teams, sync-agent-teams, init');
|
|
2748
|
+
}
|
|
2749
|
+
|
|
2750
|
+
switch (command) {
|
|
2751
|
+
case 'load-config': {
|
|
2752
|
+
cmdLoadConfig(cwd, raw);
|
|
2753
|
+
break;
|
|
2754
|
+
}
|
|
2755
|
+
|
|
2756
|
+
case 'resolve-model': {
|
|
2757
|
+
cmdResolveModel(cwd, args[1], raw);
|
|
2758
|
+
break;
|
|
2759
|
+
}
|
|
2760
|
+
|
|
2761
|
+
case 'verify-path-exists': {
|
|
2762
|
+
cmdVerifyPathExists(cwd, args[1], raw);
|
|
2763
|
+
break;
|
|
2764
|
+
}
|
|
2765
|
+
|
|
2766
|
+
case 'generate-slug': {
|
|
2767
|
+
cmdGenerateSlug(args.slice(1).join(' '), raw);
|
|
2768
|
+
break;
|
|
2769
|
+
}
|
|
2770
|
+
|
|
2771
|
+
case 'current-timestamp': {
|
|
2772
|
+
cmdCurrentTimestamp(args[1] || 'full', raw);
|
|
2773
|
+
break;
|
|
2774
|
+
}
|
|
2775
|
+
|
|
2776
|
+
case 'ensure-settings': {
|
|
2777
|
+
cmdEnsureSettings(cwd, raw);
|
|
2778
|
+
break;
|
|
2779
|
+
}
|
|
2780
|
+
|
|
2781
|
+
case 'write-github-settings': {
|
|
2782
|
+
cmdWriteGithubSettings(cwd, raw, args.slice(1));
|
|
2783
|
+
break;
|
|
2784
|
+
}
|
|
2785
|
+
|
|
2786
|
+
case 'write-agent-teams': {
|
|
2787
|
+
cmdWriteAgentTeamsSetting(cwd, raw, args.slice(1));
|
|
2788
|
+
break;
|
|
2789
|
+
}
|
|
2790
|
+
|
|
2791
|
+
case 'sync-agent-teams': {
|
|
2792
|
+
cmdSyncAgentTeams(cwd, raw);
|
|
2793
|
+
break;
|
|
2794
|
+
}
|
|
2795
|
+
|
|
2796
|
+
case 'init': {
|
|
2797
|
+
const workflow = args[1];
|
|
2798
|
+
switch (workflow) {
|
|
2799
|
+
case 'new-project':
|
|
2800
|
+
cmdInitNewProject(cwd, raw);
|
|
2801
|
+
break;
|
|
2802
|
+
case 'product-vision':
|
|
2803
|
+
cmdInitProductVision(cwd, raw);
|
|
2804
|
+
break;
|
|
2805
|
+
case 'coding-standards':
|
|
2806
|
+
cmdInitCodingStandards(cwd, raw);
|
|
2807
|
+
break;
|
|
2808
|
+
case 'map-system':
|
|
2809
|
+
cmdInitMapSystem(cwd, raw);
|
|
2810
|
+
break;
|
|
2811
|
+
case 'map-subsystem':
|
|
2812
|
+
cmdInitMapSubsystem(cwd, raw);
|
|
2813
|
+
break;
|
|
2814
|
+
case 'plan-backlog':
|
|
2815
|
+
cmdInitPlanBacklog(cwd, raw);
|
|
2816
|
+
break;
|
|
2817
|
+
case 'plan-feature':
|
|
2818
|
+
cmdInitPlanFeature(cwd, raw);
|
|
2819
|
+
break;
|
|
2820
|
+
case 'plan-story':
|
|
2821
|
+
cmdInitPlanStory(cwd, raw, args.slice(2).join(' '));
|
|
2822
|
+
break;
|
|
2823
|
+
case 'research-story':
|
|
2824
|
+
cmdInitResearchStory(cwd, raw, args.slice(2).join(' '));
|
|
2825
|
+
break;
|
|
2826
|
+
case 'execute-story':
|
|
2827
|
+
cmdInitExecuteStory(cwd, raw, args.slice(2).join(' '));
|
|
2828
|
+
break;
|
|
2829
|
+
case 'setup-github':
|
|
2830
|
+
cmdSetupGithubProject(cwd, raw);
|
|
2831
|
+
break;
|
|
2832
|
+
default:
|
|
2833
|
+
error('Unknown init subcommand. Available: new-project, product-vision, coding-standards, map-system, map-subsystem, plan-backlog, plan-feature, plan-story, research-story, execute-story, setup-github');
|
|
2834
|
+
}
|
|
2835
|
+
break;
|
|
2836
|
+
}
|
|
2837
|
+
|
|
2838
|
+
case 'story': {
|
|
2839
|
+
const storySubcommand = args[1];
|
|
2840
|
+
const storyArgs = args.slice(2);
|
|
2841
|
+
switch (storySubcommand) {
|
|
2842
|
+
case 'update-state':
|
|
2843
|
+
cmdStoryUpdateState(cwd, raw, storyArgs);
|
|
2844
|
+
break;
|
|
2845
|
+
default:
|
|
2846
|
+
error('Unknown story subcommand. Available: update-state');
|
|
2847
|
+
}
|
|
2848
|
+
break;
|
|
2849
|
+
}
|
|
2850
|
+
|
|
2851
|
+
case 'github': {
|
|
2852
|
+
const subcommand = args[1];
|
|
2853
|
+
const githubArgs = args.slice(2);
|
|
2854
|
+
switch (subcommand) {
|
|
2855
|
+
case 'resolve-fields':
|
|
2856
|
+
cmdGithubResolveFields(cwd, raw, githubArgs);
|
|
2857
|
+
break;
|
|
2858
|
+
case 'create-issue':
|
|
2859
|
+
cmdGithubCreateIssue(cwd, raw, githubArgs);
|
|
2860
|
+
break;
|
|
2861
|
+
case 'update-issue':
|
|
2862
|
+
cmdGithubUpdateIssue(cwd, raw, githubArgs);
|
|
2863
|
+
break;
|
|
2864
|
+
case 'sync-story':
|
|
2865
|
+
cmdGithubSyncStory(cwd, raw, githubArgs);
|
|
2866
|
+
break;
|
|
2867
|
+
case 'fetch-issues':
|
|
2868
|
+
cmdGithubFetchIssues(cwd, raw, githubArgs);
|
|
2869
|
+
break;
|
|
2870
|
+
default:
|
|
2871
|
+
error('Unknown github subcommand. Available: resolve-fields, create-issue, update-issue, sync-story, fetch-issues');
|
|
2872
|
+
}
|
|
2873
|
+
break;
|
|
2874
|
+
}
|
|
2875
|
+
|
|
2876
|
+
default:
|
|
2877
|
+
error(`Unknown command: ${command}\nAvailable: load-config, resolve-model, verify-path-exists, generate-slug, current-timestamp, ensure-settings, write-github-settings, write-agent-teams, sync-agent-teams, init, story, github`);
|
|
2878
|
+
}
|
|
2879
|
+
}
|
|
2880
|
+
|
|
2881
|
+
main();
|