claude-autopm 1.26.0 → 1.27.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/autopm/.claude/agents/frameworks/e2e-test-engineer.md +1 -18
- package/autopm/.claude/agents/frameworks/nats-messaging-expert.md +1 -18
- package/autopm/.claude/agents/frameworks/react-frontend-engineer.md +1 -18
- package/autopm/.claude/agents/frameworks/react-ui-expert.md +1 -18
- package/autopm/.claude/agents/frameworks/tailwindcss-expert.md +1 -18
- package/autopm/.claude/agents/frameworks/ux-design-expert.md +1 -18
- package/autopm/.claude/agents/languages/bash-scripting-expert.md +1 -18
- package/autopm/.claude/agents/languages/javascript-frontend-engineer.md +1 -18
- package/autopm/.claude/agents/languages/nodejs-backend-engineer.md +1 -18
- package/autopm/.claude/agents/languages/python-backend-engineer.md +1 -18
- package/autopm/.claude/agents/languages/python-backend-expert.md +1 -18
- package/autopm/.claude/commands/pm/epic-decompose.md +19 -5
- package/autopm/.claude/commands/pm/prd-new.md +14 -1
- package/autopm/.claude/includes/task-creation-excellence.md +18 -0
- package/autopm/.claude/lib/ai-task-generator.js +84 -0
- package/autopm/.claude/lib/cli-parser.js +148 -0
- package/autopm/.claude/lib/dependency-analyzer.js +157 -0
- package/autopm/.claude/lib/frontmatter.js +224 -0
- package/autopm/.claude/lib/task-utils.js +64 -0
- package/autopm/.claude/scripts/pm-epic-decompose-local.js +158 -0
- package/autopm/.claude/scripts/pm-epic-list-local.js +103 -0
- package/autopm/.claude/scripts/pm-epic-show-local.js +70 -0
- package/autopm/.claude/scripts/pm-epic-update-local.js +56 -0
- package/autopm/.claude/scripts/pm-prd-list-local.js +111 -0
- package/autopm/.claude/scripts/pm-prd-new-local.js +196 -0
- package/autopm/.claude/scripts/pm-prd-parse-local.js +360 -0
- package/autopm/.claude/scripts/pm-prd-show-local.js +101 -0
- package/autopm/.claude/scripts/pm-prd-update-local.js +153 -0
- package/autopm/.claude/scripts/pm-sync-download-local.js +424 -0
- package/autopm/.claude/scripts/pm-sync-upload-local.js +473 -0
- package/autopm/.claude/scripts/pm-task-list-local.js +86 -0
- package/autopm/.claude/scripts/pm-task-show-local.js +92 -0
- package/autopm/.claude/scripts/pm-task-update-local.js +109 -0
- package/autopm/.claude/scripts/setup-local-mode.js +127 -0
- package/package.json +5 -3
- package/scripts/create-task-issues.sh +26 -0
- package/scripts/fix-invalid-command-refs.sh +4 -3
- package/scripts/fix-invalid-refs-simple.sh +8 -3
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dependency Analyzer
|
|
3
|
+
*
|
|
4
|
+
* Analyzes task dependencies and validates dependency graphs.
|
|
5
|
+
* Detects circular dependencies and builds execution order.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* const { analyzeDependencies } = require('./dependency-analyzer');
|
|
9
|
+
*
|
|
10
|
+
* const result = analyzeDependencies(tasks);
|
|
11
|
+
* if (result.hasCircularDependencies) {
|
|
12
|
+
* console.error('Circular dependencies found:', result.cycles);
|
|
13
|
+
* }
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
const { generateShortTaskId } = require('./task-utils');
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Analyze task dependencies
|
|
20
|
+
*
|
|
21
|
+
* @param {Array} tasks - Array of task objects with dependencies
|
|
22
|
+
* @returns {Object} Analysis result with cycles, order, and validation
|
|
23
|
+
*/
|
|
24
|
+
function analyzeDependencies(tasks) {
|
|
25
|
+
const graph = buildDependencyGraph(tasks);
|
|
26
|
+
const cycles = detectCircularDependencies(graph);
|
|
27
|
+
const order = cycles.length === 0 ? topologicalSort(graph) : [];
|
|
28
|
+
|
|
29
|
+
return {
|
|
30
|
+
hasCircularDependencies: cycles.length > 0,
|
|
31
|
+
cycles,
|
|
32
|
+
executionOrder: order,
|
|
33
|
+
isValid: cycles.length === 0
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Build dependency graph from tasks
|
|
39
|
+
*
|
|
40
|
+
* @param {Array} tasks - Array of task objects
|
|
41
|
+
* @returns {Map} Dependency graph (task -> dependencies)
|
|
42
|
+
*/
|
|
43
|
+
function buildDependencyGraph(tasks) {
|
|
44
|
+
const graph = new Map();
|
|
45
|
+
|
|
46
|
+
tasks.forEach((task, index) => {
|
|
47
|
+
const taskId = generateShortTaskId(index + 1);
|
|
48
|
+
const dependencies = task.dependencies || [];
|
|
49
|
+
|
|
50
|
+
graph.set(taskId, dependencies);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
return graph;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Detect circular dependencies using DFS
|
|
58
|
+
*
|
|
59
|
+
* @param {Map} graph - Dependency graph
|
|
60
|
+
* @returns {Array} Array of circular dependency cycles
|
|
61
|
+
*/
|
|
62
|
+
function detectCircularDependencies(graph) {
|
|
63
|
+
const visited = new Set();
|
|
64
|
+
const recursionStack = new Set();
|
|
65
|
+
const cycles = [];
|
|
66
|
+
|
|
67
|
+
function dfs(node, path = []) {
|
|
68
|
+
if (recursionStack.has(node)) {
|
|
69
|
+
// Found a cycle
|
|
70
|
+
const cycleStart = path.indexOf(node);
|
|
71
|
+
cycles.push(path.slice(cycleStart));
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (visited.has(node)) {
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
visited.add(node);
|
|
80
|
+
recursionStack.add(node);
|
|
81
|
+
path.push(node);
|
|
82
|
+
|
|
83
|
+
const dependencies = graph.get(node) || [];
|
|
84
|
+
for (const dep of dependencies) {
|
|
85
|
+
dfs(dep, path);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
path.pop(); // Cleanup: remove node from path after exploring
|
|
89
|
+
recursionStack.delete(node);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
for (const node of graph.keys()) {
|
|
93
|
+
if (!visited.has(node)) {
|
|
94
|
+
dfs(node);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return cycles;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Topological sort for task execution order
|
|
103
|
+
*
|
|
104
|
+
* @param {Map} graph - Dependency graph
|
|
105
|
+
* @returns {Array} Ordered array of task IDs
|
|
106
|
+
*/
|
|
107
|
+
function topologicalSort(graph) {
|
|
108
|
+
const inDegree = new Map();
|
|
109
|
+
const order = [];
|
|
110
|
+
|
|
111
|
+
// Initialize in-degree for all nodes
|
|
112
|
+
for (const node of graph.keys()) {
|
|
113
|
+
inDegree.set(node, 0);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// Calculate in-degree
|
|
117
|
+
for (const deps of graph.values()) {
|
|
118
|
+
for (const dep of deps) {
|
|
119
|
+
if (graph.has(dep)) {
|
|
120
|
+
inDegree.set(dep, (inDegree.get(dep) || 0) + 1);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Queue nodes with in-degree 0
|
|
126
|
+
const queue = [];
|
|
127
|
+
for (const [node, degree] of inDegree) {
|
|
128
|
+
if (degree === 0) {
|
|
129
|
+
queue.push(node);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Process queue
|
|
134
|
+
while (queue.length > 0) {
|
|
135
|
+
const node = queue.shift();
|
|
136
|
+
order.push(node);
|
|
137
|
+
|
|
138
|
+
const dependencies = graph.get(node) || [];
|
|
139
|
+
for (const dep of dependencies) {
|
|
140
|
+
const newDegree = inDegree.get(dep) - 1;
|
|
141
|
+
inDegree.set(dep, newDegree);
|
|
142
|
+
|
|
143
|
+
if (newDegree === 0) {
|
|
144
|
+
queue.push(dep);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return order;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
module.exports = {
|
|
153
|
+
analyzeDependencies,
|
|
154
|
+
buildDependencyGraph,
|
|
155
|
+
detectCircularDependencies,
|
|
156
|
+
topologicalSort
|
|
157
|
+
};
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Frontmatter Utilities
|
|
3
|
+
*
|
|
4
|
+
* Provides utilities for parsing, validating, and manipulating YAML frontmatter
|
|
5
|
+
* in markdown files. Used by Local Mode for PRD, Epic, and Task management.
|
|
6
|
+
*
|
|
7
|
+
* Documentation Source: Context7 - /eemeli/yaml
|
|
8
|
+
* Trust Score: 9.4, 100 code snippets
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
const { parse, stringify } = require('yaml');
|
|
12
|
+
const fs = require('fs').promises;
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Parse YAML frontmatter from markdown content
|
|
16
|
+
*
|
|
17
|
+
* @param {string} content - Markdown content with optional frontmatter
|
|
18
|
+
* @returns {{frontmatter: Object, body: string}} Parsed frontmatter and body
|
|
19
|
+
*
|
|
20
|
+
* @example
|
|
21
|
+
* const { frontmatter, body } = parseFrontmatter(content);
|
|
22
|
+
* console.log(frontmatter.id); // 'task-001'
|
|
23
|
+
*/
|
|
24
|
+
function parseFrontmatter(content) {
|
|
25
|
+
if (!content || typeof content !== 'string') {
|
|
26
|
+
return { frontmatter: {}, body: content || '' };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Check for frontmatter delimiters
|
|
30
|
+
// Handles both empty (---\n---\n) and non-empty frontmatter
|
|
31
|
+
const frontmatterRegex = /^---\r?\n([\s\S]*?)^---\r?\n?([\s\S]*)$/m;
|
|
32
|
+
const match = content.match(frontmatterRegex);
|
|
33
|
+
|
|
34
|
+
if (!match) {
|
|
35
|
+
// No frontmatter found, return entire content as body
|
|
36
|
+
return { frontmatter: {}, body: content };
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const [, yamlContent, body] = match;
|
|
40
|
+
|
|
41
|
+
// Handle empty frontmatter
|
|
42
|
+
if (!yamlContent.trim()) {
|
|
43
|
+
return { frontmatter: {}, body: body || '' };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
try {
|
|
47
|
+
// Parse YAML using Context7-documented pattern
|
|
48
|
+
const frontmatter = parse(yamlContent);
|
|
49
|
+
return {
|
|
50
|
+
frontmatter: frontmatter || {},
|
|
51
|
+
body: body || ''
|
|
52
|
+
};
|
|
53
|
+
} catch (error) {
|
|
54
|
+
// Invalid YAML syntax
|
|
55
|
+
throw new Error(`Invalid YAML syntax in frontmatter: ${error.message}`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Stringify frontmatter and body into markdown format
|
|
61
|
+
*
|
|
62
|
+
* @param {Object} data - Frontmatter data object
|
|
63
|
+
* @param {string} body - Markdown body content
|
|
64
|
+
* @returns {string} Complete markdown with frontmatter
|
|
65
|
+
*
|
|
66
|
+
* @example
|
|
67
|
+
* const markdown = stringifyFrontmatter({ id: 'task-001' }, 'Body content');
|
|
68
|
+
*/
|
|
69
|
+
function stringifyFrontmatter(data, body = '') {
|
|
70
|
+
// Handle empty frontmatter - don't add empty object literal
|
|
71
|
+
if (!data || Object.keys(data).length === 0) {
|
|
72
|
+
return `---\n---\n${body}`;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Use Context7-documented stringify with default options
|
|
76
|
+
const yamlContent = stringify(data);
|
|
77
|
+
|
|
78
|
+
// Format: ---\nYAML\n---\nBODY
|
|
79
|
+
return `---\n${yamlContent}---\n${body}`;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Update frontmatter fields in a file
|
|
84
|
+
*
|
|
85
|
+
* Supports nested field updates using dot notation:
|
|
86
|
+
* - 'status' → updates top-level field
|
|
87
|
+
* - 'providers.github.owner' → updates nested field
|
|
88
|
+
*
|
|
89
|
+
* @param {string} filePath - Path to markdown file
|
|
90
|
+
* @param {Object} updates - Fields to update (supports dot notation for nested)
|
|
91
|
+
* @returns {Promise<void>}
|
|
92
|
+
*
|
|
93
|
+
* @example
|
|
94
|
+
* await updateFrontmatter('task.md', { status: 'done', 'metadata.updated': '2025-10-05' });
|
|
95
|
+
*/
|
|
96
|
+
async function updateFrontmatter(filePath, updates) {
|
|
97
|
+
// Read existing file
|
|
98
|
+
const content = await fs.readFile(filePath, 'utf8');
|
|
99
|
+
|
|
100
|
+
// Parse current frontmatter
|
|
101
|
+
const { frontmatter, body } = parseFrontmatter(content);
|
|
102
|
+
|
|
103
|
+
// Apply updates (supports nested fields via dot notation)
|
|
104
|
+
const updatedFrontmatter = { ...frontmatter };
|
|
105
|
+
|
|
106
|
+
for (const [key, value] of Object.entries(updates)) {
|
|
107
|
+
if (key.includes('.')) {
|
|
108
|
+
// Nested field update: 'providers.github.owner'
|
|
109
|
+
const keys = key.split('.');
|
|
110
|
+
let current = updatedFrontmatter;
|
|
111
|
+
|
|
112
|
+
for (let i = 0; i < keys.length - 1; i++) {
|
|
113
|
+
const k = keys[i];
|
|
114
|
+
if (!current[k] || typeof current[k] !== 'object') {
|
|
115
|
+
current[k] = {};
|
|
116
|
+
}
|
|
117
|
+
current = current[k];
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
current[keys[keys.length - 1]] = value;
|
|
121
|
+
} else {
|
|
122
|
+
// Top-level field update
|
|
123
|
+
updatedFrontmatter[key] = value;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Write updated content
|
|
128
|
+
const updated = stringifyFrontmatter(updatedFrontmatter, body);
|
|
129
|
+
await fs.writeFile(filePath, updated, 'utf8');
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* Validate frontmatter against schema
|
|
134
|
+
*
|
|
135
|
+
* Schema format:
|
|
136
|
+
* {
|
|
137
|
+
* required: ['id', 'title', 'status'],
|
|
138
|
+
* fields: {
|
|
139
|
+
* id: { type: 'string', pattern: /^task-\d+$/ },
|
|
140
|
+
* status: { type: 'string', enum: ['pending', 'in_progress', 'completed'] },
|
|
141
|
+
* tasks_total: { type: 'number' }
|
|
142
|
+
* }
|
|
143
|
+
* }
|
|
144
|
+
*
|
|
145
|
+
* @param {Object} data - Frontmatter data to validate
|
|
146
|
+
* @param {Object} schema - Validation schema
|
|
147
|
+
* @returns {{valid: boolean, errors: string[]}} Validation result
|
|
148
|
+
*
|
|
149
|
+
* @example
|
|
150
|
+
* const result = validateFrontmatter(data, schema);
|
|
151
|
+
* if (!result.valid) console.error(result.errors);
|
|
152
|
+
*/
|
|
153
|
+
function validateFrontmatter(data, schema) {
|
|
154
|
+
const errors = [];
|
|
155
|
+
|
|
156
|
+
// Check required fields
|
|
157
|
+
if (schema.required) {
|
|
158
|
+
for (const field of schema.required) {
|
|
159
|
+
if (!(field in data)) {
|
|
160
|
+
errors.push(`Missing required field: ${field}`);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// Check field types and constraints
|
|
166
|
+
if (schema.fields) {
|
|
167
|
+
for (const [field, constraints] of Object.entries(schema.fields)) {
|
|
168
|
+
const value = data[field];
|
|
169
|
+
|
|
170
|
+
// Skip validation if field is not present and not required
|
|
171
|
+
if (value === undefined) {
|
|
172
|
+
continue;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Type validation
|
|
176
|
+
if (constraints.type) {
|
|
177
|
+
const actualType = Array.isArray(value) ? 'array' : typeof value;
|
|
178
|
+
if (actualType !== constraints.type) {
|
|
179
|
+
errors.push(`Field '${field}' must be of type ${constraints.type}, got ${actualType}`);
|
|
180
|
+
continue; // Skip other validations if type is wrong
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// Enum validation
|
|
185
|
+
if (constraints.enum && !constraints.enum.includes(value)) {
|
|
186
|
+
errors.push(`Field '${field}' must be one of [${constraints.enum.join(', ')}], got '${value}' (invalid enum value)`);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Pattern validation (for strings)
|
|
190
|
+
if (constraints.pattern && typeof value === 'string') {
|
|
191
|
+
if (!constraints.pattern.test(value)) {
|
|
192
|
+
errors.push(`Field '${field}' does not match required pattern (pattern validation failed)`);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return {
|
|
199
|
+
valid: errors.length === 0,
|
|
200
|
+
errors
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Strip frontmatter and return only body content
|
|
206
|
+
*
|
|
207
|
+
* @param {string} content - Markdown content with optional frontmatter
|
|
208
|
+
* @returns {string} Body content only
|
|
209
|
+
*
|
|
210
|
+
* @example
|
|
211
|
+
* const body = stripBody(content);
|
|
212
|
+
*/
|
|
213
|
+
function stripBody(content) {
|
|
214
|
+
const { body } = parseFrontmatter(content);
|
|
215
|
+
return body;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
module.exports = {
|
|
219
|
+
parseFrontmatter,
|
|
220
|
+
stringifyFrontmatter,
|
|
221
|
+
updateFrontmatter,
|
|
222
|
+
validateFrontmatter,
|
|
223
|
+
stripBody
|
|
224
|
+
};
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Task Utilities
|
|
3
|
+
*
|
|
4
|
+
* Shared utilities for task ID generation and formatting.
|
|
5
|
+
* Ensures consistency across all task-related operations.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* const { generateTaskId, generateTaskNumber, generateTaskFilename } = require('./task-utils');
|
|
9
|
+
*
|
|
10
|
+
* const taskId = generateTaskId('epic-001', 5); // 'task-epic-001-005'
|
|
11
|
+
* const taskNum = generateTaskNumber(5); // '005'
|
|
12
|
+
* const filename = generateTaskFilename(5); // 'task-005.md'
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Generate zero-padded task number (001, 002, etc.)
|
|
17
|
+
*
|
|
18
|
+
* @param {number} index - Task index (1-based)
|
|
19
|
+
* @returns {string} Zero-padded task number
|
|
20
|
+
*/
|
|
21
|
+
function generateTaskNumber(index) {
|
|
22
|
+
return String(index).padStart(3, '0');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Generate full task ID with epic prefix
|
|
27
|
+
*
|
|
28
|
+
* @param {string} epicId - Epic ID (e.g., 'epic-001')
|
|
29
|
+
* @param {number} index - Task index (1-based)
|
|
30
|
+
* @returns {string} Full task ID (e.g., 'task-epic-001-005')
|
|
31
|
+
*/
|
|
32
|
+
function generateTaskId(epicId, index) {
|
|
33
|
+
const taskNum = generateTaskNumber(index);
|
|
34
|
+
return `task-${epicId}-${taskNum}`;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Generate short task ID without epic prefix (for dependency analyzer)
|
|
39
|
+
*
|
|
40
|
+
* @param {number} index - Task index (1-based)
|
|
41
|
+
* @returns {string} Short task ID (e.g., 'task-005')
|
|
42
|
+
*/
|
|
43
|
+
function generateShortTaskId(index) {
|
|
44
|
+
const taskNum = generateTaskNumber(index);
|
|
45
|
+
return `task-${taskNum}`;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Generate task filename
|
|
50
|
+
*
|
|
51
|
+
* @param {number} index - Task index (1-based)
|
|
52
|
+
* @returns {string} Task filename (e.g., 'task-005.md')
|
|
53
|
+
*/
|
|
54
|
+
function generateTaskFilename(index) {
|
|
55
|
+
const taskNum = generateTaskNumber(index);
|
|
56
|
+
return `task-${taskNum}.md`;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
module.exports = {
|
|
60
|
+
generateTaskNumber,
|
|
61
|
+
generateTaskId,
|
|
62
|
+
generateShortTaskId,
|
|
63
|
+
generateTaskFilename
|
|
64
|
+
};
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Epic Decomposition - Local Mode
|
|
3
|
+
*
|
|
4
|
+
* AI-powered decomposition of epics into right-sized tasks (4-8h each).
|
|
5
|
+
* Generates task files with frontmatter, dependencies, and acceptance criteria.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* const { decomposeLocalEpic } = require('./pm-epic-decompose-local');
|
|
9
|
+
*
|
|
10
|
+
* const result = await decomposeLocalEpic('epic-001', {
|
|
11
|
+
* aiProvider: new OpenAIProvider(),
|
|
12
|
+
* maxTasks: 10
|
|
13
|
+
* });
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
const fs = require('fs').promises;
|
|
17
|
+
const path = require('path');
|
|
18
|
+
const { showLocalEpic } = require('./pm-epic-show-local');
|
|
19
|
+
const { updateLocalEpic } = require('./pm-epic-update-local');
|
|
20
|
+
const { stringifyFrontmatter } = require('../lib/frontmatter');
|
|
21
|
+
const { TaskGenerator } = require('../lib/ai-task-generator');
|
|
22
|
+
const { analyzeDependencies } = require('../lib/dependency-analyzer');
|
|
23
|
+
const { generateTaskId, generateTaskNumber, generateTaskFilename } = require('../lib/task-utils');
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Decompose epic into tasks using AI
|
|
27
|
+
*
|
|
28
|
+
* @param {string} epicId - Epic ID to decompose
|
|
29
|
+
* @param {Object} options - Decomposition options
|
|
30
|
+
* @param {Object} [options.aiProvider] - AI provider instance (for testing)
|
|
31
|
+
* @param {number} [options.maxTasks=15] - Maximum tasks to generate
|
|
32
|
+
* @param {boolean} [options.validateDependencies=false] - Validate dependency graph
|
|
33
|
+
* @returns {Promise<Object>} Decomposition result
|
|
34
|
+
*/
|
|
35
|
+
async function decomposeLocalEpic(epicId, options = {}) {
|
|
36
|
+
const {
|
|
37
|
+
aiProvider = null,
|
|
38
|
+
maxTasks = 15,
|
|
39
|
+
validateDependencies = false
|
|
40
|
+
} = options;
|
|
41
|
+
|
|
42
|
+
// 1. Load epic
|
|
43
|
+
const epic = await showLocalEpic(epicId);
|
|
44
|
+
const epicDir = path.dirname(epic.path);
|
|
45
|
+
|
|
46
|
+
// 2. Generate tasks using AI
|
|
47
|
+
const generator = new TaskGenerator(aiProvider);
|
|
48
|
+
const tasks = await generator.generate(epic.body, { maxTasks });
|
|
49
|
+
|
|
50
|
+
// Handle empty response
|
|
51
|
+
if (tasks.length === 0) {
|
|
52
|
+
return {
|
|
53
|
+
epicId,
|
|
54
|
+
tasksCreated: 0,
|
|
55
|
+
warning: 'No tasks generated by AI provider'
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// 3. Validate dependencies if requested
|
|
60
|
+
if (validateDependencies) {
|
|
61
|
+
const analysis = analyzeDependencies(tasks);
|
|
62
|
+
if (analysis.hasCircularDependencies) {
|
|
63
|
+
throw new Error(
|
|
64
|
+
`Circular dependency detected: ${analysis.cycles.map(c => c.join(' -> ')).join(', ')}`
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// 4. Create task files
|
|
70
|
+
const taskIds = [];
|
|
71
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
72
|
+
const task = tasks[i];
|
|
73
|
+
const taskId = generateTaskId(epicId, i + 1);
|
|
74
|
+
const taskFilename = generateTaskFilename(i + 1);
|
|
75
|
+
const taskPath = path.join(epicDir, taskFilename);
|
|
76
|
+
|
|
77
|
+
// Build task frontmatter
|
|
78
|
+
const taskFrontmatter = {
|
|
79
|
+
id: taskId,
|
|
80
|
+
epic_id: epicId,
|
|
81
|
+
title: task.title,
|
|
82
|
+
status: 'pending',
|
|
83
|
+
priority: task.priority || 'medium',
|
|
84
|
+
estimated_hours: task.estimated_hours || 4,
|
|
85
|
+
dependencies: task.dependencies || [],
|
|
86
|
+
created: new Date().toISOString().split('T')[0]
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
// Build task body
|
|
90
|
+
const taskBody = buildTaskBody(task);
|
|
91
|
+
|
|
92
|
+
// Write task file
|
|
93
|
+
const taskContent = stringifyFrontmatter(taskFrontmatter, taskBody);
|
|
94
|
+
await fs.writeFile(taskPath, taskContent, 'utf8');
|
|
95
|
+
|
|
96
|
+
taskIds.push(taskId);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// 5. Update epic with task count
|
|
100
|
+
await updateLocalEpic(epicId, {
|
|
101
|
+
tasks_total: tasks.length,
|
|
102
|
+
tasks_completed: 0,
|
|
103
|
+
task_ids: taskIds
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
epicId,
|
|
108
|
+
tasksCreated: tasks.length,
|
|
109
|
+
taskIds
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Build task body content from AI-generated task
|
|
115
|
+
*
|
|
116
|
+
* @param {Object} task - Task object from AI
|
|
117
|
+
* @returns {string} Markdown body content
|
|
118
|
+
*/
|
|
119
|
+
function buildTaskBody(task) {
|
|
120
|
+
let body = `# ${task.title}\n\n`;
|
|
121
|
+
|
|
122
|
+
// Description
|
|
123
|
+
if (task.description) {
|
|
124
|
+
body += `## Description\n\n${task.description}\n\n`;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Acceptance Criteria
|
|
128
|
+
body += `## Acceptance Criteria\n\n`;
|
|
129
|
+
if (task.acceptance_criteria && task.acceptance_criteria.length > 0) {
|
|
130
|
+
task.acceptance_criteria.forEach(criterion => {
|
|
131
|
+
body += `- [ ] ${criterion}\n`;
|
|
132
|
+
});
|
|
133
|
+
} else {
|
|
134
|
+
body += `- [ ] Implementation complete\n`;
|
|
135
|
+
body += `- [ ] Tests passing\n`;
|
|
136
|
+
body += `- [ ] Code reviewed\n`;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
body += `\n`;
|
|
140
|
+
|
|
141
|
+
// Technical Notes (if provided)
|
|
142
|
+
if (task.technical_notes) {
|
|
143
|
+
body += `## Technical Notes\n\n${task.technical_notes}\n\n`;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Dependencies
|
|
147
|
+
if (task.dependencies && task.dependencies.length > 0) {
|
|
148
|
+
body += `## Dependencies\n\n`;
|
|
149
|
+
task.dependencies.forEach(dep => {
|
|
150
|
+
body += `- ${dep}\n`;
|
|
151
|
+
});
|
|
152
|
+
body += `\n`;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
return body.trim();
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
module.exports = { decomposeLocalEpic };
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* List Local Epics
|
|
3
|
+
*
|
|
4
|
+
* Lists all epics in the local `.claude/epics/` directory
|
|
5
|
+
* with optional filtering by status or PRD ID.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* const { listLocalEpics } = require('./pm-epic-list-local');
|
|
9
|
+
*
|
|
10
|
+
* // List all epics
|
|
11
|
+
* const epics = await listLocalEpics();
|
|
12
|
+
*
|
|
13
|
+
* // Filter by status
|
|
14
|
+
* const inProgress = await listLocalEpics({ status: 'in_progress' });
|
|
15
|
+
*
|
|
16
|
+
* // Filter by PRD
|
|
17
|
+
* const prdEpics = await listLocalEpics({ prd_id: 'prd-001' });
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
const fs = require('fs').promises;
|
|
21
|
+
const path = require('path');
|
|
22
|
+
const { parseFrontmatter } = require('../lib/frontmatter');
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* List all local epics with optional filtering
|
|
26
|
+
*
|
|
27
|
+
* @param {Object} options - Filter options
|
|
28
|
+
* @param {string} [options.status] - Filter by epic status (planning, in_progress, completed, etc.)
|
|
29
|
+
* @param {string} [options.prd_id] - Filter by PRD ID
|
|
30
|
+
* @returns {Promise<Array>} Array of epic objects with frontmatter
|
|
31
|
+
*/
|
|
32
|
+
async function listLocalEpics(options = {}) {
|
|
33
|
+
const basePath = process.cwd();
|
|
34
|
+
const epicsDir = path.join(basePath, '.claude', 'epics');
|
|
35
|
+
|
|
36
|
+
// Check if epics directory exists
|
|
37
|
+
try {
|
|
38
|
+
await fs.access(epicsDir);
|
|
39
|
+
} catch (err) {
|
|
40
|
+
if (err.code === 'ENOENT') {
|
|
41
|
+
return []; // No epics directory = no epics
|
|
42
|
+
}
|
|
43
|
+
throw err;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Read all epic directories
|
|
47
|
+
const dirs = await fs.readdir(epicsDir);
|
|
48
|
+
const epics = [];
|
|
49
|
+
|
|
50
|
+
// Process each epic directory
|
|
51
|
+
for (const dir of dirs) {
|
|
52
|
+
// Skip hidden directories and files
|
|
53
|
+
if (dir.startsWith('.')) continue;
|
|
54
|
+
|
|
55
|
+
const epicDir = path.join(epicsDir, dir);
|
|
56
|
+
const epicPath = path.join(epicDir, 'epic.md');
|
|
57
|
+
|
|
58
|
+
try {
|
|
59
|
+
// Check if it's a directory with epic.md
|
|
60
|
+
const stat = await fs.stat(epicDir);
|
|
61
|
+
if (!stat.isDirectory()) continue;
|
|
62
|
+
|
|
63
|
+
// Read and parse epic.md
|
|
64
|
+
const content = await fs.readFile(epicPath, 'utf8');
|
|
65
|
+
const { frontmatter } = parseFrontmatter(content);
|
|
66
|
+
|
|
67
|
+
// Only include valid epics with required fields
|
|
68
|
+
if (frontmatter && frontmatter.id) {
|
|
69
|
+
epics.push({
|
|
70
|
+
...frontmatter,
|
|
71
|
+
directory: dir
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
} catch (err) {
|
|
75
|
+
// Skip invalid epic directories (missing epic.md, parse errors, etc.)
|
|
76
|
+
if (err.code !== 'ENOENT') {
|
|
77
|
+
console.warn(`Warning: Could not process epic in ${dir}:`, err.message);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Apply filters
|
|
83
|
+
let filtered = epics;
|
|
84
|
+
|
|
85
|
+
if (options.status) {
|
|
86
|
+
filtered = filtered.filter(epic => epic.status === options.status);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
if (options.prd_id) {
|
|
90
|
+
filtered = filtered.filter(epic => epic.prd_id === options.prd_id);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Sort by creation date (newest first)
|
|
94
|
+
filtered.sort((a, b) => {
|
|
95
|
+
const dateA = new Date(a.created || 0);
|
|
96
|
+
const dateB = new Date(b.created || 0);
|
|
97
|
+
return dateB - dateA; // Descending order (newest first)
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
return filtered;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
module.exports = { listLocalEpics };
|