@yemi33/minions 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +819 -0
- package/LICENSE +21 -0
- package/README.md +598 -0
- package/agents/dallas/charter.md +56 -0
- package/agents/lambert/charter.md +67 -0
- package/agents/ralph/charter.md +45 -0
- package/agents/rebecca/charter.md +57 -0
- package/agents/ripley/charter.md +47 -0
- package/bin/minions.js +467 -0
- package/config.template.json +28 -0
- package/dashboard.html +4822 -0
- package/dashboard.js +2623 -0
- package/docs/auto-discovery.md +416 -0
- package/docs/blog-first-successful-dispatch.md +128 -0
- package/docs/command-center.md +156 -0
- package/docs/demo/01-dashboard-overview.gif +0 -0
- package/docs/demo/02-command-center.gif +0 -0
- package/docs/demo/03-work-items.gif +0 -0
- package/docs/demo/04-plan-docchat.gif +0 -0
- package/docs/demo/05-prd-progress.gif +0 -0
- package/docs/demo/06-inbox-metrics.gif +0 -0
- package/docs/deprecated.json +83 -0
- package/docs/distribution.md +96 -0
- package/docs/engine-restart.md +92 -0
- package/docs/human-vs-automated.md +108 -0
- package/docs/index.html +221 -0
- package/docs/plan-lifecycle.md +140 -0
- package/docs/self-improvement.md +344 -0
- package/engine/ado-mcp-wrapper.js +42 -0
- package/engine/ado.js +383 -0
- package/engine/check-status.js +23 -0
- package/engine/cli.js +754 -0
- package/engine/consolidation.js +417 -0
- package/engine/github.js +331 -0
- package/engine/lifecycle.js +1113 -0
- package/engine/llm.js +116 -0
- package/engine/queries.js +677 -0
- package/engine/shared.js +397 -0
- package/engine/spawn-agent.js +151 -0
- package/engine.js +3227 -0
- package/minions.js +556 -0
- package/package.json +48 -0
- package/playbooks/ask.md +49 -0
- package/playbooks/build-and-test.md +155 -0
- package/playbooks/explore.md +64 -0
- package/playbooks/fix.md +57 -0
- package/playbooks/implement-shared.md +68 -0
- package/playbooks/implement.md +95 -0
- package/playbooks/plan-to-prd.md +104 -0
- package/playbooks/plan.md +99 -0
- package/playbooks/review.md +68 -0
- package/playbooks/test.md +75 -0
- package/playbooks/verify.md +190 -0
- package/playbooks/work-item.md +74 -0
|
@@ -0,0 +1,1113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* engine/lifecycle.js — Post-completion hooks, PR sync, agent history/metrics, plan chaining.
|
|
3
|
+
* Extracted from engine.js.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const shared = require('./shared');
|
|
9
|
+
const { safeRead, safeJson, safeWrite, execSilent, projectPrPath, getPrLinks, addPrLink } = shared;
|
|
10
|
+
const { trackEngineUsage } = require('./llm');
|
|
11
|
+
const queries = require('./queries');
|
|
12
|
+
const { getConfig, getInboxFiles, getNotes, getPrs, getDispatch,
|
|
13
|
+
MINIONS_DIR, ENGINE_DIR, PLANS_DIR, PRD_DIR, INBOX_DIR, AGENTS_DIR } = queries;
|
|
14
|
+
|
|
15
|
+
// Lazy require — only for log(), ts(), dateStamp() and engine-specific functions
|
|
16
|
+
let _engine = null;
|
|
17
|
+
function engine() {
|
|
18
|
+
if (!_engine) _engine = require('../engine');
|
|
19
|
+
return _engine;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// ─── Plan Completion Detection ───────────────────────────────────────────────
|
|
23
|
+
function checkPlanCompletion(meta, config) {
|
|
24
|
+
const e = engine();
|
|
25
|
+
const planFile = meta.item?.sourcePlan;
|
|
26
|
+
if (!planFile) return;
|
|
27
|
+
const planPath = path.join(PRD_DIR, planFile);
|
|
28
|
+
const plan = safeJson(planPath);
|
|
29
|
+
if (!plan?.missing_features) return;
|
|
30
|
+
if (plan.status === 'completed') return;
|
|
31
|
+
|
|
32
|
+
const projects = shared.getProjects(config);
|
|
33
|
+
|
|
34
|
+
// Collect work items from ALL projects (PRD items can span multiple projects)
|
|
35
|
+
let allWorkItems = [];
|
|
36
|
+
for (const p of projects) {
|
|
37
|
+
try {
|
|
38
|
+
const wi = safeJson(shared.projectWorkItemsPath(p)) || [];
|
|
39
|
+
allWorkItems = allWorkItems.concat(wi);
|
|
40
|
+
} catch {}
|
|
41
|
+
}
|
|
42
|
+
const planItems = allWorkItems.filter(w => w.sourcePlan === planFile && w.itemType !== 'pr' && w.itemType !== 'verify');
|
|
43
|
+
if (planItems.length === 0) return;
|
|
44
|
+
|
|
45
|
+
// Hard completion gate: every PRD feature ID must have a corresponding work item in done status.
|
|
46
|
+
const planFeatureIds = new Set((plan.missing_features || []).map(f => f.id).filter(Boolean));
|
|
47
|
+
const workItemById = {};
|
|
48
|
+
for (const w of planItems) { if (w.id) workItemById[w.id] = w; }
|
|
49
|
+
|
|
50
|
+
// Check 1: every feature must have a work item (materialized)
|
|
51
|
+
// Fallback: also accept features marked done directly in the PRD JSON (resolved externally)
|
|
52
|
+
const unmaterialized = [...planFeatureIds].filter(id => {
|
|
53
|
+
if (workItemById[id]) return false;
|
|
54
|
+
const prdItem = (plan.missing_features || []).find(f => f.id === id);
|
|
55
|
+
return !(prdItem && (prdItem.status === 'done' || prdItem.status === 'in-pr'));
|
|
56
|
+
});
|
|
57
|
+
if (unmaterialized.length > 0) {
|
|
58
|
+
e.log('info', `Plan ${planFile}: ${unmaterialized.length}/${planFeatureIds.size} feature(s) not yet materialized as work items: ${unmaterialized.join(', ')}`);
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Check 2: every feature's work item must be done (or PRD item marked done externally)
|
|
63
|
+
const notDone = [...planFeatureIds].filter(id => {
|
|
64
|
+
const w = workItemById[id];
|
|
65
|
+
if (w && (w.status === 'done' || w.status === 'in-pr')) return false; // in-pr accepted for backward compat
|
|
66
|
+
const prdItem = (plan.missing_features || []).find(f => f.id === id);
|
|
67
|
+
return !(prdItem && (prdItem.status === 'done' || prdItem.status === 'in-pr'));
|
|
68
|
+
});
|
|
69
|
+
if (notDone.length > 0) {
|
|
70
|
+
e.log('info', `Plan ${planFile}: waiting for done on ${notDone.length}/${planFeatureIds.size} item(s): ${notDone.join(', ')}`);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const doneItems = planItems.filter(w => w.status === 'done' || w.status === 'in-pr');
|
|
75
|
+
const failedItems = [];
|
|
76
|
+
|
|
77
|
+
// 1. Mark plan as completed
|
|
78
|
+
plan.status = 'completed';
|
|
79
|
+
plan.completedAt = e.ts();
|
|
80
|
+
|
|
81
|
+
// Compute timing
|
|
82
|
+
let firstDispatched = null, lastCompleted = null;
|
|
83
|
+
for (const wi of planItems) {
|
|
84
|
+
if (wi.dispatched_at) {
|
|
85
|
+
const d = new Date(wi.dispatched_at).getTime();
|
|
86
|
+
if (!firstDispatched || d < firstDispatched) firstDispatched = d;
|
|
87
|
+
}
|
|
88
|
+
if (wi.completedAt) {
|
|
89
|
+
const c = new Date(wi.completedAt).getTime();
|
|
90
|
+
if (!lastCompleted || c > lastCompleted) lastCompleted = c;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
const runtimeMs = firstDispatched && lastCompleted ? lastCompleted - firstDispatched : 0;
|
|
94
|
+
const runtimeMin = Math.round(runtimeMs / 60000);
|
|
95
|
+
|
|
96
|
+
// 2. Generate completion summary
|
|
97
|
+
// Collect PRs from all projects
|
|
98
|
+
const prsCreated = [];
|
|
99
|
+
for (const p of projects) {
|
|
100
|
+
try {
|
|
101
|
+
const prPath = shared.projectPrPath(p);
|
|
102
|
+
const prs = safeJson(prPath) || [];
|
|
103
|
+
const prLinks = getPrLinks();
|
|
104
|
+
for (const pr of prs) {
|
|
105
|
+
const linkedItemId = prLinks[pr.id];
|
|
106
|
+
if (linkedItemId && doneItems.find(w => w.id === linkedItemId)) {
|
|
107
|
+
prsCreated.push(pr);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
} catch {}
|
|
111
|
+
}
|
|
112
|
+
const uniquePrs = [...new Map(prsCreated.map(pr => [pr.id || pr.url, pr])).values()];
|
|
113
|
+
|
|
114
|
+
const summary = [
|
|
115
|
+
`# PRD Completed: ${plan.plan_summary || planFile}`,
|
|
116
|
+
``,
|
|
117
|
+
`**Project:** ${plan.project || 'Unknown'}`,
|
|
118
|
+
`**Strategy:** ${plan.branch_strategy || 'parallel'}`,
|
|
119
|
+
`**Completed:** ${new Date().toISOString().slice(0, 16).replace('T', ' ')}`,
|
|
120
|
+
`**Runtime:** ${runtimeMin >= 60 ? Math.floor(runtimeMin / 60) + 'h ' + (runtimeMin % 60) + 'm' : runtimeMin + 'm'}`,
|
|
121
|
+
``,
|
|
122
|
+
`## Results`,
|
|
123
|
+
`- **${doneItems.length}** items completed`,
|
|
124
|
+
failedItems.length ? `- **${failedItems.length}** items failed` : '',
|
|
125
|
+
uniquePrs.length ? `- **${uniquePrs.length}** PR(s) created` : '',
|
|
126
|
+
``,
|
|
127
|
+
`## Items`,
|
|
128
|
+
...doneItems.map(w => `- [done] ${w.id}: ${w.title.replace('Implement: ', '')}`),
|
|
129
|
+
...failedItems.map(w => `- [failed] ${w.id}: ${w.title.replace('Implement: ', '')}${w.failReason ? ' — ' + w.failReason : ''}`),
|
|
130
|
+
uniquePrs.length ? `\n## Pull Requests` : '',
|
|
131
|
+
...uniquePrs.map(pr => `- ${pr.id}: ${pr.title || ''} ${pr.url || ''}`),
|
|
132
|
+
].filter(Boolean).join('\n');
|
|
133
|
+
|
|
134
|
+
// Write summary to notes/inbox
|
|
135
|
+
const summaryFile = `prd-completion-${planFile.replace('.json', '')}-${e.ts().slice(0, 10)}.md`;
|
|
136
|
+
shared.safeWrite(shared.uniquePath(path.join(MINIONS_DIR, 'notes', 'inbox', summaryFile)), summary);
|
|
137
|
+
e.log('info', `PRD completion summary written to notes/inbox/${summaryFile}`);
|
|
138
|
+
|
|
139
|
+
// Resolve the primary project for writing new work items (PR, verify)
|
|
140
|
+
const projectName = plan.project;
|
|
141
|
+
const primaryProject = projectName
|
|
142
|
+
? projects.find(p => p.name?.toLowerCase() === projectName?.toLowerCase()) : projects[0];
|
|
143
|
+
const wiPath = primaryProject ? shared.projectWorkItemsPath(primaryProject) : null;
|
|
144
|
+
const workItems = wiPath ? (safeJson(wiPath) || []) : [];
|
|
145
|
+
|
|
146
|
+
// 3. For shared-branch plans, create PR work item
|
|
147
|
+
if (plan.branch_strategy === 'shared-branch' && plan.feature_branch && wiPath) {
|
|
148
|
+
const existingPrItem = allWorkItems.find(w => w.sourcePlan === planFile && w.itemType === 'pr');
|
|
149
|
+
if (!existingPrItem) {
|
|
150
|
+
const id = 'PL-' + shared.uid();
|
|
151
|
+
const featureBranch = plan.feature_branch;
|
|
152
|
+
const mainBranch = primaryProject.mainBranch || 'main';
|
|
153
|
+
const itemSummary = doneItems.map(w => '- ' + w.id + ': ' + w.title.replace('Implement: ', '')).join('\n');
|
|
154
|
+
workItems.push({
|
|
155
|
+
id, title: `Create PR for plan: ${plan.plan_summary || planFile}`,
|
|
156
|
+
type: 'implement', priority: 'high',
|
|
157
|
+
description: `All plan items from \`${planFile}\` are complete on branch \`${featureBranch}\`.\n\n**Branch:** \`${featureBranch}\`\n**Target:** \`${mainBranch}\`\n\n## Completed Items\n${itemSummary}`,
|
|
158
|
+
status: 'pending', created: e.ts(), createdBy: 'engine:plan-completion',
|
|
159
|
+
sourcePlan: planFile, itemType: 'pr',
|
|
160
|
+
branch: featureBranch, branchStrategy: 'shared-branch', project: projectName,
|
|
161
|
+
});
|
|
162
|
+
shared.safeWrite(wiPath, workItems);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// 4. Create verification work item (build, test, start webapp, write testing guide)
|
|
167
|
+
const existingVerify = allWorkItems.find(w => w.sourcePlan === planFile && w.itemType === 'verify');
|
|
168
|
+
if (!existingVerify && doneItems.length > 0) {
|
|
169
|
+
const verifyId = 'PL-' + shared.uid();
|
|
170
|
+
const planSlug = planFile.replace('.json', '');
|
|
171
|
+
|
|
172
|
+
// Group PRs by project — one worktree per project with all branches merged in
|
|
173
|
+
const projectPrs = {}; // projectName -> { project, prs: [], mainBranch }
|
|
174
|
+
for (const p of projects) {
|
|
175
|
+
const prLinks = getPrLinks();
|
|
176
|
+
const prs = (safeJson(shared.projectPrPath(p)) || [])
|
|
177
|
+
.filter(pr => {
|
|
178
|
+
const linkedId = prLinks[pr.id];
|
|
179
|
+
return pr.status === 'active' && linkedId && doneItems.find(w => w.id === linkedId);
|
|
180
|
+
});
|
|
181
|
+
if (prs.length > 0) {
|
|
182
|
+
projectPrs[p.name] = { project: p, prs, mainBranch: p.mainBranch || 'main' };
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Build per-project checkout commands: one worktree, merge all PR branches into it
|
|
187
|
+
const checkoutBlocks = Object.entries(projectPrs).map(([name, { project: p, prs, mainBranch }]) => {
|
|
188
|
+
const wtPath = `${p.localPath}/../worktrees/verify-${name}-${planSlug}-${shared.uid()}`;
|
|
189
|
+
const branches = prs.map(pr => pr.branch).filter(Boolean);
|
|
190
|
+
const lines = [
|
|
191
|
+
`# ${name} — merge ${branches.length} PR branch(es) into one worktree`,
|
|
192
|
+
`cd "${p.localPath}"`,
|
|
193
|
+
`git fetch origin ${branches.map(b => `"${b}"`).join(' ')} "${mainBranch}"`,
|
|
194
|
+
`git worktree add "${wtPath}" "origin/${mainBranch}" 2>/dev/null || (cd "${wtPath}" && git checkout "${mainBranch}" && git pull origin "${mainBranch}")`,
|
|
195
|
+
`cd "${wtPath}"`,
|
|
196
|
+
...branches.map(b => `git merge "origin/${b}" --no-edit # ${prs.find(pr => pr.branch === b)?.id || b}`),
|
|
197
|
+
];
|
|
198
|
+
return lines.join('\n');
|
|
199
|
+
}).join('\n\n');
|
|
200
|
+
|
|
201
|
+
// Build completed items summary with acceptance criteria
|
|
202
|
+
const itemsWithCriteria = doneItems.map(w => {
|
|
203
|
+
const planItem = plan.missing_features?.find(f => f.id === w.id);
|
|
204
|
+
const criteria = (planItem?.acceptance_criteria || []).map(c => ` - ${c}`).join('\n');
|
|
205
|
+
return `### ${w.id}: ${w.title.replace('Implement: ', '')}\n${criteria ? '**Acceptance Criteria:**\n' + criteria : ''}`;
|
|
206
|
+
}).join('\n\n');
|
|
207
|
+
|
|
208
|
+
const prSummary = uniquePrs.map(pr =>
|
|
209
|
+
`- ${pr.id}: ${pr.title || ''} (branch: \`${pr.branch || '?'}\`) ${pr.url || ''}`
|
|
210
|
+
).join('\n');
|
|
211
|
+
|
|
212
|
+
// List projects and their worktree paths for the agent
|
|
213
|
+
const projectWorktrees = Object.entries(projectPrs).map(([name, { project: p }]) =>
|
|
214
|
+
`- **${name}**: \`${p.localPath}/../worktrees/verify-${planSlug}\``
|
|
215
|
+
).join('\n');
|
|
216
|
+
|
|
217
|
+
const description = [
|
|
218
|
+
`Verification task for completed plan \`${planFile}\`.`,
|
|
219
|
+
``,
|
|
220
|
+
`## Projects & Worktrees`,
|
|
221
|
+
``,
|
|
222
|
+
`Each project gets ONE worktree with all PR branches merged in:`,
|
|
223
|
+
projectWorktrees,
|
|
224
|
+
``,
|
|
225
|
+
`## Setup Commands`,
|
|
226
|
+
``,
|
|
227
|
+
`\`\`\`bash`,
|
|
228
|
+
checkoutBlocks,
|
|
229
|
+
`\`\`\``,
|
|
230
|
+
``,
|
|
231
|
+
`If any merge conflicts occur, resolve them (prefer the PR branch changes).`,
|
|
232
|
+
`After setup, build and test from the worktree paths above.`,
|
|
233
|
+
``,
|
|
234
|
+
`## Completed Items`,
|
|
235
|
+
``,
|
|
236
|
+
itemsWithCriteria,
|
|
237
|
+
``,
|
|
238
|
+
`## Pull Requests`,
|
|
239
|
+
``,
|
|
240
|
+
prSummary,
|
|
241
|
+
].join('\n');
|
|
242
|
+
|
|
243
|
+
workItems.push({
|
|
244
|
+
id: verifyId,
|
|
245
|
+
title: `Verify plan: ${(plan.plan_summary || planFile).slice(0, 80)}`,
|
|
246
|
+
type: 'verify',
|
|
247
|
+
priority: 'high',
|
|
248
|
+
description,
|
|
249
|
+
status: 'pending',
|
|
250
|
+
created: e.ts(),
|
|
251
|
+
createdBy: 'engine:plan-verification',
|
|
252
|
+
sourcePlan: planFile,
|
|
253
|
+
itemType: 'verify',
|
|
254
|
+
project: projectName,
|
|
255
|
+
});
|
|
256
|
+
shared.safeWrite(wiPath, workItems);
|
|
257
|
+
e.log('info', `Created verification work item ${verifyId} for plan ${planFile}`);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// 5. Archive: move PRD .json to prd/archive/ and source .md plan to plans/archive/
|
|
261
|
+
const prdArchiveDir = path.join(PRD_DIR, 'archive');
|
|
262
|
+
if (!fs.existsSync(prdArchiveDir)) fs.mkdirSync(prdArchiveDir, { recursive: true });
|
|
263
|
+
shared.safeWrite(planPath, plan); // save completed status first
|
|
264
|
+
try {
|
|
265
|
+
fs.renameSync(planPath, path.join(prdArchiveDir, planFile));
|
|
266
|
+
e.log('info', `Archived completed PRD: prd/archive/${planFile}`);
|
|
267
|
+
} catch (err) {
|
|
268
|
+
e.log('warn', `Failed to archive PRD ${planFile}: ${err.message}`);
|
|
269
|
+
shared.safeWrite(planPath, plan);
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// Also archive the source .md plan if it exists
|
|
273
|
+
const planArchiveDir = path.join(PLANS_DIR, 'archive');
|
|
274
|
+
if (!fs.existsSync(planArchiveDir)) fs.mkdirSync(planArchiveDir, { recursive: true });
|
|
275
|
+
try {
|
|
276
|
+
const mdFiles = fs.readdirSync(PLANS_DIR).filter(f => f.endsWith('.md'));
|
|
277
|
+
for (const md of mdFiles) {
|
|
278
|
+
const mdContent = shared.safeRead(path.join(PLANS_DIR, md)) || '';
|
|
279
|
+
// Match by project name or plan summary appearing in the .md content
|
|
280
|
+
if (mdContent.includes(projectName) || mdContent.includes(plan.plan_summary?.slice(0, 40) || '___nomatch___')) {
|
|
281
|
+
try {
|
|
282
|
+
fs.renameSync(path.join(PLANS_DIR, md), path.join(planArchiveDir, md));
|
|
283
|
+
e.log('info', `Archived source plan: plans/archive/${md}`);
|
|
284
|
+
} catch {}
|
|
285
|
+
break;
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
} catch {}
|
|
289
|
+
|
|
290
|
+
// 6. Clean up ALL worktrees created for this plan's work items (shared-branch + per-item)
|
|
291
|
+
try {
|
|
292
|
+
// Collect all branch slugs: shared-branch + per-item branches + item IDs
|
|
293
|
+
const branchSlugs = new Set();
|
|
294
|
+
if (plan.feature_branch) branchSlugs.add(shared.sanitizeBranch(plan.feature_branch).toLowerCase());
|
|
295
|
+
for (const w of doneItems) {
|
|
296
|
+
if (w.branch) branchSlugs.add(shared.sanitizeBranch(w.branch).toLowerCase());
|
|
297
|
+
if (w.id) branchSlugs.add(w.id.toLowerCase());
|
|
298
|
+
}
|
|
299
|
+
for (const pr of uniquePrs) {
|
|
300
|
+
if (pr.branch) branchSlugs.add(shared.sanitizeBranch(pr.branch).toLowerCase());
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
let cleanedWt = 0;
|
|
304
|
+
for (const p of projects) {
|
|
305
|
+
const root = path.resolve(p.localPath);
|
|
306
|
+
const wtRoot = path.resolve(root, config.engine?.worktreeRoot || '../worktrees');
|
|
307
|
+
if (!fs.existsSync(wtRoot)) continue;
|
|
308
|
+
const dirs = fs.readdirSync(wtRoot);
|
|
309
|
+
for (const dir of dirs) {
|
|
310
|
+
const dirLower = dir.toLowerCase();
|
|
311
|
+
const matches = [...branchSlugs].some(slug => dirLower.includes(slug));
|
|
312
|
+
if (matches) {
|
|
313
|
+
const wtPath = path.join(wtRoot, dir);
|
|
314
|
+
try {
|
|
315
|
+
execSilent(`git worktree remove "${wtPath}" --force`, { cwd: root, stdio: 'pipe', timeout: 15000 });
|
|
316
|
+
cleanedWt++;
|
|
317
|
+
} catch (err) { e.log('warn', `Failed to remove worktree ${dir}: ${err.message}`); }
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
if (cleanedWt > 0) e.log('info', `Plan completion: cleaned ${cleanedWt} worktree(s)`);
|
|
322
|
+
} catch {}
|
|
323
|
+
|
|
324
|
+
e.log('info', `PRD ${planFile} completed: ${doneItems.length} done, ${failedItems.length} failed, runtime ${runtimeMin}m`);
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// ─── Plan → PRD Chaining ─────────────────────────────────────────────────────
|
|
328
|
+
function chainPlanToPrd(dispatchItem, meta, config) {
|
|
329
|
+
const e = engine();
|
|
330
|
+
const planDir = path.join(MINIONS_DIR, 'plans');
|
|
331
|
+
if (!fs.existsSync(planDir)) fs.mkdirSync(planDir, { recursive: true });
|
|
332
|
+
|
|
333
|
+
let planFileName = meta?.planFileName || meta?.item?._planFileName;
|
|
334
|
+
if (planFileName && fs.existsSync(path.join(planDir, planFileName))) {
|
|
335
|
+
// Exact match from meta
|
|
336
|
+
} else {
|
|
337
|
+
const planFiles = fs.readdirSync(planDir)
|
|
338
|
+
.filter(f => f.endsWith('.md') || f.endsWith('.json'))
|
|
339
|
+
.map(f => ({ name: f, mtime: fs.statSync(path.join(planDir, f)).mtimeMs }))
|
|
340
|
+
.sort((a, b) => b.mtime - a.mtime);
|
|
341
|
+
planFileName = planFiles[0]?.name;
|
|
342
|
+
if (!planFileName) {
|
|
343
|
+
e.log('warn', `Plan chaining: no plan files found in plans/ after task ${dispatchItem.id}`);
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
e.log('info', `Plan chaining: using mtime fallback — found ${planFileName}`);
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
if (planFileName.endsWith('.json')) {
|
|
350
|
+
const mdName = planFileName.replace(/\.json$/, '.md');
|
|
351
|
+
// Check plans/ first, then prd/ for .json files
|
|
352
|
+
const jsonPath = fs.existsSync(path.join(planDir, planFileName))
|
|
353
|
+
? path.join(planDir, planFileName)
|
|
354
|
+
: path.join(MINIONS_DIR, 'prd', planFileName);
|
|
355
|
+
const mdPath = path.join(planDir, mdName);
|
|
356
|
+
try {
|
|
357
|
+
const content = fs.readFileSync(jsonPath, 'utf8');
|
|
358
|
+
const parsed = JSON.parse(content);
|
|
359
|
+
if (!parsed.missing_features) {
|
|
360
|
+
fs.renameSync(jsonPath, mdPath);
|
|
361
|
+
planFileName = mdName;
|
|
362
|
+
e.log('info', `Plan chaining: renamed ${planFileName} → ${mdName} (plans must be .md)`);
|
|
363
|
+
}
|
|
364
|
+
} catch {
|
|
365
|
+
try {
|
|
366
|
+
if (fs.existsSync(jsonPath)) fs.renameSync(jsonPath, path.join(planDir, mdName));
|
|
367
|
+
planFileName = mdName;
|
|
368
|
+
e.log('info', `Plan chaining: renamed to .md (not valid JSON)`);
|
|
369
|
+
} catch {}
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
const planFile = { name: planFileName };
|
|
374
|
+
const planPath = path.join(planDir, planFileName);
|
|
375
|
+
let planContent;
|
|
376
|
+
try { planContent = fs.readFileSync(planPath, 'utf8'); } catch (err) {
|
|
377
|
+
e.log('error', `Plan chaining: failed to read plan file ${planFile.name}: ${err.message}`);
|
|
378
|
+
return;
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
const projectName = meta?.item?.project || meta?.project?.name;
|
|
382
|
+
const projects = shared.getProjects(config);
|
|
383
|
+
const targetProject = projectName
|
|
384
|
+
? projects.find(p => p.name === projectName) || projects[0]
|
|
385
|
+
: projects[0];
|
|
386
|
+
|
|
387
|
+
if (!targetProject) {
|
|
388
|
+
e.log('error', 'Plan chaining: no target project available');
|
|
389
|
+
return;
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
e.log('info', `Plan chaining: queuing plan-to-prd for next tick (chained from ${dispatchItem.id})`);
|
|
393
|
+
const wiPath = path.join(MINIONS_DIR, 'work-items.json');
|
|
394
|
+
let items = [];
|
|
395
|
+
try { items = JSON.parse(fs.readFileSync(wiPath, 'utf8')); } catch {}
|
|
396
|
+
items.push({
|
|
397
|
+
id: 'W-' + shared.uid(),
|
|
398
|
+
title: `Convert plan to PRD: ${meta?.item?.title || planFile.name}`,
|
|
399
|
+
type: 'plan-to-prd',
|
|
400
|
+
priority: meta?.item?.priority || 'high',
|
|
401
|
+
description: `Plan file: plans/${planFile.name}\nChained from plan task ${dispatchItem.id}`,
|
|
402
|
+
status: 'pending',
|
|
403
|
+
created: e.ts(),
|
|
404
|
+
createdBy: 'engine:chain',
|
|
405
|
+
project: targetProject.name,
|
|
406
|
+
planFile: planFile.name,
|
|
407
|
+
});
|
|
408
|
+
shared.safeWrite(wiPath, items);
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
// ─── Work Item Status ────────────────────────────────────────────────────────
|
|
412
|
+
function updateWorkItemStatus(meta, status, reason) {
|
|
413
|
+
const e = engine();
|
|
414
|
+
const itemId = meta.item?.id;
|
|
415
|
+
if (!itemId) return;
|
|
416
|
+
|
|
417
|
+
let wiPath;
|
|
418
|
+
if (meta.source === 'central-work-item' || meta.source === 'central-work-item-fanout') {
|
|
419
|
+
wiPath = path.join(MINIONS_DIR, 'work-items.json');
|
|
420
|
+
} else if (meta.source === 'work-item' && meta.project?.name) {
|
|
421
|
+
wiPath = path.join(MINIONS_DIR, 'projects', meta.project.name, 'work-items.json');
|
|
422
|
+
}
|
|
423
|
+
if (!wiPath) return;
|
|
424
|
+
|
|
425
|
+
const items = safeJson(wiPath);
|
|
426
|
+
if (!items || !Array.isArray(items)) return;
|
|
427
|
+
|
|
428
|
+
const target = items.find(i => i.id === itemId);
|
|
429
|
+
if (target) {
|
|
430
|
+
if (meta.source === 'central-work-item-fanout') {
|
|
431
|
+
if (!target.agentResults) target.agentResults = {};
|
|
432
|
+
const parts = (meta.dispatchKey || '').split('-');
|
|
433
|
+
const agent = parts[parts.length - 1] || 'unknown';
|
|
434
|
+
target.agentResults[agent] = { status, completedAt: e.ts(), reason: reason || undefined };
|
|
435
|
+
|
|
436
|
+
const results = Object.values(target.agentResults);
|
|
437
|
+
const anySuccess = results.some(r => r.status === 'done');
|
|
438
|
+
const allDone = Array.isArray(target.fanOutAgents) && target.fanOutAgents.length > 0 ? results.length >= target.fanOutAgents.length : false;
|
|
439
|
+
const dispatchAge = target.dispatched_at ? Date.now() - new Date(target.dispatched_at).getTime() : 0;
|
|
440
|
+
const timedOut = !allDone && dispatchAge > 6 * 60 * 60 * 1000 && results.length > 0;
|
|
441
|
+
|
|
442
|
+
if (anySuccess) {
|
|
443
|
+
target.status = 'done';
|
|
444
|
+
delete target.failReason;
|
|
445
|
+
delete target.failedAt;
|
|
446
|
+
target.completedAgents = Object.entries(target.agentResults)
|
|
447
|
+
.filter(([, r]) => r.status === 'done')
|
|
448
|
+
.map(([a]) => a);
|
|
449
|
+
} else if (allDone || timedOut) {
|
|
450
|
+
target.status = 'failed';
|
|
451
|
+
target.failReason = timedOut
|
|
452
|
+
? `Fan-out timed out: ${results.length}/${(target.fanOutAgents || []).length} agents reported (all failed)`
|
|
453
|
+
: 'All fan-out agents failed';
|
|
454
|
+
target.failedAt = e.ts();
|
|
455
|
+
}
|
|
456
|
+
} else {
|
|
457
|
+
target.status = status;
|
|
458
|
+
if (status === 'done') {
|
|
459
|
+
delete target.failReason;
|
|
460
|
+
delete target.failedAt;
|
|
461
|
+
target.completedAt = e.ts();
|
|
462
|
+
} else if (status === 'failed') {
|
|
463
|
+
if (reason) target.failReason = reason;
|
|
464
|
+
target.failedAt = e.ts();
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
shared.safeWrite(wiPath, items);
|
|
469
|
+
e.log('info', `Work item ${itemId} → ${status}${reason ? ': ' + reason : ''}`);
|
|
470
|
+
|
|
471
|
+
// Sync status to PRD JSON so the two share the same value (work item is source of truth)
|
|
472
|
+
syncPrdItemStatus(itemId, status, meta.item?.sourcePlan);
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
function syncPrdItemStatus(itemId, status, sourcePlan) {
|
|
477
|
+
if (!itemId) return;
|
|
478
|
+
try {
|
|
479
|
+
const prdDir = path.join(MINIONS_DIR, 'prd');
|
|
480
|
+
const files = sourcePlan ? [sourcePlan] : require('fs').readdirSync(prdDir).filter(f => f.endsWith('.json'));
|
|
481
|
+
for (const pf of files) {
|
|
482
|
+
const fpath = path.join(prdDir, pf);
|
|
483
|
+
const plan = safeJson(fpath);
|
|
484
|
+
if (!plan?.missing_features) continue;
|
|
485
|
+
const feature = plan.missing_features.find(f => f.id === itemId);
|
|
486
|
+
if (feature && feature.status !== status) {
|
|
487
|
+
feature.status = status;
|
|
488
|
+
shared.safeWrite(fpath, plan);
|
|
489
|
+
return;
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
} catch {}
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
// ─── PR Sync from Output ─────────────────────────────────────────────────────
|
|
496
|
+
|
|
497
|
+
function syncPrsFromOutput(output, agentId, meta, config) {
|
|
498
|
+
const e = engine();
|
|
499
|
+
const prMatches = new Set();
|
|
500
|
+
const urlPattern = /(?:visualstudio\.com|dev\.azure\.com)[^\s"]*?pullrequest\/(\d+)|github\.com\/[^\s"]*?\/pull\/(\d+)/g;
|
|
501
|
+
let match;
|
|
502
|
+
|
|
503
|
+
try {
|
|
504
|
+
const lines = output.split('\n');
|
|
505
|
+
for (const line of lines) {
|
|
506
|
+
try {
|
|
507
|
+
if (!line.includes('"type":"assistant"') && !line.includes('"type":"result"')) continue;
|
|
508
|
+
const parsed = JSON.parse(line);
|
|
509
|
+
const content = parsed.message?.content || [];
|
|
510
|
+
for (const block of content) {
|
|
511
|
+
if (block.type === 'tool_result' && block.content) {
|
|
512
|
+
const text = typeof block.content === 'string' ? block.content : JSON.stringify(block.content);
|
|
513
|
+
if (text.includes('pullRequestId') || text.includes('create_pull_request')) {
|
|
514
|
+
while ((match = urlPattern.exec(text)) !== null) prMatches.add(match[1] || match[2]);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
if (parsed.type === 'result' && parsed.result) {
|
|
519
|
+
const resultText = parsed.result;
|
|
520
|
+
const createdPattern = /(?:created|opened|submitted|new PR|PR created)[^\n]*?(?:(?:visualstudio\.com|dev\.azure\.com)[^\s"]*?pullrequest\/(\d+)|github\.com\/[^\s"]*?\/pull\/(\d+))/gi;
|
|
521
|
+
while ((match = createdPattern.exec(resultText)) !== null) prMatches.add(match[1] || match[2]);
|
|
522
|
+
const createdIdPattern = /(?:created|opened|submitted|new)\s+PR[# -]*(\d{5,})/gi;
|
|
523
|
+
while ((match = createdIdPattern.exec(resultText)) !== null) prMatches.add(match[1]);
|
|
524
|
+
}
|
|
525
|
+
} catch {}
|
|
526
|
+
}
|
|
527
|
+
} catch {}
|
|
528
|
+
|
|
529
|
+
const today = e.dateStamp();
|
|
530
|
+
const inboxFiles = getInboxFiles().filter(f => f.includes(agentId) && f.includes(today));
|
|
531
|
+
for (const f of inboxFiles) {
|
|
532
|
+
const content = safeRead(path.join(INBOX_DIR, f));
|
|
533
|
+
const prHeaderPattern = /\*\*PR[:\*]*\*?\s*[#-]*\s*(?:(?:visualstudio\.com|dev\.azure\.com)[^\s"]*?pullrequest\/(\d+)|github\.com\/[^\s"]*?\/pull\/(\d+))/gi;
|
|
534
|
+
while ((match = prHeaderPattern.exec(content)) !== null) prMatches.add(match[1] || match[2]);
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
if (prMatches.size === 0) return 0;
|
|
538
|
+
|
|
539
|
+
const projects = shared.getProjects(config);
|
|
540
|
+
const defaultProject = (meta?.project?.name && projects.find(p => p.name === meta.project.name)) || projects[0];
|
|
541
|
+
if (!defaultProject) return 0;
|
|
542
|
+
|
|
543
|
+
// Match each PR to its correct project by finding which repo URL appears near the PR number in output
|
|
544
|
+
function resolveProjectForPr(prId) {
|
|
545
|
+
// Look for the PR URL in output to determine which ADO project it belongs to
|
|
546
|
+
for (const p of projects) {
|
|
547
|
+
if (!p.prUrlBase) continue;
|
|
548
|
+
const urlFragment = p.prUrlBase.replace(/pullrequest\/$/, '');
|
|
549
|
+
if (output.includes(urlFragment + 'pullrequest/' + prId) || output.includes(urlFragment + prId)) return p;
|
|
550
|
+
}
|
|
551
|
+
for (const p of projects) {
|
|
552
|
+
if (p.repoName && output.includes(`_git/${p.repoName}/pullrequest/${prId}`)) return p;
|
|
553
|
+
}
|
|
554
|
+
return defaultProject;
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
const agentName = config.agents?.[agentId]?.name || agentId;
|
|
558
|
+
let added = 0;
|
|
559
|
+
// Track which project PR files need writing
|
|
560
|
+
const dirtyProjects = new Map(); // projectName -> { project, prs, prPath }
|
|
561
|
+
|
|
562
|
+
for (const prId of prMatches) {
|
|
563
|
+
const fullId = `PR-${prId}`;
|
|
564
|
+
const targetProject = resolveProjectForPr(prId);
|
|
565
|
+
const prPath = shared.projectPrPath(targetProject);
|
|
566
|
+
|
|
567
|
+
// Load PRs for this project (cache per project)
|
|
568
|
+
if (!dirtyProjects.has(targetProject.name)) {
|
|
569
|
+
dirtyProjects.set(targetProject.name, { project: targetProject, prs: safeJson(prPath) || [], prPath });
|
|
570
|
+
}
|
|
571
|
+
const entry = dirtyProjects.get(targetProject.name);
|
|
572
|
+
if (entry.prs.some(p => p.id === fullId || String(p.id).includes(prId))) continue;
|
|
573
|
+
|
|
574
|
+
let title = meta?.item?.title || '';
|
|
575
|
+
const titleMatch = output.match(new RegExp(`${prId}[^\\n]*?[—–-]\\s*([^\\n]+)`, 'i'));
|
|
576
|
+
if (titleMatch) title = titleMatch[1].trim();
|
|
577
|
+
if (title.includes('session_id') || title.includes('is_error') || title.includes('uuid') || title.length > 120) {
|
|
578
|
+
title = meta?.item?.title || '';
|
|
579
|
+
}
|
|
580
|
+
entry.prs.push({
|
|
581
|
+
id: fullId,
|
|
582
|
+
title: (title || `PR created by ${agentName}`).slice(0, 120),
|
|
583
|
+
agent: agentName,
|
|
584
|
+
branch: meta?.branch || '',
|
|
585
|
+
reviewStatus: 'pending',
|
|
586
|
+
status: 'active',
|
|
587
|
+
created: e.dateStamp(),
|
|
588
|
+
url: targetProject.prUrlBase ? targetProject.prUrlBase + prId : '',
|
|
589
|
+
sourcePlan: meta?.item?.sourcePlan || '',
|
|
590
|
+
itemType: meta?.item?.itemType || ''
|
|
591
|
+
});
|
|
592
|
+
if (meta?.item?.id) addPrLink(fullId, meta.item.id);
|
|
593
|
+
added++;
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
for (const [name, entry] of dirtyProjects) {
|
|
597
|
+
shared.safeWrite(entry.prPath, entry.prs);
|
|
598
|
+
e.log('info', `Synced PR(s) from ${agentName}'s output to ${name}/pull-requests.json`);
|
|
599
|
+
}
|
|
600
|
+
return added;
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
// ─── Post-Completion Hooks ──────────────────────────────────────────────────
|
|
604
|
+
|
|
605
|
+
function updatePrAfterReview(agentId, pr, project) {
|
|
606
|
+
const e = engine();
|
|
607
|
+
if (!pr?.id) return;
|
|
608
|
+
const prs = getPrs(project);
|
|
609
|
+
const target = prs.find(p => p.id === pr.id);
|
|
610
|
+
if (!target) return;
|
|
611
|
+
|
|
612
|
+
const config = getConfig();
|
|
613
|
+
const reviewerName = config.agents[agentId]?.name || agentId;
|
|
614
|
+
// Record the reviewer — actual verdict comes from ADO/GitHub votes via pollPrStatus.
|
|
615
|
+
// Set to 'waiting' so pollPrStatus updates it with the real vote on next cycle.
|
|
616
|
+
const dispatch = getDispatch();
|
|
617
|
+
const completedEntry = (dispatch.completed || []).find(d => d.agent === agentId && d.type === 'review');
|
|
618
|
+
|
|
619
|
+
target.minionsReview = {
|
|
620
|
+
status: 'waiting',
|
|
621
|
+
reviewer: reviewerName,
|
|
622
|
+
reviewedAt: e.ts(),
|
|
623
|
+
note: completedEntry?.task || ''
|
|
624
|
+
};
|
|
625
|
+
const minionsVerdict = target.minionsReview.status;
|
|
626
|
+
|
|
627
|
+
const authorAgentId = (pr.agent || '').toLowerCase();
|
|
628
|
+
if (authorAgentId && config.agents?.[authorAgentId]) {
|
|
629
|
+
const metricsPath = path.join(ENGINE_DIR, 'metrics.json');
|
|
630
|
+
const metrics = safeJson(metricsPath) || {};
|
|
631
|
+
if (!metrics[authorAgentId]) metrics[authorAgentId] = { tasksCompleted:0, tasksErrored:0, prsCreated:0, prsApproved:0, prsRejected:0, reviewsDone:0, lastTask:null, lastCompleted:null };
|
|
632
|
+
if (!metrics[authorAgentId]._reviewedPrs) metrics[authorAgentId]._reviewedPrs = {};
|
|
633
|
+
const prevVerdict = metrics[authorAgentId]._reviewedPrs[pr.id];
|
|
634
|
+
if (prevVerdict !== minionsVerdict) {
|
|
635
|
+
if (prevVerdict === 'approved') metrics[authorAgentId].prsApproved = Math.max(0, (metrics[authorAgentId].prsApproved || 0) - 1);
|
|
636
|
+
else if (prevVerdict === 'changes-requested') metrics[authorAgentId].prsRejected = Math.max(0, (metrics[authorAgentId].prsRejected || 0) - 1);
|
|
637
|
+
if (minionsVerdict === 'approved') metrics[authorAgentId].prsApproved++;
|
|
638
|
+
else if (minionsVerdict === 'changes-requested') metrics[authorAgentId].prsRejected++;
|
|
639
|
+
metrics[authorAgentId]._reviewedPrs[pr.id] = minionsVerdict;
|
|
640
|
+
}
|
|
641
|
+
shared.safeWrite(metricsPath, metrics);
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
shared.safeWrite(project ? shared.projectPrPath(project) : path.join(path.resolve(MINIONS_DIR, '..'), '.minions', 'pull-requests.json'), prs);
|
|
645
|
+
e.log('info', `Updated ${pr.id} → minions review: ${minionsVerdict} by ${reviewerName}`);
|
|
646
|
+
createReviewFeedbackForAuthor(agentId, { ...pr, ...target }, config);
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
function updatePrAfterFix(pr, project, source) {
|
|
650
|
+
const e = engine();
|
|
651
|
+
if (!pr?.id) return;
|
|
652
|
+
const prs = getPrs(project);
|
|
653
|
+
const target = prs.find(p => p.id === pr.id);
|
|
654
|
+
if (!target) return;
|
|
655
|
+
|
|
656
|
+
if (source === 'pr-human-feedback') {
|
|
657
|
+
// Human feedback fix: clear pendingFix AND reset to waiting for re-review
|
|
658
|
+
if (target.humanFeedback) target.humanFeedback.pendingFix = false;
|
|
659
|
+
target.minionsReview = {
|
|
660
|
+
...target.minionsReview,
|
|
661
|
+
status: 'waiting',
|
|
662
|
+
note: 'Fixed human feedback, awaiting re-review',
|
|
663
|
+
fixedAt: e.ts()
|
|
664
|
+
};
|
|
665
|
+
e.log('info', `Updated ${pr.id} → cleared humanFeedback.pendingFix, reset to waiting for re-review`);
|
|
666
|
+
} else {
|
|
667
|
+
// Review fix: reset to waiting for re-review
|
|
668
|
+
target.minionsReview = {
|
|
669
|
+
...target.minionsReview,
|
|
670
|
+
status: 'waiting',
|
|
671
|
+
note: 'Fixed, awaiting re-review',
|
|
672
|
+
fixedAt: e.ts()
|
|
673
|
+
};
|
|
674
|
+
e.log('info', `Updated ${pr.id} → minions review: waiting (fix pushed)`);
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
shared.safeWrite(project ? shared.projectPrPath(project) : path.join(path.resolve(MINIONS_DIR, '..'), '.minions', 'pull-requests.json'), prs);
|
|
678
|
+
}
|
|
679
|
+
|
|
680
|
+
// ─── Post-Merge / Post-Close Hooks ───────────────────────────────────────────
|
|
681
|
+
|
|
682
|
+
async function handlePostMerge(pr, project, config, newStatus) {
|
|
683
|
+
const e = engine();
|
|
684
|
+
const prNum = (pr.id || '').replace('PR-', '');
|
|
685
|
+
|
|
686
|
+
if (pr.branch) {
|
|
687
|
+
const root = path.resolve(project.localPath);
|
|
688
|
+
const wtRoot = path.resolve(root, config.engine?.worktreeRoot || '../worktrees');
|
|
689
|
+
// Find worktrees matching this branch — dir format is {slug}-{branch}-{suffix}
|
|
690
|
+
const branchSlug = shared.sanitizeBranch(pr.branch).toLowerCase();
|
|
691
|
+
try {
|
|
692
|
+
const dirs = require('fs').readdirSync(wtRoot);
|
|
693
|
+
for (const dir of dirs) {
|
|
694
|
+
const dirLower = dir.toLowerCase();
|
|
695
|
+
if (dirLower.includes(branchSlug) || dir === pr.branch || dir === `bt-${prNum}`) {
|
|
696
|
+
const wtPath = path.join(wtRoot, dir);
|
|
697
|
+
try {
|
|
698
|
+
if (!require('fs').statSync(wtPath).isDirectory()) continue;
|
|
699
|
+
execSilent(`git worktree remove "${wtPath}" --force`, { cwd: root, stdio: 'pipe', timeout: 15000 });
|
|
700
|
+
e.log('info', `Post-merge cleanup: removed worktree ${dir}`);
|
|
701
|
+
} catch (err) { e.log('warn', `Failed to remove worktree ${dir}: ${err.message}`); }
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
} catch {}
|
|
705
|
+
}
|
|
706
|
+
|
|
707
|
+
if (newStatus !== 'merged') return;
|
|
708
|
+
|
|
709
|
+
const mergedItemId = getPrLinks()[pr.id];
|
|
710
|
+
if (mergedItemId) {
|
|
711
|
+
const prdDir = path.join(MINIONS_DIR, 'prd');
|
|
712
|
+
try {
|
|
713
|
+
const planFiles = fs.readdirSync(prdDir).filter(f => f.endsWith('.json'));
|
|
714
|
+
let updated = 0;
|
|
715
|
+
for (const pf of planFiles) {
|
|
716
|
+
const plan = safeJson(path.join(prdDir, pf));
|
|
717
|
+
if (!plan?.missing_features) continue;
|
|
718
|
+
const feature = plan.missing_features.find(f => f.id === mergedItemId);
|
|
719
|
+
if (feature && feature.status !== 'implemented') {
|
|
720
|
+
feature.status = 'implemented';
|
|
721
|
+
shared.safeWrite(path.join(prdDir, pf), plan);
|
|
722
|
+
updated++;
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
if (updated > 0) e.log('info', `Post-merge: marked ${mergedItemId} as implemented for ${pr.id}`);
|
|
726
|
+
} catch {}
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
const agentId = (pr.agent || '').toLowerCase();
|
|
730
|
+
if (agentId && config.agents?.[agentId]) {
|
|
731
|
+
const metricsPath = path.join(ENGINE_DIR, 'metrics.json');
|
|
732
|
+
const metrics = safeJson(metricsPath) || {};
|
|
733
|
+
if (!metrics[agentId]) metrics[agentId] = { tasksCompleted:0, tasksErrored:0, prsCreated:0, prsApproved:0, prsRejected:0, prsMerged:0, reviewsDone:0, lastTask:null, lastCompleted:null };
|
|
734
|
+
metrics[agentId].prsMerged = (metrics[agentId].prsMerged || 0) + 1;
|
|
735
|
+
shared.safeWrite(metricsPath, metrics);
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
const teamsUrl = process.env.TEAMS_PLAN_FLOW_URL;
|
|
739
|
+
if (teamsUrl) {
|
|
740
|
+
try {
|
|
741
|
+
await fetch(teamsUrl, {
|
|
742
|
+
method: 'POST',
|
|
743
|
+
headers: { 'Content-Type': 'application/json' },
|
|
744
|
+
body: JSON.stringify({ text: `PR ${pr.id} merged: ${pr.title} (${project.name}) by ${pr.agent || 'unknown'}` })
|
|
745
|
+
});
|
|
746
|
+
} catch (err) { e.log('warn', `Teams post-merge notify failed: ${err.message}`); }
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
e.log('info', `Post-merge hooks completed for ${pr.id}`);
|
|
750
|
+
}
|
|
751
|
+
|
|
752
|
+
function checkForLearnings(agentId, agentInfo, taskDesc) {
|
|
753
|
+
const e = engine();
|
|
754
|
+
const today = e.dateStamp();
|
|
755
|
+
const inboxFiles = getInboxFiles();
|
|
756
|
+
const agentFiles = inboxFiles.filter(f => f.includes(agentId) && f.includes(today));
|
|
757
|
+
if (agentFiles.length > 0) {
|
|
758
|
+
e.log('info', `${agentInfo?.name || agentId} wrote ${agentFiles.length} finding(s) to inbox`);
|
|
759
|
+
return;
|
|
760
|
+
}
|
|
761
|
+
e.log('warn', `${agentInfo?.name || agentId} didn't write learnings — no follow-up queued`);
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
function extractSkillsFromOutput(output, agentId, dispatchItem, config) {
|
|
765
|
+
const e = engine();
|
|
766
|
+
if (!output) return;
|
|
767
|
+
let fullText = '';
|
|
768
|
+
for (const line of output.split('\n')) {
|
|
769
|
+
try {
|
|
770
|
+
const j = JSON.parse(line);
|
|
771
|
+
if (j.type === 'assistant' && j.message?.content) {
|
|
772
|
+
for (const c of j.message.content) {
|
|
773
|
+
if (c.type === 'text') fullText += c.text + '\n';
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
} catch {}
|
|
777
|
+
}
|
|
778
|
+
if (!fullText) fullText = output;
|
|
779
|
+
const skillBlocks = [];
|
|
780
|
+
const skillRegex = /```skill\s*\n([\s\S]*?)```/g;
|
|
781
|
+
let match;
|
|
782
|
+
while ((match = skillRegex.exec(fullText)) !== null) {
|
|
783
|
+
skillBlocks.push(match[1].trim());
|
|
784
|
+
}
|
|
785
|
+
if (skillBlocks.length === 0) return;
|
|
786
|
+
const agentName = config.agents[agentId]?.name || agentId;
|
|
787
|
+
for (const block of skillBlocks) {
|
|
788
|
+
const fmMatch = block.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
|
|
789
|
+
if (!fmMatch) { e.log('warn', `Skill block from ${agentName} has no frontmatter, skipping`); continue; }
|
|
790
|
+
const fm = fmMatch[1];
|
|
791
|
+
const m = (key) => { const r = fm.match(new RegExp(`^${key}:\\s*(.+)$`, 'm')); return r ? r[1].trim() : ''; };
|
|
792
|
+
const name = m('name');
|
|
793
|
+
if (!name) { e.log('warn', `Skill block from ${agentName} has no name, skipping`); continue; }
|
|
794
|
+
const scope = m('scope') || 'minions';
|
|
795
|
+
const project = m('project');
|
|
796
|
+
let enrichedBlock = block;
|
|
797
|
+
if (!m('author')) enrichedBlock = enrichedBlock.replace('---\n', `---\nauthor: ${agentName}\n`);
|
|
798
|
+
if (!m('created')) enrichedBlock = enrichedBlock.replace('---\n', `---\ncreated: ${e.dateStamp()}\n`);
|
|
799
|
+
const filename = name.replace(/[^a-z0-9-]/g, '-') + '.md';
|
|
800
|
+
if (scope === 'project' && project) {
|
|
801
|
+
const proj = shared.getProjects(config).find(p => p.name === project);
|
|
802
|
+
if (proj) {
|
|
803
|
+
const centralPath = path.join(MINIONS_DIR, 'work-items.json');
|
|
804
|
+
const items = safeJson(centralPath) || [];
|
|
805
|
+
const alreadyExists = items.some(i => i.title === `Add skill: ${name}` && i.status !== 'failed');
|
|
806
|
+
if (!alreadyExists) {
|
|
807
|
+
const skillId = `SK${String(items.filter(i => i.id?.startsWith('SK')).length + 1).padStart(3, '0')}`;
|
|
808
|
+
items.push({ id: skillId, type: 'implement', title: `Add skill: ${name}`,
|
|
809
|
+
description: `Create project-level skill \`${filename}\` in ${project}.\n\nWrite this file to \`${proj.localPath}/.claude/skills/${filename}\` via a PR.\n\n## Skill Content\n\n\`\`\`\n${enrichedBlock}\n\`\`\``,
|
|
810
|
+
priority: 'low', status: 'queued', created: e.ts(), createdBy: `engine:skill-extraction:${agentName}` });
|
|
811
|
+
shared.safeWrite(centralPath, items);
|
|
812
|
+
e.log('info', `Queued work item ${skillId} to PR project skill "${name}" into ${project}`);
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
} else {
|
|
816
|
+
// Write in Claude Code native format: ~/.claude/skills/<name>/SKILL.md
|
|
817
|
+
const claudeSkillsDir = path.join(process.env.HOME || process.env.USERPROFILE || '', '.claude', 'skills');
|
|
818
|
+
const skillDir = path.join(claudeSkillsDir, name.replace(/[^a-z0-9-]/g, '-'));
|
|
819
|
+
const skillPath = path.join(skillDir, 'SKILL.md');
|
|
820
|
+
if (!fs.existsSync(skillPath)) {
|
|
821
|
+
// Convert to Claude Code format: only name + description in frontmatter
|
|
822
|
+
const description = m('description') || m('trigger') || `Auto-extracted skill from ${agentName}`;
|
|
823
|
+
const body = fmMatch[2] || '';
|
|
824
|
+
const ccContent = `---\nname: ${name}\ndescription: ${description}\n---\n\n${body.trim()}\n`;
|
|
825
|
+
if (!fs.existsSync(skillDir)) fs.mkdirSync(skillDir, { recursive: true });
|
|
826
|
+
shared.safeWrite(skillPath, ccContent);
|
|
827
|
+
e.log('info', `Extracted skill "${name}" from ${agentName} → ~/.claude/skills/${name.replace(/[^a-z0-9-]/g, '-')}/SKILL.md`);
|
|
828
|
+
} else {
|
|
829
|
+
e.log('info', `Skill "${name}" already exists, skipping`);
|
|
830
|
+
}
|
|
831
|
+
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
|
|
836
|
+
function updateAgentHistory(agentId, dispatchItem, result) {
|
|
837
|
+
const e = engine();
|
|
838
|
+
const historyPath = path.join(AGENTS_DIR, agentId, 'history.md');
|
|
839
|
+
let history = safeRead(historyPath) || '# Agent History\n\n';
|
|
840
|
+
const entry = `### ${e.ts()} — ${result}\n` +
|
|
841
|
+
`- **Task:** ${dispatchItem.task}\n` +
|
|
842
|
+
`- **Type:** ${dispatchItem.type}\n` +
|
|
843
|
+
`- **Project:** ${dispatchItem.meta?.project?.name || 'central'}\n` +
|
|
844
|
+
`- **Branch:** ${dispatchItem.meta?.branch || 'none'}\n` +
|
|
845
|
+
`- **Dispatch ID:** ${dispatchItem.id}\n\n`;
|
|
846
|
+
const headerEnd = history.indexOf('\n\n');
|
|
847
|
+
if (headerEnd >= 0) {
|
|
848
|
+
history = history.slice(0, headerEnd + 2) + entry + history.slice(headerEnd + 2);
|
|
849
|
+
} else {
|
|
850
|
+
history += entry;
|
|
851
|
+
}
|
|
852
|
+
const entries = history.split('### ').filter(Boolean);
|
|
853
|
+
const header = entries[0].startsWith('#') ? entries.shift() : '# Agent History\n\n';
|
|
854
|
+
const trimmed = entries.slice(0, 20);
|
|
855
|
+
history = header + trimmed.map(e => '### ' + e).join('');
|
|
856
|
+
shared.safeWrite(historyPath, history);
|
|
857
|
+
e.log('info', `Updated history for ${agentId}`);
|
|
858
|
+
}
|
|
859
|
+
|
|
860
|
+
function createReviewFeedbackForAuthor(reviewerAgentId, pr, config) {
|
|
861
|
+
const e = engine();
|
|
862
|
+
if (!pr?.id || !pr?.agent) return;
|
|
863
|
+
const authorAgentId = pr.agent.toLowerCase();
|
|
864
|
+
if (!config.agents[authorAgentId]) return;
|
|
865
|
+
const today = e.dateStamp();
|
|
866
|
+
const inboxFiles = getInboxFiles();
|
|
867
|
+
const reviewFiles = inboxFiles.filter(f => f.includes(reviewerAgentId) && f.includes(today));
|
|
868
|
+
if (reviewFiles.length === 0) return;
|
|
869
|
+
const reviewContent = reviewFiles.map(f => safeRead(path.join(INBOX_DIR, f))).join('\n\n');
|
|
870
|
+
const feedbackFile = `feedback-${authorAgentId}-from-${reviewerAgentId}-${pr.id}-${today}.md`;
|
|
871
|
+
const feedbackPath = shared.uniquePath(path.join(INBOX_DIR, feedbackFile));
|
|
872
|
+
const content = `# Review Feedback for ${config.agents[authorAgentId]?.name || authorAgentId}\n\n` +
|
|
873
|
+
`**PR:** ${pr.id} — ${pr.title || ''}\n` +
|
|
874
|
+
`**Reviewer:** ${config.agents[reviewerAgentId]?.name || reviewerAgentId}\n` +
|
|
875
|
+
`**Date:** ${today}\n\n` +
|
|
876
|
+
`## What the reviewer found\n\n${reviewContent}\n\n` +
|
|
877
|
+
`## Action Required\n\nRead this feedback carefully. When you work on similar tasks in the future, ` +
|
|
878
|
+
`avoid the patterns flagged here. If you are assigned to fix this PR, ` +
|
|
879
|
+
`address every point raised above.\n`;
|
|
880
|
+
shared.safeWrite(feedbackPath, content);
|
|
881
|
+
e.log('info', `Created review feedback for ${authorAgentId} from ${reviewerAgentId} on ${pr.id}`);
|
|
882
|
+
}
|
|
883
|
+
|
|
884
|
+
function updateMetrics(agentId, dispatchItem, result, taskUsage, prsCreatedCount) {
|
|
885
|
+
const e = engine();
|
|
886
|
+
const metricsPath = path.join(ENGINE_DIR, 'metrics.json');
|
|
887
|
+
const metrics = safeJson(metricsPath) || {};
|
|
888
|
+
if (!metrics[agentId]) {
|
|
889
|
+
metrics[agentId] = { tasksCompleted: 0, tasksErrored: 0, prsCreated: 0, prsApproved: 0, prsRejected: 0,
|
|
890
|
+
reviewsDone: 0, lastTask: null, lastCompleted: null, totalCostUsd: 0, totalInputTokens: 0, totalOutputTokens: 0, totalCacheRead: 0 };
|
|
891
|
+
}
|
|
892
|
+
const m = metrics[agentId];
|
|
893
|
+
m.lastTask = dispatchItem.task;
|
|
894
|
+
m.lastCompleted = e.ts();
|
|
895
|
+
if (result === 'success') {
|
|
896
|
+
m.tasksCompleted++;
|
|
897
|
+
if (prsCreatedCount > 0) m.prsCreated = (m.prsCreated || 0) + prsCreatedCount;
|
|
898
|
+
if (dispatchItem.type === 'review') m.reviewsDone++;
|
|
899
|
+
} else if (result === 'retry') {
|
|
900
|
+
// Auto-retry: count cost but not as a final outcome
|
|
901
|
+
m.tasksRetried = (m.tasksRetried || 0) + 1;
|
|
902
|
+
} else {
|
|
903
|
+
m.tasksErrored++;
|
|
904
|
+
}
|
|
905
|
+
if (taskUsage) {
|
|
906
|
+
m.totalCostUsd = (m.totalCostUsd || 0) + (taskUsage.costUsd || 0);
|
|
907
|
+
m.totalInputTokens = (m.totalInputTokens || 0) + (taskUsage.inputTokens || 0);
|
|
908
|
+
m.totalOutputTokens = (m.totalOutputTokens || 0) + (taskUsage.outputTokens || 0);
|
|
909
|
+
m.totalCacheRead = (m.totalCacheRead || 0) + (taskUsage.cacheRead || 0);
|
|
910
|
+
}
|
|
911
|
+
const today = e.dateStamp();
|
|
912
|
+
if (!metrics._daily) metrics._daily = {};
|
|
913
|
+
if (!metrics._daily[today]) metrics._daily[today] = { costUsd: 0, inputTokens: 0, outputTokens: 0, cacheRead: 0, tasks: 0 };
|
|
914
|
+
const daily = metrics._daily[today];
|
|
915
|
+
daily.tasks++;
|
|
916
|
+
if (taskUsage) {
|
|
917
|
+
daily.costUsd += taskUsage.costUsd || 0;
|
|
918
|
+
daily.inputTokens += taskUsage.inputTokens || 0;
|
|
919
|
+
daily.outputTokens += taskUsage.outputTokens || 0;
|
|
920
|
+
daily.cacheRead += taskUsage.cacheRead || 0;
|
|
921
|
+
}
|
|
922
|
+
const cutoff = new Date();
|
|
923
|
+
cutoff.setDate(cutoff.getDate() - 30);
|
|
924
|
+
const cutoffStr = cutoff.toISOString().slice(0, 10);
|
|
925
|
+
for (const day of Object.keys(metrics._daily)) {
|
|
926
|
+
if (day < cutoffStr) delete metrics._daily[day];
|
|
927
|
+
}
|
|
928
|
+
shared.safeWrite(metricsPath, metrics);
|
|
929
|
+
}
|
|
930
|
+
|
|
931
|
+
// ─── Agent Output Parsing ────────────────────────────────────────────────────
|
|
932
|
+
|
|
933
|
+
function parseAgentOutput(stdout) {
|
|
934
|
+
const parsed = shared.parseStreamJsonOutput(stdout, { maxTextLength: 500 });
|
|
935
|
+
return { resultSummary: parsed.text, taskUsage: parsed.usage };
|
|
936
|
+
}
|
|
937
|
+
|
|
938
|
+
function runPostCompletionHooks(dispatchItem, agentId, code, stdout, config) {
|
|
939
|
+
const e = engine();
|
|
940
|
+
const type = dispatchItem.type;
|
|
941
|
+
const meta = dispatchItem.meta;
|
|
942
|
+
const isSuccess = code === 0;
|
|
943
|
+
const result = isSuccess ? 'success' : 'error';
|
|
944
|
+
const { resultSummary, taskUsage } = parseAgentOutput(stdout);
|
|
945
|
+
|
|
946
|
+
if (isSuccess && meta?.item?.id) updateWorkItemStatus(meta, 'done', '');
|
|
947
|
+
if (!isSuccess && meta?.item?.id) {
|
|
948
|
+
// Auto-retry: read fresh _retryCount from file (not stale dispatch-time snapshot)
|
|
949
|
+
let retries = (meta.item._retryCount || 0);
|
|
950
|
+
try {
|
|
951
|
+
const wiPath = meta.source === 'central-work-item' || meta.source === 'central-work-item-fanout'
|
|
952
|
+
? path.join(MINIONS_DIR, 'work-items.json')
|
|
953
|
+
: meta.project?.name ? path.join(MINIONS_DIR, 'projects', meta.project.name, 'work-items.json') : null;
|
|
954
|
+
if (wiPath) {
|
|
955
|
+
const items = safeJson(wiPath) || [];
|
|
956
|
+
const wi = items.find(i => i.id === meta.item.id);
|
|
957
|
+
if (wi) retries = (wi._retryCount || 0); // Use fresh value from file
|
|
958
|
+
}
|
|
959
|
+
} catch {}
|
|
960
|
+
|
|
961
|
+
if (retries < 3) {
|
|
962
|
+
e.log('info', `Agent failed for ${meta.item.id} — auto-retry ${retries + 1}/3`);
|
|
963
|
+
updateWorkItemStatus(meta, 'pending', '');
|
|
964
|
+
try {
|
|
965
|
+
const wiPath = meta.source === 'central-work-item' || meta.source === 'central-work-item-fanout'
|
|
966
|
+
? path.join(MINIONS_DIR, 'work-items.json')
|
|
967
|
+
: meta.project?.name ? path.join(MINIONS_DIR, 'projects', meta.project.name, 'work-items.json') : null;
|
|
968
|
+
if (wiPath) {
|
|
969
|
+
const items = safeJson(wiPath) || [];
|
|
970
|
+
const wi = items.find(i => i.id === meta.item.id);
|
|
971
|
+
if (wi) { wi._retryCount = retries + 1; wi.status = 'pending'; delete wi.dispatched_at; delete wi.dispatched_to; shared.safeWrite(wiPath, items); }
|
|
972
|
+
}
|
|
973
|
+
} catch {}
|
|
974
|
+
} else {
|
|
975
|
+
updateWorkItemStatus(meta, 'failed', 'Agent failed (3 retries exhausted)');
|
|
976
|
+
}
|
|
977
|
+
}
|
|
978
|
+
// Plan chaining removed — user must explicitly execute plan-to-prd after reviewing the plan
|
|
979
|
+
if (isSuccess && meta?.item?.sourcePlan) checkPlanCompletion(meta, config);
|
|
980
|
+
|
|
981
|
+
let prsCreatedCount = 0;
|
|
982
|
+
if (isSuccess) prsCreatedCount = syncPrsFromOutput(stdout, agentId, meta, config) || 0;
|
|
983
|
+
|
|
984
|
+
// Clean up worktree for non-shared-branch tasks after completion
|
|
985
|
+
if (meta?.branch && meta?.branchStrategy !== 'shared-branch') {
|
|
986
|
+
try {
|
|
987
|
+
const project = meta.project || {};
|
|
988
|
+
const rootDir = project.localPath ? path.resolve(project.localPath) : null;
|
|
989
|
+
if (rootDir) {
|
|
990
|
+
const engineConfig = (config.engine || {});
|
|
991
|
+
const worktreeRoot = path.resolve(rootDir, engineConfig.worktreeRoot || '../worktrees');
|
|
992
|
+
// Find the worktree directory for this dispatch's branch
|
|
993
|
+
const branchSlug = shared.sanitizeBranch ? shared.sanitizeBranch(meta.branch) : meta.branch.replace(/[^a-zA-Z0-9._\-\/]/g, '-');
|
|
994
|
+
const dirs = fs.readdirSync(worktreeRoot).filter(d => {
|
|
995
|
+
return d.includes(branchSlug) && fs.statSync(path.join(worktreeRoot, d)).isDirectory();
|
|
996
|
+
});
|
|
997
|
+
// Only remove if no other active dispatch uses this branch
|
|
998
|
+
const dispatch = e.getDispatch();
|
|
999
|
+
const otherActive = ((dispatch.active || []).concat(dispatch.pending || [])).some(d =>
|
|
1000
|
+
d.id !== dispatchItem.id && d.meta?.branch && shared.sanitizeBranch && shared.sanitizeBranch(d.meta.branch) === branchSlug
|
|
1001
|
+
);
|
|
1002
|
+
if (!otherActive) {
|
|
1003
|
+
for (const dir of dirs) {
|
|
1004
|
+
const wtPath = path.join(worktreeRoot, dir);
|
|
1005
|
+
try {
|
|
1006
|
+
shared.exec(`git worktree remove "${wtPath}" --force`, { cwd: rootDir, stdio: 'pipe', timeout: 15000, windowsHide: true });
|
|
1007
|
+
e.log('info', `Post-completion: removed worktree ${dir}`);
|
|
1008
|
+
} catch (err) {
|
|
1009
|
+
e.log('warn', `Post-completion: failed to remove worktree ${dir}: ${err.message}`);
|
|
1010
|
+
}
|
|
1011
|
+
}
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
} catch (err) {
|
|
1015
|
+
e.log('warn', `Post-completion worktree cleanup error: ${err.message}`);
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
// Detect implement tasks that completed without creating a PR
|
|
1020
|
+
if (isSuccess && (type === 'implement' || type === 'implement:large' || type === 'fix') && prsCreatedCount === 0 && meta?.item?.id) {
|
|
1021
|
+
// Check if a PR already exists linked to this work item (from a previous attempt)
|
|
1022
|
+
const projects = shared.getProjects(config);
|
|
1023
|
+
const existingPrFound = Object.values(getPrLinks()).includes(meta.item.id);
|
|
1024
|
+
if (!existingPrFound) {
|
|
1025
|
+
e.log('warn', `Agent completed implement task ${meta.item.id} but no PR was created`);
|
|
1026
|
+
// Set noPr flag on the work item so the dashboard can surface this
|
|
1027
|
+
let wiPath;
|
|
1028
|
+
if (meta.source === 'central-work-item' || meta.source === 'central-work-item-fanout') {
|
|
1029
|
+
wiPath = path.join(MINIONS_DIR, 'work-items.json');
|
|
1030
|
+
} else if (meta.project?.localPath) {
|
|
1031
|
+
wiPath = shared.projectWorkItemsPath(meta.project);
|
|
1032
|
+
}
|
|
1033
|
+
if (wiPath) {
|
|
1034
|
+
const items = safeJson(wiPath) || [];
|
|
1035
|
+
const wi = items.find(i => i.id === meta.item.id);
|
|
1036
|
+
if (wi) {
|
|
1037
|
+
wi.noPr = true;
|
|
1038
|
+
shared.safeWrite(wiPath, items);
|
|
1039
|
+
}
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
if (type === 'review') updatePrAfterReview(agentId, meta?.pr, meta?.project);
|
|
1045
|
+
if (type === 'fix') updatePrAfterFix(meta?.pr, meta?.project, meta?.source);
|
|
1046
|
+
checkForLearnings(agentId, config.agents[agentId], dispatchItem.task);
|
|
1047
|
+
if (isSuccess) extractSkillsFromOutput(stdout, agentId, dispatchItem, config);
|
|
1048
|
+
updateAgentHistory(agentId, dispatchItem, result);
|
|
1049
|
+
// Don't count auto-retries as errors in metrics — only count final outcomes
|
|
1050
|
+
const isAutoRetry = !isSuccess && meta?.item?.id && (meta.item._retryCount || 0) < 3;
|
|
1051
|
+
const metricsResult = isAutoRetry ? 'retry' : result;
|
|
1052
|
+
updateMetrics(agentId, dispatchItem, metricsResult, taskUsage, prsCreatedCount);
|
|
1053
|
+
|
|
1054
|
+
return { resultSummary, taskUsage };
|
|
1055
|
+
}
|
|
1056
|
+
|
|
1057
|
+
// ─── PR → PRD Status Sync ─────────────────────────────────────────────────────
|
|
1058
|
+
// Runs every 6 ticks (~3 min). For all pending work items across all projects,
|
|
1059
|
+
// runs the reconciliation pass to catch PRs created after materialization
|
|
1060
|
+
// (e.g., manually raised PRs, cross-plan PRs, or PRs created while engine was paused).
|
|
1061
|
+
function syncPrdFromPrs(config) {
|
|
1062
|
+
try {
|
|
1063
|
+
const { getProjects, projectWorkItemsPath, projectPrPath, safeJson, safeWrite } = require('./shared');
|
|
1064
|
+
const { reconcileItemsWithPrs } = require('../engine');
|
|
1065
|
+
config = config || queries.getConfig();
|
|
1066
|
+
const allProjects = getProjects(config);
|
|
1067
|
+
|
|
1068
|
+
// Exact prdItems match only — no fuzzy matching
|
|
1069
|
+
const allPrs = allProjects.flatMap(p => safeJson(projectPrPath(p)) || []);
|
|
1070
|
+
|
|
1071
|
+
let totalReconciled = 0;
|
|
1072
|
+
for (const project of allProjects) {
|
|
1073
|
+
const wiPath = projectWorkItemsPath(project);
|
|
1074
|
+
const items = safeJson(wiPath) || [];
|
|
1075
|
+
const hasPending = items.some(wi => wi.status === 'pending' && !wi._pr);
|
|
1076
|
+
if (!hasPending) continue;
|
|
1077
|
+
const reconciled = reconcileItemsWithPrs(items, allPrs);
|
|
1078
|
+
if (reconciled > 0) {
|
|
1079
|
+
safeWrite(wiPath, items);
|
|
1080
|
+
// Sync done status to PRD JSON for each newly reconciled item
|
|
1081
|
+
for (const wi of items) {
|
|
1082
|
+
if (wi.status === 'done') syncPrdItemStatus(wi.id, 'done', wi.sourcePlan);
|
|
1083
|
+
}
|
|
1084
|
+
totalReconciled += reconciled;
|
|
1085
|
+
}
|
|
1086
|
+
}
|
|
1087
|
+
if (totalReconciled > 0) {
|
|
1088
|
+
engine().log('info', `PR sync: reconciled ${totalReconciled} pending work item(s) to done`);
|
|
1089
|
+
}
|
|
1090
|
+
} catch (err) {
|
|
1091
|
+
// Non-fatal — log and continue
|
|
1092
|
+
try { engine().log('warn', `syncPrdFromPrs error: ${err?.message || err}`); } catch {}
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
|
|
1096
|
+
module.exports = {
|
|
1097
|
+
checkPlanCompletion,
|
|
1098
|
+
updateWorkItemStatus,
|
|
1099
|
+
syncPrdItemStatus,
|
|
1100
|
+
syncPrsFromOutput,
|
|
1101
|
+
updatePrAfterReview,
|
|
1102
|
+
updatePrAfterFix,
|
|
1103
|
+
handlePostMerge,
|
|
1104
|
+
checkForLearnings,
|
|
1105
|
+
extractSkillsFromOutput,
|
|
1106
|
+
updateAgentHistory,
|
|
1107
|
+
createReviewFeedbackForAuthor,
|
|
1108
|
+
updateMetrics,
|
|
1109
|
+
parseAgentOutput,
|
|
1110
|
+
runPostCompletionHooks,
|
|
1111
|
+
syncPrdFromPrs,
|
|
1112
|
+
};
|
|
1113
|
+
|