@matthugh1/conductor-cli 0.1.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent-spawner-BNOGEYDK.js +232 -0
- package/dist/agent.js +79 -16
- package/dist/{branch-overview-XVHTGFCJ.js → branch-overview-DSSCUE5F.js} +1 -1
- package/dist/chunk-3MJBQK2F.js +75 -0
- package/dist/chunk-6AA726KG.js +238 -0
- package/dist/{chunk-IHARLSA6.js → chunk-7S5HKGS5.js} +2 -1
- package/dist/{chunk-MJKFQIYA.js → chunk-B2WDTKD7.js} +19 -20
- package/dist/{chunk-JZT526HU.js → chunk-KB2DTST2.js} +27 -81
- package/dist/{cli-config-TDSTAXIA.js → cli-config-2ZDXUUQN.js} +5 -1
- package/dist/{cli-tasks-NW3BONXC.js → cli-tasks-NM5D5PIZ.js} +3 -2
- package/dist/daemon-GGOJDZDB.js +598 -0
- package/dist/daemon-client-BE64H437.js +312 -0
- package/dist/{health-CTND2ANA.js → health-UFK7YCKQ.js} +1 -1
- package/dist/runner-prompt-MOOPKA5P.js +9 -0
- package/dist/{work-queue-YE5P4S7R.js → work-queue-U3JYHLX2.js} +11 -17
- package/dist/{worktree-manager-QKRBTPVC.js → worktree-manager-2ZUJEL3L.js} +2 -1
- package/package.json +2 -2
- package/dist/runner-prompt-2B6EXGN6.js +0 -139
- /package/dist/{chunk-VYINBHPQ.js → chunk-6VMREHG4.js} +0 -0
|
@@ -0,0 +1,598 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// ../../src/cli/daemon.ts
|
|
4
|
+
function writeOut(text) {
|
|
5
|
+
process.stdout.write(text + "\n");
|
|
6
|
+
}
|
|
7
|
+
function writeErr(text) {
|
|
8
|
+
process.stderr.write(text + "\n");
|
|
9
|
+
}
|
|
10
|
+
function ts() {
|
|
11
|
+
return (/* @__PURE__ */ new Date()).toLocaleTimeString("en-US", { hour12: false });
|
|
12
|
+
}
|
|
13
|
+
function log(msg) {
|
|
14
|
+
writeOut(`[${ts()}] ${msg}`);
|
|
15
|
+
}
|
|
16
|
+
async function cmdDaemonCancel(projectRoot, projectName, jsonOutput, apiUrl, apiKey) {
|
|
17
|
+
const { existsSync, readFileSync, unlinkSync } = await import("fs");
|
|
18
|
+
const { join } = await import("path");
|
|
19
|
+
const { createDaemonClient } = await import("./daemon-client-BE64H437.js");
|
|
20
|
+
const resolvedUrl = apiUrl ?? process.env.CONDUCTOR_API_URL;
|
|
21
|
+
const client = createDaemonClient(resolvedUrl, apiKey);
|
|
22
|
+
let projectId;
|
|
23
|
+
try {
|
|
24
|
+
const project = projectName ? await client.resolveProject({ name: projectName }) : await client.resolveProject({ path: projectRoot });
|
|
25
|
+
projectId = project.id;
|
|
26
|
+
} catch (err) {
|
|
27
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
28
|
+
writeErr(msg);
|
|
29
|
+
return 1;
|
|
30
|
+
}
|
|
31
|
+
const pidPath = join(projectRoot, ".conductor", `daemon-${projectId}.pid`);
|
|
32
|
+
if (!existsSync(pidPath)) {
|
|
33
|
+
if (jsonOutput) {
|
|
34
|
+
writeOut(JSON.stringify({ error: "No daemon running for this project." }));
|
|
35
|
+
} else {
|
|
36
|
+
writeErr("No daemon running for this project.");
|
|
37
|
+
}
|
|
38
|
+
return 1;
|
|
39
|
+
}
|
|
40
|
+
const pid = parseInt(readFileSync(pidPath, "utf8").trim(), 10);
|
|
41
|
+
if (!Number.isFinite(pid)) {
|
|
42
|
+
unlinkSync(pidPath);
|
|
43
|
+
writeErr("Stale PID file removed.");
|
|
44
|
+
return 1;
|
|
45
|
+
}
|
|
46
|
+
try {
|
|
47
|
+
process.kill(pid, "SIGTERM");
|
|
48
|
+
if (jsonOutput) {
|
|
49
|
+
writeOut(JSON.stringify({ cancelled: true, pid }));
|
|
50
|
+
} else {
|
|
51
|
+
writeOut(`Sent shutdown signal to daemon (PID ${pid}).`);
|
|
52
|
+
}
|
|
53
|
+
return 0;
|
|
54
|
+
} catch {
|
|
55
|
+
unlinkSync(pidPath);
|
|
56
|
+
if (jsonOutput) {
|
|
57
|
+
writeOut(
|
|
58
|
+
JSON.stringify({ error: "Daemon process not found. Cleaned up stale PID file." })
|
|
59
|
+
);
|
|
60
|
+
} else {
|
|
61
|
+
writeErr("Daemon process not found. Cleaned up stale PID file.");
|
|
62
|
+
}
|
|
63
|
+
return 1;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
async function cmdDaemon(opts) {
|
|
67
|
+
const { mkdirSync, writeFileSync, unlinkSync, existsSync, readFileSync } = await import("fs");
|
|
68
|
+
const { join } = await import("path");
|
|
69
|
+
const { createDaemonClient } = await import("./daemon-client-BE64H437.js");
|
|
70
|
+
const resolvedUrl = opts.apiUrl ?? process.env.CONDUCTOR_API_URL;
|
|
71
|
+
const client = createDaemonClient(resolvedUrl, opts.apiKey);
|
|
72
|
+
let projectPath;
|
|
73
|
+
let projectId;
|
|
74
|
+
try {
|
|
75
|
+
const { readProjectId } = await import("./cli-config-2ZDXUUQN.js");
|
|
76
|
+
const configProjectId = readProjectId();
|
|
77
|
+
if (configProjectId) {
|
|
78
|
+
projectId = configProjectId;
|
|
79
|
+
projectPath = opts.projectRoot;
|
|
80
|
+
log(`Using project ID from config: ${projectId}`);
|
|
81
|
+
} else if (opts.projectName) {
|
|
82
|
+
const project = await client.resolveProject({ name: opts.projectName });
|
|
83
|
+
projectId = project.id;
|
|
84
|
+
projectPath = project.path;
|
|
85
|
+
} else {
|
|
86
|
+
const project = await client.resolveProject({ path: opts.projectRoot });
|
|
87
|
+
projectId = project.id;
|
|
88
|
+
projectPath = project.path;
|
|
89
|
+
}
|
|
90
|
+
} catch (err) {
|
|
91
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
92
|
+
writeErr(msg);
|
|
93
|
+
return 1;
|
|
94
|
+
}
|
|
95
|
+
const conductorDir = join(projectPath, ".conductor");
|
|
96
|
+
mkdirSync(conductorDir, { recursive: true });
|
|
97
|
+
const pidPath = join(conductorDir, `daemon-${projectId}.pid`);
|
|
98
|
+
if (existsSync(pidPath)) {
|
|
99
|
+
const existingPid = parseInt(
|
|
100
|
+
readFileSync(pidPath, "utf8").trim(),
|
|
101
|
+
10
|
|
102
|
+
);
|
|
103
|
+
if (Number.isFinite(existingPid)) {
|
|
104
|
+
try {
|
|
105
|
+
process.kill(existingPid, 0);
|
|
106
|
+
writeErr(
|
|
107
|
+
`A daemon is already running for this project (PID ${existingPid}).`
|
|
108
|
+
);
|
|
109
|
+
writeErr("Run: conductor daemon cancel --project <name>");
|
|
110
|
+
return 1;
|
|
111
|
+
} catch {
|
|
112
|
+
log("Cleaned up stale PID file from a previous daemon.");
|
|
113
|
+
unlinkSync(pidPath);
|
|
114
|
+
}
|
|
115
|
+
} else {
|
|
116
|
+
unlinkSync(pidPath);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
writeFileSync(pidPath, String(process.pid), "utf8");
|
|
120
|
+
let running = true;
|
|
121
|
+
const activeTasks = /* @__PURE__ */ new Map();
|
|
122
|
+
function runningCountForAgent(agentId) {
|
|
123
|
+
let count = 0;
|
|
124
|
+
for (const t of activeTasks.values()) {
|
|
125
|
+
if (t.agentId === agentId) count++;
|
|
126
|
+
}
|
|
127
|
+
return count;
|
|
128
|
+
}
|
|
129
|
+
const shutdown = () => {
|
|
130
|
+
log("Shutting down daemon gracefully...");
|
|
131
|
+
running = false;
|
|
132
|
+
for (const task of activeTasks.values()) {
|
|
133
|
+
task.cancel();
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
process.on("SIGINT", shutdown);
|
|
137
|
+
process.on("SIGTERM", shutdown);
|
|
138
|
+
let fileConfig = {};
|
|
139
|
+
try {
|
|
140
|
+
const { readConfig } = await import("./cli-config-2ZDXUUQN.js");
|
|
141
|
+
const config = readConfig(projectPath);
|
|
142
|
+
fileConfig = config.daemon ?? {};
|
|
143
|
+
} catch {
|
|
144
|
+
}
|
|
145
|
+
const CLI_DEFAULTS = {
|
|
146
|
+
checkInterval: 3e4,
|
|
147
|
+
maxPerDay: 50,
|
|
148
|
+
maxConsecutiveFailures: 3,
|
|
149
|
+
timeout: 120,
|
|
150
|
+
maxConcurrent: 1,
|
|
151
|
+
skipPermissions: false
|
|
152
|
+
};
|
|
153
|
+
const daemonConfig = {
|
|
154
|
+
checkInterval: opts.checkInterval !== CLI_DEFAULTS.checkInterval ? opts.checkInterval : fileConfig.checkInterval ?? CLI_DEFAULTS.checkInterval,
|
|
155
|
+
maxPerDay: opts.maxPerDay !== CLI_DEFAULTS.maxPerDay ? opts.maxPerDay : fileConfig.maxPerDay ?? CLI_DEFAULTS.maxPerDay,
|
|
156
|
+
maxConsecutiveFailures: opts.maxConsecutiveFailures !== CLI_DEFAULTS.maxConsecutiveFailures ? opts.maxConsecutiveFailures : fileConfig.maxConsecutiveFailures ?? CLI_DEFAULTS.maxConsecutiveFailures,
|
|
157
|
+
timeout: opts.timeout !== CLI_DEFAULTS.timeout ? opts.timeout : fileConfig.timeout ?? CLI_DEFAULTS.timeout,
|
|
158
|
+
maxConcurrent: opts.maxConcurrent !== CLI_DEFAULTS.maxConcurrent ? opts.maxConcurrent : fileConfig.maxConcurrent ?? CLI_DEFAULTS.maxConcurrent,
|
|
159
|
+
useWorktree: !opts.noWorktree,
|
|
160
|
+
skipPermissions: opts.skipPermissions || (fileConfig.skipPermissions ?? false)
|
|
161
|
+
};
|
|
162
|
+
try {
|
|
163
|
+
await client.upsertHeartbeat({
|
|
164
|
+
projectId,
|
|
165
|
+
pid: process.pid,
|
|
166
|
+
state: "idle",
|
|
167
|
+
config: daemonConfig,
|
|
168
|
+
stats: { completedToday: 0, failedToday: 0 }
|
|
169
|
+
});
|
|
170
|
+
} catch (err) {
|
|
171
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
172
|
+
writeErr(`Failed to write initial heartbeat: ${msg}`);
|
|
173
|
+
unlinkSync(pidPath);
|
|
174
|
+
return 1;
|
|
175
|
+
}
|
|
176
|
+
try {
|
|
177
|
+
const purged = await client.purgeOldRuns(30);
|
|
178
|
+
if (purged > 0) {
|
|
179
|
+
log(`Cleaned up ${purged} daemon run(s) older than 30 days`);
|
|
180
|
+
}
|
|
181
|
+
} catch (err) {
|
|
182
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
183
|
+
log(`Warning: cleanup of old runs failed: ${msg}`);
|
|
184
|
+
}
|
|
185
|
+
log(`Daemon started for project: ${projectPath}`);
|
|
186
|
+
log(
|
|
187
|
+
`PID: ${process.pid} | Poll interval: ${daemonConfig.checkInterval}ms | Max per day: ${daemonConfig.maxPerDay} | Max concurrent: ${daemonConfig.maxConcurrent}`
|
|
188
|
+
);
|
|
189
|
+
const attemptedIds = /* @__PURE__ */ new Set();
|
|
190
|
+
let consecutiveFailures = 0;
|
|
191
|
+
let completedToday = 0;
|
|
192
|
+
let failedToday = 0;
|
|
193
|
+
async function sleepWithCheck(ms) {
|
|
194
|
+
const intervals = Math.ceil(ms / 1e3);
|
|
195
|
+
for (let i = 0; i < intervals && running; i++) {
|
|
196
|
+
await new Promise((r) => setTimeout(r, 1e3));
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
const heartbeatInterval = setInterval(async () => {
|
|
200
|
+
try {
|
|
201
|
+
const firstActive = activeTasks.values().next().value;
|
|
202
|
+
await client.upsertHeartbeat({
|
|
203
|
+
projectId,
|
|
204
|
+
pid: process.pid,
|
|
205
|
+
state: activeTasks.size > 0 ? "executing" : "idle",
|
|
206
|
+
currentDeliverableId: firstActive?.deliverableId ?? void 0,
|
|
207
|
+
currentRunId: firstActive?.runId,
|
|
208
|
+
config: daemonConfig,
|
|
209
|
+
stats: { completedToday, failedToday, activeCount: activeTasks.size }
|
|
210
|
+
});
|
|
211
|
+
} catch {
|
|
212
|
+
}
|
|
213
|
+
}, 3e4);
|
|
214
|
+
while (running) {
|
|
215
|
+
try {
|
|
216
|
+
if (consecutiveFailures >= daemonConfig.maxConsecutiveFailures) {
|
|
217
|
+
log(
|
|
218
|
+
`Stopping: ${consecutiveFailures} consecutive failures reached limit.`
|
|
219
|
+
);
|
|
220
|
+
break;
|
|
221
|
+
}
|
|
222
|
+
const freeSlots = daemonConfig.maxConcurrent - activeTasks.size;
|
|
223
|
+
if (freeSlots > 0) {
|
|
224
|
+
const todayCount = await client.getQuota(projectId);
|
|
225
|
+
if (todayCount >= daemonConfig.maxPerDay) {
|
|
226
|
+
log(
|
|
227
|
+
`Daily quota reached (${todayCount}/${daemonConfig.maxPerDay}). Waiting...`
|
|
228
|
+
);
|
|
229
|
+
} else {
|
|
230
|
+
await fillSlots(freeSlots, todayCount);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
const firstActive = activeTasks.values().next().value;
|
|
234
|
+
await client.upsertHeartbeat({
|
|
235
|
+
projectId,
|
|
236
|
+
pid: process.pid,
|
|
237
|
+
state: activeTasks.size > 0 ? "executing" : "idle",
|
|
238
|
+
currentDeliverableId: firstActive?.deliverableId ?? void 0,
|
|
239
|
+
currentRunId: firstActive?.runId,
|
|
240
|
+
config: daemonConfig,
|
|
241
|
+
stats: { completedToday, failedToday, activeCount: activeTasks.size }
|
|
242
|
+
});
|
|
243
|
+
try {
|
|
244
|
+
const ingested = await client.ingestAutonomousDeliverables(
|
|
245
|
+
projectId,
|
|
246
|
+
projectPath
|
|
247
|
+
);
|
|
248
|
+
if (ingested > 0) {
|
|
249
|
+
log(`Ingested ${ingested} auto-generated task(s) into task backlog`);
|
|
250
|
+
}
|
|
251
|
+
} catch (err) {
|
|
252
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
253
|
+
log(`Warning: auto-task ingestion failed: ${msg}`);
|
|
254
|
+
}
|
|
255
|
+
try {
|
|
256
|
+
const { routed, decisions } = await client.routePendingTasks(
|
|
257
|
+
projectId,
|
|
258
|
+
projectPath
|
|
259
|
+
);
|
|
260
|
+
for (const decision of decisions) {
|
|
261
|
+
log(
|
|
262
|
+
`Routed task "${decision.taskTitle}" to ${decision.agentName} (${decision.explanation.summary}; weight=${decision.explanation.weight}; load=${decision.explanation.activeQueuedCount})`
|
|
263
|
+
);
|
|
264
|
+
}
|
|
265
|
+
if (routed > 0) {
|
|
266
|
+
log(`Auto-routed ${routed} pending daemon task(s)`);
|
|
267
|
+
}
|
|
268
|
+
} catch (err) {
|
|
269
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
270
|
+
log(`Warning: daemon task routing failed: ${msg}`);
|
|
271
|
+
}
|
|
272
|
+
if (activeTasks.size > 0) {
|
|
273
|
+
const taggedPromises = [...activeTasks.entries()].map(
|
|
274
|
+
([id, task]) => task.promise.then((result) => ({ id, result }))
|
|
275
|
+
);
|
|
276
|
+
const sleepPromise = sleepWithCheck(daemonConfig.checkInterval).then(() => null);
|
|
277
|
+
const completed = await Promise.race([...taggedPromises, sleepPromise]);
|
|
278
|
+
if (completed !== null) {
|
|
279
|
+
await handleCompletion(completed.id, completed.result);
|
|
280
|
+
}
|
|
281
|
+
} else {
|
|
282
|
+
await sleepWithCheck(daemonConfig.checkInterval);
|
|
283
|
+
}
|
|
284
|
+
} catch (err) {
|
|
285
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
286
|
+
log(`Unexpected error in daemon loop: ${msg}`);
|
|
287
|
+
consecutiveFailures++;
|
|
288
|
+
failedToday++;
|
|
289
|
+
await sleepWithCheck(daemonConfig.checkInterval);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
if (activeTasks.size > 0) {
|
|
293
|
+
log(`Waiting for ${activeTasks.size} active task(s) to finish...`);
|
|
294
|
+
const remaining = [...activeTasks.entries()].map(
|
|
295
|
+
([id, task]) => task.promise.then((result) => ({ id, result }))
|
|
296
|
+
);
|
|
297
|
+
const results = await Promise.allSettled(remaining);
|
|
298
|
+
for (const r of results) {
|
|
299
|
+
if (r.status === "fulfilled") {
|
|
300
|
+
await handleCompletion(r.value.id, r.value.result);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
clearInterval(heartbeatInterval);
|
|
305
|
+
log("Daemon stopped.");
|
|
306
|
+
try {
|
|
307
|
+
await client.clearHeartbeat(projectId);
|
|
308
|
+
} catch {
|
|
309
|
+
}
|
|
310
|
+
try {
|
|
311
|
+
unlinkSync(pidPath);
|
|
312
|
+
} catch {
|
|
313
|
+
}
|
|
314
|
+
return 0;
|
|
315
|
+
async function fillSlots(freeSlots, todayCount) {
|
|
316
|
+
let agents;
|
|
317
|
+
try {
|
|
318
|
+
agents = await client.listEnabledAgents(projectId);
|
|
319
|
+
} catch (err) {
|
|
320
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
321
|
+
log(`Failed to list agents: ${msg}`);
|
|
322
|
+
consecutiveFailures++;
|
|
323
|
+
failedToday++;
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
if (agents.length === 0) return;
|
|
327
|
+
const activeInitiativeIds = new Set(
|
|
328
|
+
[...activeTasks.values()].map((t) => t.initiativeId).filter((id) => id !== null)
|
|
329
|
+
);
|
|
330
|
+
let globalSlotsRemaining = Math.min(freeSlots, daemonConfig.maxPerDay - todayCount);
|
|
331
|
+
for (const agent of agents) {
|
|
332
|
+
if (globalSlotsRemaining <= 0 || !running) break;
|
|
333
|
+
const agentRunning = runningCountForAgent(agent.id);
|
|
334
|
+
const agentCapacity = agent.maxConcurrent - agentRunning;
|
|
335
|
+
if (agentCapacity <= 0) continue;
|
|
336
|
+
const fetchLimit = Math.min(agentCapacity, globalSlotsRemaining);
|
|
337
|
+
let candidates;
|
|
338
|
+
try {
|
|
339
|
+
candidates = await client.getNextTasksForAgent(agent.id, fetchLimit);
|
|
340
|
+
} catch (err) {
|
|
341
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
342
|
+
log(`Failed to fetch tasks for agent "${agent.name}": ${msg}`);
|
|
343
|
+
continue;
|
|
344
|
+
}
|
|
345
|
+
for (const task of candidates) {
|
|
346
|
+
if (globalSlotsRemaining <= 0 || !running) break;
|
|
347
|
+
if (attemptedIds.has(task.id)) continue;
|
|
348
|
+
attemptedIds.add(task.id);
|
|
349
|
+
if (task.initiativeId !== null && activeInitiativeIds.has(task.initiativeId)) {
|
|
350
|
+
log(`Skipping "${task.title}" \u2014 another task from the same initiative is already running.`);
|
|
351
|
+
continue;
|
|
352
|
+
}
|
|
353
|
+
if (task.type === "deliverable" && task.deliverableId) {
|
|
354
|
+
try {
|
|
355
|
+
const delivDetail = await client.getDeliverableDetail(task.deliverableId);
|
|
356
|
+
if (delivDetail && ["feature", "enhancement", "refactor"].includes(delivDetail.type ?? "feature") && !delivDetail.prompt) {
|
|
357
|
+
log(`Skipping "${task.title}" \u2014 deliverable needs an implementation brief (prompt).`);
|
|
358
|
+
continue;
|
|
359
|
+
}
|
|
360
|
+
} catch {
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
let claimed;
|
|
364
|
+
try {
|
|
365
|
+
claimed = await client.claimTask(task.id);
|
|
366
|
+
} catch (err) {
|
|
367
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
368
|
+
log(`Failed to claim task "${task.title}": ${msg}`);
|
|
369
|
+
continue;
|
|
370
|
+
}
|
|
371
|
+
if (!claimed) {
|
|
372
|
+
continue;
|
|
373
|
+
}
|
|
374
|
+
const spawned = await spawnAgentTask(claimed, agent);
|
|
375
|
+
if (spawned) {
|
|
376
|
+
globalSlotsRemaining--;
|
|
377
|
+
if (task.initiativeId !== null) {
|
|
378
|
+
activeInitiativeIds.add(task.initiativeId);
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
async function spawnAgentTask(task, agent) {
|
|
385
|
+
let worktreePath = null;
|
|
386
|
+
let initiativeTitle = null;
|
|
387
|
+
if (task.initiativeId) {
|
|
388
|
+
try {
|
|
389
|
+
const init = task.deliverableId ? await client.getInitiativeForDeliverable(task.deliverableId) : null;
|
|
390
|
+
initiativeTitle = init?.initiativeTitle ?? null;
|
|
391
|
+
} catch {
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
if (daemonConfig.useWorktree && task.initiativeId) {
|
|
395
|
+
try {
|
|
396
|
+
const {
|
|
397
|
+
createWorktree,
|
|
398
|
+
getWorktreeForInitiative
|
|
399
|
+
} = await import("./worktree-manager-2ZUJEL3L.js");
|
|
400
|
+
const existing = await getWorktreeForInitiative(projectId, task.initiativeId);
|
|
401
|
+
if (existing) {
|
|
402
|
+
worktreePath = existing.worktreePath;
|
|
403
|
+
log(`Reusing existing worktree at ${worktreePath}`);
|
|
404
|
+
} else {
|
|
405
|
+
const wt = await createWorktree(
|
|
406
|
+
projectId,
|
|
407
|
+
projectPath,
|
|
408
|
+
task.initiativeId,
|
|
409
|
+
initiativeTitle ?? "unknown"
|
|
410
|
+
);
|
|
411
|
+
worktreePath = wt.worktreePath;
|
|
412
|
+
log(`Created worktree for "${initiativeTitle}" at ${worktreePath}`);
|
|
413
|
+
}
|
|
414
|
+
} catch (err) {
|
|
415
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
416
|
+
log(`Worktree setup failed: ${msg}`);
|
|
417
|
+
if (task.deliverableId) {
|
|
418
|
+
log("Skipping \u2014 useWorktree is enabled. Refusing to run in main repo without isolation.");
|
|
419
|
+
consecutiveFailures++;
|
|
420
|
+
failedToday++;
|
|
421
|
+
return false;
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
} else if (daemonConfig.useWorktree && task.deliverableId && !task.initiativeId) {
|
|
425
|
+
log(`Skipping "${task.title}" \u2014 useWorktree is enabled but no initiative set. Refusing to run without isolation.`);
|
|
426
|
+
consecutiveFailures++;
|
|
427
|
+
failedToday++;
|
|
428
|
+
return false;
|
|
429
|
+
}
|
|
430
|
+
const agentCwd = worktreePath ?? projectPath;
|
|
431
|
+
let run;
|
|
432
|
+
try {
|
|
433
|
+
run = await client.createRun({
|
|
434
|
+
projectId,
|
|
435
|
+
deliverableId: task.deliverableId ?? task.id,
|
|
436
|
+
pid: process.pid,
|
|
437
|
+
worktreePath: worktreePath ?? void 0
|
|
438
|
+
});
|
|
439
|
+
} catch (err) {
|
|
440
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
441
|
+
log(`Failed to create daemon run record: ${msg}`);
|
|
442
|
+
consecutiveFailures++;
|
|
443
|
+
failedToday++;
|
|
444
|
+
return false;
|
|
445
|
+
}
|
|
446
|
+
try {
|
|
447
|
+
await client.updateTask(task.id, { status: "running", runId: run.id });
|
|
448
|
+
} catch {
|
|
449
|
+
}
|
|
450
|
+
const { readFile } = await import("fs/promises");
|
|
451
|
+
const { join: join2 } = await import("path");
|
|
452
|
+
let claudeMd = "";
|
|
453
|
+
try {
|
|
454
|
+
claudeMd = await readFile(join2(agentCwd, "CLAUDE.md"), "utf8");
|
|
455
|
+
} catch {
|
|
456
|
+
}
|
|
457
|
+
const { assembleTaskPrompt } = await import("./runner-prompt-MOOPKA5P.js");
|
|
458
|
+
const assembledPrompt = assembleTaskPrompt(
|
|
459
|
+
{ name: agent.name, systemPrompt: agent.systemPrompt },
|
|
460
|
+
{
|
|
461
|
+
title: task.title,
|
|
462
|
+
prompt: task.prompt,
|
|
463
|
+
deliverableId: task.deliverableId,
|
|
464
|
+
projectName: projectPath
|
|
465
|
+
},
|
|
466
|
+
claudeMd
|
|
467
|
+
);
|
|
468
|
+
log(
|
|
469
|
+
`Spawning "${agent.name}" for: "${task.title}"${worktreePath ? ` (worktree: ${worktreePath})` : ""} [${activeTasks.size + 1}/${daemonConfig.maxConcurrent} slots]`
|
|
470
|
+
);
|
|
471
|
+
const { spawnAgent } = await import("./agent-spawner-BNOGEYDK.js");
|
|
472
|
+
const spawned = spawnAgent({
|
|
473
|
+
item: {
|
|
474
|
+
priority: task.priority,
|
|
475
|
+
tier: "active",
|
|
476
|
+
type: "deliverable",
|
|
477
|
+
title: task.title,
|
|
478
|
+
reason: `Agent task: ${task.title}`,
|
|
479
|
+
action: "Execute task",
|
|
480
|
+
agentRole: agent.role ?? "implementation-engineer",
|
|
481
|
+
entityId: task.deliverableId ?? task.id,
|
|
482
|
+
entityType: "deliverable"
|
|
483
|
+
},
|
|
484
|
+
projectRoot: agentCwd,
|
|
485
|
+
runId: run.id,
|
|
486
|
+
projectId,
|
|
487
|
+
deliverableId: task.deliverableId ?? void 0,
|
|
488
|
+
timeoutMs: daemonConfig.timeout * 6e4,
|
|
489
|
+
client,
|
|
490
|
+
assembledPrompt,
|
|
491
|
+
modelOverride: agent.model !== "claude-sonnet-4-20250514" ? agent.model : void 0,
|
|
492
|
+
skipPermissions: daemonConfig.skipPermissions,
|
|
493
|
+
onLine: (stream, line) => {
|
|
494
|
+
if (stream === "stderr") {
|
|
495
|
+
writeErr(line);
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
});
|
|
499
|
+
if (spawned.pid !== null) {
|
|
500
|
+
try {
|
|
501
|
+
await client.updateRunPid(run.id, spawned.pid);
|
|
502
|
+
} catch {
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
activeTasks.set(task.id, {
|
|
506
|
+
promise: spawned.done,
|
|
507
|
+
cancel: spawned.cancel,
|
|
508
|
+
taskId: task.id,
|
|
509
|
+
runId: run.id,
|
|
510
|
+
agentId: agent.id,
|
|
511
|
+
agentName: agent.name,
|
|
512
|
+
deliverableId: task.deliverableId,
|
|
513
|
+
initiativeId: task.initiativeId,
|
|
514
|
+
worktreePath,
|
|
515
|
+
title: task.title
|
|
516
|
+
});
|
|
517
|
+
log(
|
|
518
|
+
`Picked: "${task.title}" (P${task.priority}) \u2192 agent "${agent.name}"${initiativeTitle ? ` [${initiativeTitle}]` : ""}`
|
|
519
|
+
);
|
|
520
|
+
return true;
|
|
521
|
+
}
|
|
522
|
+
async function handleCompletion(taskId, result) {
|
|
523
|
+
const task = activeTasks.get(taskId);
|
|
524
|
+
if (!task) return;
|
|
525
|
+
activeTasks.delete(taskId);
|
|
526
|
+
log(
|
|
527
|
+
`Agent "${task.agentName}" finished: ${result.outcome} (exit ${result.exitCode ?? "n/a"}, ${result.lineCount} lines, ${Math.round(result.durationMs / 1e3)}s)`
|
|
528
|
+
);
|
|
529
|
+
try {
|
|
530
|
+
await client.completeRun(task.runId, {
|
|
531
|
+
status: result.outcome === "completed" ? "completed" : result.outcome === "timeout" ? "timeout" : result.outcome === "cancelled" ? "cancelled" : "failed",
|
|
532
|
+
exitCode: result.exitCode ?? void 0,
|
|
533
|
+
errorMessage: result.errorMessage
|
|
534
|
+
});
|
|
535
|
+
} catch (err) {
|
|
536
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
537
|
+
log(`Failed to update daemon run: ${msg}`);
|
|
538
|
+
}
|
|
539
|
+
if (result.outcome === "completed") {
|
|
540
|
+
log(`Completed: "${task.title}" (agent: ${task.agentName})`);
|
|
541
|
+
consecutiveFailures = 0;
|
|
542
|
+
completedToday++;
|
|
543
|
+
try {
|
|
544
|
+
await client.updateTask(task.taskId, {
|
|
545
|
+
status: "completed",
|
|
546
|
+
result: `Completed successfully in ${Math.round(result.durationMs / 1e3)}s`
|
|
547
|
+
});
|
|
548
|
+
} catch {
|
|
549
|
+
}
|
|
550
|
+
if (task.worktreePath) {
|
|
551
|
+
log(`Worktree preserved at ${task.worktreePath} for reuse by next task.`);
|
|
552
|
+
}
|
|
553
|
+
} else if (result.outcome === "cancelled") {
|
|
554
|
+
log(`Cancelled: "${task.title}" (agent: ${task.agentName})`);
|
|
555
|
+
failedToday++;
|
|
556
|
+
try {
|
|
557
|
+
await client.updateTask(task.taskId, { status: "cancelled" });
|
|
558
|
+
} catch {
|
|
559
|
+
}
|
|
560
|
+
if (task.deliverableId) {
|
|
561
|
+
try {
|
|
562
|
+
await client.updateDeliverableStatus(task.deliverableId, "parked");
|
|
563
|
+
} catch {
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
if (task.worktreePath) {
|
|
567
|
+
log(`Worktree preserved at ${task.worktreePath} for debugging`);
|
|
568
|
+
}
|
|
569
|
+
} else {
|
|
570
|
+
const reason = result.errorMessage ?? `exit ${result.exitCode}`;
|
|
571
|
+
log(
|
|
572
|
+
`${result.outcome === "timeout" ? "Timed out" : "Failed"}: "${task.title}" (agent: ${task.agentName}) \u2014 ${reason}`
|
|
573
|
+
);
|
|
574
|
+
consecutiveFailures++;
|
|
575
|
+
failedToday++;
|
|
576
|
+
try {
|
|
577
|
+
await client.updateTask(task.taskId, {
|
|
578
|
+
status: "failed",
|
|
579
|
+
errorMessage: reason
|
|
580
|
+
});
|
|
581
|
+
} catch {
|
|
582
|
+
}
|
|
583
|
+
if (task.deliverableId) {
|
|
584
|
+
try {
|
|
585
|
+
await client.updateDeliverableStatus(task.deliverableId, "parked");
|
|
586
|
+
} catch {
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
if (task.worktreePath) {
|
|
590
|
+
log(`Worktree preserved at ${task.worktreePath} for debugging`);
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
export {
|
|
596
|
+
cmdDaemon,
|
|
597
|
+
cmdDaemonCancel
|
|
598
|
+
};
|