@autoclawd/autoclawd 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile +57 -0
- package/README.md +169 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2786 -0
- package/dist/index.js.map +1 -0
- package/package.json +59 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,2786 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import { Command } from "commander";
|
|
5
|
+
|
|
6
|
+
// src/config.ts
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { readFileSync, existsSync } from "fs";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
import { homedir } from "os";
|
|
11
|
+
import yaml from "js-yaml";
|
|
12
|
+
var AgentConfigSchema = z.object({
|
|
13
|
+
model: z.string().default("claude-sonnet-4-6"),
|
|
14
|
+
maxIterations: z.number().min(1).max(50).default(10),
|
|
15
|
+
timeout: z.number().optional()
|
|
16
|
+
});
|
|
17
|
+
var DockerConfigSchema = z.object({
|
|
18
|
+
image: z.string().default("node:20"),
|
|
19
|
+
memory: z.string().default("4g"),
|
|
20
|
+
cpus: z.number().optional(),
|
|
21
|
+
setup: z.array(z.string()).optional(),
|
|
22
|
+
// Shell commands to run before Claude Code
|
|
23
|
+
volumes: z.array(z.string()).optional(),
|
|
24
|
+
// Extra bind mounts, e.g. ["~/.ssh:/home/autoclawd/.ssh:ro"]
|
|
25
|
+
network: z.enum(["bridge", "host", "none"]).default("bridge")
|
|
26
|
+
// Container network mode
|
|
27
|
+
});
|
|
28
|
+
var LinearConfigSchema = z.object({
|
|
29
|
+
apiKey: z.string(),
|
|
30
|
+
teamId: z.string(),
|
|
31
|
+
statuses: z.array(z.string()).default(["Todo"]),
|
|
32
|
+
assignToMe: z.boolean().default(true),
|
|
33
|
+
doneStatus: z.string().default("Done"),
|
|
34
|
+
inProgressStatus: z.string().default("In Progress")
|
|
35
|
+
});
|
|
36
|
+
var GitHubConfigSchema = z.object({
|
|
37
|
+
token: z.string()
|
|
38
|
+
});
|
|
39
|
+
var WebhookConfigSchema = z.object({
|
|
40
|
+
port: z.number().default(3e3),
|
|
41
|
+
url: z.string().optional(),
|
|
42
|
+
signingSecret: z.string().optional(),
|
|
43
|
+
webhookId: z.string().optional()
|
|
44
|
+
});
|
|
45
|
+
var SafetyConfigSchema = z.object({
|
|
46
|
+
allowedRepos: z.array(z.string()).optional(),
|
|
47
|
+
branchPrefix: z.string().default("autoclawd/"),
|
|
48
|
+
maxFileChanges: z.number().min(1).optional()
|
|
49
|
+
});
|
|
50
|
+
var ConfigSchema = z.object({
|
|
51
|
+
linear: LinearConfigSchema,
|
|
52
|
+
github: GitHubConfigSchema,
|
|
53
|
+
docker: DockerConfigSchema.default({}),
|
|
54
|
+
agent: AgentConfigSchema.default({}),
|
|
55
|
+
webhook: WebhookConfigSchema.default({}),
|
|
56
|
+
safety: SafetyConfigSchema.default({}),
|
|
57
|
+
maxConcurrent: z.number().default(1),
|
|
58
|
+
validate: z.array(z.string()).optional()
|
|
59
|
+
// Global default validation commands
|
|
60
|
+
});
|
|
61
|
+
var RepoLocalConfigSchema = z.object({
|
|
62
|
+
prompt: z.string().optional(),
|
|
63
|
+
base: z.string().default("main"),
|
|
64
|
+
agent: AgentConfigSchema.partial().optional(),
|
|
65
|
+
docker: DockerConfigSchema.partial().optional(),
|
|
66
|
+
validate: z.array(z.string()).optional()
|
|
67
|
+
// Validation commands to run before PR
|
|
68
|
+
});
|
|
69
|
+
function resolveEnvVars(obj) {
|
|
70
|
+
if (typeof obj === "string") {
|
|
71
|
+
if (obj.startsWith("env:")) {
|
|
72
|
+
const envName = obj.slice(4);
|
|
73
|
+
const val = process.env[envName];
|
|
74
|
+
if (!val) throw new Error(`Environment variable ${envName} is not set`);
|
|
75
|
+
return val;
|
|
76
|
+
}
|
|
77
|
+
return obj;
|
|
78
|
+
}
|
|
79
|
+
if (Array.isArray(obj)) return obj.map(resolveEnvVars);
|
|
80
|
+
if (obj && typeof obj === "object") {
|
|
81
|
+
return Object.fromEntries(
|
|
82
|
+
Object.entries(obj).map(([k, v]) => [k, resolveEnvVars(v)])
|
|
83
|
+
);
|
|
84
|
+
}
|
|
85
|
+
return obj;
|
|
86
|
+
}
|
|
87
|
+
var CONFIG_DIR = join(homedir(), ".autoclawd");
|
|
88
|
+
var CONFIG_FILE = join(CONFIG_DIR, "config.yaml");
|
|
89
|
+
function loadConfig(path) {
|
|
90
|
+
const configPath = path ?? CONFIG_FILE;
|
|
91
|
+
if (!existsSync(configPath)) {
|
|
92
|
+
throw new Error(`Config not found: ${configPath}
|
|
93
|
+
Run: autoclawd init`);
|
|
94
|
+
}
|
|
95
|
+
const content = readFileSync(configPath, "utf-8");
|
|
96
|
+
const raw = yaml.load(content);
|
|
97
|
+
if (!raw || typeof raw !== "object") {
|
|
98
|
+
throw new Error(`Config file is empty or invalid: ${configPath}`);
|
|
99
|
+
}
|
|
100
|
+
const resolved = resolveEnvVars(raw);
|
|
101
|
+
const result = ConfigSchema.safeParse(resolved);
|
|
102
|
+
if (!result.success) {
|
|
103
|
+
const issues = result.error.issues.map(
|
|
104
|
+
(i) => ` - ${i.path.join(".")}: ${i.message}`
|
|
105
|
+
).join("\n");
|
|
106
|
+
throw new Error(`Invalid config (${configPath}):
|
|
107
|
+
${issues}`);
|
|
108
|
+
}
|
|
109
|
+
return result.data;
|
|
110
|
+
}
|
|
111
|
+
function loadRepoLocalConfig(workspacePath) {
|
|
112
|
+
const yamlPath = join(workspacePath, ".autoclawd.yaml");
|
|
113
|
+
const ymlPath = join(workspacePath, ".autoclawd.yml");
|
|
114
|
+
const configPath = existsSync(yamlPath) ? yamlPath : existsSync(ymlPath) ? ymlPath : void 0;
|
|
115
|
+
if (!configPath) return void 0;
|
|
116
|
+
const raw = yaml.load(readFileSync(configPath, "utf-8"));
|
|
117
|
+
if (!raw || typeof raw !== "object") return void 0;
|
|
118
|
+
return RepoLocalConfigSchema.parse(raw);
|
|
119
|
+
}
|
|
120
|
+
function mergeConfigs(host, local) {
|
|
121
|
+
const agent = AgentConfigSchema.parse({
|
|
122
|
+
...host.agent,
|
|
123
|
+
...local?.agent
|
|
124
|
+
});
|
|
125
|
+
const docker2 = DockerConfigSchema.parse({
|
|
126
|
+
...host.docker,
|
|
127
|
+
...local?.docker
|
|
128
|
+
});
|
|
129
|
+
const validate = local?.validate ?? host.validate;
|
|
130
|
+
return { agent, docker: docker2, prompt: local?.prompt, validate };
|
|
131
|
+
}
|
|
132
|
+
function parseRepoFromLabels(labels) {
|
|
133
|
+
for (const label of labels) {
|
|
134
|
+
const match = label.match(/^repo:(.+)/i);
|
|
135
|
+
if (match) {
|
|
136
|
+
const value = match[1].trim();
|
|
137
|
+
if (value.startsWith("https://")) return value;
|
|
138
|
+
if (value.includes("/")) return `https://github.com/${value}`;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
return void 0;
|
|
142
|
+
}
|
|
143
|
+
function parseBaseFromLabels(labels) {
|
|
144
|
+
for (const label of labels) {
|
|
145
|
+
const match = label.match(/^base:(.+)/i);
|
|
146
|
+
if (match) {
|
|
147
|
+
return match[1].trim();
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
return void 0;
|
|
151
|
+
}
|
|
152
|
+
function isRepoAllowed(repoUrl, allowedRepos) {
|
|
153
|
+
if (!allowedRepos) return true;
|
|
154
|
+
return allowedRepos.some((allowed) => {
|
|
155
|
+
const normalizedAllowed = allowed.includes("/") ? allowed.startsWith("https://") ? allowed : `https://github.com/${allowed}` : allowed;
|
|
156
|
+
return repoUrl.toLowerCase() === normalizedAllowed.toLowerCase();
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// src/linear.ts
|
|
161
|
+
import { LinearClient } from "@linear/sdk";
|
|
162
|
+
|
|
163
|
+
// src/logger.ts
|
|
164
|
+
import chalk from "chalk";
|
|
165
|
+
import { appendFileSync, mkdirSync, existsSync as existsSync2 } from "fs";
|
|
166
|
+
import { join as join2 } from "path";
|
|
167
|
+
import { homedir as homedir2 } from "os";
|
|
168
|
+
var currentLevel = "info";
|
|
169
|
+
var logFilePath;
|
|
170
|
+
var levels = {
|
|
171
|
+
debug: 0,
|
|
172
|
+
info: 1,
|
|
173
|
+
warn: 2,
|
|
174
|
+
error: 3
|
|
175
|
+
};
|
|
176
|
+
function setLogLevel(level) {
|
|
177
|
+
currentLevel = level;
|
|
178
|
+
}
|
|
179
|
+
function enableFileLogging() {
|
|
180
|
+
const logDir = join2(homedir2(), ".autoclawd", "logs");
|
|
181
|
+
if (!existsSync2(logDir)) {
|
|
182
|
+
mkdirSync(logDir, { recursive: true });
|
|
183
|
+
}
|
|
184
|
+
const date = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
|
|
185
|
+
logFilePath = join2(logDir, `autoclawd-${date}.log`);
|
|
186
|
+
}
|
|
187
|
+
function shouldLog(level) {
|
|
188
|
+
return levels[level] >= levels[currentLevel];
|
|
189
|
+
}
|
|
190
|
+
function timestamp() {
|
|
191
|
+
return (/* @__PURE__ */ new Date()).toISOString().slice(11, 19);
|
|
192
|
+
}
|
|
193
|
+
function fullTimestamp() {
|
|
194
|
+
return (/* @__PURE__ */ new Date()).toISOString();
|
|
195
|
+
}
|
|
196
|
+
function writeToFile(level, msg) {
|
|
197
|
+
if (!logFilePath) return;
|
|
198
|
+
try {
|
|
199
|
+
appendFileSync(logFilePath, `${fullTimestamp()} [${level.padEnd(5)}] ${msg}
|
|
200
|
+
`);
|
|
201
|
+
} catch {
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
var log = {
|
|
205
|
+
debug(msg, ...args) {
|
|
206
|
+
writeToFile("DEBUG", msg);
|
|
207
|
+
if (shouldLog("debug")) console.log(chalk.gray(`[${timestamp()}] ${msg}`), ...args);
|
|
208
|
+
},
|
|
209
|
+
info(msg, ...args) {
|
|
210
|
+
writeToFile("INFO", msg);
|
|
211
|
+
if (shouldLog("info")) console.log(chalk.blue(`[${timestamp()}]`), msg, ...args);
|
|
212
|
+
},
|
|
213
|
+
warn(msg, ...args) {
|
|
214
|
+
writeToFile("WARN", msg);
|
|
215
|
+
if (shouldLog("warn")) console.log(chalk.yellow(`[${timestamp()}] WARN`), msg, ...args);
|
|
216
|
+
},
|
|
217
|
+
error(msg, ...args) {
|
|
218
|
+
writeToFile("ERROR", msg);
|
|
219
|
+
if (shouldLog("error")) console.log(chalk.red(`[${timestamp()}] ERROR`), msg, ...args);
|
|
220
|
+
},
|
|
221
|
+
success(msg, ...args) {
|
|
222
|
+
writeToFile("INFO", `\u2713 ${msg}`);
|
|
223
|
+
if (shouldLog("info")) console.log(chalk.green(`[${timestamp()}] \u2713`), msg, ...args);
|
|
224
|
+
},
|
|
225
|
+
ticket(id, msg, ...args) {
|
|
226
|
+
writeToFile("INFO", `[${id}] ${msg}`);
|
|
227
|
+
if (shouldLog("info")) console.log(chalk.cyan(`[${timestamp()}] [${id}]`), msg, ...args);
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
|
|
231
|
+
// src/retry.ts
|
|
232
|
+
async function retry(fn, opts) {
|
|
233
|
+
const attempts = opts.attempts ?? 3;
|
|
234
|
+
const baseDelay = opts.baseDelayMs ?? 1e3;
|
|
235
|
+
let lastErr;
|
|
236
|
+
for (let i = 1; i <= attempts; i++) {
|
|
237
|
+
try {
|
|
238
|
+
return await fn();
|
|
239
|
+
} catch (err) {
|
|
240
|
+
lastErr = err;
|
|
241
|
+
if (opts.retryIf && !opts.retryIf(err)) {
|
|
242
|
+
throw err;
|
|
243
|
+
}
|
|
244
|
+
if (i < attempts) {
|
|
245
|
+
const delay = baseDelay * Math.pow(2, i - 1);
|
|
246
|
+
const msg = err instanceof Error ? err.message.split("\n")[0] : String(err);
|
|
247
|
+
log.warn(`${opts.label} failed (attempt ${i}/${attempts}): ${msg} \u2014 retrying in ${delay}ms`);
|
|
248
|
+
await sleep(delay);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
throw lastErr;
|
|
253
|
+
}
|
|
254
|
+
function retrySync(fn, opts) {
|
|
255
|
+
return retry(() => Promise.resolve(fn()), opts);
|
|
256
|
+
}
|
|
257
|
+
function isTransientError(err) {
|
|
258
|
+
if (!(err instanceof Error)) return false;
|
|
259
|
+
const msg = err.message.toLowerCase();
|
|
260
|
+
return msg.includes("econnreset") || msg.includes("econnrefused") || msg.includes("etimedout") || msg.includes("eai_again") || msg.includes("epipe") || msg.includes("socket hang up") || msg.includes("network") || msg.includes("timeout") || msg.includes("502") || msg.includes("503") || msg.includes("504") || msg.includes("rate limit") || msg.includes("429") || msg.includes("500") || msg.includes("internal server error");
|
|
261
|
+
}
|
|
262
|
+
function sleep(ms) {
|
|
263
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// src/linear.ts
|
|
267
|
+
function createLinearClient(config) {
|
|
268
|
+
return new LinearClient({ apiKey: config.linear.apiKey });
|
|
269
|
+
}
|
|
270
|
+
async function pollTickets(client, config) {
|
|
271
|
+
const team = await client.team(config.linear.teamId);
|
|
272
|
+
const issues = await team.issues({
|
|
273
|
+
filter: {
|
|
274
|
+
state: { name: { in: config.linear.statuses } },
|
|
275
|
+
assignee: config.linear.assignToMe ? { isMe: { eq: true } } : void 0
|
|
276
|
+
},
|
|
277
|
+
first: 50
|
|
278
|
+
});
|
|
279
|
+
const tickets = [];
|
|
280
|
+
for (const issue of issues.nodes) {
|
|
281
|
+
const labels = await issue.labels();
|
|
282
|
+
const labelNames = labels.nodes.map((l) => l.name);
|
|
283
|
+
const repoUrl = parseRepoFromLabels(labelNames);
|
|
284
|
+
const baseBranch = parseBaseFromLabels(labelNames);
|
|
285
|
+
tickets.push({
|
|
286
|
+
id: issue.id,
|
|
287
|
+
identifier: issue.identifier,
|
|
288
|
+
title: issue.title,
|
|
289
|
+
description: issue.description ?? "",
|
|
290
|
+
labels: labelNames,
|
|
291
|
+
repoUrl,
|
|
292
|
+
baseBranch,
|
|
293
|
+
url: issue.url
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
return tickets;
|
|
297
|
+
}
|
|
298
|
+
async function fetchTicket(client, identifier) {
|
|
299
|
+
const match = identifier.match(/^([A-Za-z]+)-(\d+)$/);
|
|
300
|
+
if (!match) return void 0;
|
|
301
|
+
const [, teamKey, numStr] = match;
|
|
302
|
+
const normalizedKey = teamKey.toUpperCase();
|
|
303
|
+
const team = await client.team(normalizedKey);
|
|
304
|
+
const result = await team.issues({
|
|
305
|
+
filter: { number: { eq: parseInt(numStr) } },
|
|
306
|
+
first: 1
|
|
307
|
+
});
|
|
308
|
+
const issue = result.nodes[0];
|
|
309
|
+
if (!issue) return void 0;
|
|
310
|
+
const labels = await issue.labels();
|
|
311
|
+
const labelNames = labels.nodes.map((l) => l.name);
|
|
312
|
+
const repoUrl = parseRepoFromLabels(labelNames);
|
|
313
|
+
const baseBranch = parseBaseFromLabels(labelNames);
|
|
314
|
+
return {
|
|
315
|
+
id: issue.id,
|
|
316
|
+
identifier: issue.identifier,
|
|
317
|
+
title: issue.title,
|
|
318
|
+
description: issue.description ?? "",
|
|
319
|
+
labels: labelNames,
|
|
320
|
+
repoUrl,
|
|
321
|
+
baseBranch,
|
|
322
|
+
url: issue.url
|
|
323
|
+
};
|
|
324
|
+
}
|
|
325
|
+
async function claimTicket(client, config, ticketId) {
|
|
326
|
+
log.info(`Claiming ticket, moving to "${config.linear.inProgressStatus}"`);
|
|
327
|
+
await retry(async () => {
|
|
328
|
+
const team = await client.team(config.linear.teamId);
|
|
329
|
+
const states = await team.states();
|
|
330
|
+
const target = config.linear.inProgressStatus.toLowerCase();
|
|
331
|
+
const inProgress = states.nodes.find((s) => s.name.toLowerCase() === target);
|
|
332
|
+
if (!inProgress) {
|
|
333
|
+
log.warn(`State "${config.linear.inProgressStatus}" not found, skipping status update`);
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
await client.updateIssue(ticketId, { stateId: inProgress.id });
|
|
337
|
+
}, { label: "Linear claim", retryIf: isTransientError });
|
|
338
|
+
}
|
|
339
|
+
async function completeTicket(client, config, ticketId, comment) {
|
|
340
|
+
await retry(async () => {
|
|
341
|
+
await client.createComment({ issueId: ticketId, body: comment });
|
|
342
|
+
const team = await client.team(config.linear.teamId);
|
|
343
|
+
const states = await team.states();
|
|
344
|
+
const doneTarget = config.linear.doneStatus.toLowerCase();
|
|
345
|
+
const done = states.nodes.find((s) => s.name.toLowerCase() === doneTarget);
|
|
346
|
+
if (done) {
|
|
347
|
+
await client.updateIssue(ticketId, { stateId: done.id });
|
|
348
|
+
}
|
|
349
|
+
}, { label: "Linear complete", retryIf: isTransientError });
|
|
350
|
+
}
|
|
351
|
+
async function addBranchLabel(client, ticketId, branchName) {
|
|
352
|
+
try {
|
|
353
|
+
const issue = await client.issue(ticketId);
|
|
354
|
+
const team = await issue.team;
|
|
355
|
+
if (!team) return;
|
|
356
|
+
const teamLabels = await team.labels();
|
|
357
|
+
const labelName = `base:${branchName}`;
|
|
358
|
+
let labelId = teamLabels.nodes.find((l) => l.name === labelName)?.id;
|
|
359
|
+
if (!labelId) {
|
|
360
|
+
const result = await client.createIssueLabel({
|
|
361
|
+
name: labelName,
|
|
362
|
+
teamId: team.id
|
|
363
|
+
});
|
|
364
|
+
const label = await result.issueLabel;
|
|
365
|
+
labelId = label?.id;
|
|
366
|
+
}
|
|
367
|
+
if (labelId) {
|
|
368
|
+
const currentLabels = await issue.labels();
|
|
369
|
+
const labelIds = currentLabels.nodes.map((l) => l.id);
|
|
370
|
+
if (!labelIds.includes(labelId)) {
|
|
371
|
+
await client.updateIssue(ticketId, {
|
|
372
|
+
labelIds: [...labelIds, labelId]
|
|
373
|
+
});
|
|
374
|
+
log.info(`Added "base:${branchName}" label to ticket for stacked diffs`);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
} catch (err) {
|
|
378
|
+
log.debug(`Could not add branch label: ${err instanceof Error ? err.message : err}`);
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
async function failTicket(client, ticketId, reason) {
|
|
382
|
+
await client.createComment({
|
|
383
|
+
issueId: ticketId,
|
|
384
|
+
body: `**autoclawd failed:**
|
|
385
|
+
|
|
386
|
+
${reason}`
|
|
387
|
+
});
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
// src/github.ts
|
|
391
|
+
import { Octokit } from "@octokit/rest";
|
|
392
|
+
function createOctokit(config) {
|
|
393
|
+
return new Octokit({ auth: config.github.token });
|
|
394
|
+
}
|
|
395
|
+
function parseRepoUrl(url) {
|
|
396
|
+
const match = url.match(/github\.com[/:]([^/]+)\/([^/.]+)/);
|
|
397
|
+
if (!match) throw new Error(`Cannot parse GitHub URL: ${url}`);
|
|
398
|
+
return { owner: match[1], repo: match[2] };
|
|
399
|
+
}
|
|
400
|
+
async function openPR(octokit, opts) {
|
|
401
|
+
return retry(async () => {
|
|
402
|
+
const { owner, repo } = parseRepoUrl(opts.repoUrl);
|
|
403
|
+
const existing = await octokit.rest.pulls.list({
|
|
404
|
+
owner,
|
|
405
|
+
repo,
|
|
406
|
+
head: `${owner}:${opts.branch}`,
|
|
407
|
+
state: "open"
|
|
408
|
+
});
|
|
409
|
+
if (existing.data.length > 0) {
|
|
410
|
+
const pr2 = existing.data[0];
|
|
411
|
+
log.info(`Updating existing PR #${pr2.number}: ${pr2.html_url}`);
|
|
412
|
+
await octokit.rest.pulls.update({
|
|
413
|
+
owner,
|
|
414
|
+
repo,
|
|
415
|
+
pull_number: pr2.number,
|
|
416
|
+
body: opts.body
|
|
417
|
+
});
|
|
418
|
+
return { url: pr2.html_url, number: pr2.number };
|
|
419
|
+
}
|
|
420
|
+
const pr = await octokit.rest.pulls.create({
|
|
421
|
+
owner,
|
|
422
|
+
repo,
|
|
423
|
+
title: opts.title,
|
|
424
|
+
body: opts.body,
|
|
425
|
+
head: opts.branch,
|
|
426
|
+
base: opts.baseBranch,
|
|
427
|
+
draft: opts.draft ?? false
|
|
428
|
+
});
|
|
429
|
+
log.success(`Created ${opts.draft ? "draft " : ""}PR #${pr.data.number}: ${pr.data.html_url}`);
|
|
430
|
+
return { url: pr.data.html_url, number: pr.data.number };
|
|
431
|
+
}, { label: "GitHub PR", retryIf: isTransientError });
|
|
432
|
+
}
|
|
433
|
+
async function markPRReady(octokit, opts) {
|
|
434
|
+
return retry(async () => {
|
|
435
|
+
const { owner, repo } = parseRepoUrl(opts.repoUrl);
|
|
436
|
+
await octokit.rest.pulls.update({
|
|
437
|
+
owner,
|
|
438
|
+
repo,
|
|
439
|
+
pull_number: opts.prNumber,
|
|
440
|
+
draft: false
|
|
441
|
+
});
|
|
442
|
+
log.info(`Marked PR #${opts.prNumber} as ready for review`);
|
|
443
|
+
}, { label: "GitHub PR ready", retryIf: isTransientError });
|
|
444
|
+
}
|
|
445
|
+
async function updatePRBody(octokit, opts) {
|
|
446
|
+
return retry(async () => {
|
|
447
|
+
const { owner, repo } = parseRepoUrl(opts.repoUrl);
|
|
448
|
+
await octokit.rest.pulls.update({
|
|
449
|
+
owner,
|
|
450
|
+
repo,
|
|
451
|
+
pull_number: opts.prNumber,
|
|
452
|
+
body: opts.body
|
|
453
|
+
});
|
|
454
|
+
}, { label: "GitHub PR update", retryIf: isTransientError });
|
|
455
|
+
}
|
|
456
|
+
async function fetchPR(octokit, opts) {
|
|
457
|
+
const { owner, repo } = parseRepoUrl(opts.repoUrl);
|
|
458
|
+
const { data } = await octokit.rest.pulls.get({ owner, repo, pull_number: opts.prNumber });
|
|
459
|
+
return {
|
|
460
|
+
head: data.head.ref,
|
|
461
|
+
base: data.base.ref,
|
|
462
|
+
title: data.title,
|
|
463
|
+
state: data.state
|
|
464
|
+
};
|
|
465
|
+
}
|
|
466
|
+
async function fetchFailedChecks(octokit, opts) {
|
|
467
|
+
const { owner, repo } = parseRepoUrl(opts.repoUrl);
|
|
468
|
+
const { data: pr } = await octokit.rest.pulls.get({ owner, repo, pull_number: opts.prNumber });
|
|
469
|
+
const headSha = pr.head.sha;
|
|
470
|
+
const { data: checks } = await octokit.rest.checks.listForRef({
|
|
471
|
+
owner,
|
|
472
|
+
repo,
|
|
473
|
+
ref: headSha
|
|
474
|
+
});
|
|
475
|
+
const failed = checks.check_runs.filter(
|
|
476
|
+
(c) => c.conclusion === "failure" || c.conclusion === "timed_out"
|
|
477
|
+
);
|
|
478
|
+
if (failed.length === 0) return [];
|
|
479
|
+
const results = [];
|
|
480
|
+
for (const check of failed) {
|
|
481
|
+
let logText = "";
|
|
482
|
+
if (check.app?.slug === "github-actions") {
|
|
483
|
+
try {
|
|
484
|
+
const { data: runs } = await octokit.rest.actions.listWorkflowRunsForRepo({
|
|
485
|
+
owner,
|
|
486
|
+
repo,
|
|
487
|
+
head_sha: headSha,
|
|
488
|
+
per_page: 10
|
|
489
|
+
});
|
|
490
|
+
for (const run of runs.workflow_runs) {
|
|
491
|
+
if (run.conclusion !== "failure") continue;
|
|
492
|
+
const { data: jobs } = await octokit.rest.actions.listJobsForWorkflowRun({
|
|
493
|
+
owner,
|
|
494
|
+
repo,
|
|
495
|
+
run_id: run.id
|
|
496
|
+
});
|
|
497
|
+
for (const job of jobs.jobs) {
|
|
498
|
+
if (job.conclusion !== "failure") continue;
|
|
499
|
+
try {
|
|
500
|
+
const { data: log2 } = await octokit.rest.actions.downloadJobLogsForWorkflowRun({
|
|
501
|
+
owner,
|
|
502
|
+
repo,
|
|
503
|
+
job_id: job.id
|
|
504
|
+
});
|
|
505
|
+
const logStr = typeof log2 === "string" ? log2 : String(log2);
|
|
506
|
+
logText += `
|
|
507
|
+
--- Job: ${job.name} ---
|
|
508
|
+
` + logStr.slice(-3e3);
|
|
509
|
+
} catch {
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
} catch {
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
if (!logText && check.output?.text) {
|
|
517
|
+
logText = check.output.text.slice(-3e3);
|
|
518
|
+
}
|
|
519
|
+
if (!logText && check.output?.summary) {
|
|
520
|
+
logText = check.output.summary.slice(-3e3);
|
|
521
|
+
}
|
|
522
|
+
results.push({
|
|
523
|
+
name: check.name,
|
|
524
|
+
conclusion: check.conclusion ?? "failure",
|
|
525
|
+
log: logText || "(no log available)"
|
|
526
|
+
});
|
|
527
|
+
}
|
|
528
|
+
return results;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
// src/webhook.ts
|
|
532
|
+
import { createServer } from "http";
|
|
533
|
+
import { createHmac, timingSafeEqual } from "crypto";
|
|
534
|
+
|
|
535
|
+
// src/docker.ts
|
|
536
|
+
import Docker from "dockerode";
|
|
537
|
+
import { PassThrough } from "stream";
|
|
538
|
+
import { homedir as homedir3, tmpdir } from "os";
|
|
539
|
+
import { join as join3 } from "path";
|
|
540
|
+
import { existsSync as existsSync3, readFileSync as readFileSync2, writeFileSync, mkdtempSync, rmSync } from "fs";
|
|
541
|
+
import { execSync } from "child_process";
|
|
542
|
+
var docker = new Docker();
|
|
543
|
+
async function checkDockerAvailable() {
|
|
544
|
+
try {
|
|
545
|
+
await docker.ping();
|
|
546
|
+
} catch (err) {
|
|
547
|
+
throw new Error(
|
|
548
|
+
"Cannot connect to Docker daemon. Is Docker running?\nInstall: https://docs.docker.com/get-docker/"
|
|
549
|
+
);
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
var AUTOCODE_BASE_DOCKERFILE = `FROM node:20-bookworm
|
|
553
|
+
ENV DEBIAN_FRONTEND=noninteractive
|
|
554
|
+
RUN apt-get update && apt-get install -y --no-install-recommends \\
|
|
555
|
+
build-essential git curl wget ca-certificates openssh-client jq unzip \\
|
|
556
|
+
python3 python3-pip python3-venv python3-dev \\
|
|
557
|
+
ruby ruby-dev \\
|
|
558
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
559
|
+
ARG GO_VERSION=1.22.2
|
|
560
|
+
RUN curl -fsSL "https://go.dev/dl/go\${GO_VERSION}.linux-$(dpkg --print-architecture).tar.gz" \\
|
|
561
|
+
| tar -xz -C /usr/local
|
|
562
|
+
ENV PATH="/usr/local/go/bin:\${PATH}"
|
|
563
|
+
RUN npm install -g @anthropic-ai/claude-code@latest
|
|
564
|
+
WORKDIR /workspace
|
|
565
|
+
`;
|
|
566
|
+
async function ensureImage(image) {
|
|
567
|
+
try {
|
|
568
|
+
await docker.getImage(image).inspect();
|
|
569
|
+
log.debug(`Image ${image} found locally`);
|
|
570
|
+
return;
|
|
571
|
+
} catch {
|
|
572
|
+
}
|
|
573
|
+
if (image === "autoclawd-base") {
|
|
574
|
+
log.info("Building autoclawd-base image (first time only, takes a few minutes)...");
|
|
575
|
+
const buildDir = mkdtempSync(join3(tmpdir(), "autoclawd-build-"));
|
|
576
|
+
try {
|
|
577
|
+
writeFileSync(join3(buildDir, "Dockerfile"), AUTOCODE_BASE_DOCKERFILE);
|
|
578
|
+
execSync(`docker build -t autoclawd-base ${buildDir}`, {
|
|
579
|
+
stdio: "inherit",
|
|
580
|
+
timeout: 6e5
|
|
581
|
+
});
|
|
582
|
+
log.success("autoclawd-base image built");
|
|
583
|
+
return;
|
|
584
|
+
} catch (err) {
|
|
585
|
+
throw new Error(
|
|
586
|
+
`Failed to build autoclawd-base image.
|
|
587
|
+
You can build manually: docker build -t autoclawd-base .
|
|
588
|
+
Or use a different image in your config (e.g. node:20).
|
|
589
|
+
Error: ${err instanceof Error ? err.message : err}`
|
|
590
|
+
);
|
|
591
|
+
} finally {
|
|
592
|
+
rmSync(buildDir, { recursive: true, force: true });
|
|
593
|
+
}
|
|
594
|
+
}
|
|
595
|
+
log.info(`Pulling image ${image} (this may take a moment)...`);
|
|
596
|
+
await retry(() => new Promise((resolve, reject) => {
|
|
597
|
+
docker.pull(image, (err, stream) => {
|
|
598
|
+
if (err) {
|
|
599
|
+
if (err.message?.includes("not found") || err.message?.includes("404")) {
|
|
600
|
+
return reject(new Error(`Docker image "${image}" not found. Check the image name in your config.`));
|
|
601
|
+
}
|
|
602
|
+
return reject(err);
|
|
603
|
+
}
|
|
604
|
+
docker.modem.followProgress(stream, (err2) => {
|
|
605
|
+
if (err2) reject(err2);
|
|
606
|
+
else {
|
|
607
|
+
log.success(`Image ${image} pulled`);
|
|
608
|
+
resolve();
|
|
609
|
+
}
|
|
610
|
+
});
|
|
611
|
+
});
|
|
612
|
+
}), { label: "docker pull", retryIf: isTransientError });
|
|
613
|
+
}
|
|
614
|
+
async function createContainer(opts) {
|
|
615
|
+
const { dockerConfig, name } = opts;
|
|
616
|
+
await ensureImage(dockerConfig.image);
|
|
617
|
+
const binds = [`${opts.workspacePath}:/workspace`];
|
|
618
|
+
if (dockerConfig.volumes?.length) {
|
|
619
|
+
for (const vol of dockerConfig.volumes) {
|
|
620
|
+
binds.push(vol.replace(/^~(?=\/|:)/, homedir3()));
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
const container = await docker.createContainer({
|
|
624
|
+
Image: dockerConfig.image,
|
|
625
|
+
name: `autoclawd-${name}`,
|
|
626
|
+
Cmd: ["sleep", "infinity"],
|
|
627
|
+
WorkingDir: "/workspace",
|
|
628
|
+
User: "root",
|
|
629
|
+
HostConfig: {
|
|
630
|
+
Binds: binds,
|
|
631
|
+
Memory: parseMemory(dockerConfig.memory),
|
|
632
|
+
NanoCpus: dockerConfig.cpus ? dockerConfig.cpus * 1e9 : void 0,
|
|
633
|
+
NetworkMode: dockerConfig.network ?? "bridge"
|
|
634
|
+
},
|
|
635
|
+
Env: ["HOME=/root"]
|
|
636
|
+
});
|
|
637
|
+
await container.start();
|
|
638
|
+
log.info(`Container ${name} started (${container.id.slice(0, 12)})`);
|
|
639
|
+
const wrapped = { id: container.id, instance: container };
|
|
640
|
+
const shellCheck = await exec(wrapped, ["sh", "-c", "echo ok"]);
|
|
641
|
+
if (shellCheck.exitCode !== 0 || !shellCheck.stdout.includes("ok")) {
|
|
642
|
+
await destroyContainer(wrapped);
|
|
643
|
+
throw new Error(
|
|
644
|
+
`Docker image "${dockerConfig.image}" has no working shell (sh).
|
|
645
|
+
autoclawd requires a shell to set up the container. Use a standard base image
|
|
646
|
+
(e.g. node:20, python:3.12, ubuntu:24.04) or ensure /bin/sh is available.`
|
|
647
|
+
);
|
|
648
|
+
}
|
|
649
|
+
const curlCheck = await exec(wrapped, ["which", "curl"]);
|
|
650
|
+
if (curlCheck.exitCode !== 0) {
|
|
651
|
+
log.debug("curl not found, installing...");
|
|
652
|
+
await exec(wrapped, [
|
|
653
|
+
"sh",
|
|
654
|
+
"-c",
|
|
655
|
+
"(apt-get update -qq && apt-get install -y -qq curl 2>/dev/null) || (apk add --no-cache curl 2>/dev/null) || (yum install -y curl 2>/dev/null) || true"
|
|
656
|
+
]);
|
|
657
|
+
}
|
|
658
|
+
const gitCheck = await exec(wrapped, ["git", "--version"]);
|
|
659
|
+
if (gitCheck.exitCode !== 0) {
|
|
660
|
+
log.info("Git not found in image, installing...");
|
|
661
|
+
const installGit = await exec(wrapped, [
|
|
662
|
+
"sh",
|
|
663
|
+
"-c",
|
|
664
|
+
'(apt-get update -qq && apt-get install -y -qq git 2>/dev/null) || (apk add --no-cache git 2>/dev/null) || (yum install -y git 2>/dev/null) || (echo "Cannot install git" && exit 1)'
|
|
665
|
+
]);
|
|
666
|
+
if (installGit.exitCode !== 0) {
|
|
667
|
+
throw new Error(`Docker image "${dockerConfig.image}" has no git and autoclawd could not install it. Use an image with git pre-installed.`);
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
const claudeCheck = await exec(wrapped, ["which", "claude"]);
|
|
671
|
+
if (claudeCheck.exitCode !== 0) {
|
|
672
|
+
log.info("Installing Claude Code in container...");
|
|
673
|
+
const npmCheck = await exec(wrapped, ["which", "npm"]);
|
|
674
|
+
let installed = false;
|
|
675
|
+
if (npmCheck.exitCode === 0) {
|
|
676
|
+
const npmResult = await exec(wrapped, [
|
|
677
|
+
"npm",
|
|
678
|
+
"install",
|
|
679
|
+
"-g",
|
|
680
|
+
"@anthropic-ai/claude-code@latest"
|
|
681
|
+
]);
|
|
682
|
+
installed = npmResult.exitCode === 0;
|
|
683
|
+
}
|
|
684
|
+
if (!installed) {
|
|
685
|
+
log.info("npm not available, installing Claude Code standalone binary...");
|
|
686
|
+
const standaloneResult = await exec(wrapped, [
|
|
687
|
+
"bash",
|
|
688
|
+
"-c",
|
|
689
|
+
"curl -fsSL https://claude.ai/install.sh | bash && cp -L /root/.local/bin/claude /usr/local/bin/claude && chmod 755 /usr/local/bin/claude"
|
|
690
|
+
], { timeout: 12e4 });
|
|
691
|
+
if (standaloneResult.exitCode !== 0) {
|
|
692
|
+
throw new Error(
|
|
693
|
+
`Failed to install Claude Code. Tried npm and standalone installer.
|
|
694
|
+
Error: ${standaloneResult.stderr}
|
|
695
|
+
Use an image with Claude Code pre-installed, or ensure bash and curl are available.`
|
|
696
|
+
);
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
log.info("Claude Code installed");
|
|
700
|
+
} else {
|
|
701
|
+
log.debug("Claude Code already installed in image");
|
|
702
|
+
}
|
|
703
|
+
await exec(wrapped, [
|
|
704
|
+
"sh",
|
|
705
|
+
"-c",
|
|
706
|
+
'( getent passwd autoclawd >/dev/null 2>&1 ) || ( adduser --disabled-password --gecos "" --uid 1001 --home /home/autoclawd autoclawd 2>/dev/null ) || ( adduser -D -u 1001 -h /home/autoclawd autoclawd 2>/dev/null ) || ( useradd -m -u 1001 -d /home/autoclawd autoclawd 2>/dev/null ) || ( echo "autoclawd:x:1001:1001::/home/autoclawd:/bin/sh" >> /etc/passwd && mkdir -p /home/autoclawd ); mkdir -p /home/autoclawd && chown -R autoclawd /home/autoclawd /workspace'
|
|
707
|
+
]);
|
|
708
|
+
await autoInstallRuntimes(wrapped);
|
|
709
|
+
if (dockerConfig.setup?.length) {
|
|
710
|
+
log.info(`Running ${dockerConfig.setup.length} setup command(s)...`);
|
|
711
|
+
for (const cmd of dockerConfig.setup) {
|
|
712
|
+
const result = await exec(wrapped, ["sh", "-c", cmd]);
|
|
713
|
+
if (result.exitCode !== 0) {
|
|
714
|
+
throw new Error(`Setup command failed: ${cmd}
|
|
715
|
+
${result.stderr}`);
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
await copyClaudeCredentials(wrapped);
|
|
720
|
+
return wrapped;
|
|
721
|
+
}
|
|
722
|
+
async function autoInstallRuntimes(container) {
|
|
723
|
+
const detect = await exec(container, [
|
|
724
|
+
"sh",
|
|
725
|
+
"-c",
|
|
726
|
+
"ls /workspace/package.json /workspace/pyproject.toml /workspace/setup.py /workspace/requirements.txt /workspace/Pipfile /workspace/go.mod /workspace/Cargo.toml /workspace/Gemfile /workspace/pom.xml /workspace/build.gradle /workspace/build.gradle.kts /workspace/composer.json /workspace/mix.exs 2>/dev/null || true"
|
|
727
|
+
]);
|
|
728
|
+
const files = detect.stdout.trim().split("\n").filter(Boolean);
|
|
729
|
+
if (files.length === 0) return;
|
|
730
|
+
const needs = [];
|
|
731
|
+
const checks = await exec(container, [
|
|
732
|
+
"sh",
|
|
733
|
+
"-c",
|
|
734
|
+
'echo "node:$(which node 2>/dev/null || echo missing)";echo "python:$(which python3 2>/dev/null || which python 2>/dev/null || echo missing)";echo "go:$(which go 2>/dev/null || echo missing)";echo "ruby:$(which ruby 2>/dev/null || echo missing)";'
|
|
735
|
+
]);
|
|
736
|
+
const installed = /* @__PURE__ */ new Map();
|
|
737
|
+
for (const line of checks.stdout.trim().split("\n")) {
|
|
738
|
+
const [lang, path] = line.split(":");
|
|
739
|
+
installed.set(lang, path !== "missing");
|
|
740
|
+
}
|
|
741
|
+
const hasPython = files.some(
|
|
742
|
+
(f) => f.includes("pyproject.toml") || f.includes("setup.py") || f.includes("requirements.txt") || f.includes("Pipfile")
|
|
743
|
+
);
|
|
744
|
+
const hasGo = files.some((f) => f.includes("go.mod"));
|
|
745
|
+
const hasRuby = files.some((f) => f.includes("Gemfile"));
|
|
746
|
+
if (hasPython && !installed.get("python")) {
|
|
747
|
+
needs.push("python3 python3-pip python3-venv");
|
|
748
|
+
}
|
|
749
|
+
if (hasGo && !installed.get("go")) {
|
|
750
|
+
needs.push("golang");
|
|
751
|
+
}
|
|
752
|
+
if (hasRuby && !installed.get("ruby")) {
|
|
753
|
+
needs.push("ruby ruby-dev");
|
|
754
|
+
}
|
|
755
|
+
if (needs.length === 0) return;
|
|
756
|
+
const packages = needs.join(" ");
|
|
757
|
+
log.info(`Detected stack requires: ${packages}`);
|
|
758
|
+
const installResult = await exec(container, [
|
|
759
|
+
"sh",
|
|
760
|
+
"-c",
|
|
761
|
+
`(apt-get update -qq && apt-get install -y -qq ${packages} 2>/dev/null) || (apk add --no-cache ${packages.replace("python3-pip", "py3-pip").replace("python3-venv", "").replace("ruby-dev", "ruby-dev build-base")} 2>/dev/null) || echo "Warning: could not auto-install ${packages}"`
|
|
762
|
+
]);
|
|
763
|
+
if (installResult.exitCode === 0) {
|
|
764
|
+
log.success("Runtime dependencies installed");
|
|
765
|
+
} else {
|
|
766
|
+
log.warn(`Could not auto-install ${packages} \u2014 Claude may not be able to run tests for this stack`);
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
async function copyClaudeCredentials(container) {
|
|
770
|
+
const home = homedir3();
|
|
771
|
+
await exec(container, [
|
|
772
|
+
"sh",
|
|
773
|
+
"-c",
|
|
774
|
+
"mkdir -p /home/autoclawd/.claude && chown autoclawd /home/autoclawd/.claude"
|
|
775
|
+
]);
|
|
776
|
+
const credFile = join3(home, ".claude", ".credentials.json");
|
|
777
|
+
if (existsSync3(credFile)) {
|
|
778
|
+
const content = readFileSync2(credFile, "utf-8");
|
|
779
|
+
const b64 = Buffer.from(content).toString("base64");
|
|
780
|
+
await exec(container, [
|
|
781
|
+
"sh",
|
|
782
|
+
"-c",
|
|
783
|
+
`echo '${b64}' | base64 -d > /home/autoclawd/.claude/.credentials.json && chown autoclawd /home/autoclawd/.claude/.credentials.json`
|
|
784
|
+
]);
|
|
785
|
+
log.debug("Copied .credentials.json");
|
|
786
|
+
}
|
|
787
|
+
const settingsFile = join3(home, ".claude", "settings.json");
|
|
788
|
+
if (existsSync3(settingsFile)) {
|
|
789
|
+
const content = readFileSync2(settingsFile, "utf-8");
|
|
790
|
+
const b64 = Buffer.from(content).toString("base64");
|
|
791
|
+
await exec(container, [
|
|
792
|
+
"sh",
|
|
793
|
+
"-c",
|
|
794
|
+
`echo '${b64}' | base64 -d > /home/autoclawd/.claude/settings.json && chown autoclawd /home/autoclawd/.claude/settings.json`
|
|
795
|
+
]);
|
|
796
|
+
}
|
|
797
|
+
log.info("Claude credentials copied into container");
|
|
798
|
+
}
|
|
799
|
+
async function exec(container, cmd, opts) {
|
|
800
|
+
const execution = await container.instance.exec({
|
|
801
|
+
Cmd: cmd,
|
|
802
|
+
AttachStdout: true,
|
|
803
|
+
AttachStderr: true,
|
|
804
|
+
WorkingDir: opts?.workdir ?? "/workspace",
|
|
805
|
+
Env: opts?.env,
|
|
806
|
+
User: opts?.user
|
|
807
|
+
});
|
|
808
|
+
const stream = await execution.start({ hijack: true, stdin: false });
|
|
809
|
+
let stdout = "";
|
|
810
|
+
let stderr = "";
|
|
811
|
+
return new Promise((resolve, reject) => {
|
|
812
|
+
const stdoutStream = new PassThrough();
|
|
813
|
+
const stderrStream = new PassThrough();
|
|
814
|
+
docker.modem.demuxStream(stream, stdoutStream, stderrStream);
|
|
815
|
+
stdoutStream.on("data", (chunk) => {
|
|
816
|
+
stdout += chunk.toString();
|
|
817
|
+
});
|
|
818
|
+
stderrStream.on("data", (chunk) => {
|
|
819
|
+
stderr += chunk.toString();
|
|
820
|
+
});
|
|
821
|
+
let timer;
|
|
822
|
+
if (opts?.timeout) {
|
|
823
|
+
timer = setTimeout(() => {
|
|
824
|
+
stream.destroy();
|
|
825
|
+
resolve({ exitCode: 124, stdout, stderr: stderr + "\n[autoclawd] exec timed out" });
|
|
826
|
+
}, opts.timeout);
|
|
827
|
+
}
|
|
828
|
+
stream.on("end", async () => {
|
|
829
|
+
if (timer) clearTimeout(timer);
|
|
830
|
+
try {
|
|
831
|
+
const info = await execution.inspect();
|
|
832
|
+
resolve({ exitCode: info.ExitCode ?? 1, stdout, stderr });
|
|
833
|
+
} catch (e) {
|
|
834
|
+
reject(e);
|
|
835
|
+
}
|
|
836
|
+
});
|
|
837
|
+
stream.on("error", (e) => {
|
|
838
|
+
if (timer) clearTimeout(timer);
|
|
839
|
+
reject(e);
|
|
840
|
+
});
|
|
841
|
+
});
|
|
842
|
+
}
|
|
843
|
+
async function cleanupOrphanedContainers() {
|
|
844
|
+
const containers = await docker.listContainers({ all: true });
|
|
845
|
+
const orphans = containers.filter(
|
|
846
|
+
(c) => c.Names.some((n) => n.startsWith("/autoclawd-"))
|
|
847
|
+
);
|
|
848
|
+
let cleaned = 0;
|
|
849
|
+
for (const c of orphans) {
|
|
850
|
+
try {
|
|
851
|
+
const container = docker.getContainer(c.Id);
|
|
852
|
+
if (c.State === "running") {
|
|
853
|
+
await container.stop({ t: 5 });
|
|
854
|
+
}
|
|
855
|
+
await container.remove({ force: true });
|
|
856
|
+
log.info(`Cleaned up orphaned container ${c.Names[0]} (${c.Id.slice(0, 12)})`);
|
|
857
|
+
cleaned++;
|
|
858
|
+
} catch {
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
return cleaned;
|
|
862
|
+
}
|
|
863
|
+
async function destroyContainer(container) {
|
|
864
|
+
try {
|
|
865
|
+
await container.instance.stop({ t: 5 });
|
|
866
|
+
} catch {
|
|
867
|
+
}
|
|
868
|
+
try {
|
|
869
|
+
await container.instance.remove({ force: true });
|
|
870
|
+
} catch {
|
|
871
|
+
}
|
|
872
|
+
log.info(`Container ${container.id.slice(0, 12)} destroyed`);
|
|
873
|
+
}
|
|
874
|
+
function parseMemory(mem) {
|
|
875
|
+
const match = mem.match(/^(\d+(?:\.\d+)?)(k|m|g|t)$/i);
|
|
876
|
+
if (!match) throw new Error(`Invalid memory format: "${mem}". Use a number followed by k, m, g, or t (e.g. "4g", "512m")`);
|
|
877
|
+
const [, num, unit] = match;
|
|
878
|
+
const value = parseFloat(num);
|
|
879
|
+
if (value <= 0) throw new Error(`Memory must be greater than 0: "${mem}"`);
|
|
880
|
+
const multipliers = {
|
|
881
|
+
k: 1024,
|
|
882
|
+
m: 1024 * 1024,
|
|
883
|
+
g: 1024 * 1024 * 1024,
|
|
884
|
+
t: 1024 * 1024 * 1024 * 1024
|
|
885
|
+
};
|
|
886
|
+
return Math.round(value * multipliers[unit.toLowerCase()]);
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
// src/agent.ts
|
|
890
|
+
var DEFAULT_ITERATION_TIMEOUT_MS = 30 * 60 * 1e3;
|
|
891
|
+
var RATE_LIMIT_PATTERNS = [
|
|
892
|
+
/rate.?limit/i,
|
|
893
|
+
/too many requests/i,
|
|
894
|
+
/\b429\b.*(?:error|too many|rate|limit|retry)/i,
|
|
895
|
+
/(?:error|status|code|http)[:\s]*429\b/i,
|
|
896
|
+
/(?:api|server|model)\s+(?:is\s+)?overloaded/i,
|
|
897
|
+
/over\s*capacity/i
|
|
898
|
+
];
|
|
899
|
+
var COMPLETION_PATTERNS = [
|
|
900
|
+
/\[autoclawd:done\]/i,
|
|
901
|
+
/all tasks? completed/i
|
|
902
|
+
];
|
|
903
|
+
function isRateLimited(output) {
|
|
904
|
+
return RATE_LIMIT_PATTERNS.some((p) => p.test(output));
|
|
905
|
+
}
|
|
906
|
+
function isCompleted(output) {
|
|
907
|
+
return COMPLETION_PATTERNS.some((p) => p.test(output));
|
|
908
|
+
}
|
|
909
|
+
function estimateResetMs(output) {
|
|
910
|
+
const match = output.match(/retry.?after[:\s]*(\d+)/i);
|
|
911
|
+
if (match) return parseInt(match[1]) * 1e3;
|
|
912
|
+
return 6e4;
|
|
913
|
+
}
|
|
914
|
+
var MAX_RATE_LIMIT_RETRIES = 10;
|
|
915
|
+
async function runAgentLoop(opts) {
|
|
916
|
+
const { container, agentConfig, prompt, ticketId, onIteration } = opts;
|
|
917
|
+
let lastOutput = "";
|
|
918
|
+
let rateLimitRetries = 0;
|
|
919
|
+
const promptB64 = Buffer.from(prompt).toString("base64");
|
|
920
|
+
await exec(container, [
|
|
921
|
+
"sh",
|
|
922
|
+
"-c",
|
|
923
|
+
`mkdir -p /tmp/autoclawd && echo '${promptB64}' | base64 -d > /tmp/autoclawd/prompt.md && chmod 644 /tmp/autoclawd/prompt.md`
|
|
924
|
+
]);
|
|
925
|
+
for (let i = 1; i <= agentConfig.maxIterations; i++) {
|
|
926
|
+
log.ticket(ticketId, `Iteration ${i}/${agentConfig.maxIterations}`);
|
|
927
|
+
const cmd = [
|
|
928
|
+
"sh",
|
|
929
|
+
"-c",
|
|
930
|
+
[
|
|
931
|
+
"claude",
|
|
932
|
+
"-p",
|
|
933
|
+
'"$(cat /tmp/autoclawd/prompt.md)"',
|
|
934
|
+
"--model",
|
|
935
|
+
agentConfig.model,
|
|
936
|
+
"--output-format",
|
|
937
|
+
"text",
|
|
938
|
+
"--max-turns",
|
|
939
|
+
"50",
|
|
940
|
+
"--dangerously-skip-permissions",
|
|
941
|
+
"--verbose"
|
|
942
|
+
].join(" ")
|
|
943
|
+
];
|
|
944
|
+
const timeoutMs = agentConfig.timeout ? agentConfig.timeout * 1e3 : DEFAULT_ITERATION_TIMEOUT_MS;
|
|
945
|
+
const result = await exec(container, cmd, {
|
|
946
|
+
user: "autoclawd",
|
|
947
|
+
env: ["HOME=/home/autoclawd", "CLAUDE_CODE_DISABLE_NONINTERACTIVE_TUTORIAL=1"],
|
|
948
|
+
timeout: timeoutMs
|
|
949
|
+
});
|
|
950
|
+
const combined = result.stdout + result.stderr;
|
|
951
|
+
lastOutput = combined;
|
|
952
|
+
const snippet = combined.slice(-500).trim();
|
|
953
|
+
if (snippet) {
|
|
954
|
+
log.debug(`Agent output (last 500 chars): ${snippet}`);
|
|
955
|
+
}
|
|
956
|
+
const rateLimited = isRateLimited(combined);
|
|
957
|
+
const completed = isCompleted(combined) || result.exitCode === 0 && i > 1;
|
|
958
|
+
const agentResult = {
|
|
959
|
+
iteration: i,
|
|
960
|
+
exitCode: result.exitCode,
|
|
961
|
+
stdout: result.stdout,
|
|
962
|
+
stderr: result.stderr,
|
|
963
|
+
rateLimited,
|
|
964
|
+
completed
|
|
965
|
+
};
|
|
966
|
+
onIteration?.(agentResult);
|
|
967
|
+
if (completed) {
|
|
968
|
+
log.ticket(ticketId, `Completed after ${i} iterations`);
|
|
969
|
+
return { iterations: i, success: true, lastOutput };
|
|
970
|
+
}
|
|
971
|
+
if (rateLimited) {
|
|
972
|
+
rateLimitRetries++;
|
|
973
|
+
if (rateLimitRetries > MAX_RATE_LIMIT_RETRIES) {
|
|
974
|
+
log.ticket(ticketId, `Rate limited ${rateLimitRetries} times, giving up`);
|
|
975
|
+
return { iterations: i, success: false, lastOutput };
|
|
976
|
+
}
|
|
977
|
+
const waitMs = estimateResetMs(combined);
|
|
978
|
+
log.ticket(ticketId, `Rate limited (${rateLimitRetries}/${MAX_RATE_LIMIT_RETRIES}) \u2014 pausing ${Math.round(waitMs / 1e3)}s`);
|
|
979
|
+
await sleep2(waitMs);
|
|
980
|
+
i--;
|
|
981
|
+
continue;
|
|
982
|
+
}
|
|
983
|
+
if (result.exitCode !== 0) {
|
|
984
|
+
log.ticket(ticketId, `Non-zero exit (${result.exitCode}), continuing...`);
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
log.ticket(ticketId, `Max iterations (${agentConfig.maxIterations}) reached`);
|
|
988
|
+
return { iterations: agentConfig.maxIterations, success: false, lastOutput };
|
|
989
|
+
}
|
|
990
|
+
function sleep2(ms) {
|
|
991
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
// src/git.ts
|
|
995
|
+
async function setupRepo(container, opts) {
|
|
996
|
+
const u = { user: "autoclawd" };
|
|
997
|
+
await exec(container, ["git", "config", "user.name", "autoclawd"], u);
|
|
998
|
+
await exec(container, ["git", "config", "user.email", "autoclawd@autoclawd.dev"], u);
|
|
999
|
+
await exec(container, ["git", "config", "--global", "safe.directory", "/workspace"], u);
|
|
1000
|
+
log.info(`Git configured, on branch ${opts.branchName}`);
|
|
1001
|
+
}
|
|
1002
|
+
async function commitAndPush(container, opts) {
|
|
1003
|
+
const { branchName, ticketId, title, githubToken, branchPrefix, maxFileChanges } = opts;
|
|
1004
|
+
const u = { user: "autoclawd" };
|
|
1005
|
+
if (branchPrefix && !branchName.startsWith(branchPrefix)) {
|
|
1006
|
+
log.error(`Safety: branch "${branchName}" does not start with required prefix "${branchPrefix}"`);
|
|
1007
|
+
throw new Error(`Branch "${branchName}" does not start with required prefix "${branchPrefix}"`);
|
|
1008
|
+
}
|
|
1009
|
+
log.info(`Safety: branch prefix check passed for "${branchName}"`);
|
|
1010
|
+
const status = await exec(container, ["git", "status", "--porcelain"], u);
|
|
1011
|
+
if (status.stdout.trim()) {
|
|
1012
|
+
await exec(container, ["git", "add", "-A"], u);
|
|
1013
|
+
const message = `${ticketId}: ${title}`;
|
|
1014
|
+
await exec(container, ["git", "commit", "-m", message], u);
|
|
1015
|
+
}
|
|
1016
|
+
const countResult = await exec(container, [
|
|
1017
|
+
"git",
|
|
1018
|
+
"rev-list",
|
|
1019
|
+
"--count",
|
|
1020
|
+
`HEAD`,
|
|
1021
|
+
"--not",
|
|
1022
|
+
"--remotes"
|
|
1023
|
+
], u);
|
|
1024
|
+
const commitCount = parseInt(countResult.stdout.trim()) || 0;
|
|
1025
|
+
if (commitCount === 0) {
|
|
1026
|
+
log.warn("No commits to push");
|
|
1027
|
+
return { pushed: false, commitCount: 0 };
|
|
1028
|
+
}
|
|
1029
|
+
if (maxFileChanges !== void 0) {
|
|
1030
|
+
const diffResult = await exec(container, [
|
|
1031
|
+
"git",
|
|
1032
|
+
"diff",
|
|
1033
|
+
"--name-only",
|
|
1034
|
+
"HEAD",
|
|
1035
|
+
"--not",
|
|
1036
|
+
"--remotes"
|
|
1037
|
+
], u);
|
|
1038
|
+
const changedFiles = diffResult.stdout.trim().split("\n").filter(Boolean).length;
|
|
1039
|
+
if (changedFiles > maxFileChanges) {
|
|
1040
|
+
log.error(`Safety: ${changedFiles} files changed, exceeds maxFileChanges limit of ${maxFileChanges}`);
|
|
1041
|
+
throw new Error(`Too many file changes: ${changedFiles} files changed (limit: ${maxFileChanges}). Review changes manually.`);
|
|
1042
|
+
}
|
|
1043
|
+
log.info(`Safety: file change count (${changedFiles}) within limit (${maxFileChanges})`);
|
|
1044
|
+
}
|
|
1045
|
+
await exec(container, [
|
|
1046
|
+
"sh",
|
|
1047
|
+
"-c",
|
|
1048
|
+
`git config credential.helper '!f() { echo "password=${githubToken}"; echo "username=x-access-token"; }; f'`
|
|
1049
|
+
], u);
|
|
1050
|
+
const pushResult = await exec(container, ["git", "push", "-u", "origin", branchName], u);
|
|
1051
|
+
if (pushResult.exitCode !== 0) {
|
|
1052
|
+
const safeStderr = pushResult.stderr.replace(/x-access-token:[^\s@]+/g, "x-access-token:***");
|
|
1053
|
+
log.error(`Push failed: ${safeStderr}`);
|
|
1054
|
+
return { pushed: false, commitCount };
|
|
1055
|
+
}
|
|
1056
|
+
log.success(`Pushed ${commitCount} commit(s) to ${branchName}`);
|
|
1057
|
+
return { pushed: true, commitCount };
|
|
1058
|
+
}
|
|
1059
|
+
|
|
1060
|
+
// src/worker.ts
|
|
1061
|
+
import { mkdtempSync as mkdtempSync2, rmSync as rmSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
1062
|
+
import { join as join5 } from "path";
|
|
1063
|
+
import { tmpdir as tmpdir2 } from "os";
|
|
1064
|
+
import { execSync as execSync2 } from "child_process";
|
|
1065
|
+
|
|
1066
|
+
// src/db.ts
|
|
1067
|
+
import Database from "better-sqlite3";
|
|
1068
|
+
import { join as join4 } from "path";
|
|
1069
|
+
import { mkdirSync as mkdirSync2, existsSync as existsSync4 } from "fs";
|
|
1070
|
+
var DB_PATH = join4(CONFIG_DIR, "autoclawd.db");
|
|
1071
|
+
var _db;
|
|
1072
|
+
function getDb() {
|
|
1073
|
+
if (_db) return _db;
|
|
1074
|
+
if (!existsSync4(CONFIG_DIR)) {
|
|
1075
|
+
mkdirSync2(CONFIG_DIR, { recursive: true });
|
|
1076
|
+
}
|
|
1077
|
+
_db = new Database(DB_PATH);
|
|
1078
|
+
_db.pragma("journal_mode = WAL");
|
|
1079
|
+
_db.exec(`
|
|
1080
|
+
CREATE TABLE IF NOT EXISTS runs (
|
|
1081
|
+
id TEXT PRIMARY KEY,
|
|
1082
|
+
ticket_id TEXT NOT NULL,
|
|
1083
|
+
status TEXT NOT NULL DEFAULT 'queued',
|
|
1084
|
+
branch TEXT,
|
|
1085
|
+
pr_url TEXT,
|
|
1086
|
+
started_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
1087
|
+
finished_at TEXT,
|
|
1088
|
+
error TEXT,
|
|
1089
|
+
iterations INTEGER NOT NULL DEFAULT 0,
|
|
1090
|
+
model TEXT,
|
|
1091
|
+
image TEXT,
|
|
1092
|
+
duration_secs INTEGER
|
|
1093
|
+
);
|
|
1094
|
+
CREATE TABLE IF NOT EXISTS queue (
|
|
1095
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1096
|
+
ticket_id TEXT NOT NULL,
|
|
1097
|
+
payload TEXT NOT NULL,
|
|
1098
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
1099
|
+
);
|
|
1100
|
+
`);
|
|
1101
|
+
const cols = _db.prepare("PRAGMA table_info(runs)").all();
|
|
1102
|
+
const colNames = new Set(cols.map((c) => c.name));
|
|
1103
|
+
if (!colNames.has("model")) _db.exec("ALTER TABLE runs ADD COLUMN model TEXT");
|
|
1104
|
+
if (!colNames.has("image")) _db.exec("ALTER TABLE runs ADD COLUMN image TEXT");
|
|
1105
|
+
if (!colNames.has("duration_secs")) _db.exec("ALTER TABLE runs ADD COLUMN duration_secs INTEGER");
|
|
1106
|
+
return _db;
|
|
1107
|
+
}
|
|
1108
|
+
function closeDb() {
|
|
1109
|
+
_db?.close();
|
|
1110
|
+
_db = void 0;
|
|
1111
|
+
}
|
|
1112
|
+
function insertRun(ticketId, status = "running") {
|
|
1113
|
+
const db = getDb();
|
|
1114
|
+
db.prepare(
|
|
1115
|
+
"INSERT OR REPLACE INTO runs (id, ticket_id, status, started_at) VALUES (?, ?, ?, datetime('now'))"
|
|
1116
|
+
).run(ticketId, ticketId, status);
|
|
1117
|
+
}
|
|
1118
|
+
function updateRun(ticketId, update) {
|
|
1119
|
+
const db = getDb();
|
|
1120
|
+
const sets = [];
|
|
1121
|
+
const values = [];
|
|
1122
|
+
for (const [key, val] of Object.entries(update)) {
|
|
1123
|
+
sets.push(`${key} = ?`);
|
|
1124
|
+
values.push(val);
|
|
1125
|
+
}
|
|
1126
|
+
if (sets.length === 0) return;
|
|
1127
|
+
values.push(ticketId);
|
|
1128
|
+
db.prepare(`UPDATE runs SET ${sets.join(", ")} WHERE id = ?`).run(...values);
|
|
1129
|
+
}
|
|
1130
|
+
function finishRun(ticketId, status, error) {
|
|
1131
|
+
updateRun(ticketId, { status, finished_at: (/* @__PURE__ */ new Date()).toISOString(), error: error ?? null });
|
|
1132
|
+
}
|
|
1133
|
+
function getRunByTicketId(ticketId) {
|
|
1134
|
+
const db = getDb();
|
|
1135
|
+
return db.prepare("SELECT * FROM runs WHERE ticket_id = ?").get(ticketId);
|
|
1136
|
+
}
|
|
1137
|
+
function getActiveRuns() {
|
|
1138
|
+
const db = getDb();
|
|
1139
|
+
return db.prepare("SELECT * FROM runs WHERE status = 'running'").all();
|
|
1140
|
+
}
|
|
1141
|
+
function isTicketProcessed(ticketId) {
|
|
1142
|
+
const db = getDb();
|
|
1143
|
+
const row = db.prepare(
|
|
1144
|
+
"SELECT 1 FROM runs WHERE ticket_id = ? AND status IN ('success', 'failed')"
|
|
1145
|
+
).get(ticketId);
|
|
1146
|
+
return !!row;
|
|
1147
|
+
}
|
|
1148
|
+
function isTicketActive(ticketId) {
|
|
1149
|
+
const db = getDb();
|
|
1150
|
+
const row = db.prepare(
|
|
1151
|
+
"SELECT 1 FROM runs WHERE ticket_id = ? AND status = 'running'"
|
|
1152
|
+
).get(ticketId);
|
|
1153
|
+
return !!row;
|
|
1154
|
+
}
|
|
1155
|
+
function enqueue(ticket) {
|
|
1156
|
+
const db = getDb();
|
|
1157
|
+
db.prepare("INSERT INTO queue (ticket_id, payload) VALUES (?, ?)").run(
|
|
1158
|
+
ticket.id,
|
|
1159
|
+
JSON.stringify(ticket)
|
|
1160
|
+
);
|
|
1161
|
+
}
|
|
1162
|
+
function dequeue() {
|
|
1163
|
+
const db = getDb();
|
|
1164
|
+
const row = db.prepare("SELECT * FROM queue ORDER BY id ASC LIMIT 1").get();
|
|
1165
|
+
if (!row) return void 0;
|
|
1166
|
+
db.prepare("DELETE FROM queue WHERE id = ?").run(row.id);
|
|
1167
|
+
return JSON.parse(row.payload);
|
|
1168
|
+
}
|
|
1169
|
+
function removeFromQueue(ticketId) {
|
|
1170
|
+
const db = getDb();
|
|
1171
|
+
db.prepare("DELETE FROM queue WHERE ticket_id = ?").run(ticketId);
|
|
1172
|
+
}
|
|
1173
|
+
function getQueueLength() {
|
|
1174
|
+
const db = getDb();
|
|
1175
|
+
const row = db.prepare("SELECT COUNT(*) as count FROM queue").get();
|
|
1176
|
+
return row.count;
|
|
1177
|
+
}
|
|
1178
|
+
function getQueuedTickets() {
|
|
1179
|
+
const db = getDb();
|
|
1180
|
+
const rows = db.prepare("SELECT * FROM queue ORDER BY id ASC").all();
|
|
1181
|
+
return rows.map((r) => JSON.parse(r.payload));
|
|
1182
|
+
}
|
|
1183
|
+
function queryRuns(opts) {
|
|
1184
|
+
const db = getDb();
|
|
1185
|
+
const limit = opts?.limit ?? 20;
|
|
1186
|
+
const failedOnly = opts?.failedOnly ?? false;
|
|
1187
|
+
const where = failedOnly ? "WHERE status = 'failed'" : "";
|
|
1188
|
+
return db.prepare(
|
|
1189
|
+
`SELECT * FROM runs ${where} ORDER BY started_at DESC LIMIT ?`
|
|
1190
|
+
).all(limit);
|
|
1191
|
+
}
|
|
1192
|
+
function countRuns() {
|
|
1193
|
+
const db = getDb();
|
|
1194
|
+
const row = db.prepare("SELECT COUNT(*) as count FROM runs").get();
|
|
1195
|
+
return row.count;
|
|
1196
|
+
}
|
|
1197
|
+
|
|
1198
|
+
// src/worker.ts
|
|
1199
|
+
function buildPrompt(ticket, repoPrompt) {
|
|
1200
|
+
const parts = [];
|
|
1201
|
+
if (repoPrompt) {
|
|
1202
|
+
parts.push(repoPrompt);
|
|
1203
|
+
parts.push("");
|
|
1204
|
+
}
|
|
1205
|
+
parts.push(`## Task: ${ticket.identifier} \u2014 ${ticket.title}`);
|
|
1206
|
+
parts.push("");
|
|
1207
|
+
if (ticket.description) {
|
|
1208
|
+
parts.push(ticket.description);
|
|
1209
|
+
}
|
|
1210
|
+
parts.push("");
|
|
1211
|
+
parts.push("## Instructions");
|
|
1212
|
+
parts.push("- Start by reading CLAUDE.md if it exists \u2014 it has project-specific guidance");
|
|
1213
|
+
parts.push("- Read the codebase to understand the context before making changes");
|
|
1214
|
+
parts.push("- Work through the task step by step");
|
|
1215
|
+
parts.push('- After making changes, run: git add -A && git commit -m "description of changes"');
|
|
1216
|
+
parts.push("- Commit after each logical step (do not batch all changes into one commit)");
|
|
1217
|
+
parts.push("- If the task has a checklist, work through items in order and commit after each one");
|
|
1218
|
+
parts.push("- Tests MUST import from the actual source modules \u2014 never re-implement or duplicate source code in tests");
|
|
1219
|
+
parts.push("- If the project uses TypeScript but tests are plain JS, install tsx or ts-node so tests can import .ts files");
|
|
1220
|
+
parts.push("- Run any existing tests before finishing to verify your changes");
|
|
1221
|
+
parts.push("- IMPORTANT: You MUST commit your changes with git before finishing");
|
|
1222
|
+
parts.push("- When all work is complete and committed, include [autoclawd:done] in your final output");
|
|
1223
|
+
return parts.join("\n");
|
|
1224
|
+
}
|
|
1225
|
+
function slugify(s) {
|
|
1226
|
+
return s.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 50);
|
|
1227
|
+
}
|
|
1228
|
+
function assertSafeRef(name, label) {
|
|
1229
|
+
if (!name || /[\s~^:?*\[\\]|\.\./.test(name) || name.startsWith("-")) {
|
|
1230
|
+
throw new Error(`Invalid ${label}: "${name}"`);
|
|
1231
|
+
}
|
|
1232
|
+
}
|
|
1233
|
+
function createAskpass(githubToken) {
|
|
1234
|
+
const dir = mkdtempSync2(join5(tmpdir2(), "autoclawd-cred-"));
|
|
1235
|
+
const path = join5(dir, "askpass.sh");
|
|
1236
|
+
writeFileSync2(path, `#!/bin/sh
|
|
1237
|
+
echo "${githubToken}"
|
|
1238
|
+
`, { mode: 448 });
|
|
1239
|
+
return { dir, path };
|
|
1240
|
+
}
|
|
1241
|
+
function gitEnv(askpassPath) {
|
|
1242
|
+
return { ...process.env, GIT_ASKPASS: askpassPath, GIT_TERMINAL_PROMPT: "0" };
|
|
1243
|
+
}
|
|
1244
|
+
function detectDefaultBranch(repoUrl, githubToken) {
|
|
1245
|
+
const askpass = createAskpass(githubToken);
|
|
1246
|
+
const authedUrl = repoUrl.replace("https://", "https://x-access-token@");
|
|
1247
|
+
try {
|
|
1248
|
+
const output = execSync2(
|
|
1249
|
+
`git ls-remote --symref -- ${authedUrl} HEAD`,
|
|
1250
|
+
{ stdio: "pipe", timeout: 3e4, env: gitEnv(askpass.path), encoding: "utf-8" }
|
|
1251
|
+
);
|
|
1252
|
+
const match = output.match(/ref:\s+refs\/heads\/(\S+)\s+HEAD/);
|
|
1253
|
+
if (match) return match[1];
|
|
1254
|
+
} catch {
|
|
1255
|
+
} finally {
|
|
1256
|
+
rmSync2(askpass.dir, { recursive: true, force: true });
|
|
1257
|
+
}
|
|
1258
|
+
return "main";
|
|
1259
|
+
}
|
|
1260
|
+
async function cloneToTemp(repoUrl, baseBranch, githubToken) {
|
|
1261
|
+
assertSafeRef(baseBranch, "base branch");
|
|
1262
|
+
return retrySync(() => {
|
|
1263
|
+
const workDir = mkdtempSync2(join5(tmpdir2(), "autoclawd-"));
|
|
1264
|
+
const askpass = createAskpass(githubToken);
|
|
1265
|
+
const authedUrl = repoUrl.replace("https://", "https://x-access-token@");
|
|
1266
|
+
try {
|
|
1267
|
+
execSync2(`git clone --depth=50 -b ${baseBranch} -- ${authedUrl} ${workDir}`, {
|
|
1268
|
+
stdio: "pipe",
|
|
1269
|
+
timeout: 12e4,
|
|
1270
|
+
env: gitEnv(askpass.path)
|
|
1271
|
+
});
|
|
1272
|
+
} catch (err) {
|
|
1273
|
+
rmSync2(workDir, { recursive: true, force: true });
|
|
1274
|
+
throw err;
|
|
1275
|
+
} finally {
|
|
1276
|
+
rmSync2(askpass.dir, { recursive: true, force: true });
|
|
1277
|
+
}
|
|
1278
|
+
return workDir;
|
|
1279
|
+
}, { label: "git clone", retryIf: isTransientError });
|
|
1280
|
+
}
|
|
1281
|
+
async function runValidation(container, commands, ticketId) {
|
|
1282
|
+
const failures = [];
|
|
1283
|
+
for (const cmd of commands) {
|
|
1284
|
+
log.ticket(ticketId, `Validating: ${cmd}`);
|
|
1285
|
+
const result = await exec(container, ["sh", "-c", cmd], {
|
|
1286
|
+
user: "autoclawd",
|
|
1287
|
+
env: ["HOME=/home/autoclawd"],
|
|
1288
|
+
timeout: 5 * 60 * 1e3
|
|
1289
|
+
// 5 min per validation command
|
|
1290
|
+
});
|
|
1291
|
+
if (result.exitCode === 0) {
|
|
1292
|
+
log.ticket(ticketId, ` PASS: ${cmd}`);
|
|
1293
|
+
} else {
|
|
1294
|
+
log.ticket(ticketId, ` FAIL: ${cmd} (exit ${result.exitCode})`);
|
|
1295
|
+
const output = (result.stdout + result.stderr).trim();
|
|
1296
|
+
failures.push({
|
|
1297
|
+
command: cmd,
|
|
1298
|
+
exitCode: result.exitCode,
|
|
1299
|
+
output: output.slice(-3e3)
|
|
1300
|
+
// Cap output to avoid huge prompts
|
|
1301
|
+
});
|
|
1302
|
+
}
|
|
1303
|
+
}
|
|
1304
|
+
return failures;
|
|
1305
|
+
}
|
|
1306
|
+
function buildValidationFixPrompt(failures) {
|
|
1307
|
+
const parts = [];
|
|
1308
|
+
parts.push("## Validation Failed");
|
|
1309
|
+
parts.push("");
|
|
1310
|
+
parts.push("The following validation commands failed after your changes. Please fix the issues and commit your fixes.");
|
|
1311
|
+
parts.push("");
|
|
1312
|
+
for (const f of failures) {
|
|
1313
|
+
parts.push(`### Failed: \`${f.command}\` (exit code ${f.exitCode})`);
|
|
1314
|
+
parts.push("```");
|
|
1315
|
+
parts.push(f.output);
|
|
1316
|
+
parts.push("```");
|
|
1317
|
+
parts.push("");
|
|
1318
|
+
}
|
|
1319
|
+
parts.push("## Instructions");
|
|
1320
|
+
parts.push("- Read the error output carefully and fix the root cause");
|
|
1321
|
+
parts.push("- Run the failing command(s) to verify your fix");
|
|
1322
|
+
parts.push('- Commit your fixes with: git add -A && git commit -m "fix: address validation failures"');
|
|
1323
|
+
parts.push("- When done, include [autoclawd:done] in your output");
|
|
1324
|
+
return parts.join("\n");
|
|
1325
|
+
}
|
|
1326
|
+
async function executeTicket(opts) {
|
|
1327
|
+
const { ticket, config, linearClient, octokit, force } = opts;
|
|
1328
|
+
const startedAt = /* @__PURE__ */ new Date();
|
|
1329
|
+
function recordRun(result, branch) {
|
|
1330
|
+
const finishedAt = /* @__PURE__ */ new Date();
|
|
1331
|
+
const durationSecs = Math.round((finishedAt.getTime() - startedAt.getTime()) / 1e3);
|
|
1332
|
+
try {
|
|
1333
|
+
insertRun(result.ticketId, result.success ? "success" : "failed");
|
|
1334
|
+
updateRun(result.ticketId, {
|
|
1335
|
+
status: result.success ? "success" : "failed",
|
|
1336
|
+
branch: branch ?? null,
|
|
1337
|
+
pr_url: result.prUrl ?? null,
|
|
1338
|
+
error: result.error ?? null,
|
|
1339
|
+
iterations: result.iterations,
|
|
1340
|
+
model: config.agent.model,
|
|
1341
|
+
image: config.docker.image,
|
|
1342
|
+
finished_at: finishedAt.toISOString(),
|
|
1343
|
+
duration_secs: durationSecs
|
|
1344
|
+
});
|
|
1345
|
+
} catch {
|
|
1346
|
+
}
|
|
1347
|
+
}
|
|
1348
|
+
if (!force) {
|
|
1349
|
+
const existingRun = getRunByTicketId(ticket.identifier);
|
|
1350
|
+
if (existingRun?.status === "success") {
|
|
1351
|
+
log.info(`${ticket.identifier}: already completed, use --force to re-run`);
|
|
1352
|
+
return {
|
|
1353
|
+
ticketId: ticket.identifier,
|
|
1354
|
+
success: true,
|
|
1355
|
+
prUrl: existingRun.pr_url ?? void 0,
|
|
1356
|
+
iterations: existingRun.iterations
|
|
1357
|
+
};
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
insertRun(ticket.identifier, "running");
|
|
1361
|
+
if (!ticket.repoUrl) {
|
|
1362
|
+
const result = {
|
|
1363
|
+
ticketId: ticket.identifier,
|
|
1364
|
+
success: false,
|
|
1365
|
+
error: "No repo:owner/name label on ticket",
|
|
1366
|
+
iterations: 0
|
|
1367
|
+
};
|
|
1368
|
+
recordRun(result);
|
|
1369
|
+
return result;
|
|
1370
|
+
}
|
|
1371
|
+
if (!ticket.repoUrl.startsWith("https://")) {
|
|
1372
|
+
const result = {
|
|
1373
|
+
ticketId: ticket.identifier,
|
|
1374
|
+
success: false,
|
|
1375
|
+
error: `Invalid repo URL: "${ticket.repoUrl}". Only HTTPS URLs are supported.`,
|
|
1376
|
+
iterations: 0
|
|
1377
|
+
};
|
|
1378
|
+
recordRun(result);
|
|
1379
|
+
return result;
|
|
1380
|
+
}
|
|
1381
|
+
if (!isRepoAllowed(ticket.repoUrl, config.safety.allowedRepos)) {
|
|
1382
|
+
log.warn(`Safety: repo ${ticket.repoUrl} not in allowedRepos list`);
|
|
1383
|
+
return {
|
|
1384
|
+
ticketId: ticket.identifier,
|
|
1385
|
+
success: false,
|
|
1386
|
+
error: `Repo not allowed: "${ticket.repoUrl}". Check safety.allowedRepos in config.`,
|
|
1387
|
+
iterations: 0
|
|
1388
|
+
};
|
|
1389
|
+
}
|
|
1390
|
+
log.info(`Safety: repo ${ticket.repoUrl} allowed`);
|
|
1391
|
+
const branchName = `autoclawd/${ticket.identifier}-${slugify(ticket.title)}`;
|
|
1392
|
+
const containerName = `${ticket.identifier}-${Date.now()}`.toLowerCase().replace(/[^a-z0-9_.-]/g, "-");
|
|
1393
|
+
let workDir;
|
|
1394
|
+
let container;
|
|
1395
|
+
try {
|
|
1396
|
+
await claimTicket(linearClient, config, ticket.id);
|
|
1397
|
+
log.ticket(ticket.identifier, `Claimed \u2014 ${ticket.repoUrl}`);
|
|
1398
|
+
const detectedBase = detectDefaultBranch(ticket.repoUrl, config.github.token);
|
|
1399
|
+
log.ticket(ticket.identifier, `Default branch: ${detectedBase}`);
|
|
1400
|
+
workDir = await cloneToTemp(ticket.repoUrl, detectedBase, config.github.token);
|
|
1401
|
+
log.ticket(ticket.identifier, "Cloned repo");
|
|
1402
|
+
const repoLocal = loadRepoLocalConfig(workDir);
|
|
1403
|
+
const actualBase = ticket.baseBranch ?? repoLocal?.base ?? detectedBase;
|
|
1404
|
+
const { agent, docker: docker2, prompt, validate } = mergeConfigs(config, repoLocal);
|
|
1405
|
+
if (repoLocal) {
|
|
1406
|
+
log.ticket(ticket.identifier, "Loaded .autoclawd.yaml from repo");
|
|
1407
|
+
}
|
|
1408
|
+
if (ticket.baseBranch) {
|
|
1409
|
+
log.ticket(ticket.identifier, `Stacked on branch: ${ticket.baseBranch}`);
|
|
1410
|
+
}
|
|
1411
|
+
if (actualBase !== detectedBase) {
|
|
1412
|
+
rmSync2(workDir, { recursive: true, force: true });
|
|
1413
|
+
workDir = await cloneToTemp(ticket.repoUrl, actualBase, config.github.token);
|
|
1414
|
+
}
|
|
1415
|
+
execSync2(`git checkout -B ${branchName} --`, { cwd: workDir, stdio: "pipe" });
|
|
1416
|
+
container = await createContainer({
|
|
1417
|
+
dockerConfig: docker2,
|
|
1418
|
+
workspacePath: workDir,
|
|
1419
|
+
name: containerName
|
|
1420
|
+
});
|
|
1421
|
+
await setupRepo(container, { branchName });
|
|
1422
|
+
const agentPrompt = buildPrompt(ticket, prompt);
|
|
1423
|
+
let agentResult = await runAgentLoop({
|
|
1424
|
+
container,
|
|
1425
|
+
agentConfig: agent,
|
|
1426
|
+
prompt: agentPrompt,
|
|
1427
|
+
ticketId: ticket.identifier
|
|
1428
|
+
});
|
|
1429
|
+
const gitResult = await commitAndPush(container, {
|
|
1430
|
+
branchName,
|
|
1431
|
+
ticketId: ticket.identifier,
|
|
1432
|
+
title: ticket.title,
|
|
1433
|
+
repoUrl: ticket.repoUrl,
|
|
1434
|
+
githubToken: config.github.token,
|
|
1435
|
+
branchPrefix: config.safety.branchPrefix,
|
|
1436
|
+
maxFileChanges: config.safety.maxFileChanges
|
|
1437
|
+
});
|
|
1438
|
+
if (!gitResult.pushed) {
|
|
1439
|
+
await failTicket(linearClient, ticket.id, "No changes produced");
|
|
1440
|
+
finishRun(ticket.identifier, "failed", "No changes produced");
|
|
1441
|
+
const result2 = {
|
|
1442
|
+
ticketId: ticket.identifier,
|
|
1443
|
+
success: false,
|
|
1444
|
+
error: "No changes produced",
|
|
1445
|
+
iterations: agentResult.iterations
|
|
1446
|
+
};
|
|
1447
|
+
recordRun(result2, branchName);
|
|
1448
|
+
return result2;
|
|
1449
|
+
}
|
|
1450
|
+
const buildPRBody = (opts2) => {
|
|
1451
|
+
const prBody = [
|
|
1452
|
+
`Resolves [${ticket.identifier}](${ticket.url})`,
|
|
1453
|
+
""
|
|
1454
|
+
];
|
|
1455
|
+
if (ticket.description) {
|
|
1456
|
+
const desc = ticket.description.length > 2e3 ? ticket.description.slice(0, 2e3) + "\n\n*[description truncated]*" : ticket.description;
|
|
1457
|
+
prBody.push("## Description", "", desc, "");
|
|
1458
|
+
}
|
|
1459
|
+
if (opts2?.validationWarnings && opts2.validationWarnings.length > 0) {
|
|
1460
|
+
prBody.push("## :warning: Validation Failures", "");
|
|
1461
|
+
prBody.push("The following validation commands were still failing after all iterations:", "");
|
|
1462
|
+
for (const f of opts2.validationWarnings) {
|
|
1463
|
+
prBody.push(`### \`${f.command}\` (exit code ${f.exitCode})`, "");
|
|
1464
|
+
prBody.push("<details><summary>Output</summary>", "", "```", f.output, "```", "", "</details>", "");
|
|
1465
|
+
}
|
|
1466
|
+
}
|
|
1467
|
+
prBody.push(
|
|
1468
|
+
"## Details",
|
|
1469
|
+
"",
|
|
1470
|
+
`| Metric | Value |`,
|
|
1471
|
+
`| --- | --- |`,
|
|
1472
|
+
`| Iterations | ${agentResult.iterations} |`,
|
|
1473
|
+
`| Commits | ${opts2?.commitCount ?? gitResult.commitCount} |`,
|
|
1474
|
+
`| Model | ${agent.model} |`,
|
|
1475
|
+
"",
|
|
1476
|
+
"---",
|
|
1477
|
+
"*Generated by [autoclawd](https://github.com/rahul-fnu/autoclawd)*"
|
|
1478
|
+
);
|
|
1479
|
+
return prBody.join("\n");
|
|
1480
|
+
};
|
|
1481
|
+
const hasValidation = validate && validate.length > 0;
|
|
1482
|
+
const pr = await openPR(octokit, {
|
|
1483
|
+
repoUrl: ticket.repoUrl,
|
|
1484
|
+
branch: branchName,
|
|
1485
|
+
baseBranch: actualBase,
|
|
1486
|
+
title: `${ticket.identifier}: ${ticket.title}`,
|
|
1487
|
+
body: buildPRBody({ commitCount: gitResult.commitCount }),
|
|
1488
|
+
draft: hasValidation ? true : false
|
|
1489
|
+
});
|
|
1490
|
+
if (hasValidation) {
|
|
1491
|
+
let iterationsUsed = agentResult.iterations;
|
|
1492
|
+
let lastFailures = [];
|
|
1493
|
+
let totalCommitCount = gitResult.commitCount;
|
|
1494
|
+
while (iterationsUsed < agent.maxIterations) {
|
|
1495
|
+
const failures = await runValidation(container, validate, ticket.identifier);
|
|
1496
|
+
if (failures.length === 0) {
|
|
1497
|
+
log.ticket(ticket.identifier, "All validations passed");
|
|
1498
|
+
lastFailures = [];
|
|
1499
|
+
break;
|
|
1500
|
+
}
|
|
1501
|
+
lastFailures = failures;
|
|
1502
|
+
log.ticket(ticket.identifier, `${failures.length} validation(s) failed, feeding back to Claude`);
|
|
1503
|
+
const fixPrompt = buildValidationFixPrompt(failures);
|
|
1504
|
+
const fixResult = await runAgentLoop({
|
|
1505
|
+
container,
|
|
1506
|
+
agentConfig: { ...agent, maxIterations: 1 },
|
|
1507
|
+
prompt: fixPrompt,
|
|
1508
|
+
ticketId: ticket.identifier
|
|
1509
|
+
});
|
|
1510
|
+
iterationsUsed += fixResult.iterations;
|
|
1511
|
+
agentResult = {
|
|
1512
|
+
iterations: iterationsUsed,
|
|
1513
|
+
success: fixResult.success,
|
|
1514
|
+
lastOutput: fixResult.lastOutput
|
|
1515
|
+
};
|
|
1516
|
+
const fixGitResult = await commitAndPush(container, {
|
|
1517
|
+
branchName,
|
|
1518
|
+
ticketId: ticket.identifier,
|
|
1519
|
+
title: ticket.title,
|
|
1520
|
+
repoUrl: ticket.repoUrl,
|
|
1521
|
+
githubToken: config.github.token,
|
|
1522
|
+
branchPrefix: config.safety.branchPrefix,
|
|
1523
|
+
maxFileChanges: config.safety.maxFileChanges
|
|
1524
|
+
});
|
|
1525
|
+
totalCommitCount += fixGitResult.commitCount;
|
|
1526
|
+
}
|
|
1527
|
+
if (lastFailures.length > 0) {
|
|
1528
|
+
log.ticket(ticket.identifier, "Validation still failing after max iterations \u2014 leaving PR as draft with warnings");
|
|
1529
|
+
await updatePRBody(octokit, {
|
|
1530
|
+
repoUrl: ticket.repoUrl,
|
|
1531
|
+
prNumber: pr.number,
|
|
1532
|
+
body: buildPRBody({ validationWarnings: lastFailures, commitCount: totalCommitCount })
|
|
1533
|
+
});
|
|
1534
|
+
} else {
|
|
1535
|
+
await updatePRBody(octokit, {
|
|
1536
|
+
repoUrl: ticket.repoUrl,
|
|
1537
|
+
prNumber: pr.number,
|
|
1538
|
+
body: buildPRBody({ commitCount: totalCommitCount })
|
|
1539
|
+
});
|
|
1540
|
+
await markPRReady(octokit, {
|
|
1541
|
+
repoUrl: ticket.repoUrl,
|
|
1542
|
+
prNumber: pr.number
|
|
1543
|
+
});
|
|
1544
|
+
log.ticket(ticket.identifier, "PR marked as ready for review");
|
|
1545
|
+
}
|
|
1546
|
+
}
|
|
1547
|
+
await completeTicket(linearClient, config, ticket.id, `PR opened: ${pr.url}`);
|
|
1548
|
+
await addBranchLabel(linearClient, ticket.id, branchName);
|
|
1549
|
+
log.success(`${ticket.identifier} done \u2014 ${pr.url}`);
|
|
1550
|
+
const result = {
|
|
1551
|
+
ticketId: ticket.identifier,
|
|
1552
|
+
success: true,
|
|
1553
|
+
prUrl: pr.url,
|
|
1554
|
+
iterations: agentResult.iterations
|
|
1555
|
+
};
|
|
1556
|
+
finishRun(ticket.identifier, "success");
|
|
1557
|
+
updateRun(ticket.identifier, { pr_url: pr.url, branch: branchName, iterations: agentResult.iterations });
|
|
1558
|
+
recordRun(result, branchName);
|
|
1559
|
+
return result;
|
|
1560
|
+
} catch (err) {
|
|
1561
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1562
|
+
log.error(`${ticket.identifier} failed: ${msg}`);
|
|
1563
|
+
try {
|
|
1564
|
+
await failTicket(linearClient, ticket.id, msg);
|
|
1565
|
+
} catch {
|
|
1566
|
+
}
|
|
1567
|
+
finishRun(ticket.identifier, "failed", msg);
|
|
1568
|
+
const result = {
|
|
1569
|
+
ticketId: ticket.identifier,
|
|
1570
|
+
success: false,
|
|
1571
|
+
error: msg,
|
|
1572
|
+
iterations: 0
|
|
1573
|
+
};
|
|
1574
|
+
recordRun(result, branchName);
|
|
1575
|
+
return result;
|
|
1576
|
+
} finally {
|
|
1577
|
+
if (container) {
|
|
1578
|
+
await destroyContainer(container);
|
|
1579
|
+
}
|
|
1580
|
+
if (workDir) {
|
|
1581
|
+
try {
|
|
1582
|
+
rmSync2(workDir, { recursive: true, force: true });
|
|
1583
|
+
} catch {
|
|
1584
|
+
}
|
|
1585
|
+
}
|
|
1586
|
+
}
|
|
1587
|
+
}
|
|
1588
|
+
function buildFixPrompt(pr, failures) {
|
|
1589
|
+
const parts = [];
|
|
1590
|
+
parts.push(`## Task: Fix CI failures on PR "${pr.title}"`);
|
|
1591
|
+
parts.push("");
|
|
1592
|
+
parts.push(`Branch: ${pr.head}`);
|
|
1593
|
+
parts.push("");
|
|
1594
|
+
parts.push("The following CI checks are failing. Read the logs carefully, identify the root cause, fix the code, and make sure the fix is correct.");
|
|
1595
|
+
parts.push("");
|
|
1596
|
+
for (const f of failures) {
|
|
1597
|
+
parts.push(`### Failed check: ${f.name}`);
|
|
1598
|
+
parts.push("```");
|
|
1599
|
+
parts.push(f.log);
|
|
1600
|
+
parts.push("```");
|
|
1601
|
+
parts.push("");
|
|
1602
|
+
}
|
|
1603
|
+
parts.push("## Instructions");
|
|
1604
|
+
parts.push("- Start by reading CLAUDE.md if it exists");
|
|
1605
|
+
parts.push("- Read the failing test/lint/build output carefully before making changes");
|
|
1606
|
+
parts.push("- Fix the root cause, not just the symptoms");
|
|
1607
|
+
parts.push("- Run the failing command locally to verify your fix");
|
|
1608
|
+
parts.push("- Tests MUST import from actual source modules \u2014 never duplicate source code in tests");
|
|
1609
|
+
parts.push('- After fixing, run: git add -A && git commit -m "fix: address CI failures"');
|
|
1610
|
+
parts.push("- IMPORTANT: You MUST commit your changes with git before finishing");
|
|
1611
|
+
parts.push("- When all fixes are committed, include [autoclawd:done] in your final output");
|
|
1612
|
+
return parts.join("\n");
|
|
1613
|
+
}
|
|
1614
|
+
async function fixPR(opts) {
|
|
1615
|
+
const { repoUrl, prNumber, config, octokit } = opts;
|
|
1616
|
+
const prUrl = `${repoUrl}/pull/${prNumber}`;
|
|
1617
|
+
let workDir;
|
|
1618
|
+
let container;
|
|
1619
|
+
try {
|
|
1620
|
+
const pr = await fetchPR(octokit, { repoUrl, prNumber });
|
|
1621
|
+
log.info(`PR #${prNumber}: "${pr.title}" (${pr.head} \u2192 ${pr.base})`);
|
|
1622
|
+
if (pr.state !== "open") {
|
|
1623
|
+
return { success: false, prUrl, error: "PR is not open", iterations: 0, failedChecks: 0 };
|
|
1624
|
+
}
|
|
1625
|
+
const failures = await fetchFailedChecks(octokit, { repoUrl, prNumber });
|
|
1626
|
+
if (failures.length === 0) {
|
|
1627
|
+
log.success("No failed CI checks \u2014 nothing to fix");
|
|
1628
|
+
return { success: true, prUrl, iterations: 0, failedChecks: 0 };
|
|
1629
|
+
}
|
|
1630
|
+
log.info(`Found ${failures.length} failed check(s): ${failures.map((f) => f.name).join(", ")}`);
|
|
1631
|
+
workDir = await cloneToTemp(repoUrl, pr.head, config.github.token);
|
|
1632
|
+
log.info("Cloned PR branch");
|
|
1633
|
+
const repoLocal = loadRepoLocalConfig(workDir);
|
|
1634
|
+
const { agent, docker: docker2 } = mergeConfigs(config, repoLocal);
|
|
1635
|
+
const containerName = `fix-pr-${prNumber}-${Date.now()}`.replace(/[^a-z0-9_.-]/g, "-");
|
|
1636
|
+
container = await createContainer({
|
|
1637
|
+
dockerConfig: docker2,
|
|
1638
|
+
workspacePath: workDir,
|
|
1639
|
+
name: containerName
|
|
1640
|
+
});
|
|
1641
|
+
await setupRepo(container, { branchName: pr.head });
|
|
1642
|
+
const prompt = buildFixPrompt(pr, failures);
|
|
1643
|
+
const agentResult = await runAgentLoop({
|
|
1644
|
+
container,
|
|
1645
|
+
agentConfig: agent,
|
|
1646
|
+
prompt,
|
|
1647
|
+
ticketId: `PR-${prNumber}`
|
|
1648
|
+
});
|
|
1649
|
+
const gitResult = await commitAndPush(container, {
|
|
1650
|
+
branchName: pr.head,
|
|
1651
|
+
ticketId: `PR-${prNumber}`,
|
|
1652
|
+
title: "Fix CI failures",
|
|
1653
|
+
repoUrl,
|
|
1654
|
+
githubToken: config.github.token,
|
|
1655
|
+
branchPrefix: config.safety.branchPrefix,
|
|
1656
|
+
maxFileChanges: config.safety.maxFileChanges
|
|
1657
|
+
});
|
|
1658
|
+
if (!gitResult.pushed) {
|
|
1659
|
+
return { success: false, prUrl, error: "No fix produced", iterations: agentResult.iterations, failedChecks: failures.length };
|
|
1660
|
+
}
|
|
1661
|
+
log.success(`Pushed ${gitResult.commitCount} fix commit(s) to ${pr.head}`);
|
|
1662
|
+
return { success: true, prUrl, iterations: agentResult.iterations, failedChecks: failures.length };
|
|
1663
|
+
} catch (err) {
|
|
1664
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1665
|
+
log.error(`Fix failed: ${msg}`);
|
|
1666
|
+
return { success: false, prUrl, error: msg, iterations: 0, failedChecks: 0 };
|
|
1667
|
+
} finally {
|
|
1668
|
+
if (container) await destroyContainer(container);
|
|
1669
|
+
if (workDir) {
|
|
1670
|
+
try {
|
|
1671
|
+
rmSync2(workDir, { recursive: true, force: true });
|
|
1672
|
+
} catch {
|
|
1673
|
+
}
|
|
1674
|
+
}
|
|
1675
|
+
}
|
|
1676
|
+
}
|
|
1677
|
+
|
|
1678
|
+
// src/webhook.ts
|
|
1679
|
+
function verifySignature(body, signature, secret) {
|
|
1680
|
+
const expected = createHmac("sha256", secret).update(body).digest("hex");
|
|
1681
|
+
try {
|
|
1682
|
+
return timingSafeEqual(Buffer.from(expected), Buffer.from(signature));
|
|
1683
|
+
} catch {
|
|
1684
|
+
return false;
|
|
1685
|
+
}
|
|
1686
|
+
}
|
|
1687
|
+
var WebhookServer = class {
|
|
1688
|
+
constructor(config, linearClient, octokit) {
|
|
1689
|
+
this.config = config;
|
|
1690
|
+
this.linearClient = linearClient;
|
|
1691
|
+
this.octokit = octokit;
|
|
1692
|
+
}
|
|
1693
|
+
config;
|
|
1694
|
+
linearClient;
|
|
1695
|
+
octokit;
|
|
1696
|
+
server;
|
|
1697
|
+
// Track in-memory active set for concurrency counting (fast path)
|
|
1698
|
+
activeInMemory = /* @__PURE__ */ new Set();
|
|
1699
|
+
start() {
|
|
1700
|
+
getDb();
|
|
1701
|
+
this.resumeQueue();
|
|
1702
|
+
const port = this.config.webhook?.port ?? 3e3;
|
|
1703
|
+
const MAX_BODY_BYTES = 1024 * 1024;
|
|
1704
|
+
this.server = createServer((req, res) => {
|
|
1705
|
+
if (req.method === "GET" && (req.url === "/health" || req.url === "/healthz")) {
|
|
1706
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1707
|
+
res.end(JSON.stringify({
|
|
1708
|
+
status: "ok",
|
|
1709
|
+
active: this.activeInMemory.size,
|
|
1710
|
+
queued: getQueueLength(),
|
|
1711
|
+
capacity: this.config.maxConcurrent
|
|
1712
|
+
}));
|
|
1713
|
+
return;
|
|
1714
|
+
}
|
|
1715
|
+
if (req.method !== "POST" || req.url !== "/webhook" && req.url !== "/api/v1/linear/webhook") {
|
|
1716
|
+
res.writeHead(404);
|
|
1717
|
+
res.end();
|
|
1718
|
+
return;
|
|
1719
|
+
}
|
|
1720
|
+
let body = "";
|
|
1721
|
+
let bodyBytes = 0;
|
|
1722
|
+
req.on("data", (chunk) => {
|
|
1723
|
+
bodyBytes += chunk.length;
|
|
1724
|
+
if (bodyBytes > MAX_BODY_BYTES) {
|
|
1725
|
+
res.writeHead(413);
|
|
1726
|
+
res.end("Payload too large");
|
|
1727
|
+
req.destroy();
|
|
1728
|
+
return;
|
|
1729
|
+
}
|
|
1730
|
+
body += chunk;
|
|
1731
|
+
});
|
|
1732
|
+
req.on("end", () => {
|
|
1733
|
+
if (this.config.webhook?.signingSecret) {
|
|
1734
|
+
const sig = req.headers["linear-signature"];
|
|
1735
|
+
if (!sig || !verifySignature(body, sig, this.config.webhook.signingSecret)) {
|
|
1736
|
+
log.warn("Invalid webhook signature, rejecting");
|
|
1737
|
+
res.writeHead(401);
|
|
1738
|
+
res.end("Invalid signature");
|
|
1739
|
+
return;
|
|
1740
|
+
}
|
|
1741
|
+
}
|
|
1742
|
+
res.writeHead(200);
|
|
1743
|
+
res.end("ok");
|
|
1744
|
+
void this.handleWebhook(body);
|
|
1745
|
+
});
|
|
1746
|
+
});
|
|
1747
|
+
return new Promise((resolve) => {
|
|
1748
|
+
this.server.listen(port, () => {
|
|
1749
|
+
log.info(`Webhook server listening on port ${port}`);
|
|
1750
|
+
resolve();
|
|
1751
|
+
});
|
|
1752
|
+
});
|
|
1753
|
+
}
|
|
1754
|
+
stop() {
|
|
1755
|
+
this.server?.close();
|
|
1756
|
+
closeDb();
|
|
1757
|
+
log.info("Webhook server stopped");
|
|
1758
|
+
}
|
|
1759
|
+
resumeQueue() {
|
|
1760
|
+
const staleRuns = getActiveRuns();
|
|
1761
|
+
for (const run of staleRuns) {
|
|
1762
|
+
log.warn(`${run.ticket_id}: was running at shutdown, marking as failed`);
|
|
1763
|
+
finishRun(run.ticket_id, "failed", "Unclean shutdown \u2014 process restarted");
|
|
1764
|
+
}
|
|
1765
|
+
const queued = getQueuedTickets();
|
|
1766
|
+
if (queued.length > 0) {
|
|
1767
|
+
log.info(`Resuming ${queued.length} queued ticket(s) from database`);
|
|
1768
|
+
for (const ticket of queued) {
|
|
1769
|
+
removeFromQueue(ticket.id);
|
|
1770
|
+
if (this.activeInMemory.size >= this.config.maxConcurrent) {
|
|
1771
|
+
enqueue(ticket);
|
|
1772
|
+
break;
|
|
1773
|
+
}
|
|
1774
|
+
this.dispatch(ticket);
|
|
1775
|
+
}
|
|
1776
|
+
}
|
|
1777
|
+
}
|
|
1778
|
+
async handleWebhook(body) {
|
|
1779
|
+
let payload;
|
|
1780
|
+
try {
|
|
1781
|
+
payload = JSON.parse(body);
|
|
1782
|
+
} catch {
|
|
1783
|
+
log.warn("Invalid webhook JSON");
|
|
1784
|
+
return;
|
|
1785
|
+
}
|
|
1786
|
+
if (payload.type !== "Issue") return;
|
|
1787
|
+
const { action, data } = payload;
|
|
1788
|
+
const triggerStatuses = this.config.linear.statuses.map((s) => s.toLowerCase());
|
|
1789
|
+
const currentStatus = data.state.name.toLowerCase();
|
|
1790
|
+
if (action === "create" || action === "update") {
|
|
1791
|
+
if (!triggerStatuses.includes(currentStatus)) {
|
|
1792
|
+
log.debug(`${data.identifier}: status "${data.state.name}" not in trigger list, ignoring`);
|
|
1793
|
+
return;
|
|
1794
|
+
}
|
|
1795
|
+
if (action === "update" && !payload.updatedFrom?.stateId) {
|
|
1796
|
+
log.debug(`${data.identifier}: update but status didn't change, ignoring`);
|
|
1797
|
+
return;
|
|
1798
|
+
}
|
|
1799
|
+
} else {
|
|
1800
|
+
return;
|
|
1801
|
+
}
|
|
1802
|
+
if (this.activeInMemory.has(data.id) || isTicketActive(data.id)) {
|
|
1803
|
+
log.debug(`${data.identifier}: already active, ignoring`);
|
|
1804
|
+
return;
|
|
1805
|
+
}
|
|
1806
|
+
if (isTicketProcessed(data.id)) {
|
|
1807
|
+
log.debug(`${data.identifier}: already processed, ignoring`);
|
|
1808
|
+
return;
|
|
1809
|
+
}
|
|
1810
|
+
if (this.activeInMemory.size >= this.config.maxConcurrent) {
|
|
1811
|
+
log.info(`${data.identifier}: at capacity (${this.activeInMemory.size}/${this.config.maxConcurrent}), queued`);
|
|
1812
|
+
const queueLabels = data.labels.map((l) => l.name);
|
|
1813
|
+
const ticket2 = {
|
|
1814
|
+
id: data.id,
|
|
1815
|
+
identifier: data.identifier,
|
|
1816
|
+
title: data.title,
|
|
1817
|
+
description: data.description ?? "",
|
|
1818
|
+
labels: queueLabels,
|
|
1819
|
+
repoUrl: parseRepoFromLabels(queueLabels),
|
|
1820
|
+
baseBranch: parseBaseFromLabels(queueLabels),
|
|
1821
|
+
url: data.url
|
|
1822
|
+
};
|
|
1823
|
+
enqueue(ticket2);
|
|
1824
|
+
return;
|
|
1825
|
+
}
|
|
1826
|
+
const labelNames = data.labels.map((l) => l.name);
|
|
1827
|
+
const repoUrl = parseRepoFromLabels(labelNames);
|
|
1828
|
+
if (!repoUrl) {
|
|
1829
|
+
log.warn(`${data.identifier}: no repo: label, ignoring`);
|
|
1830
|
+
return;
|
|
1831
|
+
}
|
|
1832
|
+
const ticket = {
|
|
1833
|
+
id: data.id,
|
|
1834
|
+
identifier: data.identifier,
|
|
1835
|
+
title: data.title,
|
|
1836
|
+
description: data.description ?? "",
|
|
1837
|
+
labels: labelNames,
|
|
1838
|
+
repoUrl,
|
|
1839
|
+
baseBranch: parseBaseFromLabels(labelNames),
|
|
1840
|
+
url: data.url
|
|
1841
|
+
};
|
|
1842
|
+
this.dispatch(ticket);
|
|
1843
|
+
}
|
|
1844
|
+
dispatch(ticket) {
|
|
1845
|
+
this.activeInMemory.add(ticket.id);
|
|
1846
|
+
insertRun(ticket.id, "running");
|
|
1847
|
+
log.ticket(ticket.identifier, `Dispatching \u2192 ${ticket.repoUrl}`);
|
|
1848
|
+
void executeTicket({
|
|
1849
|
+
ticket,
|
|
1850
|
+
config: this.config,
|
|
1851
|
+
linearClient: this.linearClient,
|
|
1852
|
+
octokit: this.octokit
|
|
1853
|
+
}).then(
|
|
1854
|
+
() => {
|
|
1855
|
+
finishRun(ticket.id, "success");
|
|
1856
|
+
},
|
|
1857
|
+
(err) => {
|
|
1858
|
+
finishRun(ticket.id, "failed", err.message);
|
|
1859
|
+
}
|
|
1860
|
+
).finally(() => {
|
|
1861
|
+
this.activeInMemory.delete(ticket.id);
|
|
1862
|
+
this.drainQueue();
|
|
1863
|
+
});
|
|
1864
|
+
}
|
|
1865
|
+
drainQueue() {
|
|
1866
|
+
while (this.activeInMemory.size < this.config.maxConcurrent) {
|
|
1867
|
+
const next = dequeue();
|
|
1868
|
+
if (!next) break;
|
|
1869
|
+
if (this.activeInMemory.has(next.id)) continue;
|
|
1870
|
+
log.ticket(next.identifier, `Dequeued (${getQueueLength()} remaining)`);
|
|
1871
|
+
this.dispatch(next);
|
|
1872
|
+
}
|
|
1873
|
+
}
|
|
1874
|
+
};
|
|
1875
|
+
|
|
1876
|
+
// src/tunnel.ts
|
|
1877
|
+
import { spawn } from "child_process";
|
|
1878
|
+
async function startQuickTunnel(port) {
|
|
1879
|
+
return new Promise((resolve, reject) => {
|
|
1880
|
+
const proc = spawn("cloudflared", ["tunnel", "--url", `http://127.0.0.1:${port}`], {
|
|
1881
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
1882
|
+
});
|
|
1883
|
+
let resolved = false;
|
|
1884
|
+
const timeout = setTimeout(() => {
|
|
1885
|
+
if (!resolved) {
|
|
1886
|
+
reject(new Error("Tunnel startup timed out after 30s"));
|
|
1887
|
+
}
|
|
1888
|
+
}, 3e4);
|
|
1889
|
+
const handleOutput = (data) => {
|
|
1890
|
+
const line = data.toString();
|
|
1891
|
+
const match = line.match(/(https:\/\/[a-z0-9-]+\.trycloudflare\.com)/);
|
|
1892
|
+
if (match && !resolved) {
|
|
1893
|
+
resolved = true;
|
|
1894
|
+
clearTimeout(timeout);
|
|
1895
|
+
resolve({ url: match[1], process: proc });
|
|
1896
|
+
}
|
|
1897
|
+
};
|
|
1898
|
+
proc.stdout.on("data", handleOutput);
|
|
1899
|
+
proc.stderr.on("data", handleOutput);
|
|
1900
|
+
proc.on("error", (err) => {
|
|
1901
|
+
if (!resolved) {
|
|
1902
|
+
clearTimeout(timeout);
|
|
1903
|
+
if (err.code === "ENOENT") {
|
|
1904
|
+
reject(new Error("cloudflared not found. Install it: https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/"));
|
|
1905
|
+
} else {
|
|
1906
|
+
reject(err);
|
|
1907
|
+
}
|
|
1908
|
+
}
|
|
1909
|
+
});
|
|
1910
|
+
proc.on("exit", (code) => {
|
|
1911
|
+
if (!resolved) {
|
|
1912
|
+
clearTimeout(timeout);
|
|
1913
|
+
reject(new Error(`cloudflared exited with code ${code}`));
|
|
1914
|
+
}
|
|
1915
|
+
});
|
|
1916
|
+
});
|
|
1917
|
+
}
|
|
1918
|
+
function stopTunnel(tunnel) {
|
|
1919
|
+
tunnel.process.kill("SIGTERM");
|
|
1920
|
+
log.info("Tunnel stopped");
|
|
1921
|
+
}
|
|
1922
|
+
|
|
1923
|
+
// src/watch.ts
|
|
1924
|
+
var Watcher = class {
|
|
1925
|
+
constructor(config, linearClient, octokit) {
|
|
1926
|
+
this.config = config;
|
|
1927
|
+
this.linearClient = linearClient;
|
|
1928
|
+
this.octokit = octokit;
|
|
1929
|
+
}
|
|
1930
|
+
config;
|
|
1931
|
+
linearClient;
|
|
1932
|
+
octokit;
|
|
1933
|
+
activeInMemory = /* @__PURE__ */ new Set();
|
|
1934
|
+
timer;
|
|
1935
|
+
stopped = false;
|
|
1936
|
+
async start(intervalSeconds, once) {
|
|
1937
|
+
getDb();
|
|
1938
|
+
this.resumeQueue();
|
|
1939
|
+
await this.poll();
|
|
1940
|
+
if (once) {
|
|
1941
|
+
await this.waitForActive();
|
|
1942
|
+
closeDb();
|
|
1943
|
+
return;
|
|
1944
|
+
}
|
|
1945
|
+
this.timer = setInterval(() => {
|
|
1946
|
+
if (!this.stopped) {
|
|
1947
|
+
void this.poll();
|
|
1948
|
+
}
|
|
1949
|
+
}, intervalSeconds * 1e3);
|
|
1950
|
+
log.info(`Polling every ${intervalSeconds}s (Ctrl+C to stop)`);
|
|
1951
|
+
}
|
|
1952
|
+
stop() {
|
|
1953
|
+
this.stopped = true;
|
|
1954
|
+
if (this.timer) {
|
|
1955
|
+
clearInterval(this.timer);
|
|
1956
|
+
this.timer = void 0;
|
|
1957
|
+
}
|
|
1958
|
+
closeDb();
|
|
1959
|
+
log.info("Watcher stopped");
|
|
1960
|
+
}
|
|
1961
|
+
resumeQueue() {
|
|
1962
|
+
const staleRuns = getActiveRuns();
|
|
1963
|
+
for (const run of staleRuns) {
|
|
1964
|
+
log.warn(`${run.ticket_id}: was running at shutdown, marking as failed`);
|
|
1965
|
+
finishRun(run.ticket_id, "failed", "Unclean shutdown \u2014 process restarted");
|
|
1966
|
+
}
|
|
1967
|
+
const queued = getQueuedTickets();
|
|
1968
|
+
if (queued.length > 0) {
|
|
1969
|
+
log.info(`Resuming ${queued.length} queued ticket(s) from database`);
|
|
1970
|
+
for (const ticket of queued) {
|
|
1971
|
+
removeFromQueue(ticket.id);
|
|
1972
|
+
if (this.activeInMemory.size >= this.config.maxConcurrent) {
|
|
1973
|
+
enqueue(ticket);
|
|
1974
|
+
break;
|
|
1975
|
+
}
|
|
1976
|
+
this.dispatch(ticket);
|
|
1977
|
+
}
|
|
1978
|
+
}
|
|
1979
|
+
}
|
|
1980
|
+
async poll() {
|
|
1981
|
+
try {
|
|
1982
|
+
const tickets = await pollTickets(this.linearClient, this.config);
|
|
1983
|
+
const newTickets = tickets.filter((t) => this.isNewTicket(t));
|
|
1984
|
+
log.info(`Poll: ${tickets.length} ticket(s) found, ${newTickets.length} new`);
|
|
1985
|
+
for (const ticket of newTickets) {
|
|
1986
|
+
if (!ticket.repoUrl) {
|
|
1987
|
+
log.warn(`${ticket.identifier}: no repo: label, skipping`);
|
|
1988
|
+
continue;
|
|
1989
|
+
}
|
|
1990
|
+
if (this.activeInMemory.size >= this.config.maxConcurrent) {
|
|
1991
|
+
log.info(`${ticket.identifier}: at capacity (${this.activeInMemory.size}/${this.config.maxConcurrent}), queued`);
|
|
1992
|
+
enqueue(ticket);
|
|
1993
|
+
continue;
|
|
1994
|
+
}
|
|
1995
|
+
this.dispatch(ticket);
|
|
1996
|
+
}
|
|
1997
|
+
} catch (err) {
|
|
1998
|
+
log.error(`Poll failed: ${err instanceof Error ? err.message : err}`);
|
|
1999
|
+
}
|
|
2000
|
+
}
|
|
2001
|
+
isNewTicket(ticket) {
|
|
2002
|
+
if (this.activeInMemory.has(ticket.id)) return false;
|
|
2003
|
+
if (isTicketActive(ticket.id)) return false;
|
|
2004
|
+
if (isTicketProcessed(ticket.id)) return false;
|
|
2005
|
+
return true;
|
|
2006
|
+
}
|
|
2007
|
+
dispatch(ticket) {
|
|
2008
|
+
this.activeInMemory.add(ticket.id);
|
|
2009
|
+
insertRun(ticket.id, "running");
|
|
2010
|
+
log.ticket(ticket.identifier, `Dispatching \u2192 ${ticket.repoUrl}`);
|
|
2011
|
+
void executeTicket({
|
|
2012
|
+
ticket,
|
|
2013
|
+
config: this.config,
|
|
2014
|
+
linearClient: this.linearClient,
|
|
2015
|
+
octokit: this.octokit
|
|
2016
|
+
}).then(
|
|
2017
|
+
() => {
|
|
2018
|
+
finishRun(ticket.id, "success");
|
|
2019
|
+
},
|
|
2020
|
+
(err) => {
|
|
2021
|
+
finishRun(ticket.id, "failed", err.message);
|
|
2022
|
+
}
|
|
2023
|
+
).finally(() => {
|
|
2024
|
+
this.activeInMemory.delete(ticket.id);
|
|
2025
|
+
this.drainQueue();
|
|
2026
|
+
});
|
|
2027
|
+
}
|
|
2028
|
+
drainQueue() {
|
|
2029
|
+
while (this.activeInMemory.size < this.config.maxConcurrent) {
|
|
2030
|
+
const next = dequeue();
|
|
2031
|
+
if (!next) break;
|
|
2032
|
+
if (this.activeInMemory.has(next.id)) continue;
|
|
2033
|
+
log.ticket(next.identifier, `Dequeued (${getQueueLength()} remaining)`);
|
|
2034
|
+
this.dispatch(next);
|
|
2035
|
+
}
|
|
2036
|
+
}
|
|
2037
|
+
waitForActive() {
|
|
2038
|
+
if (this.activeInMemory.size === 0) return Promise.resolve();
|
|
2039
|
+
return new Promise((resolve) => {
|
|
2040
|
+
const check = setInterval(() => {
|
|
2041
|
+
if (this.activeInMemory.size === 0) {
|
|
2042
|
+
clearInterval(check);
|
|
2043
|
+
resolve();
|
|
2044
|
+
}
|
|
2045
|
+
}, 1e3);
|
|
2046
|
+
});
|
|
2047
|
+
}
|
|
2048
|
+
};
|
|
2049
|
+
|
|
2050
|
+
// src/history.ts
|
|
2051
|
+
import { readFileSync as readFileSync3, existsSync as existsSync5 } from "fs";
|
|
2052
|
+
import { join as join6 } from "path";
|
|
2053
|
+
import { homedir as homedir4 } from "os";
|
|
2054
|
+
import chalk2 from "chalk";
|
|
2055
|
+
var HISTORY_DIR = join6(homedir4(), ".autoclawd");
|
|
2056
|
+
var HISTORY_FILE = join6(HISTORY_DIR, "history.jsonl");
|
|
2057
|
+
function runToRecord(run) {
|
|
2058
|
+
return {
|
|
2059
|
+
ticketId: run.ticket_id,
|
|
2060
|
+
status: run.status === "failed" ? "failure" : "success",
|
|
2061
|
+
branch: run.branch ?? "",
|
|
2062
|
+
prUrl: run.pr_url ?? void 0,
|
|
2063
|
+
error: run.error ?? void 0,
|
|
2064
|
+
iterations: run.iterations,
|
|
2065
|
+
model: run.model ?? "",
|
|
2066
|
+
image: run.image ?? "",
|
|
2067
|
+
startedAt: run.started_at,
|
|
2068
|
+
finishedAt: run.finished_at ?? "",
|
|
2069
|
+
durationSecs: run.duration_secs ?? 0
|
|
2070
|
+
};
|
|
2071
|
+
}
|
|
2072
|
+
function readHistory(opts) {
|
|
2073
|
+
const runs = queryRuns(opts);
|
|
2074
|
+
return runs.map(runToRecord);
|
|
2075
|
+
}
|
|
2076
|
+
function migrateJsonlToSqlite() {
|
|
2077
|
+
if (!existsSync5(HISTORY_FILE)) return 0;
|
|
2078
|
+
if (countRuns() > 0) return 0;
|
|
2079
|
+
const lines = readFileSync3(HISTORY_FILE, "utf-8").split("\n").filter(Boolean);
|
|
2080
|
+
if (lines.length === 0) return 0;
|
|
2081
|
+
const db = getDb();
|
|
2082
|
+
const insert = db.prepare(
|
|
2083
|
+
`INSERT OR IGNORE INTO runs (id, ticket_id, status, branch, pr_url, started_at, finished_at, error, iterations, model, image, duration_secs)
|
|
2084
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
|
2085
|
+
);
|
|
2086
|
+
const tx = db.transaction(() => {
|
|
2087
|
+
let count = 0;
|
|
2088
|
+
for (const line of lines) {
|
|
2089
|
+
try {
|
|
2090
|
+
const r = JSON.parse(line);
|
|
2091
|
+
const status = r.status === "failure" ? "failed" : r.status;
|
|
2092
|
+
insert.run(
|
|
2093
|
+
r.ticketId,
|
|
2094
|
+
r.ticketId,
|
|
2095
|
+
status,
|
|
2096
|
+
r.branch || null,
|
|
2097
|
+
r.prUrl || null,
|
|
2098
|
+
r.startedAt,
|
|
2099
|
+
r.finishedAt || null,
|
|
2100
|
+
r.error || null,
|
|
2101
|
+
r.iterations,
|
|
2102
|
+
r.model || null,
|
|
2103
|
+
r.image || null,
|
|
2104
|
+
r.durationSecs ?? null
|
|
2105
|
+
);
|
|
2106
|
+
count++;
|
|
2107
|
+
} catch {
|
|
2108
|
+
}
|
|
2109
|
+
}
|
|
2110
|
+
return count;
|
|
2111
|
+
});
|
|
2112
|
+
return tx();
|
|
2113
|
+
}
|
|
2114
|
+
function formatDuration(secs) {
|
|
2115
|
+
if (secs < 60) return `${secs}s`;
|
|
2116
|
+
const m = Math.floor(secs / 60);
|
|
2117
|
+
const s = secs % 60;
|
|
2118
|
+
if (m < 60) return `${m}m ${s}s`;
|
|
2119
|
+
const h = Math.floor(m / 60);
|
|
2120
|
+
return `${h}h ${m % 60}m`;
|
|
2121
|
+
}
|
|
2122
|
+
function formatTime(iso) {
|
|
2123
|
+
return iso.replace("T", " ").slice(0, 19);
|
|
2124
|
+
}
|
|
2125
|
+
function pad(s, len) {
|
|
2126
|
+
return s.length >= len ? s.slice(0, len) : s + " ".repeat(len - s.length);
|
|
2127
|
+
}
|
|
2128
|
+
function printHistoryTable(records) {
|
|
2129
|
+
if (records.length === 0) {
|
|
2130
|
+
console.log("No history records found.");
|
|
2131
|
+
return;
|
|
2132
|
+
}
|
|
2133
|
+
const header = `${pad("Ticket", 14)} ${pad("Status", 10)} ${pad("PR", 50)} ${pad("Duration", 10)} ${pad("Time", 20)}`;
|
|
2134
|
+
console.log(chalk2.bold(header));
|
|
2135
|
+
console.log("-".repeat(header.length));
|
|
2136
|
+
for (const r of records) {
|
|
2137
|
+
const status = r.status === "success" ? chalk2.green(pad("success", 10)) : chalk2.red(pad("failure", 10));
|
|
2138
|
+
const pr = pad(r.prUrl ?? (r.error ? r.error.slice(0, 48) : "-"), 50);
|
|
2139
|
+
const dur = pad(formatDuration(r.durationSecs), 10);
|
|
2140
|
+
const time = pad(formatTime(r.startedAt), 20);
|
|
2141
|
+
console.log(`${pad(r.ticketId, 14)} ${status} ${pr} ${dur} ${time}`);
|
|
2142
|
+
}
|
|
2143
|
+
}
|
|
2144
|
+
|
|
2145
|
+
// src/deps.ts
|
|
2146
|
+
import { execSync as execSync3 } from "child_process";
|
|
2147
|
+
import { existsSync as existsSync6 } from "fs";
|
|
2148
|
+
import { join as join7 } from "path";
|
|
2149
|
+
import { homedir as homedir5 } from "os";
|
|
2150
|
+
function detectPackageManager() {
|
|
2151
|
+
const checks = [
|
|
2152
|
+
["apt", "apt-get"],
|
|
2153
|
+
["dnf", "dnf"],
|
|
2154
|
+
["yum", "yum"],
|
|
2155
|
+
["pacman", "pacman"],
|
|
2156
|
+
["apk", "apk"],
|
|
2157
|
+
["brew", "brew"]
|
|
2158
|
+
];
|
|
2159
|
+
for (const [name, cmd] of checks) {
|
|
2160
|
+
try {
|
|
2161
|
+
execSync3(`which ${cmd}`, { stdio: "pipe" });
|
|
2162
|
+
return name;
|
|
2163
|
+
} catch {
|
|
2164
|
+
}
|
|
2165
|
+
}
|
|
2166
|
+
return "unknown";
|
|
2167
|
+
}
|
|
2168
|
+
var DOCKER_INSTALL = {
|
|
2169
|
+
apt: [
|
|
2170
|
+
"apt-get update -qq",
|
|
2171
|
+
"apt-get install -y docker.io",
|
|
2172
|
+
"systemctl enable docker",
|
|
2173
|
+
"systemctl start docker"
|
|
2174
|
+
],
|
|
2175
|
+
dnf: [
|
|
2176
|
+
"dnf install -y docker",
|
|
2177
|
+
"systemctl enable docker",
|
|
2178
|
+
"systemctl start docker"
|
|
2179
|
+
],
|
|
2180
|
+
yum: [
|
|
2181
|
+
"yum install -y docker",
|
|
2182
|
+
"systemctl enable docker",
|
|
2183
|
+
"systemctl start docker"
|
|
2184
|
+
],
|
|
2185
|
+
pacman: [
|
|
2186
|
+
"pacman -Sy --noconfirm docker",
|
|
2187
|
+
"systemctl enable docker",
|
|
2188
|
+
"systemctl start docker"
|
|
2189
|
+
],
|
|
2190
|
+
apk: [
|
|
2191
|
+
"apk add docker",
|
|
2192
|
+
"rc-update add docker boot",
|
|
2193
|
+
"service docker start"
|
|
2194
|
+
],
|
|
2195
|
+
brew: [
|
|
2196
|
+
"brew install --cask docker"
|
|
2197
|
+
],
|
|
2198
|
+
unknown: []
|
|
2199
|
+
};
|
|
2200
|
+
var CLOUDFLARED_INSTALL = {
|
|
2201
|
+
apt: [
|
|
2202
|
+
"apt-get update -qq",
|
|
2203
|
+
'apt-get install -y cloudflared || (curl -fsSL https://pkg.cloudflare.com/cloudflare-main.gpg | tee /usr/share/keyrings/cloudflare-main.gpg >/dev/null && echo "deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/cloudflared.list && apt-get update -qq && apt-get install -y cloudflared)'
|
|
2204
|
+
],
|
|
2205
|
+
dnf: ["dnf install -y cloudflared || npm install -g cloudflared"],
|
|
2206
|
+
yum: ["yum install -y cloudflared || npm install -g cloudflared"],
|
|
2207
|
+
pacman: ["pacman -Sy --noconfirm cloudflared"],
|
|
2208
|
+
apk: ["apk add cloudflared"],
|
|
2209
|
+
brew: ["brew install cloudflared"],
|
|
2210
|
+
unknown: []
|
|
2211
|
+
};
|
|
2212
|
+
function checkDeps() {
|
|
2213
|
+
const pm = detectPackageManager();
|
|
2214
|
+
const needsSudo = pm !== "brew" && pm !== "unknown" && process.getuid?.() !== 0;
|
|
2215
|
+
const sudo = needsSudo ? "sudo " : "";
|
|
2216
|
+
return [
|
|
2217
|
+
{
|
|
2218
|
+
name: "Docker",
|
|
2219
|
+
installed: commandExists("docker"),
|
|
2220
|
+
running: isDockerRunning(),
|
|
2221
|
+
installable: pm !== "unknown",
|
|
2222
|
+
installCommands: DOCKER_INSTALL[pm].map((c) => `${sudo}${c}`),
|
|
2223
|
+
manualInstructions: "https://docs.docker.com/get-docker/"
|
|
2224
|
+
},
|
|
2225
|
+
{
|
|
2226
|
+
name: "Claude Code",
|
|
2227
|
+
installed: commandExists("claude"),
|
|
2228
|
+
installable: commandExists("npm"),
|
|
2229
|
+
installCommands: ["npm install -g @anthropic-ai/claude-code@latest"],
|
|
2230
|
+
manualInstructions: "npm install -g @anthropic-ai/claude-code"
|
|
2231
|
+
},
|
|
2232
|
+
{
|
|
2233
|
+
name: "Claude credentials",
|
|
2234
|
+
installed: existsSync6(join7(homedir5(), ".claude", ".credentials.json")),
|
|
2235
|
+
installable: false,
|
|
2236
|
+
installCommands: [],
|
|
2237
|
+
manualInstructions: 'Run "claude" and complete the login flow'
|
|
2238
|
+
},
|
|
2239
|
+
{
|
|
2240
|
+
name: "cloudflared",
|
|
2241
|
+
installed: commandExists("cloudflared"),
|
|
2242
|
+
installable: pm !== "unknown",
|
|
2243
|
+
installCommands: CLOUDFLARED_INSTALL[pm].map((c) => `${sudo}${c}`),
|
|
2244
|
+
manualInstructions: "https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/"
|
|
2245
|
+
}
|
|
2246
|
+
];
|
|
2247
|
+
}
|
|
2248
|
+
function installDep(dep) {
|
|
2249
|
+
if (dep.installCommands.length === 0) return false;
|
|
2250
|
+
for (const cmd of dep.installCommands) {
|
|
2251
|
+
try {
|
|
2252
|
+
execSync3(cmd, { stdio: "inherit", timeout: 3e5 });
|
|
2253
|
+
} catch {
|
|
2254
|
+
return false;
|
|
2255
|
+
}
|
|
2256
|
+
}
|
|
2257
|
+
return true;
|
|
2258
|
+
}
|
|
2259
|
+
function addUserToDockerGroup() {
|
|
2260
|
+
if (process.platform !== "linux") return;
|
|
2261
|
+
try {
|
|
2262
|
+
const user = execSync3("whoami", { encoding: "utf-8" }).trim();
|
|
2263
|
+
execSync3(`sudo usermod -aG docker ${user}`, { stdio: "pipe" });
|
|
2264
|
+
log.info(`Added ${user} to docker group \u2014 log out and back in to apply`);
|
|
2265
|
+
} catch {
|
|
2266
|
+
}
|
|
2267
|
+
}
|
|
2268
|
+
function commandExists(cmd) {
|
|
2269
|
+
try {
|
|
2270
|
+
execSync3(`which ${cmd}`, { stdio: "pipe" });
|
|
2271
|
+
return true;
|
|
2272
|
+
} catch {
|
|
2273
|
+
return false;
|
|
2274
|
+
}
|
|
2275
|
+
}
|
|
2276
|
+
function isDockerRunning() {
|
|
2277
|
+
try {
|
|
2278
|
+
execSync3("docker info", { stdio: "pipe", timeout: 1e4 });
|
|
2279
|
+
return true;
|
|
2280
|
+
} catch {
|
|
2281
|
+
return false;
|
|
2282
|
+
}
|
|
2283
|
+
}
|
|
2284
|
+
|
|
2285
|
+
// src/index.ts
|
|
2286
|
+
import { existsSync as existsSync7, mkdirSync as mkdirSync3, writeFileSync as writeFileSync3 } from "fs";
|
|
2287
|
+
import { execSync as execSync4 } from "child_process";
|
|
2288
|
+
import { createInterface } from "readline";
|
|
2289
|
+
import { join as join8 } from "path";
|
|
2290
|
+
import { homedir as homedir6 } from "os";
|
|
2291
|
+
import { LinearClient as LinearClient2 } from "@linear/sdk";
|
|
2292
|
+
import { Octokit as Octokit2 } from "@octokit/rest";
|
|
2293
|
+
import { createRequire } from "module";
|
|
2294
|
+
var require2 = createRequire(import.meta.url);
|
|
2295
|
+
var pkg = require2("../package.json");
|
|
2296
|
+
var program = new Command();
|
|
2297
|
+
program.name("autoclawd").description("Linear webhooks \u2192 Claude Code in Docker \u2192 PRs").version(pkg.version);
|
|
2298
|
+
function ask(question, defaultVal) {
|
|
2299
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
2300
|
+
const hint = defaultVal ? ` (${defaultVal})` : "";
|
|
2301
|
+
return new Promise((resolve) => {
|
|
2302
|
+
rl.question(`${question}${hint}: `, (answer) => {
|
|
2303
|
+
rl.close();
|
|
2304
|
+
resolve(answer.trim() || defaultVal || "");
|
|
2305
|
+
});
|
|
2306
|
+
});
|
|
2307
|
+
}
|
|
2308
|
+
program.command("serve").description("Start webhook server with auto-tunnel").option("-c, --config <path>", "Config file path").option("-p, --port <number>", "Override webhook port").option("--no-tunnel", "Skip auto-tunnel (use if you have your own reverse proxy)").option("-v, --verbose", "Verbose logging").action(async (opts) => {
|
|
2309
|
+
if (opts.verbose) setLogLevel("debug");
|
|
2310
|
+
enableFileLogging();
|
|
2311
|
+
log.info(`autoclawd v${pkg.version}`);
|
|
2312
|
+
await checkDockerAvailable();
|
|
2313
|
+
if (!checkClaudeCredentials()) {
|
|
2314
|
+
throw new Error(
|
|
2315
|
+
"Claude Code credentials not found (~/.claude/.credentials.json)\nRun: claude (and complete login) to create credentials"
|
|
2316
|
+
);
|
|
2317
|
+
}
|
|
2318
|
+
const orphans = await cleanupOrphanedContainers();
|
|
2319
|
+
if (orphans > 0) {
|
|
2320
|
+
log.info(`Cleaned up ${orphans} orphaned container(s) from previous session`);
|
|
2321
|
+
}
|
|
2322
|
+
const config = loadConfig(opts.config);
|
|
2323
|
+
if (opts.port) config.webhook.port = parseInt(opts.port);
|
|
2324
|
+
log.info(`Team: ${config.linear.teamId} | Statuses: ${config.linear.statuses.join(", ")} | Model: ${config.agent.model} | Max concurrent: ${config.maxConcurrent}`);
|
|
2325
|
+
const linearClient = createLinearClient(config);
|
|
2326
|
+
const octokit = createOctokit(config);
|
|
2327
|
+
const server = new WebhookServer(config, linearClient, octokit);
|
|
2328
|
+
let tunnel;
|
|
2329
|
+
if (opts.tunnel !== false) {
|
|
2330
|
+
log.info("Starting tunnel...");
|
|
2331
|
+
try {
|
|
2332
|
+
tunnel = await startQuickTunnel(config.webhook.port);
|
|
2333
|
+
const webhookUrl = `${tunnel.url}/api/v1/linear/webhook`;
|
|
2334
|
+
log.success(`Tunnel: ${tunnel.url}`);
|
|
2335
|
+
log.info(`Webhook URL: ${webhookUrl}`);
|
|
2336
|
+
if (config.webhook.webhookId) {
|
|
2337
|
+
try {
|
|
2338
|
+
await linearClient.deleteWebhook(config.webhook.webhookId);
|
|
2339
|
+
} catch {
|
|
2340
|
+
}
|
|
2341
|
+
}
|
|
2342
|
+
const team = await linearClient.team(config.linear.teamId);
|
|
2343
|
+
const result = await linearClient.createWebhook({
|
|
2344
|
+
url: webhookUrl,
|
|
2345
|
+
resourceTypes: ["Issue"],
|
|
2346
|
+
label: "autoclawd",
|
|
2347
|
+
teamId: team.id
|
|
2348
|
+
});
|
|
2349
|
+
const webhook = await result.webhook;
|
|
2350
|
+
if (webhook) {
|
|
2351
|
+
log.success(`Linear webhook created \u2192 ${webhookUrl}`);
|
|
2352
|
+
config.webhook.webhookId = webhook.id;
|
|
2353
|
+
config.webhook.signingSecret = void 0;
|
|
2354
|
+
}
|
|
2355
|
+
} catch (err) {
|
|
2356
|
+
log.warn(`Tunnel failed: ${err instanceof Error ? err.message : err}`);
|
|
2357
|
+
log.info("Falling back to local-only mode. Set up your own reverse proxy.");
|
|
2358
|
+
}
|
|
2359
|
+
}
|
|
2360
|
+
await server.start();
|
|
2361
|
+
const shutdown = () => {
|
|
2362
|
+
log.info("Shutting down...");
|
|
2363
|
+
server.stop();
|
|
2364
|
+
if (tunnel) stopTunnel(tunnel);
|
|
2365
|
+
process.exit(0);
|
|
2366
|
+
};
|
|
2367
|
+
process.on("SIGINT", shutdown);
|
|
2368
|
+
process.on("SIGTERM", shutdown);
|
|
2369
|
+
});
|
|
2370
|
+
program.command("watch").description("Poll Linear for tickets (no webhook/tunnel needed)").option("-c, --config <path>", "Config file path").option("-i, --interval <seconds>", "Poll interval in seconds", "30").option("--once", "Poll once and exit (useful for cron)").option("-v, --verbose", "Verbose logging").action(async (opts) => {
|
|
2371
|
+
if (opts.verbose) setLogLevel("debug");
|
|
2372
|
+
enableFileLogging();
|
|
2373
|
+
log.info(`autoclawd v${pkg.version}`);
|
|
2374
|
+
await checkDockerAvailable();
|
|
2375
|
+
if (!checkClaudeCredentials()) {
|
|
2376
|
+
throw new Error(
|
|
2377
|
+
"Claude Code credentials not found (~/.claude/.credentials.json)\nRun: claude (and complete login) to create credentials"
|
|
2378
|
+
);
|
|
2379
|
+
}
|
|
2380
|
+
const orphans = await cleanupOrphanedContainers();
|
|
2381
|
+
if (orphans > 0) {
|
|
2382
|
+
log.info(`Cleaned up ${orphans} orphaned container(s) from previous session`);
|
|
2383
|
+
}
|
|
2384
|
+
const config = loadConfig(opts.config);
|
|
2385
|
+
const intervalSeconds = parseInt(opts.interval);
|
|
2386
|
+
log.info(`Team: ${config.linear.teamId} | Statuses: ${config.linear.statuses.join(", ")} | Model: ${config.agent.model} | Max concurrent: ${config.maxConcurrent}`);
|
|
2387
|
+
const linearClient = createLinearClient(config);
|
|
2388
|
+
const octokit = createOctokit(config);
|
|
2389
|
+
const watcher = new Watcher(config, linearClient, octokit);
|
|
2390
|
+
const shutdown = () => {
|
|
2391
|
+
log.info("Shutting down...");
|
|
2392
|
+
watcher.stop();
|
|
2393
|
+
process.exit(0);
|
|
2394
|
+
};
|
|
2395
|
+
process.on("SIGINT", shutdown);
|
|
2396
|
+
process.on("SIGTERM", shutdown);
|
|
2397
|
+
await watcher.start(intervalSeconds, opts.once ?? false);
|
|
2398
|
+
});
|
|
2399
|
+
program.command("run <ticket>").description("Run a single ticket (e.g. autoclawd run RAH-123)").option("-c, --config <path>", "Config file path").option("-v, --verbose", "Verbose logging").option("--dry-run", "Show what would happen without executing").option("--force", "Bypass completed-ticket check and re-run").action(async (ticketId, opts) => {
|
|
2400
|
+
if (opts.verbose) setLogLevel("debug");
|
|
2401
|
+
enableFileLogging();
|
|
2402
|
+
const config = loadConfig(opts.config);
|
|
2403
|
+
const linearClient = createLinearClient(config);
|
|
2404
|
+
const ticket = await fetchTicket(linearClient, ticketId);
|
|
2405
|
+
if (!ticket) throw new Error(`Ticket ${ticketId} not found`);
|
|
2406
|
+
if (!ticket.repoUrl) {
|
|
2407
|
+
throw new Error(`${ticketId} has no repo: label. Add "repo:owner/name" label to the ticket.`);
|
|
2408
|
+
}
|
|
2409
|
+
if (opts.dryRun) {
|
|
2410
|
+
log.info("Dry run \u2014 no changes will be made\n");
|
|
2411
|
+
log.info(` Ticket: ${ticket.identifier} \u2014 ${ticket.title}`);
|
|
2412
|
+
log.info(` Repo: ${ticket.repoUrl}`);
|
|
2413
|
+
log.info(` Base: ${ticket.baseBranch ?? "(from .autoclawd.yaml or main)"}`);
|
|
2414
|
+
log.info(` Branch: autoclawd/${ticket.identifier}-...`);
|
|
2415
|
+
log.info(` Image: ${config.docker.image}`);
|
|
2416
|
+
log.info(` Model: ${config.agent.model}`);
|
|
2417
|
+
log.info(` Max iter: ${config.agent.maxIterations}`);
|
|
2418
|
+
if (ticket.description) {
|
|
2419
|
+
log.info(`
|
|
2420
|
+
Description:
|
|
2421
|
+
${ticket.description.split("\n").join("\n ")}`);
|
|
2422
|
+
}
|
|
2423
|
+
return;
|
|
2424
|
+
}
|
|
2425
|
+
await checkDockerAvailable();
|
|
2426
|
+
if (!checkClaudeCredentials()) {
|
|
2427
|
+
throw new Error(
|
|
2428
|
+
"Claude Code credentials not found (~/.claude/.credentials.json)\nRun: claude (and complete login) to create credentials"
|
|
2429
|
+
);
|
|
2430
|
+
}
|
|
2431
|
+
const octokit = createOctokit(config);
|
|
2432
|
+
const abort = new AbortController();
|
|
2433
|
+
const onSignal = () => {
|
|
2434
|
+
log.info("Received signal, cleaning up...");
|
|
2435
|
+
abort.abort();
|
|
2436
|
+
};
|
|
2437
|
+
process.on("SIGINT", onSignal);
|
|
2438
|
+
process.on("SIGTERM", onSignal);
|
|
2439
|
+
try {
|
|
2440
|
+
const result = await executeTicket({ ticket, config, linearClient, octokit, force: opts.force });
|
|
2441
|
+
if (!result.success) throw new Error(result.error ?? "Unknown failure");
|
|
2442
|
+
} finally {
|
|
2443
|
+
process.off("SIGINT", onSignal);
|
|
2444
|
+
process.off("SIGTERM", onSignal);
|
|
2445
|
+
}
|
|
2446
|
+
});
|
|
2447
|
+
program.command("fix <pr-url>").description("Fix failed CI checks on a PR (e.g. autoclawd fix https://github.com/owner/repo/pull/123)").option("-c, --config <path>", "Config file path").option("-v, --verbose", "Verbose logging").action(async (prUrl, opts) => {
|
|
2448
|
+
if (opts.verbose) setLogLevel("debug");
|
|
2449
|
+
enableFileLogging();
|
|
2450
|
+
const match = prUrl.match(/github\.com\/([^/]+)\/([^/]+)\/pull\/(\d+)/);
|
|
2451
|
+
if (!match) {
|
|
2452
|
+
throw new Error(`Invalid PR URL: "${prUrl}"
|
|
2453
|
+
Expected: https://github.com/owner/repo/pull/123`);
|
|
2454
|
+
}
|
|
2455
|
+
const [, owner, repo, prNumStr] = match;
|
|
2456
|
+
const repoUrl = `https://github.com/${owner}/${repo}`;
|
|
2457
|
+
const prNumber = parseInt(prNumStr);
|
|
2458
|
+
const config = loadConfig(opts.config);
|
|
2459
|
+
await checkDockerAvailable();
|
|
2460
|
+
if (!checkClaudeCredentials()) {
|
|
2461
|
+
throw new Error(
|
|
2462
|
+
"Claude Code credentials not found (~/.claude/.credentials.json)\nRun: claude (and complete login) to create credentials"
|
|
2463
|
+
);
|
|
2464
|
+
}
|
|
2465
|
+
const octokit = createOctokit(config);
|
|
2466
|
+
const result = await fixPR({ repoUrl, prNumber, config, octokit });
|
|
2467
|
+
if (!result.success) {
|
|
2468
|
+
throw new Error(result.error ?? "Fix failed");
|
|
2469
|
+
}
|
|
2470
|
+
});
|
|
2471
|
+
program.command("retry [ticket]").description("Retry a failed ticket (e.g. autoclawd retry RAH-123)").option("-c, --config <path>", "Config file path").option("-v, --verbose", "Verbose logging").option("--last-failed", "Retry the most recent failed run").option("--all-failed", "Retry all failed runs (sequentially)").action(async (ticketArg, opts) => {
|
|
2472
|
+
if (opts.verbose) setLogLevel("debug");
|
|
2473
|
+
enableFileLogging();
|
|
2474
|
+
if (!ticketArg && !opts.lastFailed && !opts.allFailed) {
|
|
2475
|
+
throw new Error(
|
|
2476
|
+
"Provide a ticket ID, --last-failed, or --all-failed.\nUsage: autoclawd retry RAH-123 | --last-failed | --all-failed"
|
|
2477
|
+
);
|
|
2478
|
+
}
|
|
2479
|
+
const config = loadConfig(opts.config);
|
|
2480
|
+
await checkDockerAvailable();
|
|
2481
|
+
if (!checkClaudeCredentials()) {
|
|
2482
|
+
throw new Error(
|
|
2483
|
+
"Claude Code credentials not found (~/.claude/.credentials.json)\nRun: claude (and complete login) to create credentials"
|
|
2484
|
+
);
|
|
2485
|
+
}
|
|
2486
|
+
const linearClient = createLinearClient(config);
|
|
2487
|
+
const octokit = createOctokit(config);
|
|
2488
|
+
let ticketIds = [];
|
|
2489
|
+
if (ticketArg) {
|
|
2490
|
+
const history = readHistory({ failedOnly: true, limit: 1e3 });
|
|
2491
|
+
const prev = history.find(
|
|
2492
|
+
(r) => r.ticketId.toLowerCase() === ticketArg.toLowerCase()
|
|
2493
|
+
);
|
|
2494
|
+
if (prev) {
|
|
2495
|
+
log.info(`Retrying ${prev.ticketId} \u2014 last failure: ${prev.error ?? "unknown error"}`);
|
|
2496
|
+
log.info(` Failed at: ${prev.startedAt}`);
|
|
2497
|
+
} else {
|
|
2498
|
+
log.info(`No previous failure found for ${ticketArg}, running fresh`);
|
|
2499
|
+
}
|
|
2500
|
+
ticketIds = [ticketArg];
|
|
2501
|
+
} else if (opts.lastFailed) {
|
|
2502
|
+
const history = readHistory({ failedOnly: true, limit: 1 });
|
|
2503
|
+
if (history.length === 0) {
|
|
2504
|
+
throw new Error("No failed runs found in history");
|
|
2505
|
+
}
|
|
2506
|
+
const last = history[0];
|
|
2507
|
+
log.info(`Retrying last failed: ${last.ticketId} \u2014 ${last.error ?? "unknown error"}`);
|
|
2508
|
+
log.info(` Failed at: ${last.startedAt}`);
|
|
2509
|
+
ticketIds = [last.ticketId];
|
|
2510
|
+
} else if (opts.allFailed) {
|
|
2511
|
+
const allRecords = readHistory({ limit: 1e4 });
|
|
2512
|
+
const succeeded = new Set(
|
|
2513
|
+
allRecords.filter((r) => r.status === "success").map((r) => r.ticketId)
|
|
2514
|
+
);
|
|
2515
|
+
const failedUnique = /* @__PURE__ */ new Map();
|
|
2516
|
+
for (const r of allRecords) {
|
|
2517
|
+
if (r.status === "failure" && !succeeded.has(r.ticketId) && !failedUnique.has(r.ticketId)) {
|
|
2518
|
+
failedUnique.set(r.ticketId, r);
|
|
2519
|
+
}
|
|
2520
|
+
}
|
|
2521
|
+
if (failedUnique.size === 0) {
|
|
2522
|
+
throw new Error("No unresolved failed runs found in history");
|
|
2523
|
+
}
|
|
2524
|
+
log.info(`Found ${failedUnique.size} failed ticket(s) to retry:`);
|
|
2525
|
+
for (const [id, rec] of failedUnique) {
|
|
2526
|
+
log.info(` ${id} \u2014 ${rec.error ?? "unknown error"}`);
|
|
2527
|
+
}
|
|
2528
|
+
ticketIds = [...failedUnique.keys()];
|
|
2529
|
+
}
|
|
2530
|
+
const abort = new AbortController();
|
|
2531
|
+
const onSignal = () => {
|
|
2532
|
+
log.info("Received signal, cleaning up...");
|
|
2533
|
+
abort.abort();
|
|
2534
|
+
};
|
|
2535
|
+
process.on("SIGINT", onSignal);
|
|
2536
|
+
process.on("SIGTERM", onSignal);
|
|
2537
|
+
try {
|
|
2538
|
+
for (const id of ticketIds) {
|
|
2539
|
+
log.info(`
|
|
2540
|
+
Retrying ticket ${id}...`);
|
|
2541
|
+
const ticket = await fetchTicket(linearClient, id);
|
|
2542
|
+
if (!ticket) {
|
|
2543
|
+
log.error(`Ticket ${id} not found in Linear, skipping`);
|
|
2544
|
+
continue;
|
|
2545
|
+
}
|
|
2546
|
+
if (!ticket.repoUrl) {
|
|
2547
|
+
log.error(`${id} has no repo: label, skipping`);
|
|
2548
|
+
continue;
|
|
2549
|
+
}
|
|
2550
|
+
const result = await executeTicket({ ticket, config, linearClient, octokit });
|
|
2551
|
+
if (result.success) {
|
|
2552
|
+
log.success(`${id} completed${result.prUrl ? ` \u2192 ${result.prUrl}` : ""}`);
|
|
2553
|
+
} else {
|
|
2554
|
+
log.error(`${id} failed again: ${result.error ?? "unknown"}`);
|
|
2555
|
+
if (ticketIds.length === 1) {
|
|
2556
|
+
throw new Error(result.error ?? "Retry failed");
|
|
2557
|
+
}
|
|
2558
|
+
}
|
|
2559
|
+
}
|
|
2560
|
+
} finally {
|
|
2561
|
+
process.off("SIGINT", onSignal);
|
|
2562
|
+
process.off("SIGTERM", onSignal);
|
|
2563
|
+
}
|
|
2564
|
+
});
|
|
2565
|
+
program.command("validate").description("Validate config without running anything").option("-c, --config <path>", "Config file path").action(async (opts) => {
|
|
2566
|
+
try {
|
|
2567
|
+
const config = loadConfig(opts.config);
|
|
2568
|
+
log.success("Config is valid");
|
|
2569
|
+
log.info(` Linear team: ${config.linear.teamId}`);
|
|
2570
|
+
log.info(` Trigger statuses: ${config.linear.statuses.join(", ")}`);
|
|
2571
|
+
log.info(` Docker image: ${config.docker.image}`);
|
|
2572
|
+
log.info(` Agent model: ${config.agent.model}`);
|
|
2573
|
+
log.info(` Max iterations: ${config.agent.maxIterations}`);
|
|
2574
|
+
log.info(` Max concurrent: ${config.maxConcurrent}`);
|
|
2575
|
+
log.info(` Webhook port: ${config.webhook.port}`);
|
|
2576
|
+
} catch (err) {
|
|
2577
|
+
log.error(err instanceof Error ? err.message : String(err));
|
|
2578
|
+
process.exit(1);
|
|
2579
|
+
}
|
|
2580
|
+
});
|
|
2581
|
+
program.command("cleanup").description("Remove orphaned autoclawd Docker containers").action(async () => {
|
|
2582
|
+
await checkDockerAvailable();
|
|
2583
|
+
const count = await cleanupOrphanedContainers();
|
|
2584
|
+
if (count === 0) {
|
|
2585
|
+
log.info("No orphaned containers found");
|
|
2586
|
+
} else {
|
|
2587
|
+
log.success(`Cleaned up ${count} orphaned container(s)`);
|
|
2588
|
+
}
|
|
2589
|
+
});
|
|
2590
|
+
program.command("history").description("Show run history").option("-n, --limit <number>", "Number of records to show", "20").option("--json", "Output as JSON").option("--failed", "Show only failures").action((opts) => {
|
|
2591
|
+
migrateJsonlToSqlite();
|
|
2592
|
+
const records = readHistory({
|
|
2593
|
+
limit: parseInt(opts.limit),
|
|
2594
|
+
failedOnly: opts.failed ?? false
|
|
2595
|
+
});
|
|
2596
|
+
if (opts.json) {
|
|
2597
|
+
console.log(JSON.stringify(records, null, 2));
|
|
2598
|
+
} else {
|
|
2599
|
+
printHistoryTable(records);
|
|
2600
|
+
}
|
|
2601
|
+
});
|
|
2602
|
+
function checkClaudeCredentials() {
|
|
2603
|
+
return existsSync7(join8(homedir6(), ".claude", ".credentials.json"));
|
|
2604
|
+
}
|
|
2605
|
+
async function validateGitHubToken(token) {
|
|
2606
|
+
try {
|
|
2607
|
+
const octokit = new Octokit2({ auth: token });
|
|
2608
|
+
const { data } = await octokit.rest.users.getAuthenticated();
|
|
2609
|
+
return data.login;
|
|
2610
|
+
} catch {
|
|
2611
|
+
return void 0;
|
|
2612
|
+
}
|
|
2613
|
+
}
|
|
2614
|
+
program.command("init").description("Set up autoclawd interactively").action(async () => {
|
|
2615
|
+
console.log("\n autoclawd setup\n");
|
|
2616
|
+
console.log(" Checking dependencies...\n");
|
|
2617
|
+
const deps = checkDeps();
|
|
2618
|
+
const stillMissing = [];
|
|
2619
|
+
for (const dep of deps) {
|
|
2620
|
+
if (dep.installed && dep.running !== false) {
|
|
2621
|
+
log.success(`${dep.name}`);
|
|
2622
|
+
continue;
|
|
2623
|
+
}
|
|
2624
|
+
if (dep.installed && dep.running === false) {
|
|
2625
|
+
log.warn(`${dep.name} \u2014 installed but not running`);
|
|
2626
|
+
stillMissing.push(`${dep.name} (needs restart)`);
|
|
2627
|
+
continue;
|
|
2628
|
+
}
|
|
2629
|
+
if (dep.installable && dep.installCommands.length > 0) {
|
|
2630
|
+
const answer = await ask(` ${dep.name} not found. Install? (Y/n)`, "Y");
|
|
2631
|
+
if (answer.toLowerCase() !== "n") {
|
|
2632
|
+
console.log(`
|
|
2633
|
+
Installing ${dep.name} via system package manager...
|
|
2634
|
+
`);
|
|
2635
|
+
const ok = installDep(dep);
|
|
2636
|
+
if (ok) {
|
|
2637
|
+
log.success(`${dep.name} installed`);
|
|
2638
|
+
if (dep.name === "Docker") addUserToDockerGroup();
|
|
2639
|
+
if (dep.name === "Claude Code") {
|
|
2640
|
+
console.log('\n Run "claude" in your terminal to log in, then re-run: autoclawd init\n');
|
|
2641
|
+
stillMissing.push("Claude Code login");
|
|
2642
|
+
}
|
|
2643
|
+
} else {
|
|
2644
|
+
log.error(`${dep.name} install failed`);
|
|
2645
|
+
console.log(` Install manually: ${dep.manualInstructions}`);
|
|
2646
|
+
stillMissing.push(dep.name);
|
|
2647
|
+
}
|
|
2648
|
+
} else {
|
|
2649
|
+
console.log(` Install manually: ${dep.manualInstructions}`);
|
|
2650
|
+
stillMissing.push(dep.name);
|
|
2651
|
+
}
|
|
2652
|
+
} else if (dep.name === "Claude credentials") {
|
|
2653
|
+
log.warn(`${dep.name} \u2014 run "claude" to log in`);
|
|
2654
|
+
stillMissing.push(dep.name);
|
|
2655
|
+
} else {
|
|
2656
|
+
log.warn(`${dep.name} \u2014 ${dep.manualInstructions}`);
|
|
2657
|
+
stillMissing.push(dep.name);
|
|
2658
|
+
}
|
|
2659
|
+
}
|
|
2660
|
+
if (stillMissing.length > 0) {
|
|
2661
|
+
console.log(`
|
|
2662
|
+
Continuing setup \u2014 install ${stillMissing.join(", ")} before running.
|
|
2663
|
+
`);
|
|
2664
|
+
} else {
|
|
2665
|
+
console.log("");
|
|
2666
|
+
}
|
|
2667
|
+
if (!existsSync7(CONFIG_DIR)) {
|
|
2668
|
+
mkdirSync3(CONFIG_DIR, { recursive: true });
|
|
2669
|
+
}
|
|
2670
|
+
if (existsSync7(CONFIG_FILE)) {
|
|
2671
|
+
const overwrite = await ask("Config already exists. Overwrite? (y/N)", "N");
|
|
2672
|
+
if (overwrite.toLowerCase() !== "y") {
|
|
2673
|
+
log.info("Keeping existing config.");
|
|
2674
|
+
return;
|
|
2675
|
+
}
|
|
2676
|
+
}
|
|
2677
|
+
const linearKey = await ask("Linear API key", process.env.LINEAR_API_KEY);
|
|
2678
|
+
if (!linearKey) {
|
|
2679
|
+
log.error("Linear API key is required");
|
|
2680
|
+
process.exit(1);
|
|
2681
|
+
}
|
|
2682
|
+
let teamId = "";
|
|
2683
|
+
try {
|
|
2684
|
+
const client = new LinearClient2({ apiKey: linearKey });
|
|
2685
|
+
const teams = await client.teams();
|
|
2686
|
+
if (teams.nodes.length === 0) {
|
|
2687
|
+
log.error("No teams found in your Linear workspace");
|
|
2688
|
+
process.exit(1);
|
|
2689
|
+
}
|
|
2690
|
+
if (teams.nodes.length === 1) {
|
|
2691
|
+
teamId = teams.nodes[0].key;
|
|
2692
|
+
log.success(`Linear team: ${teams.nodes[0].name} (${teamId})`);
|
|
2693
|
+
} else {
|
|
2694
|
+
console.log("\n Available teams:");
|
|
2695
|
+
for (const t of teams.nodes) {
|
|
2696
|
+
console.log(` ${t.key} \u2014 ${t.name}`);
|
|
2697
|
+
}
|
|
2698
|
+
teamId = await ask("\n Team key");
|
|
2699
|
+
}
|
|
2700
|
+
} catch (err) {
|
|
2701
|
+
log.error(`Invalid Linear API key: ${err instanceof Error ? err.message : err}`);
|
|
2702
|
+
process.exit(1);
|
|
2703
|
+
}
|
|
2704
|
+
let ghToken = "";
|
|
2705
|
+
try {
|
|
2706
|
+
ghToken = execSync4("gh auth token", { encoding: "utf-8" }).trim();
|
|
2707
|
+
log.success(`GitHub token detected from gh CLI`);
|
|
2708
|
+
} catch {
|
|
2709
|
+
ghToken = await ask("GitHub personal access token");
|
|
2710
|
+
}
|
|
2711
|
+
if (!ghToken) {
|
|
2712
|
+
log.error("GitHub token is required");
|
|
2713
|
+
process.exit(1);
|
|
2714
|
+
}
|
|
2715
|
+
const ghUser = await validateGitHubToken(ghToken);
|
|
2716
|
+
if (ghUser) {
|
|
2717
|
+
log.success(`GitHub authenticated as ${ghUser}`);
|
|
2718
|
+
} else {
|
|
2719
|
+
log.error("GitHub token is invalid or expired");
|
|
2720
|
+
process.exit(1);
|
|
2721
|
+
}
|
|
2722
|
+
console.log("\n Docker image options:");
|
|
2723
|
+
console.log(" autoclawd-base \u2014 All runtimes: Node, Python, Go, Rust, Ruby (~1.5GB)");
|
|
2724
|
+
console.log(" Works with any repo, no per-repo config needed");
|
|
2725
|
+
console.log(" node:20 \u2014 Node.js only (fast, ~300MB)");
|
|
2726
|
+
const dockerImage = await ask("Docker image", "autoclawd-base");
|
|
2727
|
+
if (dockerImage === "autoclawd-base") {
|
|
2728
|
+
try {
|
|
2729
|
+
execSync4("docker image inspect autoclawd-base", { stdio: "pipe" });
|
|
2730
|
+
log.success("autoclawd-base image found");
|
|
2731
|
+
} catch {
|
|
2732
|
+
log.info("autoclawd-base will be built automatically on first run (~3 min)");
|
|
2733
|
+
}
|
|
2734
|
+
}
|
|
2735
|
+
const model = await ask("Claude model", "claude-sonnet-4-6");
|
|
2736
|
+
const maxIter = await ask("Max iterations per ticket", "10");
|
|
2737
|
+
const config = `# autoclawd config \u2014 generated by autoclawd init
|
|
2738
|
+
# Tip: use "env:VAR_NAME" to read secrets from environment variables
|
|
2739
|
+
# e.g. apiKey: env:LINEAR_API_KEY
|
|
2740
|
+
|
|
2741
|
+
linear:
|
|
2742
|
+
apiKey: "${linearKey}"
|
|
2743
|
+
teamId: ${teamId}
|
|
2744
|
+
statuses: [Todo]
|
|
2745
|
+
assignToMe: false
|
|
2746
|
+
inProgressStatus: In Progress
|
|
2747
|
+
doneStatus: Done
|
|
2748
|
+
|
|
2749
|
+
github:
|
|
2750
|
+
token: "${ghToken}"
|
|
2751
|
+
|
|
2752
|
+
webhook:
|
|
2753
|
+
port: 3000
|
|
2754
|
+
|
|
2755
|
+
docker:
|
|
2756
|
+
image: ${dockerImage}
|
|
2757
|
+
memory: 4g
|
|
2758
|
+
|
|
2759
|
+
agent:
|
|
2760
|
+
model: ${model}
|
|
2761
|
+
maxIterations: ${parseInt(maxIter) || 10}
|
|
2762
|
+
|
|
2763
|
+
maxConcurrent: 1
|
|
2764
|
+
`;
|
|
2765
|
+
writeFileSync3(CONFIG_FILE, config, { mode: 384 });
|
|
2766
|
+
log.success(`Config saved to ${CONFIG_FILE}`);
|
|
2767
|
+
console.log(`
|
|
2768
|
+
Setup complete! Next steps:
|
|
2769
|
+
|
|
2770
|
+
1. Add repo: labels to your Linear team
|
|
2771
|
+
(e.g. "repo:your-org/your-repo")
|
|
2772
|
+
|
|
2773
|
+
2. Tag tickets with a repo: label
|
|
2774
|
+
|
|
2775
|
+
3. Start autoclawd:
|
|
2776
|
+
$ autoclawd serve
|
|
2777
|
+
|
|
2778
|
+
autoclawd serve will auto-create a tunnel and
|
|
2779
|
+
register the webhook with Linear. Just run it.
|
|
2780
|
+
`);
|
|
2781
|
+
});
|
|
2782
|
+
program.parseAsync().catch((err) => {
|
|
2783
|
+
log.error(err instanceof Error ? err.message : String(err));
|
|
2784
|
+
process.exit(1);
|
|
2785
|
+
});
|
|
2786
|
+
//# sourceMappingURL=index.js.map
|