@autoclawd/autocode 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile +57 -0
- package/README.md +169 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +1762 -0
- package/dist/index.js.map +1 -0
- package/package.json +57 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1762 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import { Command } from "commander";
|
|
5
|
+
|
|
6
|
+
// src/config.ts
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { readFileSync, existsSync } from "fs";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
import { homedir } from "os";
|
|
11
|
+
import yaml from "js-yaml";
|
|
12
|
+
var AgentConfigSchema = z.object({
|
|
13
|
+
model: z.string().default("claude-sonnet-4-6"),
|
|
14
|
+
maxIterations: z.number().min(1).max(50).default(10),
|
|
15
|
+
timeout: z.number().optional()
|
|
16
|
+
});
|
|
17
|
+
var DockerConfigSchema = z.object({
|
|
18
|
+
image: z.string().default("node:20"),
|
|
19
|
+
memory: z.string().default("4g"),
|
|
20
|
+
cpus: z.number().optional(),
|
|
21
|
+
setup: z.array(z.string()).optional(),
|
|
22
|
+
// Shell commands to run before Claude Code
|
|
23
|
+
volumes: z.array(z.string()).optional(),
|
|
24
|
+
// Extra bind mounts, e.g. ["~/.ssh:/home/autocode/.ssh:ro"]
|
|
25
|
+
network: z.enum(["bridge", "host", "none"]).default("bridge")
|
|
26
|
+
// Container network mode
|
|
27
|
+
});
|
|
28
|
+
var LinearConfigSchema = z.object({
|
|
29
|
+
apiKey: z.string(),
|
|
30
|
+
teamId: z.string(),
|
|
31
|
+
statuses: z.array(z.string()).default(["Todo"]),
|
|
32
|
+
assignToMe: z.boolean().default(true),
|
|
33
|
+
doneStatus: z.string().default("Done"),
|
|
34
|
+
inProgressStatus: z.string().default("In Progress")
|
|
35
|
+
});
|
|
36
|
+
var GitHubConfigSchema = z.object({
|
|
37
|
+
token: z.string()
|
|
38
|
+
});
|
|
39
|
+
var WebhookConfigSchema = z.object({
|
|
40
|
+
port: z.number().default(3e3),
|
|
41
|
+
url: z.string().optional(),
|
|
42
|
+
signingSecret: z.string().optional(),
|
|
43
|
+
webhookId: z.string().optional()
|
|
44
|
+
});
|
|
45
|
+
var ConfigSchema = z.object({
|
|
46
|
+
linear: LinearConfigSchema,
|
|
47
|
+
github: GitHubConfigSchema,
|
|
48
|
+
docker: DockerConfigSchema.default({}),
|
|
49
|
+
agent: AgentConfigSchema.default({}),
|
|
50
|
+
webhook: WebhookConfigSchema.default({}),
|
|
51
|
+
maxConcurrent: z.number().default(1)
|
|
52
|
+
});
|
|
53
|
+
var RepoLocalConfigSchema = z.object({
|
|
54
|
+
prompt: z.string().optional(),
|
|
55
|
+
base: z.string().default("main"),
|
|
56
|
+
agent: AgentConfigSchema.partial().optional(),
|
|
57
|
+
docker: DockerConfigSchema.partial().optional()
|
|
58
|
+
});
|
|
59
|
+
function resolveEnvVars(obj) {
|
|
60
|
+
if (typeof obj === "string") {
|
|
61
|
+
if (obj.startsWith("env:")) {
|
|
62
|
+
const envName = obj.slice(4);
|
|
63
|
+
const val = process.env[envName];
|
|
64
|
+
if (!val) throw new Error(`Environment variable ${envName} is not set`);
|
|
65
|
+
return val;
|
|
66
|
+
}
|
|
67
|
+
return obj;
|
|
68
|
+
}
|
|
69
|
+
if (Array.isArray(obj)) return obj.map(resolveEnvVars);
|
|
70
|
+
if (obj && typeof obj === "object") {
|
|
71
|
+
return Object.fromEntries(
|
|
72
|
+
Object.entries(obj).map(([k, v]) => [k, resolveEnvVars(v)])
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
return obj;
|
|
76
|
+
}
|
|
77
|
+
var CONFIG_DIR = join(homedir(), ".autocode");
|
|
78
|
+
var CONFIG_FILE = join(CONFIG_DIR, "config.yaml");
|
|
79
|
+
function loadConfig(path) {
|
|
80
|
+
const configPath = path ?? CONFIG_FILE;
|
|
81
|
+
if (!existsSync(configPath)) {
|
|
82
|
+
throw new Error(`Config not found: ${configPath}
|
|
83
|
+
Run: autocode init`);
|
|
84
|
+
}
|
|
85
|
+
const content = readFileSync(configPath, "utf-8");
|
|
86
|
+
const raw = yaml.load(content);
|
|
87
|
+
if (!raw || typeof raw !== "object") {
|
|
88
|
+
throw new Error(`Config file is empty or invalid: ${configPath}`);
|
|
89
|
+
}
|
|
90
|
+
const resolved = resolveEnvVars(raw);
|
|
91
|
+
const result = ConfigSchema.safeParse(resolved);
|
|
92
|
+
if (!result.success) {
|
|
93
|
+
const issues = result.error.issues.map(
|
|
94
|
+
(i) => ` - ${i.path.join(".")}: ${i.message}`
|
|
95
|
+
).join("\n");
|
|
96
|
+
throw new Error(`Invalid config (${configPath}):
|
|
97
|
+
${issues}`);
|
|
98
|
+
}
|
|
99
|
+
return result.data;
|
|
100
|
+
}
|
|
101
|
+
function loadRepoLocalConfig(workspacePath) {
|
|
102
|
+
const yamlPath = join(workspacePath, ".autocode.yaml");
|
|
103
|
+
const ymlPath = join(workspacePath, ".autocode.yml");
|
|
104
|
+
const configPath = existsSync(yamlPath) ? yamlPath : existsSync(ymlPath) ? ymlPath : void 0;
|
|
105
|
+
if (!configPath) return void 0;
|
|
106
|
+
const raw = yaml.load(readFileSync(configPath, "utf-8"));
|
|
107
|
+
if (!raw || typeof raw !== "object") return void 0;
|
|
108
|
+
return RepoLocalConfigSchema.parse(raw);
|
|
109
|
+
}
|
|
110
|
+
function mergeConfigs(host, local) {
|
|
111
|
+
const agent = AgentConfigSchema.parse({
|
|
112
|
+
...host.agent,
|
|
113
|
+
...local?.agent
|
|
114
|
+
});
|
|
115
|
+
const docker2 = DockerConfigSchema.parse({
|
|
116
|
+
...host.docker,
|
|
117
|
+
...local?.docker
|
|
118
|
+
});
|
|
119
|
+
return { agent, docker: docker2, prompt: local?.prompt };
|
|
120
|
+
}
|
|
121
|
+
function parseRepoFromLabels(labels) {
|
|
122
|
+
for (const label of labels) {
|
|
123
|
+
const match = label.match(/^repo:(.+)/i);
|
|
124
|
+
if (match) {
|
|
125
|
+
const value = match[1].trim();
|
|
126
|
+
if (value.startsWith("https://")) return value;
|
|
127
|
+
if (value.includes("/")) return `https://github.com/${value}`;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return void 0;
|
|
131
|
+
}
|
|
132
|
+
function parseBaseFromLabels(labels) {
|
|
133
|
+
for (const label of labels) {
|
|
134
|
+
const match = label.match(/^base:(.+)/i);
|
|
135
|
+
if (match) {
|
|
136
|
+
return match[1].trim();
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
return void 0;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// src/linear.ts
|
|
143
|
+
import { LinearClient } from "@linear/sdk";
|
|
144
|
+
|
|
145
|
+
// src/logger.ts
|
|
146
|
+
import chalk from "chalk";
|
|
147
|
+
import { appendFileSync, mkdirSync, existsSync as existsSync2 } from "fs";
|
|
148
|
+
import { join as join2 } from "path";
|
|
149
|
+
import { homedir as homedir2 } from "os";
|
|
150
|
+
var currentLevel = "info";
|
|
151
|
+
var logFilePath;
|
|
152
|
+
var levels = {
|
|
153
|
+
debug: 0,
|
|
154
|
+
info: 1,
|
|
155
|
+
warn: 2,
|
|
156
|
+
error: 3
|
|
157
|
+
};
|
|
158
|
+
function setLogLevel(level) {
|
|
159
|
+
currentLevel = level;
|
|
160
|
+
}
|
|
161
|
+
function enableFileLogging() {
|
|
162
|
+
const logDir = join2(homedir2(), ".autocode", "logs");
|
|
163
|
+
if (!existsSync2(logDir)) {
|
|
164
|
+
mkdirSync(logDir, { recursive: true });
|
|
165
|
+
}
|
|
166
|
+
const date = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
|
|
167
|
+
logFilePath = join2(logDir, `autocode-${date}.log`);
|
|
168
|
+
}
|
|
169
|
+
function shouldLog(level) {
|
|
170
|
+
return levels[level] >= levels[currentLevel];
|
|
171
|
+
}
|
|
172
|
+
function timestamp() {
|
|
173
|
+
return (/* @__PURE__ */ new Date()).toISOString().slice(11, 19);
|
|
174
|
+
}
|
|
175
|
+
function fullTimestamp() {
|
|
176
|
+
return (/* @__PURE__ */ new Date()).toISOString();
|
|
177
|
+
}
|
|
178
|
+
function writeToFile(level, msg) {
|
|
179
|
+
if (!logFilePath) return;
|
|
180
|
+
try {
|
|
181
|
+
appendFileSync(logFilePath, `${fullTimestamp()} [${level.padEnd(5)}] ${msg}
|
|
182
|
+
`);
|
|
183
|
+
} catch {
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
var log = {
|
|
187
|
+
debug(msg, ...args) {
|
|
188
|
+
writeToFile("DEBUG", msg);
|
|
189
|
+
if (shouldLog("debug")) console.log(chalk.gray(`[${timestamp()}] ${msg}`), ...args);
|
|
190
|
+
},
|
|
191
|
+
info(msg, ...args) {
|
|
192
|
+
writeToFile("INFO", msg);
|
|
193
|
+
if (shouldLog("info")) console.log(chalk.blue(`[${timestamp()}]`), msg, ...args);
|
|
194
|
+
},
|
|
195
|
+
warn(msg, ...args) {
|
|
196
|
+
writeToFile("WARN", msg);
|
|
197
|
+
if (shouldLog("warn")) console.log(chalk.yellow(`[${timestamp()}] WARN`), msg, ...args);
|
|
198
|
+
},
|
|
199
|
+
error(msg, ...args) {
|
|
200
|
+
writeToFile("ERROR", msg);
|
|
201
|
+
if (shouldLog("error")) console.log(chalk.red(`[${timestamp()}] ERROR`), msg, ...args);
|
|
202
|
+
},
|
|
203
|
+
success(msg, ...args) {
|
|
204
|
+
writeToFile("INFO", `\u2713 ${msg}`);
|
|
205
|
+
if (shouldLog("info")) console.log(chalk.green(`[${timestamp()}] \u2713`), msg, ...args);
|
|
206
|
+
},
|
|
207
|
+
ticket(id, msg, ...args) {
|
|
208
|
+
writeToFile("INFO", `[${id}] ${msg}`);
|
|
209
|
+
if (shouldLog("info")) console.log(chalk.cyan(`[${timestamp()}] [${id}]`), msg, ...args);
|
|
210
|
+
}
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
// src/linear.ts
|
|
214
|
+
function createLinearClient(config) {
|
|
215
|
+
return new LinearClient({ apiKey: config.linear.apiKey });
|
|
216
|
+
}
|
|
217
|
+
async function fetchTicket(client, identifier) {
|
|
218
|
+
const match = identifier.match(/^([A-Za-z]+)-(\d+)$/);
|
|
219
|
+
if (!match) return void 0;
|
|
220
|
+
const [, teamKey, numStr] = match;
|
|
221
|
+
const normalizedKey = teamKey.toUpperCase();
|
|
222
|
+
const team = await client.team(normalizedKey);
|
|
223
|
+
const result = await team.issues({
|
|
224
|
+
filter: { number: { eq: parseInt(numStr) } },
|
|
225
|
+
first: 1
|
|
226
|
+
});
|
|
227
|
+
const issue = result.nodes[0];
|
|
228
|
+
if (!issue) return void 0;
|
|
229
|
+
const labels = await issue.labels();
|
|
230
|
+
const labelNames = labels.nodes.map((l) => l.name);
|
|
231
|
+
const repoUrl = parseRepoFromLabels(labelNames);
|
|
232
|
+
const baseBranch = parseBaseFromLabels(labelNames);
|
|
233
|
+
return {
|
|
234
|
+
id: issue.id,
|
|
235
|
+
identifier: issue.identifier,
|
|
236
|
+
title: issue.title,
|
|
237
|
+
description: issue.description ?? "",
|
|
238
|
+
labels: labelNames,
|
|
239
|
+
repoUrl,
|
|
240
|
+
baseBranch,
|
|
241
|
+
url: issue.url
|
|
242
|
+
};
|
|
243
|
+
}
|
|
244
|
+
async function claimTicket(client, config, ticketId) {
|
|
245
|
+
log.info(`Claiming ticket, moving to "${config.linear.inProgressStatus}"`);
|
|
246
|
+
const team = await client.team(config.linear.teamId);
|
|
247
|
+
const states = await team.states();
|
|
248
|
+
const target = config.linear.inProgressStatus.toLowerCase();
|
|
249
|
+
const inProgress = states.nodes.find((s) => s.name.toLowerCase() === target);
|
|
250
|
+
if (!inProgress) {
|
|
251
|
+
log.warn(`State "${config.linear.inProgressStatus}" not found, skipping status update`);
|
|
252
|
+
return;
|
|
253
|
+
}
|
|
254
|
+
await client.updateIssue(ticketId, { stateId: inProgress.id });
|
|
255
|
+
}
|
|
256
|
+
async function completeTicket(client, config, ticketId, comment) {
|
|
257
|
+
await client.createComment({ issueId: ticketId, body: comment });
|
|
258
|
+
const team = await client.team(config.linear.teamId);
|
|
259
|
+
const states = await team.states();
|
|
260
|
+
const doneTarget = config.linear.doneStatus.toLowerCase();
|
|
261
|
+
const done = states.nodes.find((s) => s.name.toLowerCase() === doneTarget);
|
|
262
|
+
if (done) {
|
|
263
|
+
await client.updateIssue(ticketId, { stateId: done.id });
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
async function addBranchLabel(client, ticketId, branchName) {
|
|
267
|
+
try {
|
|
268
|
+
const issue = await client.issue(ticketId);
|
|
269
|
+
const team = await issue.team;
|
|
270
|
+
if (!team) return;
|
|
271
|
+
const teamLabels = await team.labels();
|
|
272
|
+
const labelName = `base:${branchName}`;
|
|
273
|
+
let labelId = teamLabels.nodes.find((l) => l.name === labelName)?.id;
|
|
274
|
+
if (!labelId) {
|
|
275
|
+
const result = await client.createIssueLabel({
|
|
276
|
+
name: labelName,
|
|
277
|
+
teamId: team.id
|
|
278
|
+
});
|
|
279
|
+
const label = await result.issueLabel;
|
|
280
|
+
labelId = label?.id;
|
|
281
|
+
}
|
|
282
|
+
if (labelId) {
|
|
283
|
+
const currentLabels = await issue.labels();
|
|
284
|
+
const labelIds = currentLabels.nodes.map((l) => l.id);
|
|
285
|
+
if (!labelIds.includes(labelId)) {
|
|
286
|
+
await client.updateIssue(ticketId, {
|
|
287
|
+
labelIds: [...labelIds, labelId]
|
|
288
|
+
});
|
|
289
|
+
log.info(`Added "base:${branchName}" label to ticket for stacked diffs`);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
} catch (err) {
|
|
293
|
+
log.debug(`Could not add branch label: ${err instanceof Error ? err.message : err}`);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
async function failTicket(client, ticketId, reason) {
|
|
297
|
+
await client.createComment({
|
|
298
|
+
issueId: ticketId,
|
|
299
|
+
body: `**autocode failed:**
|
|
300
|
+
|
|
301
|
+
${reason}`
|
|
302
|
+
});
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// src/github.ts
|
|
306
|
+
import { Octokit } from "@octokit/rest";
|
|
307
|
+
function createOctokit(config) {
|
|
308
|
+
return new Octokit({ auth: config.github.token });
|
|
309
|
+
}
|
|
310
|
+
function parseRepoUrl(url) {
|
|
311
|
+
const match = url.match(/github\.com[/:]([^/]+)\/([^/.]+)/);
|
|
312
|
+
if (!match) throw new Error(`Cannot parse GitHub URL: ${url}`);
|
|
313
|
+
return { owner: match[1], repo: match[2] };
|
|
314
|
+
}
|
|
315
|
+
async function openPR(octokit, opts) {
|
|
316
|
+
const { owner, repo } = parseRepoUrl(opts.repoUrl);
|
|
317
|
+
const existing = await octokit.rest.pulls.list({
|
|
318
|
+
owner,
|
|
319
|
+
repo,
|
|
320
|
+
head: `${owner}:${opts.branch}`,
|
|
321
|
+
state: "open"
|
|
322
|
+
});
|
|
323
|
+
if (existing.data.length > 0) {
|
|
324
|
+
const pr2 = existing.data[0];
|
|
325
|
+
log.info(`PR already exists: ${pr2.html_url}`);
|
|
326
|
+
await octokit.rest.pulls.update({
|
|
327
|
+
owner,
|
|
328
|
+
repo,
|
|
329
|
+
pull_number: pr2.number,
|
|
330
|
+
body: opts.body
|
|
331
|
+
});
|
|
332
|
+
return { url: pr2.html_url, number: pr2.number };
|
|
333
|
+
}
|
|
334
|
+
const pr = await octokit.rest.pulls.create({
|
|
335
|
+
owner,
|
|
336
|
+
repo,
|
|
337
|
+
title: opts.title,
|
|
338
|
+
body: opts.body,
|
|
339
|
+
head: opts.branch,
|
|
340
|
+
base: opts.baseBranch
|
|
341
|
+
});
|
|
342
|
+
log.success(`PR opened: ${pr.data.html_url}`);
|
|
343
|
+
return { url: pr.data.html_url, number: pr.data.number };
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
// src/webhook.ts
|
|
347
|
+
import { createServer } from "http";
|
|
348
|
+
import { createHmac, timingSafeEqual } from "crypto";
|
|
349
|
+
|
|
350
|
+
// src/docker.ts
|
|
351
|
+
import Docker from "dockerode";
|
|
352
|
+
import { PassThrough } from "stream";
|
|
353
|
+
import { homedir as homedir3, tmpdir } from "os";
|
|
354
|
+
import { join as join3 } from "path";
|
|
355
|
+
import { existsSync as existsSync3, readFileSync as readFileSync2, writeFileSync, mkdtempSync, rmSync } from "fs";
|
|
356
|
+
import { execSync } from "child_process";
|
|
357
|
+
var docker = new Docker();
|
|
358
|
+
async function checkDockerAvailable() {
|
|
359
|
+
try {
|
|
360
|
+
await docker.ping();
|
|
361
|
+
} catch (err) {
|
|
362
|
+
throw new Error(
|
|
363
|
+
"Cannot connect to Docker daemon. Is Docker running?\nInstall: https://docs.docker.com/get-docker/"
|
|
364
|
+
);
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
var AUTOCODE_BASE_DOCKERFILE = `FROM node:20-bookworm
|
|
368
|
+
ENV DEBIAN_FRONTEND=noninteractive
|
|
369
|
+
RUN apt-get update && apt-get install -y --no-install-recommends \\
|
|
370
|
+
build-essential git curl wget ca-certificates openssh-client jq unzip \\
|
|
371
|
+
python3 python3-pip python3-venv python3-dev \\
|
|
372
|
+
ruby ruby-dev \\
|
|
373
|
+
&& rm -rf /var/lib/apt/lists/*
|
|
374
|
+
ARG GO_VERSION=1.22.2
|
|
375
|
+
RUN curl -fsSL "https://go.dev/dl/go\${GO_VERSION}.linux-$(dpkg --print-architecture).tar.gz" \\
|
|
376
|
+
| tar -xz -C /usr/local
|
|
377
|
+
ENV PATH="/usr/local/go/bin:\${PATH}"
|
|
378
|
+
RUN npm install -g @anthropic-ai/claude-code@latest
|
|
379
|
+
WORKDIR /workspace
|
|
380
|
+
`;
|
|
381
|
+
async function ensureImage(image) {
|
|
382
|
+
try {
|
|
383
|
+
await docker.getImage(image).inspect();
|
|
384
|
+
log.debug(`Image ${image} found locally`);
|
|
385
|
+
return;
|
|
386
|
+
} catch {
|
|
387
|
+
}
|
|
388
|
+
if (image === "autocode-base") {
|
|
389
|
+
log.info("Building autocode-base image (first time only, takes a few minutes)...");
|
|
390
|
+
const buildDir = mkdtempSync(join3(tmpdir(), "autocode-build-"));
|
|
391
|
+
try {
|
|
392
|
+
writeFileSync(join3(buildDir, "Dockerfile"), AUTOCODE_BASE_DOCKERFILE);
|
|
393
|
+
execSync(`docker build -t autocode-base ${buildDir}`, {
|
|
394
|
+
stdio: "inherit",
|
|
395
|
+
timeout: 6e5
|
|
396
|
+
});
|
|
397
|
+
log.success("autocode-base image built");
|
|
398
|
+
return;
|
|
399
|
+
} catch (err) {
|
|
400
|
+
throw new Error(
|
|
401
|
+
`Failed to build autocode-base image.
|
|
402
|
+
You can build manually: docker build -t autocode-base .
|
|
403
|
+
Or use a different image in your config (e.g. node:20).
|
|
404
|
+
Error: ${err instanceof Error ? err.message : err}`
|
|
405
|
+
);
|
|
406
|
+
} finally {
|
|
407
|
+
rmSync(buildDir, { recursive: true, force: true });
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
log.info(`Pulling image ${image} (this may take a moment)...`);
|
|
411
|
+
await new Promise((resolve, reject) => {
|
|
412
|
+
docker.pull(image, (err, stream) => {
|
|
413
|
+
if (err) {
|
|
414
|
+
if (err.message?.includes("not found") || err.message?.includes("404")) {
|
|
415
|
+
return reject(new Error(`Docker image "${image}" not found. Check the image name in your config.`));
|
|
416
|
+
}
|
|
417
|
+
return reject(err);
|
|
418
|
+
}
|
|
419
|
+
docker.modem.followProgress(stream, (err2) => {
|
|
420
|
+
if (err2) reject(err2);
|
|
421
|
+
else {
|
|
422
|
+
log.success(`Image ${image} pulled`);
|
|
423
|
+
resolve();
|
|
424
|
+
}
|
|
425
|
+
});
|
|
426
|
+
});
|
|
427
|
+
});
|
|
428
|
+
}
|
|
429
|
+
async function createContainer(opts) {
|
|
430
|
+
const { dockerConfig, name } = opts;
|
|
431
|
+
await ensureImage(dockerConfig.image);
|
|
432
|
+
const binds = [`${opts.workspacePath}:/workspace`];
|
|
433
|
+
if (dockerConfig.volumes?.length) {
|
|
434
|
+
for (const vol of dockerConfig.volumes) {
|
|
435
|
+
binds.push(vol.replace(/^~(?=\/|:)/, homedir3()));
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
const container = await docker.createContainer({
|
|
439
|
+
Image: dockerConfig.image,
|
|
440
|
+
name: `autocode-${name}`,
|
|
441
|
+
Cmd: ["sleep", "infinity"],
|
|
442
|
+
WorkingDir: "/workspace",
|
|
443
|
+
User: "root",
|
|
444
|
+
HostConfig: {
|
|
445
|
+
Binds: binds,
|
|
446
|
+
Memory: parseMemory(dockerConfig.memory),
|
|
447
|
+
NanoCpus: dockerConfig.cpus ? dockerConfig.cpus * 1e9 : void 0,
|
|
448
|
+
NetworkMode: dockerConfig.network ?? "bridge"
|
|
449
|
+
},
|
|
450
|
+
Env: ["HOME=/root"]
|
|
451
|
+
});
|
|
452
|
+
await container.start();
|
|
453
|
+
log.info(`Container ${name} started (${container.id.slice(0, 12)})`);
|
|
454
|
+
const wrapped = { id: container.id, instance: container };
|
|
455
|
+
const shellCheck = await exec(wrapped, ["sh", "-c", "echo ok"]);
|
|
456
|
+
if (shellCheck.exitCode !== 0 || !shellCheck.stdout.includes("ok")) {
|
|
457
|
+
await destroyContainer(wrapped);
|
|
458
|
+
throw new Error(
|
|
459
|
+
`Docker image "${dockerConfig.image}" has no working shell (sh).
|
|
460
|
+
autocode requires a shell to set up the container. Use a standard base image
|
|
461
|
+
(e.g. node:20, python:3.12, ubuntu:24.04) or ensure /bin/sh is available.`
|
|
462
|
+
);
|
|
463
|
+
}
|
|
464
|
+
const curlCheck = await exec(wrapped, ["which", "curl"]);
|
|
465
|
+
if (curlCheck.exitCode !== 0) {
|
|
466
|
+
log.debug("curl not found, installing...");
|
|
467
|
+
await exec(wrapped, [
|
|
468
|
+
"sh",
|
|
469
|
+
"-c",
|
|
470
|
+
"(apt-get update -qq && apt-get install -y -qq curl 2>/dev/null) || (apk add --no-cache curl 2>/dev/null) || (yum install -y curl 2>/dev/null) || true"
|
|
471
|
+
]);
|
|
472
|
+
}
|
|
473
|
+
const gitCheck = await exec(wrapped, ["git", "--version"]);
|
|
474
|
+
if (gitCheck.exitCode !== 0) {
|
|
475
|
+
log.info("Git not found in image, installing...");
|
|
476
|
+
const installGit = await exec(wrapped, [
|
|
477
|
+
"sh",
|
|
478
|
+
"-c",
|
|
479
|
+
'(apt-get update -qq && apt-get install -y -qq git 2>/dev/null) || (apk add --no-cache git 2>/dev/null) || (yum install -y git 2>/dev/null) || (echo "Cannot install git" && exit 1)'
|
|
480
|
+
]);
|
|
481
|
+
if (installGit.exitCode !== 0) {
|
|
482
|
+
throw new Error(`Docker image "${dockerConfig.image}" has no git and autocode could not install it. Use an image with git pre-installed.`);
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
const claudeCheck = await exec(wrapped, ["which", "claude"]);
|
|
486
|
+
if (claudeCheck.exitCode !== 0) {
|
|
487
|
+
log.info("Installing Claude Code in container...");
|
|
488
|
+
const npmCheck = await exec(wrapped, ["which", "npm"]);
|
|
489
|
+
let installed = false;
|
|
490
|
+
if (npmCheck.exitCode === 0) {
|
|
491
|
+
const npmResult = await exec(wrapped, [
|
|
492
|
+
"npm",
|
|
493
|
+
"install",
|
|
494
|
+
"-g",
|
|
495
|
+
"@anthropic-ai/claude-code@latest"
|
|
496
|
+
]);
|
|
497
|
+
installed = npmResult.exitCode === 0;
|
|
498
|
+
}
|
|
499
|
+
if (!installed) {
|
|
500
|
+
log.info("npm not available, installing Claude Code standalone binary...");
|
|
501
|
+
const standaloneResult = await exec(wrapped, [
|
|
502
|
+
"sh",
|
|
503
|
+
"-c",
|
|
504
|
+
"curl -fsSL https://claude.ai/install.sh | sh"
|
|
505
|
+
], { timeout: 12e4 });
|
|
506
|
+
if (standaloneResult.exitCode !== 0) {
|
|
507
|
+
throw new Error(
|
|
508
|
+
`Failed to install Claude Code. Tried npm and standalone installer.
|
|
509
|
+
Error: ${standaloneResult.stderr}
|
|
510
|
+
Use an image with Claude Code pre-installed, or ensure curl is available.`
|
|
511
|
+
);
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
log.info("Claude Code installed");
|
|
515
|
+
} else {
|
|
516
|
+
log.debug("Claude Code already installed in image");
|
|
517
|
+
}
|
|
518
|
+
await exec(wrapped, [
|
|
519
|
+
"sh",
|
|
520
|
+
"-c",
|
|
521
|
+
'( getent passwd autocode >/dev/null 2>&1 ) || ( adduser --disabled-password --gecos "" --uid 1001 --home /home/autocode autocode 2>/dev/null ) || ( adduser -D -u 1001 -h /home/autocode autocode 2>/dev/null ) || ( useradd -m -u 1001 -d /home/autocode autocode 2>/dev/null ) || ( echo "autocode:x:1001:1001::/home/autocode:/bin/sh" >> /etc/passwd && mkdir -p /home/autocode ); mkdir -p /home/autocode && chown -R autocode /home/autocode /workspace'
|
|
522
|
+
]);
|
|
523
|
+
await autoInstallRuntimes(wrapped);
|
|
524
|
+
if (dockerConfig.setup?.length) {
|
|
525
|
+
log.info(`Running ${dockerConfig.setup.length} setup command(s)...`);
|
|
526
|
+
for (const cmd of dockerConfig.setup) {
|
|
527
|
+
const result = await exec(wrapped, ["sh", "-c", cmd]);
|
|
528
|
+
if (result.exitCode !== 0) {
|
|
529
|
+
throw new Error(`Setup command failed: ${cmd}
|
|
530
|
+
${result.stderr}`);
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
await copyClaudeCredentials(wrapped);
|
|
535
|
+
return wrapped;
|
|
536
|
+
}
|
|
537
|
+
async function autoInstallRuntimes(container) {
|
|
538
|
+
const detect = await exec(container, [
|
|
539
|
+
"sh",
|
|
540
|
+
"-c",
|
|
541
|
+
"ls /workspace/package.json /workspace/pyproject.toml /workspace/setup.py /workspace/requirements.txt /workspace/Pipfile /workspace/go.mod /workspace/Cargo.toml /workspace/Gemfile /workspace/pom.xml /workspace/build.gradle /workspace/build.gradle.kts /workspace/composer.json /workspace/mix.exs 2>/dev/null || true"
|
|
542
|
+
]);
|
|
543
|
+
const files = detect.stdout.trim().split("\n").filter(Boolean);
|
|
544
|
+
if (files.length === 0) return;
|
|
545
|
+
const needs = [];
|
|
546
|
+
const checks = await exec(container, [
|
|
547
|
+
"sh",
|
|
548
|
+
"-c",
|
|
549
|
+
'echo "node:$(which node 2>/dev/null || echo missing)";echo "python:$(which python3 2>/dev/null || which python 2>/dev/null || echo missing)";echo "go:$(which go 2>/dev/null || echo missing)";echo "ruby:$(which ruby 2>/dev/null || echo missing)";'
|
|
550
|
+
]);
|
|
551
|
+
const installed = /* @__PURE__ */ new Map();
|
|
552
|
+
for (const line of checks.stdout.trim().split("\n")) {
|
|
553
|
+
const [lang, path] = line.split(":");
|
|
554
|
+
installed.set(lang, path !== "missing");
|
|
555
|
+
}
|
|
556
|
+
const hasPython = files.some(
|
|
557
|
+
(f) => f.includes("pyproject.toml") || f.includes("setup.py") || f.includes("requirements.txt") || f.includes("Pipfile")
|
|
558
|
+
);
|
|
559
|
+
const hasGo = files.some((f) => f.includes("go.mod"));
|
|
560
|
+
const hasRuby = files.some((f) => f.includes("Gemfile"));
|
|
561
|
+
if (hasPython && !installed.get("python")) {
|
|
562
|
+
needs.push("python3 python3-pip python3-venv");
|
|
563
|
+
}
|
|
564
|
+
if (hasGo && !installed.get("go")) {
|
|
565
|
+
needs.push("golang");
|
|
566
|
+
}
|
|
567
|
+
if (hasRuby && !installed.get("ruby")) {
|
|
568
|
+
needs.push("ruby ruby-dev");
|
|
569
|
+
}
|
|
570
|
+
if (needs.length === 0) return;
|
|
571
|
+
const packages = needs.join(" ");
|
|
572
|
+
log.info(`Detected stack requires: ${packages}`);
|
|
573
|
+
const installResult = await exec(container, [
|
|
574
|
+
"sh",
|
|
575
|
+
"-c",
|
|
576
|
+
`(apt-get update -qq && apt-get install -y -qq ${packages} 2>/dev/null) || (apk add --no-cache ${packages.replace("python3-pip", "py3-pip").replace("python3-venv", "").replace("ruby-dev", "ruby-dev build-base")} 2>/dev/null) || echo "Warning: could not auto-install ${packages}"`
|
|
577
|
+
]);
|
|
578
|
+
if (installResult.exitCode === 0) {
|
|
579
|
+
log.success("Runtime dependencies installed");
|
|
580
|
+
} else {
|
|
581
|
+
log.warn(`Could not auto-install ${packages} \u2014 Claude may not be able to run tests for this stack`);
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
async function copyClaudeCredentials(container) {
|
|
585
|
+
const home = homedir3();
|
|
586
|
+
await exec(container, [
|
|
587
|
+
"sh",
|
|
588
|
+
"-c",
|
|
589
|
+
"mkdir -p /home/autocode/.claude && chown autocode /home/autocode/.claude"
|
|
590
|
+
]);
|
|
591
|
+
const credFile = join3(home, ".claude", ".credentials.json");
|
|
592
|
+
if (existsSync3(credFile)) {
|
|
593
|
+
const content = readFileSync2(credFile, "utf-8");
|
|
594
|
+
const b64 = Buffer.from(content).toString("base64");
|
|
595
|
+
await exec(container, [
|
|
596
|
+
"sh",
|
|
597
|
+
"-c",
|
|
598
|
+
`echo '${b64}' | base64 -d > /home/autocode/.claude/.credentials.json && chown autocode /home/autocode/.claude/.credentials.json`
|
|
599
|
+
]);
|
|
600
|
+
log.debug("Copied .credentials.json");
|
|
601
|
+
}
|
|
602
|
+
const settingsFile = join3(home, ".claude", "settings.json");
|
|
603
|
+
if (existsSync3(settingsFile)) {
|
|
604
|
+
const content = readFileSync2(settingsFile, "utf-8");
|
|
605
|
+
const b64 = Buffer.from(content).toString("base64");
|
|
606
|
+
await exec(container, [
|
|
607
|
+
"sh",
|
|
608
|
+
"-c",
|
|
609
|
+
`echo '${b64}' | base64 -d > /home/autocode/.claude/settings.json && chown autocode /home/autocode/.claude/settings.json`
|
|
610
|
+
]);
|
|
611
|
+
}
|
|
612
|
+
log.info("Claude credentials copied into container");
|
|
613
|
+
}
|
|
614
|
+
async function exec(container, cmd, opts) {
|
|
615
|
+
const execution = await container.instance.exec({
|
|
616
|
+
Cmd: cmd,
|
|
617
|
+
AttachStdout: true,
|
|
618
|
+
AttachStderr: true,
|
|
619
|
+
WorkingDir: opts?.workdir ?? "/workspace",
|
|
620
|
+
Env: opts?.env,
|
|
621
|
+
User: opts?.user
|
|
622
|
+
});
|
|
623
|
+
const stream = await execution.start({ hijack: true, stdin: false });
|
|
624
|
+
let stdout = "";
|
|
625
|
+
let stderr = "";
|
|
626
|
+
return new Promise((resolve, reject) => {
|
|
627
|
+
const stdoutStream = new PassThrough();
|
|
628
|
+
const stderrStream = new PassThrough();
|
|
629
|
+
docker.modem.demuxStream(stream, stdoutStream, stderrStream);
|
|
630
|
+
stdoutStream.on("data", (chunk) => {
|
|
631
|
+
stdout += chunk.toString();
|
|
632
|
+
});
|
|
633
|
+
stderrStream.on("data", (chunk) => {
|
|
634
|
+
stderr += chunk.toString();
|
|
635
|
+
});
|
|
636
|
+
let timer;
|
|
637
|
+
if (opts?.timeout) {
|
|
638
|
+
timer = setTimeout(() => {
|
|
639
|
+
stream.destroy();
|
|
640
|
+
resolve({ exitCode: 124, stdout, stderr: stderr + "\n[autocode] exec timed out" });
|
|
641
|
+
}, opts.timeout);
|
|
642
|
+
}
|
|
643
|
+
stream.on("end", async () => {
|
|
644
|
+
if (timer) clearTimeout(timer);
|
|
645
|
+
try {
|
|
646
|
+
const info = await execution.inspect();
|
|
647
|
+
resolve({ exitCode: info.ExitCode ?? 1, stdout, stderr });
|
|
648
|
+
} catch (e) {
|
|
649
|
+
reject(e);
|
|
650
|
+
}
|
|
651
|
+
});
|
|
652
|
+
stream.on("error", (e) => {
|
|
653
|
+
if (timer) clearTimeout(timer);
|
|
654
|
+
reject(e);
|
|
655
|
+
});
|
|
656
|
+
});
|
|
657
|
+
}
|
|
658
|
+
async function cleanupOrphanedContainers() {
|
|
659
|
+
const containers = await docker.listContainers({ all: true });
|
|
660
|
+
const orphans = containers.filter(
|
|
661
|
+
(c) => c.Names.some((n) => n.startsWith("/autocode-"))
|
|
662
|
+
);
|
|
663
|
+
let cleaned = 0;
|
|
664
|
+
for (const c of orphans) {
|
|
665
|
+
try {
|
|
666
|
+
const container = docker.getContainer(c.Id);
|
|
667
|
+
if (c.State === "running") {
|
|
668
|
+
await container.stop({ t: 5 });
|
|
669
|
+
}
|
|
670
|
+
await container.remove({ force: true });
|
|
671
|
+
log.info(`Cleaned up orphaned container ${c.Names[0]} (${c.Id.slice(0, 12)})`);
|
|
672
|
+
cleaned++;
|
|
673
|
+
} catch {
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
return cleaned;
|
|
677
|
+
}
|
|
678
|
+
async function destroyContainer(container) {
|
|
679
|
+
try {
|
|
680
|
+
await container.instance.stop({ t: 5 });
|
|
681
|
+
} catch {
|
|
682
|
+
}
|
|
683
|
+
try {
|
|
684
|
+
await container.instance.remove({ force: true });
|
|
685
|
+
} catch {
|
|
686
|
+
}
|
|
687
|
+
log.info(`Container ${container.id.slice(0, 12)} destroyed`);
|
|
688
|
+
}
|
|
689
|
+
function parseMemory(mem) {
|
|
690
|
+
const match = mem.match(/^(\d+(?:\.\d+)?)(k|m|g|t)$/i);
|
|
691
|
+
if (!match) throw new Error(`Invalid memory format: "${mem}". Use a number followed by k, m, g, or t (e.g. "4g", "512m")`);
|
|
692
|
+
const [, num, unit] = match;
|
|
693
|
+
const value = parseFloat(num);
|
|
694
|
+
if (value <= 0) throw new Error(`Memory must be greater than 0: "${mem}"`);
|
|
695
|
+
const multipliers = {
|
|
696
|
+
k: 1024,
|
|
697
|
+
m: 1024 * 1024,
|
|
698
|
+
g: 1024 * 1024 * 1024,
|
|
699
|
+
t: 1024 * 1024 * 1024 * 1024
|
|
700
|
+
};
|
|
701
|
+
return Math.round(value * multipliers[unit.toLowerCase()]);
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
// src/agent.ts
|
|
705
|
+
var DEFAULT_ITERATION_TIMEOUT_MS = 30 * 60 * 1e3;
|
|
706
|
+
var RATE_LIMIT_PATTERNS = [
|
|
707
|
+
/rate.?limit/i,
|
|
708
|
+
/too many requests/i,
|
|
709
|
+
/\b429\b.*(?:error|too many|rate|limit|retry)/i,
|
|
710
|
+
/(?:error|status|code|http)[:\s]*429\b/i,
|
|
711
|
+
/(?:api|server|model)\s+(?:is\s+)?overloaded/i,
|
|
712
|
+
/over\s*capacity/i
|
|
713
|
+
];
|
|
714
|
+
var COMPLETION_PATTERNS = [
|
|
715
|
+
/\[autocode:done\]/i,
|
|
716
|
+
/all tasks? completed/i
|
|
717
|
+
];
|
|
718
|
+
function isRateLimited(output) {
|
|
719
|
+
return RATE_LIMIT_PATTERNS.some((p) => p.test(output));
|
|
720
|
+
}
|
|
721
|
+
function isCompleted(output) {
|
|
722
|
+
return COMPLETION_PATTERNS.some((p) => p.test(output));
|
|
723
|
+
}
|
|
724
|
+
function estimateResetMs(output) {
|
|
725
|
+
const match = output.match(/retry.?after[:\s]*(\d+)/i);
|
|
726
|
+
if (match) return parseInt(match[1]) * 1e3;
|
|
727
|
+
return 6e4;
|
|
728
|
+
}
|
|
729
|
+
var MAX_RATE_LIMIT_RETRIES = 10;
|
|
730
|
+
async function runAgentLoop(opts) {
|
|
731
|
+
const { container, agentConfig, prompt, ticketId, onIteration } = opts;
|
|
732
|
+
let lastOutput = "";
|
|
733
|
+
let rateLimitRetries = 0;
|
|
734
|
+
const promptB64 = Buffer.from(prompt).toString("base64");
|
|
735
|
+
await exec(container, [
|
|
736
|
+
"sh",
|
|
737
|
+
"-c",
|
|
738
|
+
`mkdir -p /tmp/autocode && echo '${promptB64}' | base64 -d > /tmp/autocode/prompt.md && chmod 644 /tmp/autocode/prompt.md`
|
|
739
|
+
]);
|
|
740
|
+
for (let i = 1; i <= agentConfig.maxIterations; i++) {
|
|
741
|
+
log.ticket(ticketId, `Iteration ${i}/${agentConfig.maxIterations}`);
|
|
742
|
+
const cmd = [
|
|
743
|
+
"sh",
|
|
744
|
+
"-c",
|
|
745
|
+
[
|
|
746
|
+
"claude",
|
|
747
|
+
"-p",
|
|
748
|
+
'"$(cat /tmp/autocode/prompt.md)"',
|
|
749
|
+
"--model",
|
|
750
|
+
agentConfig.model,
|
|
751
|
+
"--output-format",
|
|
752
|
+
"text",
|
|
753
|
+
"--max-turns",
|
|
754
|
+
"50",
|
|
755
|
+
"--dangerously-skip-permissions",
|
|
756
|
+
"--verbose"
|
|
757
|
+
].join(" ")
|
|
758
|
+
];
|
|
759
|
+
const timeoutMs = agentConfig.timeout ? agentConfig.timeout * 1e3 : DEFAULT_ITERATION_TIMEOUT_MS;
|
|
760
|
+
const result = await exec(container, cmd, {
|
|
761
|
+
user: "autocode",
|
|
762
|
+
env: ["HOME=/home/autocode", "CLAUDE_CODE_DISABLE_NONINTERACTIVE_TUTORIAL=1"],
|
|
763
|
+
timeout: timeoutMs
|
|
764
|
+
});
|
|
765
|
+
const combined = result.stdout + result.stderr;
|
|
766
|
+
lastOutput = combined;
|
|
767
|
+
const snippet = combined.slice(-500).trim();
|
|
768
|
+
if (snippet) {
|
|
769
|
+
log.debug(`Agent output (last 500 chars): ${snippet}`);
|
|
770
|
+
}
|
|
771
|
+
const rateLimited = isRateLimited(combined);
|
|
772
|
+
const completed = isCompleted(combined) || result.exitCode === 0 && i > 1;
|
|
773
|
+
const agentResult = {
|
|
774
|
+
iteration: i,
|
|
775
|
+
exitCode: result.exitCode,
|
|
776
|
+
stdout: result.stdout,
|
|
777
|
+
stderr: result.stderr,
|
|
778
|
+
rateLimited,
|
|
779
|
+
completed
|
|
780
|
+
};
|
|
781
|
+
onIteration?.(agentResult);
|
|
782
|
+
if (completed) {
|
|
783
|
+
log.ticket(ticketId, `Completed after ${i} iterations`);
|
|
784
|
+
return { iterations: i, success: true, lastOutput };
|
|
785
|
+
}
|
|
786
|
+
if (rateLimited) {
|
|
787
|
+
rateLimitRetries++;
|
|
788
|
+
if (rateLimitRetries > MAX_RATE_LIMIT_RETRIES) {
|
|
789
|
+
log.ticket(ticketId, `Rate limited ${rateLimitRetries} times, giving up`);
|
|
790
|
+
return { iterations: i, success: false, lastOutput };
|
|
791
|
+
}
|
|
792
|
+
const waitMs = estimateResetMs(combined);
|
|
793
|
+
log.ticket(ticketId, `Rate limited (${rateLimitRetries}/${MAX_RATE_LIMIT_RETRIES}) \u2014 pausing ${Math.round(waitMs / 1e3)}s`);
|
|
794
|
+
await sleep(waitMs);
|
|
795
|
+
i--;
|
|
796
|
+
continue;
|
|
797
|
+
}
|
|
798
|
+
if (result.exitCode !== 0) {
|
|
799
|
+
log.ticket(ticketId, `Non-zero exit (${result.exitCode}), continuing...`);
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
log.ticket(ticketId, `Max iterations (${agentConfig.maxIterations}) reached`);
|
|
803
|
+
return { iterations: agentConfig.maxIterations, success: false, lastOutput };
|
|
804
|
+
}
|
|
805
|
+
function sleep(ms) {
|
|
806
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
// src/git.ts
|
|
810
|
+
async function setupRepo(container, opts) {
|
|
811
|
+
const u = { user: "autocode" };
|
|
812
|
+
await exec(container, ["git", "config", "user.name", "autocode"], u);
|
|
813
|
+
await exec(container, ["git", "config", "user.email", "autocode@localhost"], u);
|
|
814
|
+
await exec(container, ["git", "config", "--global", "safe.directory", "/workspace"], u);
|
|
815
|
+
log.info(`Git configured, on branch ${opts.branchName}`);
|
|
816
|
+
}
|
|
817
|
+
async function commitAndPush(container, opts) {
|
|
818
|
+
const { branchName, ticketId, title, githubToken } = opts;
|
|
819
|
+
const u = { user: "autocode" };
|
|
820
|
+
const status = await exec(container, ["git", "status", "--porcelain"], u);
|
|
821
|
+
if (status.stdout.trim()) {
|
|
822
|
+
await exec(container, ["git", "add", "-A"], u);
|
|
823
|
+
const message = `${ticketId}: ${title}`;
|
|
824
|
+
await exec(container, ["git", "commit", "-m", message], u);
|
|
825
|
+
}
|
|
826
|
+
const countResult = await exec(container, [
|
|
827
|
+
"git",
|
|
828
|
+
"rev-list",
|
|
829
|
+
"--count",
|
|
830
|
+
`HEAD`,
|
|
831
|
+
"--not",
|
|
832
|
+
"--remotes"
|
|
833
|
+
], u);
|
|
834
|
+
const commitCount = parseInt(countResult.stdout.trim()) || 0;
|
|
835
|
+
if (commitCount === 0) {
|
|
836
|
+
log.warn("No commits to push");
|
|
837
|
+
return { pushed: false, commitCount: 0 };
|
|
838
|
+
}
|
|
839
|
+
await exec(container, [
|
|
840
|
+
"sh",
|
|
841
|
+
"-c",
|
|
842
|
+
`git config credential.helper '!f() { echo "password=${githubToken}"; echo "username=x-access-token"; }; f'`
|
|
843
|
+
], u);
|
|
844
|
+
const pushResult = await exec(container, ["git", "push", "-u", "origin", branchName], u);
|
|
845
|
+
if (pushResult.exitCode !== 0) {
|
|
846
|
+
const safeStderr = pushResult.stderr.replace(/x-access-token:[^\s@]+/g, "x-access-token:***");
|
|
847
|
+
log.error(`Push failed: ${safeStderr}`);
|
|
848
|
+
return { pushed: false, commitCount };
|
|
849
|
+
}
|
|
850
|
+
log.success(`Pushed ${commitCount} commit(s) to ${branchName}`);
|
|
851
|
+
return { pushed: true, commitCount };
|
|
852
|
+
}
|
|
853
|
+
|
|
854
|
+
// src/worker.ts
|
|
855
|
+
import { mkdtempSync as mkdtempSync2, rmSync as rmSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
856
|
+
import { join as join4 } from "path";
|
|
857
|
+
import { tmpdir as tmpdir2 } from "os";
|
|
858
|
+
import { execSync as execSync2 } from "child_process";
|
|
859
|
+
function buildPrompt(ticket, repoPrompt) {
|
|
860
|
+
const parts = [];
|
|
861
|
+
if (repoPrompt) {
|
|
862
|
+
parts.push(repoPrompt);
|
|
863
|
+
parts.push("");
|
|
864
|
+
}
|
|
865
|
+
parts.push(`## Task: ${ticket.identifier} \u2014 ${ticket.title}`);
|
|
866
|
+
parts.push("");
|
|
867
|
+
if (ticket.description) {
|
|
868
|
+
parts.push(ticket.description);
|
|
869
|
+
}
|
|
870
|
+
parts.push("");
|
|
871
|
+
parts.push("## Instructions");
|
|
872
|
+
parts.push("- Start by reading CLAUDE.md if it exists \u2014 it has project-specific guidance");
|
|
873
|
+
parts.push("- Read the codebase to understand the context before making changes");
|
|
874
|
+
parts.push("- Work through the task step by step");
|
|
875
|
+
parts.push('- After making changes, run: git add -A && git commit -m "description of changes"');
|
|
876
|
+
parts.push("- Commit after each logical step (do not batch all changes into one commit)");
|
|
877
|
+
parts.push("- If the task has a checklist, work through items in order and commit after each one");
|
|
878
|
+
parts.push("- Run any existing tests before finishing to verify your changes");
|
|
879
|
+
parts.push("- IMPORTANT: You MUST commit your changes with git before finishing");
|
|
880
|
+
parts.push("- When all work is complete and committed, include [autocode:done] in your final output");
|
|
881
|
+
return parts.join("\n");
|
|
882
|
+
}
|
|
883
|
+
function slugify(s) {
|
|
884
|
+
return s.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 50);
|
|
885
|
+
}
|
|
886
|
+
function assertSafeRef(name, label) {
|
|
887
|
+
if (!name || /[\s~^:?*\[\\]|\.\./.test(name) || name.startsWith("-")) {
|
|
888
|
+
throw new Error(`Invalid ${label}: "${name}"`);
|
|
889
|
+
}
|
|
890
|
+
}
|
|
891
|
+
function createAskpass(githubToken) {
|
|
892
|
+
const dir = mkdtempSync2(join4(tmpdir2(), "autocode-cred-"));
|
|
893
|
+
const path = join4(dir, "askpass.sh");
|
|
894
|
+
writeFileSync2(path, `#!/bin/sh
|
|
895
|
+
echo "${githubToken}"
|
|
896
|
+
`, { mode: 448 });
|
|
897
|
+
return { dir, path };
|
|
898
|
+
}
|
|
899
|
+
function gitEnv(askpassPath) {
|
|
900
|
+
return { ...process.env, GIT_ASKPASS: askpassPath, GIT_TERMINAL_PROMPT: "0" };
|
|
901
|
+
}
|
|
902
|
+
function detectDefaultBranch(repoUrl, githubToken) {
|
|
903
|
+
const askpass = createAskpass(githubToken);
|
|
904
|
+
const authedUrl = repoUrl.replace("https://", "https://x-access-token@");
|
|
905
|
+
try {
|
|
906
|
+
const output = execSync2(
|
|
907
|
+
`git ls-remote --symref -- ${authedUrl} HEAD`,
|
|
908
|
+
{ stdio: "pipe", timeout: 3e4, env: gitEnv(askpass.path), encoding: "utf-8" }
|
|
909
|
+
);
|
|
910
|
+
const match = output.match(/ref:\s+refs\/heads\/(\S+)\s+HEAD/);
|
|
911
|
+
if (match) return match[1];
|
|
912
|
+
} catch {
|
|
913
|
+
} finally {
|
|
914
|
+
rmSync2(askpass.dir, { recursive: true, force: true });
|
|
915
|
+
}
|
|
916
|
+
return "main";
|
|
917
|
+
}
|
|
918
|
+
function cloneToTemp(repoUrl, baseBranch, githubToken) {
|
|
919
|
+
assertSafeRef(baseBranch, "base branch");
|
|
920
|
+
const workDir = mkdtempSync2(join4(tmpdir2(), "autocode-"));
|
|
921
|
+
const askpass = createAskpass(githubToken);
|
|
922
|
+
const authedUrl = repoUrl.replace("https://", "https://x-access-token@");
|
|
923
|
+
try {
|
|
924
|
+
execSync2(`git clone --depth=50 -b ${baseBranch} -- ${authedUrl} ${workDir}`, {
|
|
925
|
+
stdio: "pipe",
|
|
926
|
+
timeout: 12e4,
|
|
927
|
+
env: gitEnv(askpass.path)
|
|
928
|
+
});
|
|
929
|
+
} finally {
|
|
930
|
+
rmSync2(askpass.dir, { recursive: true, force: true });
|
|
931
|
+
}
|
|
932
|
+
return workDir;
|
|
933
|
+
}
|
|
934
|
+
async function executeTicket(opts) {
|
|
935
|
+
const { ticket, config, linearClient, octokit } = opts;
|
|
936
|
+
if (!ticket.repoUrl) {
|
|
937
|
+
return {
|
|
938
|
+
ticketId: ticket.identifier,
|
|
939
|
+
success: false,
|
|
940
|
+
error: "No repo:owner/name label on ticket",
|
|
941
|
+
iterations: 0
|
|
942
|
+
};
|
|
943
|
+
}
|
|
944
|
+
if (!ticket.repoUrl.startsWith("https://")) {
|
|
945
|
+
return {
|
|
946
|
+
ticketId: ticket.identifier,
|
|
947
|
+
success: false,
|
|
948
|
+
error: `Invalid repo URL: "${ticket.repoUrl}". Only HTTPS URLs are supported.`,
|
|
949
|
+
iterations: 0
|
|
950
|
+
};
|
|
951
|
+
}
|
|
952
|
+
const branchName = `autocode/${ticket.identifier}-${slugify(ticket.title)}`;
|
|
953
|
+
const containerName = `${ticket.identifier}-${Date.now()}`.toLowerCase().replace(/[^a-z0-9_.-]/g, "-");
|
|
954
|
+
let workDir;
|
|
955
|
+
let container;
|
|
956
|
+
try {
|
|
957
|
+
await claimTicket(linearClient, config, ticket.id);
|
|
958
|
+
log.ticket(ticket.identifier, `Claimed \u2014 ${ticket.repoUrl}`);
|
|
959
|
+
const detectedBase = detectDefaultBranch(ticket.repoUrl, config.github.token);
|
|
960
|
+
log.ticket(ticket.identifier, `Default branch: ${detectedBase}`);
|
|
961
|
+
workDir = cloneToTemp(ticket.repoUrl, detectedBase, config.github.token);
|
|
962
|
+
log.ticket(ticket.identifier, "Cloned repo");
|
|
963
|
+
const repoLocal = loadRepoLocalConfig(workDir);
|
|
964
|
+
const actualBase = ticket.baseBranch ?? repoLocal?.base ?? detectedBase;
|
|
965
|
+
const { agent, docker: docker2, prompt } = mergeConfigs(config, repoLocal);
|
|
966
|
+
if (repoLocal) {
|
|
967
|
+
log.ticket(ticket.identifier, "Loaded .autocode.yaml from repo");
|
|
968
|
+
}
|
|
969
|
+
if (ticket.baseBranch) {
|
|
970
|
+
log.ticket(ticket.identifier, `Stacked on branch: ${ticket.baseBranch}`);
|
|
971
|
+
}
|
|
972
|
+
if (actualBase !== detectedBase) {
|
|
973
|
+
rmSync2(workDir, { recursive: true, force: true });
|
|
974
|
+
workDir = cloneToTemp(ticket.repoUrl, actualBase, config.github.token);
|
|
975
|
+
}
|
|
976
|
+
execSync2(`git checkout -B ${branchName} --`, { cwd: workDir, stdio: "pipe" });
|
|
977
|
+
container = await createContainer({
|
|
978
|
+
dockerConfig: docker2,
|
|
979
|
+
workspacePath: workDir,
|
|
980
|
+
name: containerName
|
|
981
|
+
});
|
|
982
|
+
await setupRepo(container, { branchName });
|
|
983
|
+
const agentPrompt = buildPrompt(ticket, prompt);
|
|
984
|
+
const agentResult = await runAgentLoop({
|
|
985
|
+
container,
|
|
986
|
+
agentConfig: agent,
|
|
987
|
+
prompt: agentPrompt,
|
|
988
|
+
ticketId: ticket.identifier
|
|
989
|
+
});
|
|
990
|
+
const gitResult = await commitAndPush(container, {
|
|
991
|
+
branchName,
|
|
992
|
+
ticketId: ticket.identifier,
|
|
993
|
+
title: ticket.title,
|
|
994
|
+
repoUrl: ticket.repoUrl,
|
|
995
|
+
githubToken: config.github.token
|
|
996
|
+
});
|
|
997
|
+
if (!gitResult.pushed) {
|
|
998
|
+
await failTicket(linearClient, ticket.id, "No changes produced");
|
|
999
|
+
return {
|
|
1000
|
+
ticketId: ticket.identifier,
|
|
1001
|
+
success: false,
|
|
1002
|
+
error: "No changes produced",
|
|
1003
|
+
iterations: agentResult.iterations
|
|
1004
|
+
};
|
|
1005
|
+
}
|
|
1006
|
+
const prBody = [
|
|
1007
|
+
`Resolves [${ticket.identifier}](${ticket.url})`,
|
|
1008
|
+
""
|
|
1009
|
+
];
|
|
1010
|
+
if (ticket.description) {
|
|
1011
|
+
const desc = ticket.description.length > 2e3 ? ticket.description.slice(0, 2e3) + "\n\n*[description truncated]*" : ticket.description;
|
|
1012
|
+
prBody.push("## Description", "", desc, "");
|
|
1013
|
+
}
|
|
1014
|
+
prBody.push(
|
|
1015
|
+
"## Details",
|
|
1016
|
+
"",
|
|
1017
|
+
`| Metric | Value |`,
|
|
1018
|
+
`| --- | --- |`,
|
|
1019
|
+
`| Iterations | ${agentResult.iterations} |`,
|
|
1020
|
+
`| Commits | ${gitResult.commitCount} |`,
|
|
1021
|
+
`| Model | ${agent.model} |`,
|
|
1022
|
+
"",
|
|
1023
|
+
"---",
|
|
1024
|
+
"*Generated by [autocode](https://github.com/rahulrai/autocode)*"
|
|
1025
|
+
);
|
|
1026
|
+
const pr = await openPR(octokit, {
|
|
1027
|
+
repoUrl: ticket.repoUrl,
|
|
1028
|
+
branch: branchName,
|
|
1029
|
+
baseBranch: actualBase,
|
|
1030
|
+
title: `${ticket.identifier}: ${ticket.title}`,
|
|
1031
|
+
body: prBody.join("\n")
|
|
1032
|
+
});
|
|
1033
|
+
await completeTicket(linearClient, config, ticket.id, `PR opened: ${pr.url}`);
|
|
1034
|
+
await addBranchLabel(linearClient, ticket.id, branchName);
|
|
1035
|
+
log.success(`${ticket.identifier} done \u2014 ${pr.url}`);
|
|
1036
|
+
return {
|
|
1037
|
+
ticketId: ticket.identifier,
|
|
1038
|
+
success: true,
|
|
1039
|
+
prUrl: pr.url,
|
|
1040
|
+
iterations: agentResult.iterations
|
|
1041
|
+
};
|
|
1042
|
+
} catch (err) {
|
|
1043
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1044
|
+
log.error(`${ticket.identifier} failed: ${msg}`);
|
|
1045
|
+
try {
|
|
1046
|
+
await failTicket(linearClient, ticket.id, msg);
|
|
1047
|
+
} catch {
|
|
1048
|
+
}
|
|
1049
|
+
return {
|
|
1050
|
+
ticketId: ticket.identifier,
|
|
1051
|
+
success: false,
|
|
1052
|
+
error: msg,
|
|
1053
|
+
iterations: 0
|
|
1054
|
+
};
|
|
1055
|
+
} finally {
|
|
1056
|
+
if (container) {
|
|
1057
|
+
await destroyContainer(container);
|
|
1058
|
+
}
|
|
1059
|
+
if (workDir) {
|
|
1060
|
+
try {
|
|
1061
|
+
rmSync2(workDir, { recursive: true, force: true });
|
|
1062
|
+
} catch {
|
|
1063
|
+
}
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
}
|
|
1067
|
+
|
|
1068
|
+
// src/webhook.ts
|
|
1069
|
+
function verifySignature(body, signature, secret) {
|
|
1070
|
+
const expected = createHmac("sha256", secret).update(body).digest("hex");
|
|
1071
|
+
try {
|
|
1072
|
+
return timingSafeEqual(Buffer.from(expected), Buffer.from(signature));
|
|
1073
|
+
} catch {
|
|
1074
|
+
return false;
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
var WebhookServer = class {
|
|
1078
|
+
constructor(config, linearClient, octokit) {
|
|
1079
|
+
this.config = config;
|
|
1080
|
+
this.linearClient = linearClient;
|
|
1081
|
+
this.octokit = octokit;
|
|
1082
|
+
}
|
|
1083
|
+
config;
|
|
1084
|
+
linearClient;
|
|
1085
|
+
octokit;
|
|
1086
|
+
active = /* @__PURE__ */ new Set();
|
|
1087
|
+
queue = [];
|
|
1088
|
+
processed = /* @__PURE__ */ new Set();
|
|
1089
|
+
// Track processed ticket IDs to prevent duplicate dispatch
|
|
1090
|
+
server;
|
|
1091
|
+
start() {
|
|
1092
|
+
const port = this.config.webhook?.port ?? 3e3;
|
|
1093
|
+
const MAX_BODY_BYTES = 1024 * 1024;
|
|
1094
|
+
this.server = createServer((req, res) => {
|
|
1095
|
+
if (req.method === "GET" && (req.url === "/health" || req.url === "/healthz")) {
|
|
1096
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1097
|
+
res.end(JSON.stringify({ status: "ok", active: this.active.size, queued: this.queue.length, capacity: this.config.maxConcurrent }));
|
|
1098
|
+
return;
|
|
1099
|
+
}
|
|
1100
|
+
if (req.method !== "POST" || req.url !== "/webhook" && req.url !== "/api/v1/linear/webhook") {
|
|
1101
|
+
res.writeHead(404);
|
|
1102
|
+
res.end();
|
|
1103
|
+
return;
|
|
1104
|
+
}
|
|
1105
|
+
let body = "";
|
|
1106
|
+
let bodyBytes = 0;
|
|
1107
|
+
req.on("data", (chunk) => {
|
|
1108
|
+
bodyBytes += chunk.length;
|
|
1109
|
+
if (bodyBytes > MAX_BODY_BYTES) {
|
|
1110
|
+
res.writeHead(413);
|
|
1111
|
+
res.end("Payload too large");
|
|
1112
|
+
req.destroy();
|
|
1113
|
+
return;
|
|
1114
|
+
}
|
|
1115
|
+
body += chunk;
|
|
1116
|
+
});
|
|
1117
|
+
req.on("end", () => {
|
|
1118
|
+
if (this.config.webhook?.signingSecret) {
|
|
1119
|
+
const sig = req.headers["linear-signature"];
|
|
1120
|
+
if (!sig || !verifySignature(body, sig, this.config.webhook.signingSecret)) {
|
|
1121
|
+
log.warn("Invalid webhook signature, rejecting");
|
|
1122
|
+
res.writeHead(401);
|
|
1123
|
+
res.end("Invalid signature");
|
|
1124
|
+
return;
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
res.writeHead(200);
|
|
1128
|
+
res.end("ok");
|
|
1129
|
+
void this.handleWebhook(body);
|
|
1130
|
+
});
|
|
1131
|
+
});
|
|
1132
|
+
return new Promise((resolve) => {
|
|
1133
|
+
this.server.listen(port, () => {
|
|
1134
|
+
log.info(`Webhook server listening on port ${port}`);
|
|
1135
|
+
resolve();
|
|
1136
|
+
});
|
|
1137
|
+
});
|
|
1138
|
+
}
|
|
1139
|
+
stop() {
|
|
1140
|
+
this.server?.close();
|
|
1141
|
+
log.info("Webhook server stopped");
|
|
1142
|
+
}
|
|
1143
|
+
async handleWebhook(body) {
|
|
1144
|
+
let payload;
|
|
1145
|
+
try {
|
|
1146
|
+
payload = JSON.parse(body);
|
|
1147
|
+
} catch {
|
|
1148
|
+
log.warn("Invalid webhook JSON");
|
|
1149
|
+
return;
|
|
1150
|
+
}
|
|
1151
|
+
if (payload.type !== "Issue") return;
|
|
1152
|
+
const { action, data } = payload;
|
|
1153
|
+
const triggerStatuses = this.config.linear.statuses.map((s) => s.toLowerCase());
|
|
1154
|
+
const currentStatus = data.state.name.toLowerCase();
|
|
1155
|
+
if (action === "create" || action === "update") {
|
|
1156
|
+
if (!triggerStatuses.includes(currentStatus)) {
|
|
1157
|
+
log.debug(`${data.identifier}: status "${data.state.name}" not in trigger list, ignoring`);
|
|
1158
|
+
return;
|
|
1159
|
+
}
|
|
1160
|
+
if (action === "update" && !payload.updatedFrom?.stateId) {
|
|
1161
|
+
log.debug(`${data.identifier}: update but status didn't change, ignoring`);
|
|
1162
|
+
return;
|
|
1163
|
+
}
|
|
1164
|
+
} else {
|
|
1165
|
+
return;
|
|
1166
|
+
}
|
|
1167
|
+
if (this.active.has(data.id)) {
|
|
1168
|
+
log.debug(`${data.identifier}: already active, ignoring`);
|
|
1169
|
+
return;
|
|
1170
|
+
}
|
|
1171
|
+
if (this.processed.has(data.id)) {
|
|
1172
|
+
log.debug(`${data.identifier}: already processed in this session, ignoring`);
|
|
1173
|
+
return;
|
|
1174
|
+
}
|
|
1175
|
+
if (this.active.size >= this.config.maxConcurrent) {
|
|
1176
|
+
log.info(`${data.identifier}: at capacity (${this.active.size}/${this.config.maxConcurrent}), queued`);
|
|
1177
|
+
const queueLabels = data.labels.map((l) => l.name);
|
|
1178
|
+
const ticket2 = {
|
|
1179
|
+
id: data.id,
|
|
1180
|
+
identifier: data.identifier,
|
|
1181
|
+
title: data.title,
|
|
1182
|
+
description: data.description ?? "",
|
|
1183
|
+
labels: queueLabels,
|
|
1184
|
+
repoUrl: parseRepoFromLabels(queueLabels),
|
|
1185
|
+
baseBranch: parseBaseFromLabels(queueLabels),
|
|
1186
|
+
url: data.url
|
|
1187
|
+
};
|
|
1188
|
+
this.queue.push(ticket2);
|
|
1189
|
+
return;
|
|
1190
|
+
}
|
|
1191
|
+
const labelNames = data.labels.map((l) => l.name);
|
|
1192
|
+
const repoUrl = parseRepoFromLabels(labelNames);
|
|
1193
|
+
if (!repoUrl) {
|
|
1194
|
+
log.warn(`${data.identifier}: no repo: label, ignoring`);
|
|
1195
|
+
return;
|
|
1196
|
+
}
|
|
1197
|
+
const ticket = {
|
|
1198
|
+
id: data.id,
|
|
1199
|
+
identifier: data.identifier,
|
|
1200
|
+
title: data.title,
|
|
1201
|
+
description: data.description ?? "",
|
|
1202
|
+
labels: labelNames,
|
|
1203
|
+
repoUrl,
|
|
1204
|
+
baseBranch: parseBaseFromLabels(labelNames),
|
|
1205
|
+
url: data.url
|
|
1206
|
+
};
|
|
1207
|
+
this.dispatch(ticket);
|
|
1208
|
+
}
|
|
1209
|
+
dispatch(ticket) {
|
|
1210
|
+
this.active.add(ticket.id);
|
|
1211
|
+
log.ticket(ticket.identifier, `Dispatching \u2192 ${ticket.repoUrl}`);
|
|
1212
|
+
void executeTicket({
|
|
1213
|
+
ticket,
|
|
1214
|
+
config: this.config,
|
|
1215
|
+
linearClient: this.linearClient,
|
|
1216
|
+
octokit: this.octokit
|
|
1217
|
+
}).finally(() => {
|
|
1218
|
+
this.active.delete(ticket.id);
|
|
1219
|
+
this.processed.add(ticket.id);
|
|
1220
|
+
this.drainQueue();
|
|
1221
|
+
});
|
|
1222
|
+
}
|
|
1223
|
+
drainQueue() {
|
|
1224
|
+
while (this.queue.length > 0 && this.active.size < this.config.maxConcurrent) {
|
|
1225
|
+
const next = this.queue.shift();
|
|
1226
|
+
if (this.active.has(next.id)) continue;
|
|
1227
|
+
log.ticket(next.identifier, `Dequeued (${this.queue.length} remaining)`);
|
|
1228
|
+
this.dispatch(next);
|
|
1229
|
+
}
|
|
1230
|
+
}
|
|
1231
|
+
};
|
|
1232
|
+
|
|
1233
|
+
// src/tunnel.ts
|
|
1234
|
+
import { spawn } from "child_process";
|
|
1235
|
+
async function startQuickTunnel(port) {
|
|
1236
|
+
return new Promise((resolve, reject) => {
|
|
1237
|
+
const proc = spawn("cloudflared", ["tunnel", "--url", `http://127.0.0.1:${port}`], {
|
|
1238
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
1239
|
+
});
|
|
1240
|
+
let resolved = false;
|
|
1241
|
+
const timeout = setTimeout(() => {
|
|
1242
|
+
if (!resolved) {
|
|
1243
|
+
reject(new Error("Tunnel startup timed out after 30s"));
|
|
1244
|
+
}
|
|
1245
|
+
}, 3e4);
|
|
1246
|
+
const handleOutput = (data) => {
|
|
1247
|
+
const line = data.toString();
|
|
1248
|
+
const match = line.match(/(https:\/\/[a-z0-9-]+\.trycloudflare\.com)/);
|
|
1249
|
+
if (match && !resolved) {
|
|
1250
|
+
resolved = true;
|
|
1251
|
+
clearTimeout(timeout);
|
|
1252
|
+
resolve({ url: match[1], process: proc });
|
|
1253
|
+
}
|
|
1254
|
+
};
|
|
1255
|
+
proc.stdout.on("data", handleOutput);
|
|
1256
|
+
proc.stderr.on("data", handleOutput);
|
|
1257
|
+
proc.on("error", (err) => {
|
|
1258
|
+
if (!resolved) {
|
|
1259
|
+
clearTimeout(timeout);
|
|
1260
|
+
if (err.code === "ENOENT") {
|
|
1261
|
+
reject(new Error("cloudflared not found. Install it: https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/"));
|
|
1262
|
+
} else {
|
|
1263
|
+
reject(err);
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
});
|
|
1267
|
+
proc.on("exit", (code) => {
|
|
1268
|
+
if (!resolved) {
|
|
1269
|
+
clearTimeout(timeout);
|
|
1270
|
+
reject(new Error(`cloudflared exited with code ${code}`));
|
|
1271
|
+
}
|
|
1272
|
+
});
|
|
1273
|
+
});
|
|
1274
|
+
}
|
|
1275
|
+
function stopTunnel(tunnel) {
|
|
1276
|
+
tunnel.process.kill("SIGTERM");
|
|
1277
|
+
log.info("Tunnel stopped");
|
|
1278
|
+
}
|
|
1279
|
+
|
|
1280
|
+
// src/deps.ts
|
|
1281
|
+
import { execSync as execSync3 } from "child_process";
|
|
1282
|
+
import { existsSync as existsSync4 } from "fs";
|
|
1283
|
+
import { join as join5 } from "path";
|
|
1284
|
+
import { homedir as homedir4 } from "os";
|
|
1285
|
+
function detectPackageManager() {
|
|
1286
|
+
const checks = [
|
|
1287
|
+
["apt", "apt-get"],
|
|
1288
|
+
["dnf", "dnf"],
|
|
1289
|
+
["yum", "yum"],
|
|
1290
|
+
["pacman", "pacman"],
|
|
1291
|
+
["apk", "apk"],
|
|
1292
|
+
["brew", "brew"]
|
|
1293
|
+
];
|
|
1294
|
+
for (const [name, cmd] of checks) {
|
|
1295
|
+
try {
|
|
1296
|
+
execSync3(`which ${cmd}`, { stdio: "pipe" });
|
|
1297
|
+
return name;
|
|
1298
|
+
} catch {
|
|
1299
|
+
}
|
|
1300
|
+
}
|
|
1301
|
+
return "unknown";
|
|
1302
|
+
}
|
|
1303
|
+
var DOCKER_INSTALL = {
|
|
1304
|
+
apt: [
|
|
1305
|
+
"apt-get update -qq",
|
|
1306
|
+
"apt-get install -y docker.io",
|
|
1307
|
+
"systemctl enable docker",
|
|
1308
|
+
"systemctl start docker"
|
|
1309
|
+
],
|
|
1310
|
+
dnf: [
|
|
1311
|
+
"dnf install -y docker",
|
|
1312
|
+
"systemctl enable docker",
|
|
1313
|
+
"systemctl start docker"
|
|
1314
|
+
],
|
|
1315
|
+
yum: [
|
|
1316
|
+
"yum install -y docker",
|
|
1317
|
+
"systemctl enable docker",
|
|
1318
|
+
"systemctl start docker"
|
|
1319
|
+
],
|
|
1320
|
+
pacman: [
|
|
1321
|
+
"pacman -Sy --noconfirm docker",
|
|
1322
|
+
"systemctl enable docker",
|
|
1323
|
+
"systemctl start docker"
|
|
1324
|
+
],
|
|
1325
|
+
apk: [
|
|
1326
|
+
"apk add docker",
|
|
1327
|
+
"rc-update add docker boot",
|
|
1328
|
+
"service docker start"
|
|
1329
|
+
],
|
|
1330
|
+
brew: [
|
|
1331
|
+
"brew install --cask docker"
|
|
1332
|
+
],
|
|
1333
|
+
unknown: []
|
|
1334
|
+
};
|
|
1335
|
+
var CLOUDFLARED_INSTALL = {
|
|
1336
|
+
apt: [
|
|
1337
|
+
"apt-get update -qq",
|
|
1338
|
+
'apt-get install -y cloudflared || (curl -fsSL https://pkg.cloudflare.com/cloudflare-main.gpg | tee /usr/share/keyrings/cloudflare-main.gpg >/dev/null && echo "deb [signed-by=/usr/share/keyrings/cloudflare-main.gpg] https://pkg.cloudflare.com/cloudflared $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/cloudflared.list && apt-get update -qq && apt-get install -y cloudflared)'
|
|
1339
|
+
],
|
|
1340
|
+
dnf: ["dnf install -y cloudflared || npm install -g cloudflared"],
|
|
1341
|
+
yum: ["yum install -y cloudflared || npm install -g cloudflared"],
|
|
1342
|
+
pacman: ["pacman -Sy --noconfirm cloudflared"],
|
|
1343
|
+
apk: ["apk add cloudflared"],
|
|
1344
|
+
brew: ["brew install cloudflared"],
|
|
1345
|
+
unknown: []
|
|
1346
|
+
};
|
|
1347
|
+
function checkDeps() {
|
|
1348
|
+
const pm = detectPackageManager();
|
|
1349
|
+
const needsSudo = pm !== "brew" && pm !== "unknown" && process.getuid?.() !== 0;
|
|
1350
|
+
const sudo = needsSudo ? "sudo " : "";
|
|
1351
|
+
return [
|
|
1352
|
+
{
|
|
1353
|
+
name: "Docker",
|
|
1354
|
+
installed: commandExists("docker"),
|
|
1355
|
+
running: isDockerRunning(),
|
|
1356
|
+
installable: pm !== "unknown",
|
|
1357
|
+
installCommands: DOCKER_INSTALL[pm].map((c) => `${sudo}${c}`),
|
|
1358
|
+
manualInstructions: "https://docs.docker.com/get-docker/"
|
|
1359
|
+
},
|
|
1360
|
+
{
|
|
1361
|
+
name: "Claude Code",
|
|
1362
|
+
installed: commandExists("claude"),
|
|
1363
|
+
installable: commandExists("npm"),
|
|
1364
|
+
installCommands: ["npm install -g @anthropic-ai/claude-code@latest"],
|
|
1365
|
+
manualInstructions: "npm install -g @anthropic-ai/claude-code"
|
|
1366
|
+
},
|
|
1367
|
+
{
|
|
1368
|
+
name: "Claude credentials",
|
|
1369
|
+
installed: existsSync4(join5(homedir4(), ".claude", ".credentials.json")),
|
|
1370
|
+
installable: false,
|
|
1371
|
+
installCommands: [],
|
|
1372
|
+
manualInstructions: 'Run "claude" and complete the login flow'
|
|
1373
|
+
},
|
|
1374
|
+
{
|
|
1375
|
+
name: "cloudflared",
|
|
1376
|
+
installed: commandExists("cloudflared"),
|
|
1377
|
+
installable: pm !== "unknown",
|
|
1378
|
+
installCommands: CLOUDFLARED_INSTALL[pm].map((c) => `${sudo}${c}`),
|
|
1379
|
+
manualInstructions: "https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/"
|
|
1380
|
+
}
|
|
1381
|
+
];
|
|
1382
|
+
}
|
|
1383
|
+
function installDep(dep) {
|
|
1384
|
+
if (dep.installCommands.length === 0) return false;
|
|
1385
|
+
for (const cmd of dep.installCommands) {
|
|
1386
|
+
try {
|
|
1387
|
+
execSync3(cmd, { stdio: "inherit", timeout: 3e5 });
|
|
1388
|
+
} catch {
|
|
1389
|
+
return false;
|
|
1390
|
+
}
|
|
1391
|
+
}
|
|
1392
|
+
return true;
|
|
1393
|
+
}
|
|
1394
|
+
function addUserToDockerGroup() {
|
|
1395
|
+
if (process.platform !== "linux") return;
|
|
1396
|
+
try {
|
|
1397
|
+
const user = execSync3("whoami", { encoding: "utf-8" }).trim();
|
|
1398
|
+
execSync3(`sudo usermod -aG docker ${user}`, { stdio: "pipe" });
|
|
1399
|
+
log.info(`Added ${user} to docker group \u2014 log out and back in to apply`);
|
|
1400
|
+
} catch {
|
|
1401
|
+
}
|
|
1402
|
+
}
|
|
1403
|
+
function commandExists(cmd) {
|
|
1404
|
+
try {
|
|
1405
|
+
execSync3(`which ${cmd}`, { stdio: "pipe" });
|
|
1406
|
+
return true;
|
|
1407
|
+
} catch {
|
|
1408
|
+
return false;
|
|
1409
|
+
}
|
|
1410
|
+
}
|
|
1411
|
+
function isDockerRunning() {
|
|
1412
|
+
try {
|
|
1413
|
+
execSync3("docker info", { stdio: "pipe", timeout: 1e4 });
|
|
1414
|
+
return true;
|
|
1415
|
+
} catch {
|
|
1416
|
+
return false;
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
|
|
1420
|
+
// src/index.ts
|
|
1421
|
+
import { existsSync as existsSync5, mkdirSync as mkdirSync2, writeFileSync as writeFileSync3 } from "fs";
|
|
1422
|
+
import { execSync as execSync4 } from "child_process";
|
|
1423
|
+
import { createInterface } from "readline";
|
|
1424
|
+
import { join as join6 } from "path";
|
|
1425
|
+
import { homedir as homedir5 } from "os";
|
|
1426
|
+
import { LinearClient as LinearClient2 } from "@linear/sdk";
|
|
1427
|
+
import { Octokit as Octokit2 } from "@octokit/rest";
|
|
1428
|
+
import { createRequire } from "module";
|
|
1429
|
+
var require2 = createRequire(import.meta.url);
|
|
1430
|
+
var pkg = require2("../package.json");
|
|
1431
|
+
var program = new Command();
|
|
1432
|
+
program.name("autocode").description("Linear webhooks \u2192 Claude Code in Docker \u2192 PRs").version(pkg.version);
|
|
1433
|
+
function ask(question, defaultVal) {
|
|
1434
|
+
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
1435
|
+
const hint = defaultVal ? ` (${defaultVal})` : "";
|
|
1436
|
+
return new Promise((resolve) => {
|
|
1437
|
+
rl.question(`${question}${hint}: `, (answer) => {
|
|
1438
|
+
rl.close();
|
|
1439
|
+
resolve(answer.trim() || defaultVal || "");
|
|
1440
|
+
});
|
|
1441
|
+
});
|
|
1442
|
+
}
|
|
1443
|
+
program.command("serve").description("Start webhook server with auto-tunnel").option("-c, --config <path>", "Config file path").option("-p, --port <number>", "Override webhook port").option("--no-tunnel", "Skip auto-tunnel (use if you have your own reverse proxy)").option("-v, --verbose", "Verbose logging").action(async (opts) => {
|
|
1444
|
+
if (opts.verbose) setLogLevel("debug");
|
|
1445
|
+
enableFileLogging();
|
|
1446
|
+
log.info(`autocode v${pkg.version}`);
|
|
1447
|
+
await checkDockerAvailable();
|
|
1448
|
+
if (!checkClaudeCredentials()) {
|
|
1449
|
+
throw new Error(
|
|
1450
|
+
"Claude Code credentials not found (~/.claude/.credentials.json)\nRun: claude (and complete login) to create credentials"
|
|
1451
|
+
);
|
|
1452
|
+
}
|
|
1453
|
+
const orphans = await cleanupOrphanedContainers();
|
|
1454
|
+
if (orphans > 0) {
|
|
1455
|
+
log.info(`Cleaned up ${orphans} orphaned container(s) from previous session`);
|
|
1456
|
+
}
|
|
1457
|
+
const config = loadConfig(opts.config);
|
|
1458
|
+
if (opts.port) config.webhook.port = parseInt(opts.port);
|
|
1459
|
+
log.info(`Team: ${config.linear.teamId} | Statuses: ${config.linear.statuses.join(", ")} | Model: ${config.agent.model} | Max concurrent: ${config.maxConcurrent}`);
|
|
1460
|
+
const linearClient = createLinearClient(config);
|
|
1461
|
+
const octokit = createOctokit(config);
|
|
1462
|
+
const server = new WebhookServer(config, linearClient, octokit);
|
|
1463
|
+
let tunnel;
|
|
1464
|
+
if (opts.tunnel !== false) {
|
|
1465
|
+
log.info("Starting tunnel...");
|
|
1466
|
+
try {
|
|
1467
|
+
tunnel = await startQuickTunnel(config.webhook.port);
|
|
1468
|
+
const webhookUrl = `${tunnel.url}/api/v1/linear/webhook`;
|
|
1469
|
+
log.success(`Tunnel: ${tunnel.url}`);
|
|
1470
|
+
log.info(`Webhook URL: ${webhookUrl}`);
|
|
1471
|
+
if (config.webhook.webhookId) {
|
|
1472
|
+
try {
|
|
1473
|
+
await linearClient.deleteWebhook(config.webhook.webhookId);
|
|
1474
|
+
} catch {
|
|
1475
|
+
}
|
|
1476
|
+
}
|
|
1477
|
+
const team = await linearClient.team(config.linear.teamId);
|
|
1478
|
+
const result = await linearClient.createWebhook({
|
|
1479
|
+
url: webhookUrl,
|
|
1480
|
+
resourceTypes: ["Issue"],
|
|
1481
|
+
label: "autocode",
|
|
1482
|
+
teamId: team.id
|
|
1483
|
+
});
|
|
1484
|
+
const webhook = await result.webhook;
|
|
1485
|
+
if (webhook) {
|
|
1486
|
+
log.success(`Linear webhook created \u2192 ${webhookUrl}`);
|
|
1487
|
+
config.webhook.webhookId = webhook.id;
|
|
1488
|
+
config.webhook.signingSecret = void 0;
|
|
1489
|
+
}
|
|
1490
|
+
} catch (err) {
|
|
1491
|
+
log.warn(`Tunnel failed: ${err instanceof Error ? err.message : err}`);
|
|
1492
|
+
log.info("Falling back to local-only mode. Set up your own reverse proxy.");
|
|
1493
|
+
}
|
|
1494
|
+
}
|
|
1495
|
+
await server.start();
|
|
1496
|
+
const shutdown = () => {
|
|
1497
|
+
log.info("Shutting down...");
|
|
1498
|
+
server.stop();
|
|
1499
|
+
if (tunnel) stopTunnel(tunnel);
|
|
1500
|
+
process.exit(0);
|
|
1501
|
+
};
|
|
1502
|
+
process.on("SIGINT", shutdown);
|
|
1503
|
+
process.on("SIGTERM", shutdown);
|
|
1504
|
+
});
|
|
1505
|
+
program.command("run <ticket>").description("Run a single ticket (e.g. autocode run RAH-123)").option("-c, --config <path>", "Config file path").option("-v, --verbose", "Verbose logging").option("--dry-run", "Show what would happen without executing").action(async (ticketId, opts) => {
|
|
1506
|
+
if (opts.verbose) setLogLevel("debug");
|
|
1507
|
+
enableFileLogging();
|
|
1508
|
+
const config = loadConfig(opts.config);
|
|
1509
|
+
const linearClient = createLinearClient(config);
|
|
1510
|
+
const ticket = await fetchTicket(linearClient, ticketId);
|
|
1511
|
+
if (!ticket) throw new Error(`Ticket ${ticketId} not found`);
|
|
1512
|
+
if (!ticket.repoUrl) {
|
|
1513
|
+
throw new Error(`${ticketId} has no repo: label. Add "repo:owner/name" label to the ticket.`);
|
|
1514
|
+
}
|
|
1515
|
+
if (opts.dryRun) {
|
|
1516
|
+
log.info("Dry run \u2014 no changes will be made\n");
|
|
1517
|
+
log.info(` Ticket: ${ticket.identifier} \u2014 ${ticket.title}`);
|
|
1518
|
+
log.info(` Repo: ${ticket.repoUrl}`);
|
|
1519
|
+
log.info(` Base: ${ticket.baseBranch ?? "(from .autocode.yaml or main)"}`);
|
|
1520
|
+
log.info(` Branch: autocode/${ticket.identifier}-...`);
|
|
1521
|
+
log.info(` Image: ${config.docker.image}`);
|
|
1522
|
+
log.info(` Model: ${config.agent.model}`);
|
|
1523
|
+
log.info(` Max iter: ${config.agent.maxIterations}`);
|
|
1524
|
+
if (ticket.description) {
|
|
1525
|
+
log.info(`
|
|
1526
|
+
Description:
|
|
1527
|
+
${ticket.description.split("\n").join("\n ")}`);
|
|
1528
|
+
}
|
|
1529
|
+
return;
|
|
1530
|
+
}
|
|
1531
|
+
await checkDockerAvailable();
|
|
1532
|
+
if (!checkClaudeCredentials()) {
|
|
1533
|
+
throw new Error(
|
|
1534
|
+
"Claude Code credentials not found (~/.claude/.credentials.json)\nRun: claude (and complete login) to create credentials"
|
|
1535
|
+
);
|
|
1536
|
+
}
|
|
1537
|
+
const octokit = createOctokit(config);
|
|
1538
|
+
const abort = new AbortController();
|
|
1539
|
+
const onSignal = () => {
|
|
1540
|
+
log.info("Received signal, cleaning up...");
|
|
1541
|
+
abort.abort();
|
|
1542
|
+
};
|
|
1543
|
+
process.on("SIGINT", onSignal);
|
|
1544
|
+
process.on("SIGTERM", onSignal);
|
|
1545
|
+
try {
|
|
1546
|
+
const result = await executeTicket({ ticket, config, linearClient, octokit });
|
|
1547
|
+
if (!result.success) throw new Error(result.error ?? "Unknown failure");
|
|
1548
|
+
} finally {
|
|
1549
|
+
process.off("SIGINT", onSignal);
|
|
1550
|
+
process.off("SIGTERM", onSignal);
|
|
1551
|
+
}
|
|
1552
|
+
});
|
|
1553
|
+
program.command("validate").description("Validate config without running anything").option("-c, --config <path>", "Config file path").action(async (opts) => {
|
|
1554
|
+
try {
|
|
1555
|
+
const config = loadConfig(opts.config);
|
|
1556
|
+
log.success("Config is valid");
|
|
1557
|
+
log.info(` Linear team: ${config.linear.teamId}`);
|
|
1558
|
+
log.info(` Trigger statuses: ${config.linear.statuses.join(", ")}`);
|
|
1559
|
+
log.info(` Docker image: ${config.docker.image}`);
|
|
1560
|
+
log.info(` Agent model: ${config.agent.model}`);
|
|
1561
|
+
log.info(` Max iterations: ${config.agent.maxIterations}`);
|
|
1562
|
+
log.info(` Max concurrent: ${config.maxConcurrent}`);
|
|
1563
|
+
log.info(` Webhook port: ${config.webhook.port}`);
|
|
1564
|
+
} catch (err) {
|
|
1565
|
+
log.error(err instanceof Error ? err.message : String(err));
|
|
1566
|
+
process.exit(1);
|
|
1567
|
+
}
|
|
1568
|
+
});
|
|
1569
|
+
program.command("cleanup").description("Remove orphaned autocode Docker containers").action(async () => {
|
|
1570
|
+
await checkDockerAvailable();
|
|
1571
|
+
const count = await cleanupOrphanedContainers();
|
|
1572
|
+
if (count === 0) {
|
|
1573
|
+
log.info("No orphaned containers found");
|
|
1574
|
+
} else {
|
|
1575
|
+
log.success(`Cleaned up ${count} orphaned container(s)`);
|
|
1576
|
+
}
|
|
1577
|
+
});
|
|
1578
|
+
function checkClaudeCredentials() {
|
|
1579
|
+
return existsSync5(join6(homedir5(), ".claude", ".credentials.json"));
|
|
1580
|
+
}
|
|
1581
|
+
async function validateGitHubToken(token) {
|
|
1582
|
+
try {
|
|
1583
|
+
const octokit = new Octokit2({ auth: token });
|
|
1584
|
+
const { data } = await octokit.rest.users.getAuthenticated();
|
|
1585
|
+
return data.login;
|
|
1586
|
+
} catch {
|
|
1587
|
+
return void 0;
|
|
1588
|
+
}
|
|
1589
|
+
}
|
|
1590
|
+
program.command("init").description("Set up autocode interactively").action(async () => {
|
|
1591
|
+
console.log("\n autocode setup\n");
|
|
1592
|
+
console.log(" Checking dependencies...\n");
|
|
1593
|
+
const deps = checkDeps();
|
|
1594
|
+
const stillMissing = [];
|
|
1595
|
+
for (const dep of deps) {
|
|
1596
|
+
if (dep.installed && dep.running !== false) {
|
|
1597
|
+
log.success(`${dep.name}`);
|
|
1598
|
+
continue;
|
|
1599
|
+
}
|
|
1600
|
+
if (dep.installed && dep.running === false) {
|
|
1601
|
+
log.warn(`${dep.name} \u2014 installed but not running`);
|
|
1602
|
+
stillMissing.push(`${dep.name} (needs restart)`);
|
|
1603
|
+
continue;
|
|
1604
|
+
}
|
|
1605
|
+
if (dep.installable && dep.installCommands.length > 0) {
|
|
1606
|
+
const answer = await ask(` ${dep.name} not found. Install? (Y/n)`, "Y");
|
|
1607
|
+
if (answer.toLowerCase() !== "n") {
|
|
1608
|
+
console.log(`
|
|
1609
|
+
Installing ${dep.name} via system package manager...
|
|
1610
|
+
`);
|
|
1611
|
+
const ok = installDep(dep);
|
|
1612
|
+
if (ok) {
|
|
1613
|
+
log.success(`${dep.name} installed`);
|
|
1614
|
+
if (dep.name === "Docker") addUserToDockerGroup();
|
|
1615
|
+
if (dep.name === "Claude Code") {
|
|
1616
|
+
console.log('\n Run "claude" in your terminal to log in, then re-run: autocode init\n');
|
|
1617
|
+
stillMissing.push("Claude Code login");
|
|
1618
|
+
}
|
|
1619
|
+
} else {
|
|
1620
|
+
log.error(`${dep.name} install failed`);
|
|
1621
|
+
console.log(` Install manually: ${dep.manualInstructions}`);
|
|
1622
|
+
stillMissing.push(dep.name);
|
|
1623
|
+
}
|
|
1624
|
+
} else {
|
|
1625
|
+
console.log(` Install manually: ${dep.manualInstructions}`);
|
|
1626
|
+
stillMissing.push(dep.name);
|
|
1627
|
+
}
|
|
1628
|
+
} else if (dep.name === "Claude credentials") {
|
|
1629
|
+
log.warn(`${dep.name} \u2014 run "claude" to log in`);
|
|
1630
|
+
stillMissing.push(dep.name);
|
|
1631
|
+
} else {
|
|
1632
|
+
log.warn(`${dep.name} \u2014 ${dep.manualInstructions}`);
|
|
1633
|
+
stillMissing.push(dep.name);
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
if (stillMissing.length > 0) {
|
|
1637
|
+
console.log(`
|
|
1638
|
+
Continuing setup \u2014 install ${stillMissing.join(", ")} before running.
|
|
1639
|
+
`);
|
|
1640
|
+
} else {
|
|
1641
|
+
console.log("");
|
|
1642
|
+
}
|
|
1643
|
+
if (!existsSync5(CONFIG_DIR)) {
|
|
1644
|
+
mkdirSync2(CONFIG_DIR, { recursive: true });
|
|
1645
|
+
}
|
|
1646
|
+
if (existsSync5(CONFIG_FILE)) {
|
|
1647
|
+
const overwrite = await ask("Config already exists. Overwrite? (y/N)", "N");
|
|
1648
|
+
if (overwrite.toLowerCase() !== "y") {
|
|
1649
|
+
log.info("Keeping existing config.");
|
|
1650
|
+
return;
|
|
1651
|
+
}
|
|
1652
|
+
}
|
|
1653
|
+
const linearKey = await ask("Linear API key", process.env.LINEAR_API_KEY);
|
|
1654
|
+
if (!linearKey) {
|
|
1655
|
+
log.error("Linear API key is required");
|
|
1656
|
+
process.exit(1);
|
|
1657
|
+
}
|
|
1658
|
+
let teamId = "";
|
|
1659
|
+
try {
|
|
1660
|
+
const client = new LinearClient2({ apiKey: linearKey });
|
|
1661
|
+
const teams = await client.teams();
|
|
1662
|
+
if (teams.nodes.length === 0) {
|
|
1663
|
+
log.error("No teams found in your Linear workspace");
|
|
1664
|
+
process.exit(1);
|
|
1665
|
+
}
|
|
1666
|
+
if (teams.nodes.length === 1) {
|
|
1667
|
+
teamId = teams.nodes[0].key;
|
|
1668
|
+
log.success(`Linear team: ${teams.nodes[0].name} (${teamId})`);
|
|
1669
|
+
} else {
|
|
1670
|
+
console.log("\n Available teams:");
|
|
1671
|
+
for (const t of teams.nodes) {
|
|
1672
|
+
console.log(` ${t.key} \u2014 ${t.name}`);
|
|
1673
|
+
}
|
|
1674
|
+
teamId = await ask("\n Team key");
|
|
1675
|
+
}
|
|
1676
|
+
} catch (err) {
|
|
1677
|
+
log.error(`Invalid Linear API key: ${err instanceof Error ? err.message : err}`);
|
|
1678
|
+
process.exit(1);
|
|
1679
|
+
}
|
|
1680
|
+
let ghToken = "";
|
|
1681
|
+
try {
|
|
1682
|
+
ghToken = execSync4("gh auth token", { encoding: "utf-8" }).trim();
|
|
1683
|
+
log.success(`GitHub token detected from gh CLI`);
|
|
1684
|
+
} catch {
|
|
1685
|
+
ghToken = await ask("GitHub personal access token");
|
|
1686
|
+
}
|
|
1687
|
+
if (!ghToken) {
|
|
1688
|
+
log.error("GitHub token is required");
|
|
1689
|
+
process.exit(1);
|
|
1690
|
+
}
|
|
1691
|
+
const ghUser = await validateGitHubToken(ghToken);
|
|
1692
|
+
if (ghUser) {
|
|
1693
|
+
log.success(`GitHub authenticated as ${ghUser}`);
|
|
1694
|
+
} else {
|
|
1695
|
+
log.error("GitHub token is invalid or expired");
|
|
1696
|
+
process.exit(1);
|
|
1697
|
+
}
|
|
1698
|
+
console.log("\n Docker image options:");
|
|
1699
|
+
console.log(" autocode-base \u2014 All runtimes: Node, Python, Go, Rust, Ruby (~1.5GB)");
|
|
1700
|
+
console.log(" Works with any repo, no per-repo config needed");
|
|
1701
|
+
console.log(" node:20 \u2014 Node.js only (fast, ~300MB)");
|
|
1702
|
+
const dockerImage = await ask("Docker image", "autocode-base");
|
|
1703
|
+
if (dockerImage === "autocode-base") {
|
|
1704
|
+
try {
|
|
1705
|
+
execSync4("docker image inspect autocode-base", { stdio: "pipe" });
|
|
1706
|
+
log.success("autocode-base image found");
|
|
1707
|
+
} catch {
|
|
1708
|
+
log.info("autocode-base will be built automatically on first run (~3 min)");
|
|
1709
|
+
}
|
|
1710
|
+
}
|
|
1711
|
+
const model = await ask("Claude model", "claude-sonnet-4-6");
|
|
1712
|
+
const maxIter = await ask("Max iterations per ticket", "10");
|
|
1713
|
+
const config = `# autocode config \u2014 generated by autocode init
|
|
1714
|
+
# Tip: use "env:VAR_NAME" to read secrets from environment variables
|
|
1715
|
+
# e.g. apiKey: env:LINEAR_API_KEY
|
|
1716
|
+
|
|
1717
|
+
linear:
|
|
1718
|
+
apiKey: "${linearKey}"
|
|
1719
|
+
teamId: ${teamId}
|
|
1720
|
+
statuses: [Todo]
|
|
1721
|
+
assignToMe: false
|
|
1722
|
+
inProgressStatus: In Progress
|
|
1723
|
+
doneStatus: Done
|
|
1724
|
+
|
|
1725
|
+
github:
|
|
1726
|
+
token: "${ghToken}"
|
|
1727
|
+
|
|
1728
|
+
webhook:
|
|
1729
|
+
port: 3000
|
|
1730
|
+
|
|
1731
|
+
docker:
|
|
1732
|
+
image: ${dockerImage}
|
|
1733
|
+
memory: 4g
|
|
1734
|
+
|
|
1735
|
+
agent:
|
|
1736
|
+
model: ${model}
|
|
1737
|
+
maxIterations: ${parseInt(maxIter) || 10}
|
|
1738
|
+
|
|
1739
|
+
maxConcurrent: 1
|
|
1740
|
+
`;
|
|
1741
|
+
writeFileSync3(CONFIG_FILE, config, { mode: 384 });
|
|
1742
|
+
log.success(`Config saved to ${CONFIG_FILE}`);
|
|
1743
|
+
console.log(`
|
|
1744
|
+
Setup complete! Next steps:
|
|
1745
|
+
|
|
1746
|
+
1. Add repo: labels to your Linear team
|
|
1747
|
+
(e.g. "repo:your-org/your-repo")
|
|
1748
|
+
|
|
1749
|
+
2. Tag tickets with a repo: label
|
|
1750
|
+
|
|
1751
|
+
3. Start autocode:
|
|
1752
|
+
$ autocode serve
|
|
1753
|
+
|
|
1754
|
+
autocode serve will auto-create a tunnel and
|
|
1755
|
+
register the webhook with Linear. Just run it.
|
|
1756
|
+
`);
|
|
1757
|
+
});
|
|
1758
|
+
program.parseAsync().catch((err) => {
|
|
1759
|
+
log.error(err instanceof Error ? err.message : String(err));
|
|
1760
|
+
process.exit(1);
|
|
1761
|
+
});
|
|
1762
|
+
//# sourceMappingURL=index.js.map
|