@tarcisiopgs/lisa 0.9.2 → 0.9.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1519 -318
- package/package.json +23 -5
package/dist/index.js
CHANGED
|
@@ -2,16 +2,21 @@
|
|
|
2
2
|
|
|
3
3
|
// src/cli.ts
|
|
4
4
|
import { execSync as execSync4 } from "child_process";
|
|
5
|
-
import { existsSync as
|
|
6
|
-
import { join as
|
|
7
|
-
import { defineCommand, runMain } from "citty";
|
|
5
|
+
import { existsSync as existsSync7, readdirSync, readFileSync as readFileSync6 } from "fs";
|
|
6
|
+
import { join as join8, resolve as resolvePath } from "path";
|
|
8
7
|
import * as clack from "@clack/prompts";
|
|
8
|
+
import { defineCommand, runMain } from "citty";
|
|
9
9
|
import pc2 from "picocolors";
|
|
10
10
|
|
|
11
11
|
// src/config.ts
|
|
12
12
|
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
|
|
13
13
|
import { resolve } from "path";
|
|
14
14
|
import { parse, stringify } from "yaml";
|
|
15
|
+
var DEFAULT_OVERSEER_CONFIG = {
|
|
16
|
+
enabled: false,
|
|
17
|
+
check_interval: 30,
|
|
18
|
+
stuck_threshold: 300
|
|
19
|
+
};
|
|
15
20
|
var CONFIG_DIR = ".lisa";
|
|
16
21
|
var CONFIG_FILE = "config.yaml";
|
|
17
22
|
var DEFAULT_CONFIG = {
|
|
@@ -37,7 +42,8 @@ var DEFAULT_CONFIG = {
|
|
|
37
42
|
logs: {
|
|
38
43
|
dir: "",
|
|
39
44
|
format: ""
|
|
40
|
-
}
|
|
45
|
+
},
|
|
46
|
+
overseer: { ...DEFAULT_OVERSEER_CONFIG }
|
|
41
47
|
};
|
|
42
48
|
function getConfigPath(cwd = process.cwd()) {
|
|
43
49
|
return resolve(cwd, CONFIG_DIR, CONFIG_FILE);
|
|
@@ -69,12 +75,19 @@ function loadConfig(cwd = process.cwd()) {
|
|
|
69
75
|
...parsed,
|
|
70
76
|
source_config: sourceConfig,
|
|
71
77
|
loop: { ...DEFAULT_CONFIG.loop, ...parsed.loop ?? {} },
|
|
72
|
-
logs: { ...DEFAULT_CONFIG.logs, ...parsed.logs ?? {} }
|
|
78
|
+
logs: { ...DEFAULT_CONFIG.logs, ...parsed.logs ?? {} },
|
|
79
|
+
overseer: {
|
|
80
|
+
...DEFAULT_OVERSEER_CONFIG,
|
|
81
|
+
...parsed.overseer ?? {}
|
|
82
|
+
}
|
|
73
83
|
};
|
|
74
84
|
if (!config2.base_branch) config2.base_branch = "main";
|
|
75
85
|
for (const repo of config2.repos) {
|
|
76
86
|
if (!repo.base_branch) repo.base_branch = config2.base_branch;
|
|
77
87
|
}
|
|
88
|
+
if (!config2.models && config2.provider) {
|
|
89
|
+
config2.models = [config2.provider];
|
|
90
|
+
}
|
|
78
91
|
return config2;
|
|
79
92
|
}
|
|
80
93
|
function saveConfig(config2, cwd = process.cwd()) {
|
|
@@ -84,7 +97,20 @@ function saveConfig(config2, cwd = process.cwd()) {
|
|
|
84
97
|
mkdirSync(dir, { recursive: true });
|
|
85
98
|
}
|
|
86
99
|
const sc = config2.source_config;
|
|
87
|
-
const sourceYaml = config2.source === "trello" ? {
|
|
100
|
+
const sourceYaml = config2.source === "trello" ? {
|
|
101
|
+
board: sc.team,
|
|
102
|
+
pick_from: sc.pick_from || sc.project,
|
|
103
|
+
label: sc.label,
|
|
104
|
+
in_progress: sc.in_progress,
|
|
105
|
+
done: sc.done
|
|
106
|
+
} : {
|
|
107
|
+
team: sc.team,
|
|
108
|
+
project: sc.project,
|
|
109
|
+
label: sc.label,
|
|
110
|
+
pick_from: sc.pick_from,
|
|
111
|
+
in_progress: sc.in_progress,
|
|
112
|
+
done: sc.done
|
|
113
|
+
};
|
|
88
114
|
const output = { ...config2, source_config: sourceYaml };
|
|
89
115
|
writeFileSync(configPath, stringify(output), "utf-8");
|
|
90
116
|
}
|
|
@@ -97,6 +123,101 @@ function mergeWithFlags(config2, flags) {
|
|
|
97
123
|
return merged;
|
|
98
124
|
}
|
|
99
125
|
|
|
126
|
+
// src/github.ts
|
|
127
|
+
import { execa } from "execa";
|
|
128
|
+
var API_URL = "https://api.github.com";
|
|
129
|
+
var REQUEST_TIMEOUT_MS = 3e4;
|
|
130
|
+
async function isGhCliAvailable() {
|
|
131
|
+
try {
|
|
132
|
+
await execa("gh", ["auth", "status"]);
|
|
133
|
+
return true;
|
|
134
|
+
} catch {
|
|
135
|
+
return false;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
function getToken() {
|
|
139
|
+
const token = process.env.GITHUB_TOKEN;
|
|
140
|
+
if (!token) throw new Error("GITHUB_TOKEN is not set");
|
|
141
|
+
return token;
|
|
142
|
+
}
|
|
143
|
+
async function createPullRequest(opts, method = "cli") {
|
|
144
|
+
if (method === "cli" && await isGhCliAvailable()) {
|
|
145
|
+
return createPullRequestWithGhCli(opts);
|
|
146
|
+
}
|
|
147
|
+
const res = await fetch(`${API_URL}/repos/${opts.owner}/${opts.repo}/pulls`, {
|
|
148
|
+
method: "POST",
|
|
149
|
+
headers: {
|
|
150
|
+
Authorization: `Bearer ${getToken()}`,
|
|
151
|
+
Accept: "application/vnd.github+json",
|
|
152
|
+
"Content-Type": "application/json"
|
|
153
|
+
},
|
|
154
|
+
body: JSON.stringify({
|
|
155
|
+
title: opts.title,
|
|
156
|
+
body: opts.body,
|
|
157
|
+
head: opts.head,
|
|
158
|
+
base: opts.base
|
|
159
|
+
}),
|
|
160
|
+
signal: AbortSignal.timeout(REQUEST_TIMEOUT_MS)
|
|
161
|
+
});
|
|
162
|
+
if (!res.ok) {
|
|
163
|
+
const text2 = await res.text();
|
|
164
|
+
throw new Error(`GitHub API error (${res.status}): ${text2}`);
|
|
165
|
+
}
|
|
166
|
+
const data = await res.json();
|
|
167
|
+
return { number: data.number, html_url: data.html_url };
|
|
168
|
+
}
|
|
169
|
+
async function createPullRequestWithGhCli(opts) {
|
|
170
|
+
const result = await execa("gh", [
|
|
171
|
+
"pr",
|
|
172
|
+
"create",
|
|
173
|
+
"--repo",
|
|
174
|
+
`${opts.owner}/${opts.repo}`,
|
|
175
|
+
"--head",
|
|
176
|
+
opts.head,
|
|
177
|
+
"--base",
|
|
178
|
+
opts.base,
|
|
179
|
+
"--title",
|
|
180
|
+
opts.title,
|
|
181
|
+
"--body",
|
|
182
|
+
opts.body
|
|
183
|
+
]);
|
|
184
|
+
const url = result.stdout.trim();
|
|
185
|
+
const prNumberMatch = url.match(/\/pull\/(\d+)/);
|
|
186
|
+
const number = prNumberMatch ? Number.parseInt(prNumberMatch[1] ?? "0", 10) : 0;
|
|
187
|
+
return { number, html_url: url };
|
|
188
|
+
}
|
|
189
|
+
async function getRepoInfo(cwd) {
|
|
190
|
+
const { stdout: remoteUrl } = await execa("git", ["remote", "get-url", "origin"], { cwd });
|
|
191
|
+
let owner;
|
|
192
|
+
let repo;
|
|
193
|
+
const sshMatch = remoteUrl.match(/git@github\.com:(.+?)\/(.+?)(?:\.git)?$/);
|
|
194
|
+
const httpsMatch = remoteUrl.match(/github\.com\/(.+?)\/(.+?)(?:\.git)?$/);
|
|
195
|
+
if (sshMatch) {
|
|
196
|
+
owner = sshMatch[1] ?? "";
|
|
197
|
+
repo = sshMatch[2] ?? "";
|
|
198
|
+
} else if (httpsMatch) {
|
|
199
|
+
owner = httpsMatch[1] ?? "";
|
|
200
|
+
repo = httpsMatch[2] ?? "";
|
|
201
|
+
} else {
|
|
202
|
+
throw new Error(`Cannot parse GitHub owner/repo from remote URL: ${remoteUrl}`);
|
|
203
|
+
}
|
|
204
|
+
const { stdout: branch } = await execa("git", ["branch", "--show-current"], { cwd });
|
|
205
|
+
const { stdout: defaultBranch } = await execa(
|
|
206
|
+
"git",
|
|
207
|
+
["symbolic-ref", "refs/remotes/origin/HEAD", "--short"],
|
|
208
|
+
{ cwd, reject: false }
|
|
209
|
+
).then(
|
|
210
|
+
(r) => r,
|
|
211
|
+
() => ({ stdout: "origin/main" })
|
|
212
|
+
);
|
|
213
|
+
return {
|
|
214
|
+
owner,
|
|
215
|
+
repo,
|
|
216
|
+
branch: branch.trim(),
|
|
217
|
+
defaultBranch: defaultBranch.replace("origin/", "").trim()
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
|
|
100
221
|
// src/logger.ts
|
|
101
222
|
import { appendFileSync, existsSync as existsSync2, mkdirSync as mkdirSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
102
223
|
import { dirname } from "path";
|
|
@@ -131,26 +252,38 @@ function emitJson(level, message) {
|
|
|
131
252
|
console.log(JSON.stringify(event));
|
|
132
253
|
}
|
|
133
254
|
function log(message) {
|
|
134
|
-
if (outputMode === "json")
|
|
255
|
+
if (outputMode === "json") {
|
|
256
|
+
emitJson("info", message);
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
135
259
|
if (outputMode !== "quiet") {
|
|
136
260
|
console.log(`${pc.cyan("[lisa]")} ${pc.dim(timestamp())} ${message}`);
|
|
137
261
|
}
|
|
138
262
|
writeToFile("info", message);
|
|
139
263
|
}
|
|
140
264
|
function warn(message) {
|
|
141
|
-
if (outputMode === "json")
|
|
265
|
+
if (outputMode === "json") {
|
|
266
|
+
emitJson("warn", message);
|
|
267
|
+
return;
|
|
268
|
+
}
|
|
142
269
|
if (outputMode !== "quiet") {
|
|
143
270
|
console.error(`${pc.yellow("[lisa]")} ${pc.dim(timestamp())} ${message}`);
|
|
144
271
|
}
|
|
145
272
|
writeToFile("warn", message);
|
|
146
273
|
}
|
|
147
274
|
function error(message) {
|
|
148
|
-
if (outputMode === "json")
|
|
275
|
+
if (outputMode === "json") {
|
|
276
|
+
emitJson("error", message);
|
|
277
|
+
return;
|
|
278
|
+
}
|
|
149
279
|
console.error(`${pc.red("[lisa]")} ${pc.dim(timestamp())} ${message}`);
|
|
150
280
|
writeToFile("error", message);
|
|
151
281
|
}
|
|
152
282
|
function ok(message) {
|
|
153
|
-
if (outputMode === "json")
|
|
283
|
+
if (outputMode === "json") {
|
|
284
|
+
emitJson("ok", message);
|
|
285
|
+
return;
|
|
286
|
+
}
|
|
154
287
|
if (outputMode !== "quiet") {
|
|
155
288
|
console.log(`${pc.green("[lisa]")} ${pc.dim(timestamp())} ${message}`);
|
|
156
289
|
}
|
|
@@ -188,7 +321,7 @@ function banner() {
|
|
|
188
321
|
@%@+=#@==@#:+@##@
|
|
189
322
|
@@@@%%@##%
|
|
190
323
|
`;
|
|
191
|
-
const title = "
|
|
324
|
+
const title = " Lisa \u2014 deterministic autonomous issue resolver ";
|
|
192
325
|
const border = "\u2500".repeat(title.length);
|
|
193
326
|
console.log(pc.yellow(art));
|
|
194
327
|
console.log(pc.cyan(` \u250C${border}\u2510`));
|
|
@@ -198,18 +331,322 @@ function banner() {
|
|
|
198
331
|
}
|
|
199
332
|
|
|
200
333
|
// src/loop.ts
|
|
201
|
-
import {
|
|
202
|
-
import {
|
|
334
|
+
import { appendFileSync as appendFileSync6, existsSync as existsSync6, readFileSync as readFileSync5, unlinkSync as unlinkSync4 } from "fs";
|
|
335
|
+
import { join as join7, resolve as resolve5 } from "path";
|
|
336
|
+
import { execa as execa3 } from "execa";
|
|
203
337
|
|
|
204
|
-
// src/
|
|
338
|
+
// src/lifecycle.ts
|
|
339
|
+
import { spawn } from "child_process";
|
|
340
|
+
import { createConnection } from "net";
|
|
205
341
|
import { resolve as resolve2 } from "path";
|
|
206
|
-
|
|
342
|
+
var managedResources = [];
|
|
343
|
+
var cleanupRegistered = false;
|
|
344
|
+
function isPortInUse(port) {
|
|
345
|
+
return new Promise((resolve6) => {
|
|
346
|
+
const socket = createConnection({ port }, () => {
|
|
347
|
+
socket.destroy();
|
|
348
|
+
resolve6(true);
|
|
349
|
+
});
|
|
350
|
+
socket.on("error", () => {
|
|
351
|
+
socket.destroy();
|
|
352
|
+
resolve6(false);
|
|
353
|
+
});
|
|
354
|
+
});
|
|
355
|
+
}
|
|
356
|
+
function waitForPort(port, timeoutMs) {
|
|
357
|
+
return new Promise((resolve6) => {
|
|
358
|
+
const deadline = Date.now() + timeoutMs;
|
|
359
|
+
const check = () => {
|
|
360
|
+
if (Date.now() > deadline) {
|
|
361
|
+
resolve6(false);
|
|
362
|
+
return;
|
|
363
|
+
}
|
|
364
|
+
isPortInUse(port).then((inUse) => {
|
|
365
|
+
if (inUse) {
|
|
366
|
+
resolve6(true);
|
|
367
|
+
} else {
|
|
368
|
+
setTimeout(check, 500);
|
|
369
|
+
}
|
|
370
|
+
});
|
|
371
|
+
};
|
|
372
|
+
check();
|
|
373
|
+
});
|
|
374
|
+
}
|
|
375
|
+
function spawnResource(config2, baseCwd) {
|
|
376
|
+
const cwd = config2.cwd ? resolve2(baseCwd, config2.cwd) : baseCwd;
|
|
377
|
+
const child = spawn("sh", ["-c", config2.up], {
|
|
378
|
+
cwd,
|
|
379
|
+
stdio: "ignore",
|
|
380
|
+
detached: true
|
|
381
|
+
});
|
|
382
|
+
child.unref();
|
|
383
|
+
return child;
|
|
384
|
+
}
|
|
385
|
+
function runSetupCommand(command, cwd) {
|
|
386
|
+
return new Promise((resolve6, reject) => {
|
|
387
|
+
const child = spawn("sh", ["-c", command], {
|
|
388
|
+
cwd,
|
|
389
|
+
stdio: "inherit"
|
|
390
|
+
});
|
|
391
|
+
child.on("close", (code) => {
|
|
392
|
+
if (code === 0) {
|
|
393
|
+
resolve6();
|
|
394
|
+
} else {
|
|
395
|
+
reject(new Error(`Setup command failed with exit code ${code}: ${command}`));
|
|
396
|
+
}
|
|
397
|
+
});
|
|
398
|
+
child.on("error", (err) => {
|
|
399
|
+
reject(new Error(`Setup command error: ${err.message}`));
|
|
400
|
+
});
|
|
401
|
+
});
|
|
402
|
+
}
|
|
403
|
+
async function startResources(repo, baseCwd) {
|
|
404
|
+
const lifecycle = repo.lifecycle;
|
|
405
|
+
if (!lifecycle) return true;
|
|
406
|
+
registerCleanup();
|
|
407
|
+
for (const resource of lifecycle.resources) {
|
|
408
|
+
const alreadyRunning = await isPortInUse(resource.check_port);
|
|
409
|
+
if (alreadyRunning) {
|
|
410
|
+
ok(`Resource "${resource.name}" already running on port ${resource.check_port}`);
|
|
411
|
+
continue;
|
|
412
|
+
}
|
|
413
|
+
log(`Starting resource "${resource.name}" on port ${resource.check_port}...`);
|
|
414
|
+
const child = spawnResource(resource, baseCwd);
|
|
415
|
+
managedResources.push({
|
|
416
|
+
name: resource.name,
|
|
417
|
+
config: resource,
|
|
418
|
+
process: child
|
|
419
|
+
});
|
|
420
|
+
const timeoutMs = (resource.startup_timeout || 30) * 1e3;
|
|
421
|
+
const ready = await waitForPort(resource.check_port, timeoutMs);
|
|
422
|
+
if (!ready) {
|
|
423
|
+
error(
|
|
424
|
+
`Resource "${resource.name}" failed to start within ${resource.startup_timeout}s`
|
|
425
|
+
);
|
|
426
|
+
await stopResources();
|
|
427
|
+
return false;
|
|
428
|
+
}
|
|
429
|
+
ok(`Resource "${resource.name}" is ready on port ${resource.check_port}`);
|
|
430
|
+
}
|
|
431
|
+
for (const command of lifecycle.setup) {
|
|
432
|
+
log(`Running setup: ${command}`);
|
|
433
|
+
try {
|
|
434
|
+
await runSetupCommand(command, baseCwd);
|
|
435
|
+
ok(`Setup complete: ${command}`);
|
|
436
|
+
} catch (err) {
|
|
437
|
+
error(`Setup failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
438
|
+
await stopResources();
|
|
439
|
+
return false;
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
return true;
|
|
443
|
+
}
|
|
444
|
+
async function stopResources() {
|
|
445
|
+
for (const managed of managedResources) {
|
|
446
|
+
const { name, config: config2, process: child } = managed;
|
|
447
|
+
log(`Stopping resource "${name}"...`);
|
|
448
|
+
try {
|
|
449
|
+
if (config2.down === "auto") {
|
|
450
|
+
if (child?.pid) {
|
|
451
|
+
try {
|
|
452
|
+
process.kill(-child.pid, "SIGTERM");
|
|
453
|
+
} catch {
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
} else {
|
|
457
|
+
await new Promise((resolve6) => {
|
|
458
|
+
const down = spawn("sh", ["-c", config2.down], {
|
|
459
|
+
stdio: "ignore"
|
|
460
|
+
});
|
|
461
|
+
down.on("close", () => resolve6());
|
|
462
|
+
down.on("error", () => resolve6());
|
|
463
|
+
});
|
|
464
|
+
}
|
|
465
|
+
ok(`Resource "${name}" stopped`);
|
|
466
|
+
} catch (err) {
|
|
467
|
+
warn(
|
|
468
|
+
`Failed to stop resource "${name}": ${err instanceof Error ? err.message : String(err)}`
|
|
469
|
+
);
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
managedResources.length = 0;
|
|
473
|
+
}
|
|
474
|
+
function registerCleanup() {
|
|
475
|
+
if (cleanupRegistered) return;
|
|
476
|
+
cleanupRegistered = true;
|
|
477
|
+
const cleanup = () => {
|
|
478
|
+
for (const managed of managedResources) {
|
|
479
|
+
const { config: config2, process: child } = managed;
|
|
480
|
+
try {
|
|
481
|
+
if (config2.down === "auto") {
|
|
482
|
+
if (child?.pid) {
|
|
483
|
+
process.kill(-child.pid, "SIGTERM");
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
} catch {
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
};
|
|
490
|
+
process.on("exit", cleanup);
|
|
491
|
+
process.on("SIGINT", () => {
|
|
492
|
+
cleanup();
|
|
493
|
+
process.exit(130);
|
|
494
|
+
});
|
|
495
|
+
process.on("SIGTERM", () => {
|
|
496
|
+
cleanup();
|
|
497
|
+
process.exit(143);
|
|
498
|
+
});
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
// src/prompt.ts
|
|
502
|
+
import { existsSync as existsSync3, readFileSync as readFileSync2 } from "fs";
|
|
503
|
+
import { join, resolve as resolve3 } from "path";
|
|
504
|
+
function detectTestRunner(cwd) {
|
|
505
|
+
const packageJsonPath = join(cwd, "package.json");
|
|
506
|
+
if (!existsSync3(packageJsonPath)) return null;
|
|
507
|
+
try {
|
|
508
|
+
const content = JSON.parse(readFileSync2(packageJsonPath, "utf-8"));
|
|
509
|
+
const deps = { ...content.dependencies, ...content.devDependencies };
|
|
510
|
+
if ("vitest" in deps) return "vitest";
|
|
511
|
+
if ("jest" in deps) return "jest";
|
|
512
|
+
return null;
|
|
513
|
+
} catch {
|
|
514
|
+
return null;
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
function buildImplementPrompt(issue, config2, testRunner) {
|
|
207
518
|
if (config2.workflow === "worktree") {
|
|
208
|
-
return buildWorktreePrompt(issue);
|
|
519
|
+
return buildWorktreePrompt(issue, testRunner);
|
|
209
520
|
}
|
|
210
|
-
return buildBranchPrompt(issue, config2);
|
|
521
|
+
return buildBranchPrompt(issue, config2, testRunner);
|
|
211
522
|
}
|
|
212
|
-
function
|
|
523
|
+
function buildTestInstructions(testRunner) {
|
|
524
|
+
if (!testRunner) return "";
|
|
525
|
+
return `
|
|
526
|
+
**MANDATORY \u2014 Unit Tests:**
|
|
527
|
+
This project uses **${testRunner}** as its test runner.
|
|
528
|
+
- You MUST write unit tests (\`*.test.ts\`) for every new file or module you create.
|
|
529
|
+
- Tests should cover the main functionality, edge cases, and error scenarios.
|
|
530
|
+
- Run \`npm run test\` and ensure ALL tests pass before committing.
|
|
531
|
+
- Do NOT skip writing tests \u2014 the PR will be blocked if tests are missing or failing.
|
|
532
|
+
`;
|
|
533
|
+
}
|
|
534
|
+
function buildReadmeInstructions() {
|
|
535
|
+
return `
|
|
536
|
+
**README.md Evaluation:**
|
|
537
|
+
After implementing, review the diff of all changed files and check if README.md needs updating.
|
|
538
|
+
|
|
539
|
+
Update README.md if the changes include:
|
|
540
|
+
- New or removed CLI commands or flags
|
|
541
|
+
- New or removed providers or sources
|
|
542
|
+
- Configuration schema changes (new fields, renamed fields, removed fields)
|
|
543
|
+
- Pipeline or workflow stage changes
|
|
544
|
+
- New or removed environment variables
|
|
545
|
+
- Architectural changes
|
|
546
|
+
|
|
547
|
+
Do NOT update README.md for:
|
|
548
|
+
- Internal refactors that don't change documented behavior
|
|
549
|
+
- Bug fixes that don't change documented behavior
|
|
550
|
+
- Test-only changes
|
|
551
|
+
- Logging or formatting changes
|
|
552
|
+
- Dependency updates
|
|
553
|
+
|
|
554
|
+
If an update is needed, keep the existing README style and structure. Include the README change in the same commit as the implementation.
|
|
555
|
+
`;
|
|
556
|
+
}
|
|
557
|
+
function buildWorktreeMultiRepoPrompt(issue, config2) {
|
|
558
|
+
const workspace = resolve3(config2.workspace);
|
|
559
|
+
const repoBlock = config2.repos.map((r) => {
|
|
560
|
+
const absPath = resolve3(workspace, r.path);
|
|
561
|
+
return [
|
|
562
|
+
`- **${r.name}**: \`${absPath}\``,
|
|
563
|
+
` - Base branch: \`${r.base_branch}\``,
|
|
564
|
+
` - Worktrees dir: \`${join(absPath, ".worktrees")}\``
|
|
565
|
+
].join("\n");
|
|
566
|
+
}).join("\n\n");
|
|
567
|
+
const readmeBlock = buildReadmeInstructions();
|
|
568
|
+
const manifestPath = join(workspace, ".lisa-manifest.json");
|
|
569
|
+
return `You are an autonomous implementation agent working in a multi-repository workspace.
|
|
570
|
+
Your job is to determine the correct repository, create an English-named branch, implement the issue, commit, and write a manifest file.
|
|
571
|
+
|
|
572
|
+
You are in the workspace: \`${workspace}\`
|
|
573
|
+
|
|
574
|
+
## Issue
|
|
575
|
+
|
|
576
|
+
- **ID:** ${issue.id}
|
|
577
|
+
- **Title:** ${issue.title}
|
|
578
|
+
- **URL:** ${issue.url}
|
|
579
|
+
|
|
580
|
+
### Description
|
|
581
|
+
|
|
582
|
+
${issue.description}
|
|
583
|
+
|
|
584
|
+
## Available Repositories
|
|
585
|
+
|
|
586
|
+
${repoBlock}
|
|
587
|
+
|
|
588
|
+
## Instructions
|
|
589
|
+
|
|
590
|
+
1. **Identify the correct repository**: Read the issue title and description carefully.
|
|
591
|
+
Determine which single repository above is the right target. Consider:
|
|
592
|
+
- File paths or module names mentioned in the description
|
|
593
|
+
- Technologies and frameworks referenced
|
|
594
|
+
- The nature of the change (e.g., API endpoint \u2192 api repo, UI component \u2192 frontend repo)
|
|
595
|
+
|
|
596
|
+
2. **Choose an English branch name**: Create a slug in English following:
|
|
597
|
+
\`feat/${issue.id.toLowerCase()}-short-english-description\`
|
|
598
|
+
The description part MUST be in English regardless of the issue title language.
|
|
599
|
+
Example: for "${issue.id} Implementar rate limiting na API" \u2192 \`feat/${issue.id.toLowerCase()}-add-rate-limiting-to-api\`
|
|
600
|
+
|
|
601
|
+
3. **Set up the worktree**: In the chosen repo, run:
|
|
602
|
+
\`\`\`
|
|
603
|
+
git fetch origin <base_branch>
|
|
604
|
+
git worktree add -b <your-english-branch> <repoPath>/.worktrees/<your-english-branch> origin/<base_branch>
|
|
605
|
+
cd <repoPath>/.worktrees/<your-english-branch>
|
|
606
|
+
\`\`\`
|
|
607
|
+
|
|
608
|
+
4. **Implement**: Work inside the worktree. Follow the issue description exactly:
|
|
609
|
+
- Read all relevant files listed in the description first (if present)
|
|
610
|
+
- Follow the implementation instructions exactly
|
|
611
|
+
- Verify each acceptance criteria (if present)
|
|
612
|
+
- Respect any stack or technical constraints (if present)
|
|
613
|
+
${readmeBlock}
|
|
614
|
+
5. **Validate**: Run the project's linter/typecheck/tests if available:
|
|
615
|
+
- Check \`package.json\` for lint, typecheck, check, or test scripts.
|
|
616
|
+
- Run whichever validation scripts exist (e.g., \`npm run lint\`, \`npm run typecheck\`, \`npm run test\`).
|
|
617
|
+
- Fix any errors before proceeding.
|
|
618
|
+
|
|
619
|
+
6. **Commit (do NOT push)**: Make atomic commits with conventional commit messages.
|
|
620
|
+
Do NOT run \`git push\` \u2014 the caller handles pushing.
|
|
621
|
+
**IMPORTANT \u2014 Language rules:**
|
|
622
|
+
- All commit messages MUST be in English.
|
|
623
|
+
- Use conventional commits format: \`feat: ...\`, \`fix: ...\`, \`refactor: ...\`, \`chore: ...\`
|
|
624
|
+
|
|
625
|
+
7. **Write the manifest**: After committing, create \`${manifestPath}\` with JSON:
|
|
626
|
+
\`\`\`json
|
|
627
|
+
{
|
|
628
|
+
"repoPath": "<absolute path to the chosen repo>",
|
|
629
|
+
"branch": "<your English branch name>",
|
|
630
|
+
"prTitle": "<PR title in English, conventional commit format>"
|
|
631
|
+
}
|
|
632
|
+
\`\`\`
|
|
633
|
+
Do NOT commit this file.
|
|
634
|
+
|
|
635
|
+
## Rules
|
|
636
|
+
|
|
637
|
+
- **ALL git commits, branch names, PR titles, and PR descriptions MUST be in English.**
|
|
638
|
+
- The issue description may be in any language \u2014 read it for context but write all code artifacts in English.
|
|
639
|
+
- Do NOT push \u2014 the caller handles that.
|
|
640
|
+
- Do NOT create pull requests \u2014 the caller handles that.
|
|
641
|
+
- Do NOT update the issue tracker \u2014 the caller handles that.
|
|
642
|
+
- Do NOT install new dependencies unless the issue explicitly requires it.
|
|
643
|
+
- If you get stuck or the issue is unclear, STOP and explain why.
|
|
644
|
+
- One issue only. Do not pick up additional issues.
|
|
645
|
+
- If the repo has a CLAUDE.md, read it first and follow its conventions.`;
|
|
646
|
+
}
|
|
647
|
+
function buildWorktreePrompt(issue, testRunner) {
|
|
648
|
+
const testBlock = buildTestInstructions(testRunner ?? null);
|
|
649
|
+
const readmeBlock = buildReadmeInstructions();
|
|
213
650
|
return `You are an autonomous implementation agent. Your job is to implement a single
|
|
214
651
|
issue, validate it, commit, and push the branch.
|
|
215
652
|
|
|
@@ -233,22 +670,32 @@ ${issue.description}
|
|
|
233
670
|
- Follow the implementation instructions exactly
|
|
234
671
|
- Verify each acceptance criteria (if present)
|
|
235
672
|
- Respect any stack or technical constraints (if present)
|
|
236
|
-
|
|
673
|
+
${testBlock}${readmeBlock}
|
|
237
674
|
2. **Validate**: Run the project's linter/typecheck/tests if available:
|
|
238
675
|
- Check \`package.json\` (or equivalent) for lint, typecheck, check, or test scripts.
|
|
239
676
|
- Run whichever validation scripts exist (e.g., \`npm run lint\`, \`npm run typecheck\`).
|
|
240
677
|
- Fix any errors before proceeding.
|
|
241
678
|
|
|
242
|
-
3. **Commit
|
|
243
|
-
|
|
679
|
+
3. **Commit**: Make atomic commits with conventional commit messages.
|
|
680
|
+
**Branch name must be in English.** The branch was pre-created with an auto-generated name.
|
|
681
|
+
If that name contains non-English words, rename it before committing:
|
|
682
|
+
\`git branch -m <current-name> feat/${issue.id.toLowerCase()}-short-english-slug\`
|
|
683
|
+
Do NOT push \u2014 the caller handles pushing.
|
|
244
684
|
**IMPORTANT \u2014 Language rules:**
|
|
245
685
|
- All commit messages MUST be in English.
|
|
246
686
|
- Use conventional commits format: \`feat: ...\`, \`fix: ...\`, \`refactor: ...\`, \`chore: ...\`
|
|
247
687
|
|
|
688
|
+
4. **Write manifest**: Create \`.lisa-manifest.json\` in the **current directory** with JSON:
|
|
689
|
+
\`\`\`json
|
|
690
|
+
{"branch": "<final English branch name>", "prTitle": "<English PR title, conventional commit format>"}
|
|
691
|
+
\`\`\`
|
|
692
|
+
Do NOT commit this file.
|
|
693
|
+
|
|
248
694
|
## Rules
|
|
249
695
|
|
|
250
|
-
- **ALL git commits MUST be in English.**
|
|
696
|
+
- **ALL git commits, branch names, PR titles, and PR descriptions MUST be in English.**
|
|
251
697
|
- The issue description may be in any language \u2014 read it for context but write all code artifacts in English.
|
|
698
|
+
- Do NOT push \u2014 the caller handles that.
|
|
252
699
|
- Do NOT install new dependencies unless the issue explicitly requires it.
|
|
253
700
|
- If you get stuck or the issue is unclear, STOP and explain why.
|
|
254
701
|
- One issue only. Do not pick up additional issues.
|
|
@@ -256,10 +703,15 @@ ${issue.description}
|
|
|
256
703
|
- Do NOT create pull requests \u2014 the caller handles that.
|
|
257
704
|
- Do NOT update the issue tracker \u2014 the caller handles that.`;
|
|
258
705
|
}
|
|
259
|
-
function buildBranchPrompt(issue, config2) {
|
|
260
|
-
const workspace =
|
|
261
|
-
const repoEntries = config2.repos.map(
|
|
706
|
+
function buildBranchPrompt(issue, config2, testRunner) {
|
|
707
|
+
const workspace = resolve3(config2.workspace);
|
|
708
|
+
const repoEntries = config2.repos.map(
|
|
709
|
+
(r) => ` - If it says "Repo: ${r.name}" or title starts with "${r.match}" \u2192 \`${resolve3(workspace, r.path)}\` (base branch: \`${r.base_branch}\`)`
|
|
710
|
+
).join("\n");
|
|
262
711
|
const baseBranchInstruction = config2.repos.length > 0 ? "From the repo's base branch (listed above)" : `From \`${config2.base_branch}\``;
|
|
712
|
+
const testBlock = buildTestInstructions(testRunner ?? null);
|
|
713
|
+
const readmeBlock = buildReadmeInstructions();
|
|
714
|
+
const manifestPath = join(workspace, ".lisa-manifest.json");
|
|
263
715
|
return `You are an autonomous implementation agent. Your job is to implement a single
|
|
264
716
|
issue, validate it, commit, and push the branch.
|
|
265
717
|
|
|
@@ -279,15 +731,17 @@ ${issue.description}
|
|
|
279
731
|
${repoEntries}
|
|
280
732
|
- If it references multiple repos, pick the PRIMARY one (the one with the most files listed).
|
|
281
733
|
|
|
282
|
-
2. **Create a branch**: ${baseBranchInstruction}, create a branch
|
|
283
|
-
|
|
734
|
+
2. **Create a branch**: ${baseBranchInstruction}, create a branch with an **English** slug:
|
|
735
|
+
\`feat/${issue.id.toLowerCase()}-short-english-description\`
|
|
736
|
+
The description MUST be in English \u2014 translate or summarize the issue title if it's in another language.
|
|
737
|
+
Example: "Implementar rate limiting na API" \u2192 \`feat/${issue.id.toLowerCase()}-add-rate-limiting-to-api\`
|
|
284
738
|
|
|
285
739
|
3. **Implement**: Follow the issue description exactly:
|
|
286
740
|
- Read all relevant files listed in the description first (if present)
|
|
287
741
|
- Follow the implementation instructions exactly
|
|
288
742
|
- Verify each acceptance criteria (if present)
|
|
289
743
|
- Respect any stack or technical constraints (if present)
|
|
290
|
-
|
|
744
|
+
${testBlock}${readmeBlock}
|
|
291
745
|
4. **Validate**: Run the project's linter/typecheck/tests if available:
|
|
292
746
|
- Check \`package.json\` (or equivalent) for lint, typecheck, check, or test scripts.
|
|
293
747
|
- Run whichever validation scripts exist (e.g., \`npm run lint\`, \`npm run typecheck\`).
|
|
@@ -299,9 +753,15 @@ ${repoEntries}
|
|
|
299
753
|
- All commit messages MUST be in English.
|
|
300
754
|
- Use conventional commits format: \`feat: ...\`, \`fix: ...\`, \`refactor: ...\`, \`chore: ...\`
|
|
301
755
|
|
|
756
|
+
6. **Write manifest**: Before finishing, create \`${manifestPath}\` with JSON:
|
|
757
|
+
\`\`\`json
|
|
758
|
+
{"repoPath": "<absolute path to this repo>", "branch": "<branch name>", "prTitle": "<English PR title, conventional commit format>"}
|
|
759
|
+
\`\`\`
|
|
760
|
+
Do NOT commit this file.
|
|
761
|
+
|
|
302
762
|
## Rules
|
|
303
763
|
|
|
304
|
-
- **ALL git commits, branch names MUST be in English.**
|
|
764
|
+
- **ALL git commits, branch names, PR titles, and PR descriptions MUST be in English.**
|
|
305
765
|
- The issue description may be in any language \u2014 read it for context but write all code artifacts in English.
|
|
306
766
|
- Do NOT modify files outside the target repo.
|
|
307
767
|
- Do NOT install new dependencies unless the issue explicitly requires it.
|
|
@@ -312,11 +772,175 @@ ${repoEntries}
|
|
|
312
772
|
- Do NOT update the issue tracker \u2014 the caller handles that.`;
|
|
313
773
|
}
|
|
314
774
|
|
|
775
|
+
// src/guardrails.ts
|
|
776
|
+
import { existsSync as existsSync4, mkdirSync as mkdirSync3, readFileSync as readFileSync3, writeFileSync as writeFileSync3 } from "fs";
|
|
777
|
+
import { dirname as dirname2, join as join2 } from "path";
|
|
778
|
+
var GUARDRAILS_FILE = ".lisa/guardrails.md";
|
|
779
|
+
var MAX_ENTRIES = 20;
|
|
780
|
+
var CONTEXT_LINES = 20;
|
|
781
|
+
function guardrailsPath(dir) {
|
|
782
|
+
return join2(dir, GUARDRAILS_FILE);
|
|
783
|
+
}
|
|
784
|
+
function readGuardrails(dir) {
|
|
785
|
+
const path = guardrailsPath(dir);
|
|
786
|
+
if (!existsSync4(path)) return "";
|
|
787
|
+
try {
|
|
788
|
+
return readFileSync3(path, "utf-8");
|
|
789
|
+
} catch {
|
|
790
|
+
return "";
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
function buildGuardrailsSection(dir) {
|
|
794
|
+
const content = readGuardrails(dir);
|
|
795
|
+
if (!content.trim()) return "";
|
|
796
|
+
return `
|
|
797
|
+
## Guardrails \u2014 Avoid these known pitfalls
|
|
798
|
+
|
|
799
|
+
${content}
|
|
800
|
+
`;
|
|
801
|
+
}
|
|
802
|
+
function extractContext(output) {
|
|
803
|
+
const lines = output.trim().split("\n");
|
|
804
|
+
return lines.slice(-CONTEXT_LINES).join("\n");
|
|
805
|
+
}
|
|
806
|
+
function extractErrorType(output) {
|
|
807
|
+
if (/429|rate.?limit|quota/i.test(output)) return "Rate limit / quota exceeded";
|
|
808
|
+
if (/ETIMEDOUT|ECONNREFUSED|ECONNRESET|ENOTFOUND/.test(output)) return "Network error";
|
|
809
|
+
if (/timeout|timed?\s*out/i.test(output)) return "Timeout";
|
|
810
|
+
const exitMatch = output.match(/exit code[:\s]+(\d+)/i);
|
|
811
|
+
if (exitMatch) return `Exit code ${exitMatch[1]}`;
|
|
812
|
+
if (/exit(?:ed)? with/i.test(output)) return "Non-zero exit code";
|
|
813
|
+
return "Unknown error";
|
|
814
|
+
}
|
|
815
|
+
function appendEntry(dir, entry) {
|
|
816
|
+
const path = guardrailsPath(dir);
|
|
817
|
+
const guardrailsDir = dirname2(path);
|
|
818
|
+
if (!existsSync4(guardrailsDir)) {
|
|
819
|
+
mkdirSync3(guardrailsDir, { recursive: true });
|
|
820
|
+
}
|
|
821
|
+
const existing = existsSync4(path) ? readFileSync3(path, "utf-8") : "";
|
|
822
|
+
const newEntryText = formatEntry(entry);
|
|
823
|
+
let content;
|
|
824
|
+
if (!existing.trim()) {
|
|
825
|
+
content = `# Guardrails \u2014 Li\xE7\xF5es aprendidas
|
|
826
|
+
|
|
827
|
+
${newEntryText}`;
|
|
828
|
+
} else {
|
|
829
|
+
const header = extractHeader(existing);
|
|
830
|
+
const entries = splitEntries(existing);
|
|
831
|
+
entries.push(newEntryText);
|
|
832
|
+
const rotated = entries.length > MAX_ENTRIES ? entries.slice(-MAX_ENTRIES) : entries;
|
|
833
|
+
content = `${header}
|
|
834
|
+
|
|
835
|
+
${rotated.join("\n\n")}`;
|
|
836
|
+
}
|
|
837
|
+
writeFileSync3(path, content, "utf-8");
|
|
838
|
+
}
|
|
839
|
+
function formatEntry(entry) {
|
|
840
|
+
return [
|
|
841
|
+
`## Issue ${entry.issueId} (${entry.date})`,
|
|
842
|
+
`- Provider: ${entry.provider}`,
|
|
843
|
+
`- Erro: ${entry.errorType}`,
|
|
844
|
+
`- Contexto:`,
|
|
845
|
+
"```",
|
|
846
|
+
entry.context,
|
|
847
|
+
"```"
|
|
848
|
+
].join("\n");
|
|
849
|
+
}
|
|
850
|
+
function extractHeader(content) {
|
|
851
|
+
const firstEntry = content.search(/^## /m);
|
|
852
|
+
if (firstEntry === -1) return content.trim();
|
|
853
|
+
return content.slice(0, firstEntry).trim();
|
|
854
|
+
}
|
|
855
|
+
function splitEntries(content) {
|
|
856
|
+
const positions = [];
|
|
857
|
+
const regex = /^## /gm;
|
|
858
|
+
for (const match of content.matchAll(regex)) {
|
|
859
|
+
positions.push(match.index);
|
|
860
|
+
}
|
|
861
|
+
return positions.map((start, i) => {
|
|
862
|
+
const end = positions[i + 1] ?? content.length;
|
|
863
|
+
return content.slice(start, end).trim();
|
|
864
|
+
});
|
|
865
|
+
}
|
|
866
|
+
|
|
315
867
|
// src/providers/claude.ts
|
|
316
|
-
import {
|
|
317
|
-
import { appendFileSync as appendFileSync2,
|
|
318
|
-
import { join } from "path";
|
|
868
|
+
import { execSync, spawn as spawn2 } from "child_process";
|
|
869
|
+
import { appendFileSync as appendFileSync2, mkdtempSync, unlinkSync, writeFileSync as writeFileSync4 } from "fs";
|
|
319
870
|
import { tmpdir } from "os";
|
|
871
|
+
import { join as join3 } from "path";
|
|
872
|
+
|
|
873
|
+
// src/overseer.ts
|
|
874
|
+
import { execFile } from "child_process";
|
|
875
|
+
import { promisify } from "util";
|
|
876
|
+
var execFileAsync = promisify(execFile);
|
|
877
|
+
var STUCK_MESSAGE = "\n[lisa-overseer] Provider killed: no git changes detected within the stuck threshold. Eligible for fallback.\n";
|
|
878
|
+
async function getGitSnapshot(cwd) {
|
|
879
|
+
try {
|
|
880
|
+
const { stdout } = await execFileAsync("git", ["status", "--porcelain"], {
|
|
881
|
+
cwd,
|
|
882
|
+
timeout: 1e4
|
|
883
|
+
});
|
|
884
|
+
return stdout;
|
|
885
|
+
} catch {
|
|
886
|
+
return "";
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
function startOverseer(proc, cwd, config2, getSnapshot = getGitSnapshot) {
|
|
890
|
+
if (!config2.enabled) {
|
|
891
|
+
return {
|
|
892
|
+
stop() {
|
|
893
|
+
},
|
|
894
|
+
wasKilled() {
|
|
895
|
+
return false;
|
|
896
|
+
}
|
|
897
|
+
};
|
|
898
|
+
}
|
|
899
|
+
let killed = false;
|
|
900
|
+
let lastSnapshot;
|
|
901
|
+
let lastChangeTime = Date.now();
|
|
902
|
+
let timer = null;
|
|
903
|
+
const check = async () => {
|
|
904
|
+
if (killed) return;
|
|
905
|
+
try {
|
|
906
|
+
const snapshot = await getSnapshot(cwd);
|
|
907
|
+
if (lastSnapshot === void 0) {
|
|
908
|
+
lastSnapshot = snapshot;
|
|
909
|
+
lastChangeTime = Date.now();
|
|
910
|
+
return;
|
|
911
|
+
}
|
|
912
|
+
if (snapshot !== lastSnapshot) {
|
|
913
|
+
lastSnapshot = snapshot;
|
|
914
|
+
lastChangeTime = Date.now();
|
|
915
|
+
return;
|
|
916
|
+
}
|
|
917
|
+
const idleMs = Date.now() - lastChangeTime;
|
|
918
|
+
if (idleMs >= config2.stuck_threshold * 1e3) {
|
|
919
|
+
killed = true;
|
|
920
|
+
if (timer) {
|
|
921
|
+
clearInterval(timer);
|
|
922
|
+
timer = null;
|
|
923
|
+
}
|
|
924
|
+
proc.kill("SIGTERM");
|
|
925
|
+
}
|
|
926
|
+
} catch {
|
|
927
|
+
}
|
|
928
|
+
};
|
|
929
|
+
timer = setInterval(check, config2.check_interval * 1e3);
|
|
930
|
+
return {
|
|
931
|
+
stop() {
|
|
932
|
+
if (timer) {
|
|
933
|
+
clearInterval(timer);
|
|
934
|
+
timer = null;
|
|
935
|
+
}
|
|
936
|
+
},
|
|
937
|
+
wasKilled() {
|
|
938
|
+
return killed;
|
|
939
|
+
}
|
|
940
|
+
};
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
// src/providers/claude.ts
|
|
320
944
|
var ClaudeProvider = class {
|
|
321
945
|
name = "claude";
|
|
322
946
|
async isAvailable() {
|
|
@@ -329,11 +953,11 @@ var ClaudeProvider = class {
|
|
|
329
953
|
}
|
|
330
954
|
async run(prompt, opts) {
|
|
331
955
|
const start = Date.now();
|
|
332
|
-
const tmpDir = mkdtempSync(
|
|
333
|
-
const promptFile =
|
|
334
|
-
|
|
956
|
+
const tmpDir = mkdtempSync(join3(tmpdir(), "lisa-"));
|
|
957
|
+
const promptFile = join3(tmpDir, "prompt.md");
|
|
958
|
+
writeFileSync4(promptFile, prompt, "utf-8");
|
|
335
959
|
try {
|
|
336
|
-
const proc =
|
|
960
|
+
const proc = spawn2(
|
|
337
961
|
"sh",
|
|
338
962
|
["-c", `claude -p --dangerously-skip-permissions "$(cat '${promptFile}')"`],
|
|
339
963
|
{
|
|
@@ -342,6 +966,7 @@ var ClaudeProvider = class {
|
|
|
342
966
|
env: { ...process.env, CLAUDECODE: void 0 }
|
|
343
967
|
}
|
|
344
968
|
);
|
|
969
|
+
const overseer = opts.overseer?.enabled ? startOverseer(proc, opts.cwd, opts.overseer) : null;
|
|
345
970
|
const chunks = [];
|
|
346
971
|
proc.stdout.on("data", (chunk) => {
|
|
347
972
|
const text2 = chunk.toString();
|
|
@@ -360,11 +985,17 @@ var ClaudeProvider = class {
|
|
|
360
985
|
} catch {
|
|
361
986
|
}
|
|
362
987
|
});
|
|
363
|
-
const exitCode = await new Promise((
|
|
364
|
-
proc.on("close", (code) =>
|
|
988
|
+
const exitCode = await new Promise((resolve6) => {
|
|
989
|
+
proc.on("close", (code) => {
|
|
990
|
+
overseer?.stop();
|
|
991
|
+
resolve6(code ?? 1);
|
|
992
|
+
});
|
|
365
993
|
});
|
|
994
|
+
if (overseer?.wasKilled()) {
|
|
995
|
+
chunks.push(STUCK_MESSAGE);
|
|
996
|
+
}
|
|
366
997
|
return {
|
|
367
|
-
success: exitCode === 0,
|
|
998
|
+
success: exitCode === 0 && !overseer?.wasKilled(),
|
|
368
999
|
output: chunks.join(""),
|
|
369
1000
|
duration: Date.now() - start
|
|
370
1001
|
};
|
|
@@ -384,10 +1015,10 @@ var ClaudeProvider = class {
|
|
|
384
1015
|
};
|
|
385
1016
|
|
|
386
1017
|
// src/providers/gemini.ts
|
|
387
|
-
import {
|
|
388
|
-
import { appendFileSync as appendFileSync3,
|
|
389
|
-
import { join as join2 } from "path";
|
|
1018
|
+
import { execSync as execSync2, spawn as spawn3 } from "child_process";
|
|
1019
|
+
import { appendFileSync as appendFileSync3, mkdtempSync as mkdtempSync2, unlinkSync as unlinkSync2, writeFileSync as writeFileSync5 } from "fs";
|
|
390
1020
|
import { tmpdir as tmpdir2 } from "os";
|
|
1021
|
+
import { join as join4 } from "path";
|
|
391
1022
|
var GeminiProvider = class {
|
|
392
1023
|
name = "gemini";
|
|
393
1024
|
async isAvailable() {
|
|
@@ -400,18 +1031,15 @@ var GeminiProvider = class {
|
|
|
400
1031
|
}
|
|
401
1032
|
async run(prompt, opts) {
|
|
402
1033
|
const start = Date.now();
|
|
403
|
-
const tmpDir = mkdtempSync2(
|
|
404
|
-
const promptFile =
|
|
405
|
-
|
|
1034
|
+
const tmpDir = mkdtempSync2(join4(tmpdir2(), "lisa-"));
|
|
1035
|
+
const promptFile = join4(tmpDir, "prompt.md");
|
|
1036
|
+
writeFileSync5(promptFile, prompt, "utf-8");
|
|
406
1037
|
try {
|
|
407
|
-
const proc =
|
|
408
|
-
|
|
409
|
-
["
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
413
|
-
}
|
|
414
|
-
);
|
|
1038
|
+
const proc = spawn3("sh", ["-c", `gemini --yolo -p "$(cat '${promptFile}')"`], {
|
|
1039
|
+
cwd: opts.cwd,
|
|
1040
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
1041
|
+
});
|
|
1042
|
+
const overseer = opts.overseer?.enabled ? startOverseer(proc, opts.cwd, opts.overseer) : null;
|
|
415
1043
|
const chunks = [];
|
|
416
1044
|
proc.stdout.on("data", (chunk) => {
|
|
417
1045
|
const text2 = chunk.toString();
|
|
@@ -430,11 +1058,17 @@ var GeminiProvider = class {
|
|
|
430
1058
|
} catch {
|
|
431
1059
|
}
|
|
432
1060
|
});
|
|
433
|
-
const exitCode = await new Promise((
|
|
434
|
-
proc.on("close", (code) =>
|
|
1061
|
+
const exitCode = await new Promise((resolve6) => {
|
|
1062
|
+
proc.on("close", (code) => {
|
|
1063
|
+
overseer?.stop();
|
|
1064
|
+
resolve6(code ?? 1);
|
|
1065
|
+
});
|
|
435
1066
|
});
|
|
1067
|
+
if (overseer?.wasKilled()) {
|
|
1068
|
+
chunks.push(STUCK_MESSAGE);
|
|
1069
|
+
}
|
|
436
1070
|
return {
|
|
437
|
-
success: exitCode === 0,
|
|
1071
|
+
success: exitCode === 0 && !overseer?.wasKilled(),
|
|
438
1072
|
output: chunks.join(""),
|
|
439
1073
|
duration: Date.now() - start
|
|
440
1074
|
};
|
|
@@ -454,10 +1088,10 @@ var GeminiProvider = class {
|
|
|
454
1088
|
};
|
|
455
1089
|
|
|
456
1090
|
// src/providers/opencode.ts
|
|
457
|
-
import {
|
|
458
|
-
import { appendFileSync as appendFileSync4,
|
|
459
|
-
import { join as join3 } from "path";
|
|
1091
|
+
import { execSync as execSync3, spawn as spawn4 } from "child_process";
|
|
1092
|
+
import { appendFileSync as appendFileSync4, mkdtempSync as mkdtempSync3, unlinkSync as unlinkSync3, writeFileSync as writeFileSync6 } from "fs";
|
|
460
1093
|
import { tmpdir as tmpdir3 } from "os";
|
|
1094
|
+
import { join as join5 } from "path";
|
|
461
1095
|
var OpenCodeProvider = class {
|
|
462
1096
|
name = "opencode";
|
|
463
1097
|
async isAvailable() {
|
|
@@ -470,18 +1104,15 @@ var OpenCodeProvider = class {
|
|
|
470
1104
|
}
|
|
471
1105
|
async run(prompt, opts) {
|
|
472
1106
|
const start = Date.now();
|
|
473
|
-
const tmpDir = mkdtempSync3(
|
|
474
|
-
const promptFile =
|
|
475
|
-
|
|
1107
|
+
const tmpDir = mkdtempSync3(join5(tmpdir3(), "lisa-"));
|
|
1108
|
+
const promptFile = join5(tmpDir, "prompt.md");
|
|
1109
|
+
writeFileSync6(promptFile, prompt, "utf-8");
|
|
476
1110
|
try {
|
|
477
|
-
const proc =
|
|
478
|
-
|
|
479
|
-
["
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
483
|
-
}
|
|
484
|
-
);
|
|
1111
|
+
const proc = spawn4("sh", ["-c", `opencode run "$(cat '${promptFile}')"`], {
|
|
1112
|
+
cwd: opts.cwd,
|
|
1113
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
1114
|
+
});
|
|
1115
|
+
const overseer = opts.overseer?.enabled ? startOverseer(proc, opts.cwd, opts.overseer) : null;
|
|
485
1116
|
const chunks = [];
|
|
486
1117
|
proc.stdout.on("data", (chunk) => {
|
|
487
1118
|
const text2 = chunk.toString();
|
|
@@ -500,11 +1131,17 @@ var OpenCodeProvider = class {
|
|
|
500
1131
|
} catch {
|
|
501
1132
|
}
|
|
502
1133
|
});
|
|
503
|
-
const exitCode = await new Promise((
|
|
504
|
-
proc.on("close", (code) =>
|
|
1134
|
+
const exitCode = await new Promise((resolve6) => {
|
|
1135
|
+
proc.on("close", (code) => {
|
|
1136
|
+
overseer?.stop();
|
|
1137
|
+
resolve6(code ?? 1);
|
|
1138
|
+
});
|
|
505
1139
|
});
|
|
1140
|
+
if (overseer?.wasKilled()) {
|
|
1141
|
+
chunks.push(STUCK_MESSAGE);
|
|
1142
|
+
}
|
|
506
1143
|
return {
|
|
507
|
-
success: exitCode === 0,
|
|
1144
|
+
success: exitCode === 0 && !overseer?.wasKilled(),
|
|
508
1145
|
output: chunks.join(""),
|
|
509
1146
|
duration: Date.now() - start
|
|
510
1147
|
};
|
|
@@ -543,24 +1180,126 @@ function createProvider(name) {
|
|
|
543
1180
|
}
|
|
544
1181
|
return factory();
|
|
545
1182
|
}
|
|
1183
|
+
var ELIGIBLE_ERROR_PATTERNS = [
|
|
1184
|
+
/429/i,
|
|
1185
|
+
/quota/i,
|
|
1186
|
+
/rate.?limit/i,
|
|
1187
|
+
/too many requests/i,
|
|
1188
|
+
/resource.?exhausted/i,
|
|
1189
|
+
/overloaded/i,
|
|
1190
|
+
/unavailable/i,
|
|
1191
|
+
/not.?found.*model/i,
|
|
1192
|
+
/model.*not.?found/i,
|
|
1193
|
+
/does not exist/i,
|
|
1194
|
+
/ETIMEDOUT/,
|
|
1195
|
+
/ECONNREFUSED/,
|
|
1196
|
+
/ECONNRESET/,
|
|
1197
|
+
/ENOTFOUND/,
|
|
1198
|
+
/timeout/i,
|
|
1199
|
+
/timed?\s*out/i,
|
|
1200
|
+
/network.?error/i,
|
|
1201
|
+
/not installed/i,
|
|
1202
|
+
/not in PATH/i,
|
|
1203
|
+
/command not found/i,
|
|
1204
|
+
/lisa-overseer/i
|
|
1205
|
+
];
|
|
1206
|
+
function isEligibleForFallback(output) {
|
|
1207
|
+
return ELIGIBLE_ERROR_PATTERNS.some((pattern) => pattern.test(output));
|
|
1208
|
+
}
|
|
1209
|
+
async function runWithFallback(models, prompt, opts) {
|
|
1210
|
+
const attempts = [];
|
|
1211
|
+
for (const model of models) {
|
|
1212
|
+
const provider = createProvider(model);
|
|
1213
|
+
const available = await provider.isAvailable();
|
|
1214
|
+
if (!available) {
|
|
1215
|
+
attempts.push({
|
|
1216
|
+
provider: model,
|
|
1217
|
+
success: false,
|
|
1218
|
+
error: `Provider "${model}" is not installed or not in PATH`,
|
|
1219
|
+
duration: 0
|
|
1220
|
+
});
|
|
1221
|
+
continue;
|
|
1222
|
+
}
|
|
1223
|
+
const guardrailsSection = opts.guardrailsDir ? buildGuardrailsSection(opts.guardrailsDir) : "";
|
|
1224
|
+
const fullPrompt = guardrailsSection ? `${prompt}${guardrailsSection}` : prompt;
|
|
1225
|
+
const result = await provider.run(fullPrompt, opts);
|
|
1226
|
+
if (result.success) {
|
|
1227
|
+
attempts.push({
|
|
1228
|
+
provider: model,
|
|
1229
|
+
success: true,
|
|
1230
|
+
duration: result.duration
|
|
1231
|
+
});
|
|
1232
|
+
return {
|
|
1233
|
+
success: true,
|
|
1234
|
+
output: result.output,
|
|
1235
|
+
duration: result.duration,
|
|
1236
|
+
providerUsed: model,
|
|
1237
|
+
attempts
|
|
1238
|
+
};
|
|
1239
|
+
}
|
|
1240
|
+
if (opts.guardrailsDir && opts.issueId) {
|
|
1241
|
+
appendEntry(opts.guardrailsDir, {
|
|
1242
|
+
issueId: opts.issueId,
|
|
1243
|
+
date: (/* @__PURE__ */ new Date()).toISOString().slice(0, 10),
|
|
1244
|
+
provider: model,
|
|
1245
|
+
errorType: extractErrorType(result.output),
|
|
1246
|
+
context: extractContext(result.output)
|
|
1247
|
+
});
|
|
1248
|
+
}
|
|
1249
|
+
const eligible = isEligibleForFallback(result.output);
|
|
1250
|
+
attempts.push({
|
|
1251
|
+
provider: model,
|
|
1252
|
+
success: false,
|
|
1253
|
+
error: eligible ? "Eligible error (quota/unavailable/timeout)" : "Non-eligible error",
|
|
1254
|
+
duration: result.duration
|
|
1255
|
+
});
|
|
1256
|
+
if (!eligible) {
|
|
1257
|
+
return {
|
|
1258
|
+
success: false,
|
|
1259
|
+
output: result.output,
|
|
1260
|
+
duration: result.duration,
|
|
1261
|
+
providerUsed: model,
|
|
1262
|
+
attempts
|
|
1263
|
+
};
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
const totalDuration = attempts.reduce((sum, a) => sum + a.duration, 0);
|
|
1267
|
+
return {
|
|
1268
|
+
success: false,
|
|
1269
|
+
output: formatAttemptsReport(attempts),
|
|
1270
|
+
duration: totalDuration,
|
|
1271
|
+
providerUsed: attempts[attempts.length - 1]?.provider ?? models[0] ?? "claude",
|
|
1272
|
+
attempts
|
|
1273
|
+
};
|
|
1274
|
+
}
|
|
1275
|
+
function formatAttemptsReport(attempts) {
|
|
1276
|
+
const lines = ["All models exhausted. Attempt history:"];
|
|
1277
|
+
for (const [i, a] of attempts.entries()) {
|
|
1278
|
+
const status2 = a.success ? "OK" : "FAILED";
|
|
1279
|
+
const error2 = a.error ? ` \u2014 ${a.error}` : "";
|
|
1280
|
+
const duration = a.duration > 0 ? ` (${Math.round(a.duration / 1e3)}s)` : "";
|
|
1281
|
+
lines.push(` ${i + 1}. ${a.provider}: ${status2}${error2}${duration}`);
|
|
1282
|
+
}
|
|
1283
|
+
return lines.join("\n");
|
|
1284
|
+
}
|
|
546
1285
|
|
|
547
1286
|
// src/sources/linear.ts
|
|
548
|
-
var
|
|
549
|
-
var
|
|
1287
|
+
var API_URL2 = "https://api.linear.app/graphql";
|
|
1288
|
+
var REQUEST_TIMEOUT_MS2 = 3e4;
|
|
550
1289
|
function getApiKey() {
|
|
551
1290
|
const key = process.env.LINEAR_API_KEY;
|
|
552
1291
|
if (!key) throw new Error("LINEAR_API_KEY is not set");
|
|
553
1292
|
return key;
|
|
554
1293
|
}
|
|
555
1294
|
async function gql(query, variables) {
|
|
556
|
-
const res = await fetch(
|
|
1295
|
+
const res = await fetch(API_URL2, {
|
|
557
1296
|
method: "POST",
|
|
558
1297
|
headers: {
|
|
559
1298
|
"Content-Type": "application/json",
|
|
560
1299
|
Authorization: getApiKey()
|
|
561
1300
|
},
|
|
562
1301
|
body: JSON.stringify({ query, variables }),
|
|
563
|
-
signal: AbortSignal.timeout(
|
|
1302
|
+
signal: AbortSignal.timeout(REQUEST_TIMEOUT_MS2)
|
|
564
1303
|
});
|
|
565
1304
|
if (!res.ok) {
|
|
566
1305
|
const text2 = await res.text();
|
|
@@ -584,7 +1323,7 @@ var LinearSource = class {
|
|
|
584
1323
|
labels: { name: { eq: $labelName } }
|
|
585
1324
|
state: { name: { eq: $statusName } }
|
|
586
1325
|
}
|
|
587
|
-
first:
|
|
1326
|
+
first: 50
|
|
588
1327
|
) {
|
|
589
1328
|
nodes {
|
|
590
1329
|
id
|
|
@@ -593,6 +1332,15 @@ var LinearSource = class {
|
|
|
593
1332
|
description
|
|
594
1333
|
url
|
|
595
1334
|
priority
|
|
1335
|
+
inverseRelations(first: 50) {
|
|
1336
|
+
nodes {
|
|
1337
|
+
type
|
|
1338
|
+
issue {
|
|
1339
|
+
identifier
|
|
1340
|
+
state { type }
|
|
1341
|
+
}
|
|
1342
|
+
}
|
|
1343
|
+
}
|
|
596
1344
|
}
|
|
597
1345
|
}
|
|
598
1346
|
}`,
|
|
@@ -605,12 +1353,32 @@ var LinearSource = class {
|
|
|
605
1353
|
);
|
|
606
1354
|
const issues = data.issues.nodes;
|
|
607
1355
|
if (issues.length === 0) return null;
|
|
608
|
-
|
|
1356
|
+
const unblocked = [];
|
|
1357
|
+
const blocked = [];
|
|
1358
|
+
for (const issue2 of issues) {
|
|
1359
|
+
const activeBlockers = issue2.inverseRelations.nodes.filter((r) => r.type === "blocks").filter((r) => r.issue.state.type !== "completed" && r.issue.state.type !== "canceled").map((r) => r.issue.identifier);
|
|
1360
|
+
if (activeBlockers.length === 0) {
|
|
1361
|
+
unblocked.push(issue2);
|
|
1362
|
+
} else {
|
|
1363
|
+
blocked.push({ identifier: issue2.identifier, blockers: activeBlockers });
|
|
1364
|
+
}
|
|
1365
|
+
}
|
|
1366
|
+
if (unblocked.length === 0) {
|
|
1367
|
+
if (blocked.length > 0) {
|
|
1368
|
+
warn("No unblocked issues found. Blocked issues:");
|
|
1369
|
+
for (const entry of blocked) {
|
|
1370
|
+
warn(` ${entry.identifier} \u2014 blocked by: ${entry.blockers.join(", ")}`);
|
|
1371
|
+
}
|
|
1372
|
+
}
|
|
1373
|
+
return null;
|
|
1374
|
+
}
|
|
1375
|
+
unblocked.sort((a, b) => {
|
|
609
1376
|
const pa = a.priority === 0 ? 5 : a.priority;
|
|
610
1377
|
const pb = b.priority === 0 ? 5 : b.priority;
|
|
611
1378
|
return pa - pb;
|
|
612
1379
|
});
|
|
613
|
-
const issue =
|
|
1380
|
+
const issue = unblocked[0];
|
|
1381
|
+
if (!issue) return null;
|
|
614
1382
|
return {
|
|
615
1383
|
id: issue.identifier,
|
|
616
1384
|
title: issue.title,
|
|
@@ -618,6 +1386,28 @@ var LinearSource = class {
|
|
|
618
1386
|
url: issue.url
|
|
619
1387
|
};
|
|
620
1388
|
}
|
|
1389
|
+
async fetchIssueById(id) {
|
|
1390
|
+
const identifier = parseLinearIdentifier(id);
|
|
1391
|
+
const data = await gql(
|
|
1392
|
+
`query($identifier: String!) {
|
|
1393
|
+
issue(id: $identifier) {
|
|
1394
|
+
id
|
|
1395
|
+
identifier
|
|
1396
|
+
title
|
|
1397
|
+
description
|
|
1398
|
+
url
|
|
1399
|
+
}
|
|
1400
|
+
}`,
|
|
1401
|
+
{ identifier }
|
|
1402
|
+
);
|
|
1403
|
+
if (!data.issue) return null;
|
|
1404
|
+
return {
|
|
1405
|
+
id: data.issue.identifier,
|
|
1406
|
+
title: data.issue.title,
|
|
1407
|
+
description: data.issue.description || "",
|
|
1408
|
+
url: data.issue.url
|
|
1409
|
+
};
|
|
1410
|
+
}
|
|
621
1411
|
async updateStatus(issueId, statusName) {
|
|
622
1412
|
const issueData = await gql(
|
|
623
1413
|
`query($identifier: String!) {
|
|
@@ -643,7 +1433,7 @@ var LinearSource = class {
|
|
|
643
1433
|
const available = statesData.workflowStates.nodes.map((s) => s.name).join(", ");
|
|
644
1434
|
throw new Error(`Status "${statusName}" not found. Available: ${available}`);
|
|
645
1435
|
}
|
|
646
|
-
await gql(
|
|
1436
|
+
const mutationResult = await gql(
|
|
647
1437
|
`mutation($issueId: String!, $stateId: String!) {
|
|
648
1438
|
issueUpdate(id: $issueId, input: { stateId: $stateId }) {
|
|
649
1439
|
success
|
|
@@ -651,6 +1441,11 @@ var LinearSource = class {
|
|
|
651
1441
|
}`,
|
|
652
1442
|
{ issueId: issueData.issue.id, stateId: state.id }
|
|
653
1443
|
);
|
|
1444
|
+
if (!mutationResult.issueUpdate.success) {
|
|
1445
|
+
throw new Error(
|
|
1446
|
+
`issueUpdate returned success=false for ${issueId} (stateId: ${state.id}, stateName: ${state.name})`
|
|
1447
|
+
);
|
|
1448
|
+
}
|
|
654
1449
|
}
|
|
655
1450
|
async attachPullRequest(_issueId, _prUrl) {
|
|
656
1451
|
}
|
|
@@ -665,11 +1460,9 @@ var LinearSource = class {
|
|
|
665
1460
|
{ identifier: issueId }
|
|
666
1461
|
);
|
|
667
1462
|
const currentLabels = issueData.issue.labels.nodes;
|
|
668
|
-
const filtered = currentLabels.filter(
|
|
669
|
-
(l) => l.name.toLowerCase() !== labelName.toLowerCase()
|
|
670
|
-
);
|
|
1463
|
+
const filtered = currentLabels.filter((l) => l.name.toLowerCase() !== labelName.toLowerCase());
|
|
671
1464
|
if (filtered.length === currentLabels.length) return;
|
|
672
|
-
await gql(
|
|
1465
|
+
const mutationResult = await gql(
|
|
673
1466
|
`mutation($issueId: String!, $labelIds: [String!]!) {
|
|
674
1467
|
issueUpdate(id: $issueId, input: { labelIds: $labelIds }) {
|
|
675
1468
|
success
|
|
@@ -680,12 +1473,22 @@ var LinearSource = class {
|
|
|
680
1473
|
labelIds: filtered.map((l) => l.id)
|
|
681
1474
|
}
|
|
682
1475
|
);
|
|
1476
|
+
if (!mutationResult.issueUpdate.success) {
|
|
1477
|
+
throw new Error(
|
|
1478
|
+
`issueUpdate returned success=false when removing label "${labelName}" from ${issueId}`
|
|
1479
|
+
);
|
|
1480
|
+
}
|
|
683
1481
|
}
|
|
684
1482
|
};
|
|
1483
|
+
function parseLinearIdentifier(input) {
|
|
1484
|
+
const urlMatch = input.match(/\/issue\/([A-Z]+-\d+)/);
|
|
1485
|
+
if (urlMatch?.[1]) return urlMatch[1];
|
|
1486
|
+
return input;
|
|
1487
|
+
}
|
|
685
1488
|
|
|
686
1489
|
// src/sources/trello.ts
|
|
687
|
-
var
|
|
688
|
-
var
|
|
1490
|
+
var API_URL3 = "https://api.trello.com/1";
|
|
1491
|
+
var REQUEST_TIMEOUT_MS3 = 3e4;
|
|
689
1492
|
function getAuthHeaders() {
|
|
690
1493
|
const key = process.env.TRELLO_API_KEY;
|
|
691
1494
|
const token = process.env.TRELLO_TOKEN;
|
|
@@ -696,11 +1499,11 @@ function getAuthHeaders() {
|
|
|
696
1499
|
}
|
|
697
1500
|
async function trelloFetch(method, path, params = "") {
|
|
698
1501
|
const sep = params ? "?" : "";
|
|
699
|
-
const url = `${
|
|
1502
|
+
const url = `${API_URL3}${path}${sep}${params}`;
|
|
700
1503
|
const res = await fetch(url, {
|
|
701
1504
|
method,
|
|
702
1505
|
headers: getAuthHeaders(),
|
|
703
|
-
signal: AbortSignal.timeout(
|
|
1506
|
+
signal: AbortSignal.timeout(REQUEST_TIMEOUT_MS3)
|
|
704
1507
|
});
|
|
705
1508
|
if (!res.ok) {
|
|
706
1509
|
const text2 = await res.text();
|
|
@@ -755,6 +1558,7 @@ var TrelloSource = class {
|
|
|
755
1558
|
const matching = cards.filter((c) => c.idLabels.includes(label.id));
|
|
756
1559
|
if (matching.length === 0) return null;
|
|
757
1560
|
const card = matching[0];
|
|
1561
|
+
if (!card) return null;
|
|
758
1562
|
return {
|
|
759
1563
|
id: card.id,
|
|
760
1564
|
title: card.name,
|
|
@@ -762,150 +1566,96 @@ var TrelloSource = class {
|
|
|
762
1566
|
url: card.url
|
|
763
1567
|
};
|
|
764
1568
|
}
|
|
1569
|
+
async fetchIssueById(id) {
|
|
1570
|
+
const shortLink = parseTrelloIdentifier(id);
|
|
1571
|
+
try {
|
|
1572
|
+
const card = await trelloGet(
|
|
1573
|
+
`/cards/${shortLink}`,
|
|
1574
|
+
"fields=name,desc,url,idLabels,idList"
|
|
1575
|
+
);
|
|
1576
|
+
return {
|
|
1577
|
+
id: card.id,
|
|
1578
|
+
title: card.name,
|
|
1579
|
+
description: card.desc || "",
|
|
1580
|
+
url: card.url
|
|
1581
|
+
};
|
|
1582
|
+
} catch {
|
|
1583
|
+
return null;
|
|
1584
|
+
}
|
|
1585
|
+
}
|
|
765
1586
|
async updateStatus(cardId, listName) {
|
|
766
1587
|
const card = await trelloGet(`/cards/${cardId}`, "fields=idBoard");
|
|
767
1588
|
const list = await findListByName(card.idBoard, listName);
|
|
768
|
-
await trelloPut(`/cards/${cardId}`, `idList=${list.id}`);
|
|
769
|
-
}
|
|
770
|
-
async attachPullRequest(cardId, prUrl) {
|
|
771
|
-
await trelloPost(`/cards/${cardId}/attachments`, `url=${encodeURIComponent(prUrl)}`);
|
|
772
|
-
}
|
|
773
|
-
async removeLabel(cardId, labelName) {
|
|
774
|
-
const card = await trelloGet(
|
|
775
|
-
`/cards/${cardId}`,
|
|
776
|
-
"fields=idBoard,idLabels"
|
|
777
|
-
);
|
|
778
|
-
const label = await findLabelByName(card.idBoard, labelName);
|
|
779
|
-
if (!card.idLabels.includes(label.id)) return;
|
|
780
|
-
await trelloDelete(`/cards/${cardId}/idLabels/${label.id}`);
|
|
781
|
-
}
|
|
782
|
-
};
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
trello: () => new TrelloSource()
|
|
788
|
-
};
|
|
789
|
-
function createSource(name) {
|
|
790
|
-
const factory = sources[name];
|
|
791
|
-
if (!factory) {
|
|
792
|
-
throw new Error(`Unknown source: ${name}. Available: ${Object.keys(sources).join(", ")}`);
|
|
793
|
-
}
|
|
794
|
-
return factory();
|
|
795
|
-
}
|
|
796
|
-
|
|
797
|
-
// src/github.ts
|
|
798
|
-
import { execa } from "execa";
|
|
799
|
-
var API_URL3 = "https://api.github.com";
|
|
800
|
-
var REQUEST_TIMEOUT_MS3 = 3e4;
|
|
801
|
-
async function isGhCliAvailable() {
|
|
802
|
-
try {
|
|
803
|
-
await execa("gh", ["auth", "status"]);
|
|
804
|
-
return true;
|
|
805
|
-
} catch {
|
|
806
|
-
return false;
|
|
807
|
-
}
|
|
808
|
-
}
|
|
809
|
-
function getToken() {
|
|
810
|
-
const token = process.env.GITHUB_TOKEN;
|
|
811
|
-
if (!token) throw new Error("GITHUB_TOKEN is not set");
|
|
812
|
-
return token;
|
|
813
|
-
}
|
|
814
|
-
async function createPullRequest(opts, method = "cli") {
|
|
815
|
-
if (method === "cli" && await isGhCliAvailable()) {
|
|
816
|
-
return createPullRequestWithGhCli(opts);
|
|
817
|
-
}
|
|
818
|
-
const res = await fetch(`${API_URL3}/repos/${opts.owner}/${opts.repo}/pulls`, {
|
|
819
|
-
method: "POST",
|
|
820
|
-
headers: {
|
|
821
|
-
Authorization: `Bearer ${getToken()}`,
|
|
822
|
-
Accept: "application/vnd.github+json",
|
|
823
|
-
"Content-Type": "application/json"
|
|
824
|
-
},
|
|
825
|
-
body: JSON.stringify({
|
|
826
|
-
title: opts.title,
|
|
827
|
-
body: opts.body,
|
|
828
|
-
head: opts.head,
|
|
829
|
-
base: opts.base
|
|
830
|
-
}),
|
|
831
|
-
signal: AbortSignal.timeout(REQUEST_TIMEOUT_MS3)
|
|
832
|
-
});
|
|
833
|
-
if (!res.ok) {
|
|
834
|
-
const text2 = await res.text();
|
|
835
|
-
throw new Error(`GitHub API error (${res.status}): ${text2}`);
|
|
836
|
-
}
|
|
837
|
-
const data = await res.json();
|
|
838
|
-
return { number: data.number, html_url: data.html_url };
|
|
839
|
-
}
|
|
840
|
-
async function createPullRequestWithGhCli(opts) {
|
|
841
|
-
const result = await execa("gh", [
|
|
842
|
-
"pr",
|
|
843
|
-
"create",
|
|
844
|
-
"--repo",
|
|
845
|
-
`${opts.owner}/${opts.repo}`,
|
|
846
|
-
"--head",
|
|
847
|
-
opts.head,
|
|
848
|
-
"--base",
|
|
849
|
-
opts.base,
|
|
850
|
-
"--title",
|
|
851
|
-
opts.title,
|
|
852
|
-
"--body",
|
|
853
|
-
opts.body
|
|
854
|
-
]);
|
|
855
|
-
const url = result.stdout.trim();
|
|
856
|
-
const prNumberMatch = url.match(/\/pull\/(\d+)/);
|
|
857
|
-
const number = prNumberMatch ? Number.parseInt(prNumberMatch[1], 10) : 0;
|
|
858
|
-
return { number, html_url: url };
|
|
1589
|
+
await trelloPut(`/cards/${cardId}`, `idList=${list.id}`);
|
|
1590
|
+
}
|
|
1591
|
+
async attachPullRequest(cardId, prUrl) {
|
|
1592
|
+
await trelloPost(`/cards/${cardId}/attachments`, `url=${encodeURIComponent(prUrl)}`);
|
|
1593
|
+
}
|
|
1594
|
+
async removeLabel(cardId, labelName) {
|
|
1595
|
+
const card = await trelloGet(
|
|
1596
|
+
`/cards/${cardId}`,
|
|
1597
|
+
"fields=idBoard,idLabels"
|
|
1598
|
+
);
|
|
1599
|
+
const label = await findLabelByName(card.idBoard, labelName);
|
|
1600
|
+
if (!card.idLabels.includes(label.id)) return;
|
|
1601
|
+
await trelloDelete(`/cards/${cardId}/idLabels/${label.id}`);
|
|
1602
|
+
}
|
|
1603
|
+
};
|
|
1604
|
+
function parseTrelloIdentifier(input) {
|
|
1605
|
+
const urlMatch = input.match(/\/c\/([a-zA-Z0-9]+)/);
|
|
1606
|
+
if (urlMatch?.[1]) return urlMatch[1];
|
|
1607
|
+
return input;
|
|
859
1608
|
}
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
owner = httpsMatch[1];
|
|
871
|
-
repo = httpsMatch[2];
|
|
872
|
-
} else {
|
|
873
|
-
throw new Error(`Cannot parse GitHub owner/repo from remote URL: ${remoteUrl}`);
|
|
1609
|
+
|
|
1610
|
+
// src/sources/index.ts
|
|
1611
|
+
var sources = {
|
|
1612
|
+
linear: () => new LinearSource(),
|
|
1613
|
+
trello: () => new TrelloSource()
|
|
1614
|
+
};
|
|
1615
|
+
function createSource(name) {
|
|
1616
|
+
const factory = sources[name];
|
|
1617
|
+
if (!factory) {
|
|
1618
|
+
throw new Error(`Unknown source: ${name}. Available: ${Object.keys(sources).join(", ")}`);
|
|
874
1619
|
}
|
|
875
|
-
|
|
876
|
-
const { stdout: defaultBranch } = await execa(
|
|
877
|
-
"git",
|
|
878
|
-
["symbolic-ref", "refs/remotes/origin/HEAD", "--short"],
|
|
879
|
-
{ cwd, reject: false }
|
|
880
|
-
).then(
|
|
881
|
-
(r) => r,
|
|
882
|
-
() => ({ stdout: "origin/main" })
|
|
883
|
-
);
|
|
884
|
-
return {
|
|
885
|
-
owner,
|
|
886
|
-
repo,
|
|
887
|
-
branch: branch.trim(),
|
|
888
|
-
defaultBranch: defaultBranch.replace("origin/", "").trim()
|
|
889
|
-
};
|
|
1620
|
+
return factory();
|
|
890
1621
|
}
|
|
891
1622
|
|
|
892
1623
|
// src/worktree.ts
|
|
893
|
-
import {
|
|
894
|
-
import { join as
|
|
1624
|
+
import { appendFileSync as appendFileSync5, existsSync as existsSync5, readFileSync as readFileSync4 } from "fs";
|
|
1625
|
+
import { join as join6, resolve as resolve4 } from "path";
|
|
895
1626
|
import { execa as execa2 } from "execa";
|
|
896
1627
|
var WORKTREES_DIR = ".worktrees";
|
|
897
1628
|
function generateBranchName(issueId, title) {
|
|
898
1629
|
const slug = title.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").substring(0, 40);
|
|
899
1630
|
return `feat/${issueId.toLowerCase()}-${slug}`;
|
|
900
1631
|
}
|
|
1632
|
+
async function cleanupOrphanedWorktree(repoRoot, branchName) {
|
|
1633
|
+
const { stdout: branchList } = await execa2("git", ["branch", "--list", branchName], {
|
|
1634
|
+
cwd: repoRoot,
|
|
1635
|
+
reject: false
|
|
1636
|
+
});
|
|
1637
|
+
if (!branchList.trim()) {
|
|
1638
|
+
return false;
|
|
1639
|
+
}
|
|
1640
|
+
const worktreePath = join6(repoRoot, WORKTREES_DIR, branchName);
|
|
1641
|
+
const { stdout: worktreeList } = await execa2("git", ["worktree", "list", "--porcelain"], {
|
|
1642
|
+
cwd: repoRoot,
|
|
1643
|
+
reject: false
|
|
1644
|
+
});
|
|
1645
|
+
if (worktreeList.includes(worktreePath)) {
|
|
1646
|
+
await execa2("git", ["worktree", "remove", worktreePath, "--force"], { cwd: repoRoot });
|
|
1647
|
+
await execa2("git", ["worktree", "prune"], { cwd: repoRoot });
|
|
1648
|
+
}
|
|
1649
|
+
await execa2("git", ["branch", "-D", branchName], { cwd: repoRoot });
|
|
1650
|
+
return true;
|
|
1651
|
+
}
|
|
901
1652
|
async function createWorktree(repoRoot, branchName, baseBranch) {
|
|
902
|
-
const worktreePath =
|
|
1653
|
+
const worktreePath = join6(repoRoot, WORKTREES_DIR, branchName);
|
|
1654
|
+
await cleanupOrphanedWorktree(repoRoot, branchName);
|
|
903
1655
|
await execa2("git", ["fetch", "origin", baseBranch], { cwd: repoRoot });
|
|
904
|
-
await execa2(
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
{ cwd: repoRoot }
|
|
908
|
-
);
|
|
1656
|
+
await execa2("git", ["worktree", "add", "-b", branchName, worktreePath, `origin/${baseBranch}`], {
|
|
1657
|
+
cwd: repoRoot
|
|
1658
|
+
});
|
|
909
1659
|
return worktreePath;
|
|
910
1660
|
}
|
|
911
1661
|
async function removeWorktree(repoRoot, worktreePath) {
|
|
@@ -915,13 +1665,13 @@ async function removeWorktree(repoRoot, worktreePath) {
|
|
|
915
1665
|
await execa2("git", ["worktree", "prune"], { cwd: repoRoot });
|
|
916
1666
|
}
|
|
917
1667
|
function ensureWorktreeGitignore(repoRoot) {
|
|
918
|
-
const gitignorePath =
|
|
919
|
-
if (!
|
|
1668
|
+
const gitignorePath = join6(repoRoot, ".gitignore");
|
|
1669
|
+
if (!existsSync5(gitignorePath)) {
|
|
920
1670
|
appendFileSync5(gitignorePath, `${WORKTREES_DIR}
|
|
921
1671
|
`);
|
|
922
1672
|
return;
|
|
923
1673
|
}
|
|
924
|
-
const content =
|
|
1674
|
+
const content = readFileSync4(gitignorePath, "utf-8");
|
|
925
1675
|
if (!content.split("\n").some((line) => line.trim() === WORKTREES_DIR)) {
|
|
926
1676
|
const separator = content.endsWith("\n") ? "" : "\n";
|
|
927
1677
|
appendFileSync5(gitignorePath, `${separator}${WORKTREES_DIR}
|
|
@@ -930,27 +1680,23 @@ function ensureWorktreeGitignore(repoRoot) {
|
|
|
930
1680
|
}
|
|
931
1681
|
async function findBranchByIssueId(repoRoot, issueId) {
|
|
932
1682
|
const needle = issueId.toLowerCase();
|
|
933
|
-
const { stdout: local } = await execa2(
|
|
934
|
-
"
|
|
935
|
-
"--sort=-committerdate",
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
], { cwd: repoRoot });
|
|
1683
|
+
const { stdout: local } = await execa2(
|
|
1684
|
+
"git",
|
|
1685
|
+
["for-each-ref", "--sort=-committerdate", "--format=%(refname:short)", "refs/heads/"],
|
|
1686
|
+
{ cwd: repoRoot }
|
|
1687
|
+
);
|
|
939
1688
|
const localMatch = local.split("\n").map((b) => b.trim()).filter(Boolean).find((b) => b.toLowerCase().includes(needle));
|
|
940
1689
|
if (localMatch) return localMatch;
|
|
941
|
-
const { stdout: remote } = await execa2(
|
|
942
|
-
"
|
|
943
|
-
"--sort=-committerdate",
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
], { cwd: repoRoot });
|
|
1690
|
+
const { stdout: remote } = await execa2(
|
|
1691
|
+
"git",
|
|
1692
|
+
["for-each-ref", "--sort=-committerdate", "--format=%(refname:short)", "refs/remotes/origin/"],
|
|
1693
|
+
{ cwd: repoRoot }
|
|
1694
|
+
);
|
|
947
1695
|
const remoteMatch = remote.split("\n").map((b) => b.trim()).filter(Boolean).find((b) => b.toLowerCase().includes(needle));
|
|
948
1696
|
if (remoteMatch) return remoteMatch.replace("origin/", "");
|
|
949
|
-
const { stdout: lsRemote } = await execa2("git", [
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
"origin"
|
|
953
|
-
], { cwd: repoRoot });
|
|
1697
|
+
const { stdout: lsRemote } = await execa2("git", ["ls-remote", "--heads", "origin"], {
|
|
1698
|
+
cwd: repoRoot
|
|
1699
|
+
});
|
|
954
1700
|
const lsMatch = lsRemote.split("\n").map((l) => l.trim()).filter(Boolean).map((l) => l.split(" ")[1]?.replace("refs/heads/", "") ?? "").find((b) => b.toLowerCase().includes(needle));
|
|
955
1701
|
if (lsMatch) return lsMatch;
|
|
956
1702
|
return void 0;
|
|
@@ -959,17 +1705,18 @@ function determineRepoPath(repos, issue, workspace) {
|
|
|
959
1705
|
if (repos.length === 0) return void 0;
|
|
960
1706
|
if (issue.repo) {
|
|
961
1707
|
const match = repos.find((r) => r.name === issue.repo);
|
|
962
|
-
if (match) return
|
|
1708
|
+
if (match) return join6(workspace, match.path);
|
|
963
1709
|
}
|
|
964
1710
|
for (const r of repos) {
|
|
965
1711
|
if (r.match && issue.title.startsWith(r.match)) {
|
|
966
|
-
return
|
|
1712
|
+
return join6(workspace, r.path);
|
|
967
1713
|
}
|
|
968
1714
|
}
|
|
969
|
-
|
|
1715
|
+
const first = repos[0];
|
|
1716
|
+
return first ? join6(workspace, first.path) : void 0;
|
|
970
1717
|
}
|
|
971
1718
|
async function detectFeatureBranches(repos, issueId, workspace, globalBaseBranch) {
|
|
972
|
-
const entries = repos.length > 0 ? repos.map((r) => ({ path:
|
|
1719
|
+
const entries = repos.length > 0 ? repos.map((r) => ({ path: resolve4(workspace, r.path), baseBranch: r.base_branch })) : [{ path: workspace, baseBranch: globalBaseBranch }];
|
|
973
1720
|
const needle = issueId.toLowerCase();
|
|
974
1721
|
const results = [];
|
|
975
1722
|
const matched = /* @__PURE__ */ new Set();
|
|
@@ -979,7 +1726,7 @@ async function detectFeatureBranches(repos, issueId, workspace, globalBaseBranch
|
|
|
979
1726
|
const { stdout } = await execa2("git", ["branch", "--show-current"], { cwd: entry.path });
|
|
980
1727
|
const current = stdout.trim();
|
|
981
1728
|
currentBranches.push({ ...entry, current });
|
|
982
|
-
if (current
|
|
1729
|
+
if (current?.toLowerCase().includes(needle)) {
|
|
983
1730
|
results.push({ repoPath: entry.path, branch: current });
|
|
984
1731
|
matched.add(entry.path);
|
|
985
1732
|
}
|
|
@@ -1003,17 +1750,121 @@ async function detectFeatureBranches(repos, issueId, workspace, globalBaseBranch
|
|
|
1003
1750
|
}
|
|
1004
1751
|
|
|
1005
1752
|
// src/loop.ts
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1753
|
+
var activeCleanup = null;
|
|
1754
|
+
var shuttingDown = false;
|
|
1755
|
+
function resolveModels(config2) {
|
|
1756
|
+
if (config2.models && config2.models.length > 0) return config2.models;
|
|
1757
|
+
return [config2.provider];
|
|
1758
|
+
}
|
|
1759
|
+
function buildPrBody(issue, providerUsed) {
|
|
1760
|
+
return `Closes ${issue.url}
|
|
1761
|
+
|
|
1762
|
+
Implemented by [lisa](https://github.com/tarcisiopgs/lisa) using **${providerUsed}**.`;
|
|
1763
|
+
}
|
|
1764
|
+
var PR_TITLE_FILE = ".pr-title";
|
|
1765
|
+
function readPrTitle(cwd) {
|
|
1766
|
+
try {
|
|
1767
|
+
const title = readFileSync5(join7(cwd, PR_TITLE_FILE), "utf-8").trim().split("\n")[0]?.trim();
|
|
1768
|
+
return title || null;
|
|
1769
|
+
} catch {
|
|
1770
|
+
return null;
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
function cleanupPrTitle(cwd) {
|
|
1774
|
+
try {
|
|
1775
|
+
unlinkSync4(join7(cwd, PR_TITLE_FILE));
|
|
1776
|
+
} catch {
|
|
1777
|
+
}
|
|
1778
|
+
}
|
|
1779
|
+
var MANIFEST_FILE = ".lisa-manifest.json";
|
|
1780
|
+
function readLisaManifest(dir) {
|
|
1781
|
+
const manifestPath = join7(dir, MANIFEST_FILE);
|
|
1782
|
+
if (!existsSync6(manifestPath)) return null;
|
|
1783
|
+
try {
|
|
1784
|
+
return JSON.parse(readFileSync5(manifestPath, "utf-8").trim());
|
|
1785
|
+
} catch {
|
|
1786
|
+
return null;
|
|
1787
|
+
}
|
|
1788
|
+
}
|
|
1789
|
+
function cleanupManifest(dir) {
|
|
1790
|
+
try {
|
|
1791
|
+
unlinkSync4(join7(dir, MANIFEST_FILE));
|
|
1792
|
+
} catch {
|
|
1793
|
+
}
|
|
1794
|
+
}
|
|
1795
|
+
function installSignalHandlers() {
|
|
1796
|
+
const cleanup = async (signal) => {
|
|
1797
|
+
if (shuttingDown) {
|
|
1798
|
+
warn("Force exiting...");
|
|
1799
|
+
process.exit(1);
|
|
1800
|
+
}
|
|
1801
|
+
shuttingDown = true;
|
|
1802
|
+
warn(`Received ${signal}. Reverting active issue...`);
|
|
1803
|
+
if (activeCleanup) {
|
|
1804
|
+
const { issueId, previousStatus, source } = activeCleanup;
|
|
1805
|
+
try {
|
|
1806
|
+
await Promise.race([
|
|
1807
|
+
source.updateStatus(issueId, previousStatus),
|
|
1808
|
+
new Promise(
|
|
1809
|
+
(_, reject) => setTimeout(() => reject(new Error("Revert timed out")), 5e3)
|
|
1810
|
+
)
|
|
1811
|
+
]);
|
|
1812
|
+
ok(`Reverted ${issueId} to "${previousStatus}"`);
|
|
1813
|
+
} catch (err) {
|
|
1814
|
+
error(
|
|
1815
|
+
`Failed to revert ${issueId}: ${err instanceof Error ? err.message : String(err)}`
|
|
1816
|
+
);
|
|
1817
|
+
}
|
|
1818
|
+
}
|
|
1012
1819
|
process.exit(1);
|
|
1820
|
+
};
|
|
1821
|
+
process.on("SIGINT", () => {
|
|
1822
|
+
cleanup("SIGINT");
|
|
1823
|
+
});
|
|
1824
|
+
process.on("SIGTERM", () => {
|
|
1825
|
+
cleanup("SIGTERM");
|
|
1826
|
+
});
|
|
1827
|
+
}
|
|
1828
|
+
async function recoverOrphanIssues(source, config2) {
|
|
1829
|
+
const orphanConfig = {
|
|
1830
|
+
...config2.source_config,
|
|
1831
|
+
pick_from: config2.source_config.in_progress
|
|
1832
|
+
};
|
|
1833
|
+
while (true) {
|
|
1834
|
+
let orphan;
|
|
1835
|
+
try {
|
|
1836
|
+
orphan = await source.fetchNextIssue(orphanConfig);
|
|
1837
|
+
} catch (err) {
|
|
1838
|
+
warn(
|
|
1839
|
+
`Failed to check for orphan issues: ${err instanceof Error ? err.message : String(err)}`
|
|
1840
|
+
);
|
|
1841
|
+
break;
|
|
1842
|
+
}
|
|
1843
|
+
if (!orphan) break;
|
|
1844
|
+
warn(
|
|
1845
|
+
`Found orphan issue ${orphan.id} stuck in "${config2.source_config.in_progress}". Reverting to "${config2.source_config.pick_from}".`
|
|
1846
|
+
);
|
|
1847
|
+
try {
|
|
1848
|
+
await source.updateStatus(orphan.id, config2.source_config.pick_from);
|
|
1849
|
+
ok(`Recovered orphan ${orphan.id}`);
|
|
1850
|
+
} catch (err) {
|
|
1851
|
+
error(
|
|
1852
|
+
`Failed to recover orphan ${orphan.id}: ${err instanceof Error ? err.message : String(err)}`
|
|
1853
|
+
);
|
|
1854
|
+
break;
|
|
1855
|
+
}
|
|
1013
1856
|
}
|
|
1857
|
+
}
|
|
1858
|
+
async function runLoop(config2, opts) {
|
|
1859
|
+
const source = createSource(config2.source);
|
|
1860
|
+
const models = resolveModels(config2);
|
|
1861
|
+
installSignalHandlers();
|
|
1014
1862
|
log(
|
|
1015
|
-
`Starting loop (
|
|
1863
|
+
`Starting loop (models: ${models.join(" \u2192 ")}, source: ${config2.source}, label: ${config2.source_config.label}, workflow: ${config2.workflow})`
|
|
1016
1864
|
);
|
|
1865
|
+
if (!opts.dryRun) {
|
|
1866
|
+
await recoverOrphanIssues(source, config2);
|
|
1867
|
+
}
|
|
1017
1868
|
let session = 0;
|
|
1018
1869
|
while (true) {
|
|
1019
1870
|
session++;
|
|
@@ -1022,18 +1873,29 @@ async function runLoop(config2, opts) {
|
|
|
1022
1873
|
break;
|
|
1023
1874
|
}
|
|
1024
1875
|
const timestamp2 = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-").substring(0, 19);
|
|
1025
|
-
const logFile =
|
|
1876
|
+
const logFile = resolve5(config2.logs.dir, `session_${session}_${timestamp2}.log`);
|
|
1026
1877
|
divider(session);
|
|
1027
|
-
|
|
1878
|
+
if (opts.issueId) {
|
|
1879
|
+
log(`Fetching issue '${opts.issueId}' from ${config2.source}...`);
|
|
1880
|
+
} else {
|
|
1881
|
+
log(`Fetching next '${config2.source_config.label}' issue from ${config2.source}...`);
|
|
1882
|
+
}
|
|
1028
1883
|
if (opts.dryRun) {
|
|
1029
|
-
|
|
1884
|
+
if (opts.issueId) {
|
|
1885
|
+
log(`[dry-run] Would fetch issue '${opts.issueId}' from ${config2.source}`);
|
|
1886
|
+
} else {
|
|
1887
|
+
log(
|
|
1888
|
+
`[dry-run] Would fetch issue from ${config2.source} (${config2.source_config.team}/${config2.source_config.project})`
|
|
1889
|
+
);
|
|
1890
|
+
}
|
|
1030
1891
|
log(`[dry-run] Workflow mode: ${config2.workflow}`);
|
|
1892
|
+
log(`[dry-run] Models priority: ${models.join(" \u2192 ")}`);
|
|
1031
1893
|
log("[dry-run] Then implement, push, create PR, and update issue status");
|
|
1032
1894
|
break;
|
|
1033
1895
|
}
|
|
1034
1896
|
let issue;
|
|
1035
1897
|
try {
|
|
1036
|
-
issue = await source.fetchNextIssue(config2.source_config);
|
|
1898
|
+
issue = opts.issueId ? await source.fetchIssueById(opts.issueId) : await source.fetchNextIssue(config2.source_config);
|
|
1037
1899
|
} catch (err) {
|
|
1038
1900
|
error(`Failed to fetch issues: ${err instanceof Error ? err.message : String(err)}`);
|
|
1039
1901
|
if (opts.once) break;
|
|
@@ -1041,10 +1903,15 @@ async function runLoop(config2, opts) {
|
|
|
1041
1903
|
continue;
|
|
1042
1904
|
}
|
|
1043
1905
|
if (!issue) {
|
|
1044
|
-
|
|
1906
|
+
if (opts.issueId) {
|
|
1907
|
+
error(`Issue '${opts.issueId}' not found.`);
|
|
1908
|
+
} else {
|
|
1909
|
+
ok(`No more issues with label '${config2.source_config.label}'. Done.`);
|
|
1910
|
+
}
|
|
1045
1911
|
break;
|
|
1046
1912
|
}
|
|
1047
1913
|
ok(`Picked up: ${issue.id} \u2014 ${issue.title}`);
|
|
1914
|
+
const previousStatus = config2.source_config.pick_from;
|
|
1048
1915
|
try {
|
|
1049
1916
|
const inProgress = config2.source_config.in_progress;
|
|
1050
1917
|
await source.updateStatus(issue.id, inProgress);
|
|
@@ -1052,8 +1919,71 @@ async function runLoop(config2, opts) {
|
|
|
1052
1919
|
} catch (err) {
|
|
1053
1920
|
warn(`Failed to update status: ${err instanceof Error ? err.message : String(err)}`);
|
|
1054
1921
|
}
|
|
1055
|
-
|
|
1056
|
-
|
|
1922
|
+
activeCleanup = { issueId: issue.id, previousStatus, source };
|
|
1923
|
+
let sessionResult;
|
|
1924
|
+
try {
|
|
1925
|
+
sessionResult = config2.workflow === "worktree" ? await runWorktreeSession(config2, issue, logFile, session, models) : await runBranchSession(config2, issue, logFile, session, models);
|
|
1926
|
+
} catch (err) {
|
|
1927
|
+
error(
|
|
1928
|
+
`Unhandled error in session for ${issue.id}: ${err instanceof Error ? err.message : String(err)}`
|
|
1929
|
+
);
|
|
1930
|
+
try {
|
|
1931
|
+
await source.updateStatus(issue.id, previousStatus);
|
|
1932
|
+
ok(`Reverted ${issue.id} to "${previousStatus}"`);
|
|
1933
|
+
} catch (revertErr) {
|
|
1934
|
+
error(
|
|
1935
|
+
`Failed to revert status: ${revertErr instanceof Error ? revertErr.message : String(revertErr)}`
|
|
1936
|
+
);
|
|
1937
|
+
}
|
|
1938
|
+
activeCleanup = null;
|
|
1939
|
+
if (opts.once) break;
|
|
1940
|
+
log(`Cooling down ${config2.loop.cooldown}s before next issue...`);
|
|
1941
|
+
await sleep(config2.loop.cooldown * 1e3);
|
|
1942
|
+
continue;
|
|
1943
|
+
}
|
|
1944
|
+
if (!sessionResult.success) {
|
|
1945
|
+
error(`All models failed for ${issue.id}. Reverting to "${previousStatus}".`);
|
|
1946
|
+
logAttemptHistory(sessionResult);
|
|
1947
|
+
try {
|
|
1948
|
+
await source.updateStatus(issue.id, previousStatus);
|
|
1949
|
+
ok(`Reverted ${issue.id} to "${previousStatus}"`);
|
|
1950
|
+
} catch (err) {
|
|
1951
|
+
error(
|
|
1952
|
+
`Failed to revert status: ${err instanceof Error ? err.message : String(err)}`
|
|
1953
|
+
);
|
|
1954
|
+
}
|
|
1955
|
+
activeCleanup = null;
|
|
1956
|
+
if (opts.once) {
|
|
1957
|
+
log("Single iteration mode. Exiting.");
|
|
1958
|
+
break;
|
|
1959
|
+
}
|
|
1960
|
+
log(`Cooling down ${config2.loop.cooldown}s before next issue...`);
|
|
1961
|
+
await sleep(config2.loop.cooldown * 1e3);
|
|
1962
|
+
continue;
|
|
1963
|
+
}
|
|
1964
|
+
ok(`Completed with provider: ${sessionResult.providerUsed}`);
|
|
1965
|
+
if (sessionResult.prUrls.length === 0) {
|
|
1966
|
+
warn(
|
|
1967
|
+
`Session succeeded but no PRs created for ${issue.id}. Reverting to "${previousStatus}".`
|
|
1968
|
+
);
|
|
1969
|
+
try {
|
|
1970
|
+
await source.updateStatus(issue.id, previousStatus);
|
|
1971
|
+
ok(`Reverted ${issue.id} to "${previousStatus}"`);
|
|
1972
|
+
} catch (err) {
|
|
1973
|
+
error(
|
|
1974
|
+
`Failed to revert status: ${err instanceof Error ? err.message : String(err)}`
|
|
1975
|
+
);
|
|
1976
|
+
}
|
|
1977
|
+
activeCleanup = null;
|
|
1978
|
+
if (opts.once) {
|
|
1979
|
+
log("Single iteration mode. Exiting.");
|
|
1980
|
+
break;
|
|
1981
|
+
}
|
|
1982
|
+
log(`Cooling down ${config2.loop.cooldown}s before next issue...`);
|
|
1983
|
+
await sleep(config2.loop.cooldown * 1e3);
|
|
1984
|
+
continue;
|
|
1985
|
+
}
|
|
1986
|
+
for (const prUrl of sessionResult.prUrls) {
|
|
1057
1987
|
try {
|
|
1058
1988
|
await source.attachPullRequest(issue.id, prUrl);
|
|
1059
1989
|
ok(`Attached PR to ${issue.id}`);
|
|
@@ -1061,19 +1991,24 @@ async function runLoop(config2, opts) {
|
|
|
1061
1991
|
warn(`Failed to attach PR: ${err instanceof Error ? err.message : String(err)}`);
|
|
1062
1992
|
}
|
|
1063
1993
|
}
|
|
1994
|
+
let statusUpdated = false;
|
|
1064
1995
|
try {
|
|
1065
1996
|
const doneStatus = config2.source_config.done;
|
|
1066
1997
|
await source.updateStatus(issue.id, doneStatus);
|
|
1067
1998
|
ok(`Updated ${issue.id} status to "${doneStatus}"`);
|
|
1999
|
+
statusUpdated = true;
|
|
1068
2000
|
} catch (err) {
|
|
1069
2001
|
error(`Failed to update status: ${err instanceof Error ? err.message : String(err)}`);
|
|
1070
2002
|
}
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
2003
|
+
if (statusUpdated && !opts.issueId) {
|
|
2004
|
+
try {
|
|
2005
|
+
await source.removeLabel(issue.id, config2.source_config.label);
|
|
2006
|
+
ok(`Removed label "${config2.source_config.label}" from ${issue.id}`);
|
|
2007
|
+
} catch (err) {
|
|
2008
|
+
error(`Failed to remove label: ${err instanceof Error ? err.message : String(err)}`);
|
|
2009
|
+
}
|
|
1076
2010
|
}
|
|
2011
|
+
activeCleanup = null;
|
|
1077
2012
|
if (opts.once) {
|
|
1078
2013
|
log("Single iteration mode. Exiting.");
|
|
1079
2014
|
break;
|
|
@@ -1083,14 +2018,49 @@ async function runLoop(config2, opts) {
|
|
|
1083
2018
|
}
|
|
1084
2019
|
ok(`lisa finished. ${session} session(s) run.`);
|
|
1085
2020
|
}
|
|
2021
|
+
function logAttemptHistory(result) {
|
|
2022
|
+
for (const [i, attempt] of result.fallback.attempts.entries()) {
|
|
2023
|
+
const status2 = attempt.success ? "OK" : "FAILED";
|
|
2024
|
+
const error2 = attempt.error ? ` \u2014 ${attempt.error}` : "";
|
|
2025
|
+
const duration = attempt.duration > 0 ? ` (${Math.round(attempt.duration / 1e3)}s)` : "";
|
|
2026
|
+
warn(` Attempt ${i + 1}: ${attempt.provider} ${status2}${error2}${duration}`);
|
|
2027
|
+
}
|
|
2028
|
+
}
|
|
1086
2029
|
function resolveBaseBranch(config2, repoPath) {
|
|
1087
|
-
const workspace =
|
|
1088
|
-
const repo = config2.repos.find((r) =>
|
|
2030
|
+
const workspace = resolve5(config2.workspace);
|
|
2031
|
+
const repo = config2.repos.find((r) => resolve5(workspace, r.path) === repoPath);
|
|
1089
2032
|
return repo?.base_branch ?? config2.base_branch;
|
|
1090
2033
|
}
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
2034
|
+
function findRepoConfig(config2, issue) {
|
|
2035
|
+
if (config2.repos.length === 0) return void 0;
|
|
2036
|
+
if (issue.repo) {
|
|
2037
|
+
const match = config2.repos.find((r) => r.name === issue.repo);
|
|
2038
|
+
if (match) return match;
|
|
2039
|
+
}
|
|
2040
|
+
for (const r of config2.repos) {
|
|
2041
|
+
if (r.match && issue.title.startsWith(r.match)) return r;
|
|
2042
|
+
}
|
|
2043
|
+
return config2.repos[0];
|
|
2044
|
+
}
|
|
2045
|
+
async function runTestValidation(cwd) {
|
|
2046
|
+
const testRunner = detectTestRunner(cwd);
|
|
2047
|
+
if (!testRunner) return true;
|
|
2048
|
+
log(`Running test validation (${testRunner} detected)...`);
|
|
2049
|
+
try {
|
|
2050
|
+
await execa3("npm", ["run", "test"], { cwd, stdio: "pipe" });
|
|
2051
|
+
ok("Tests passed.");
|
|
2052
|
+
return true;
|
|
2053
|
+
} catch (err) {
|
|
2054
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
2055
|
+
error(`Tests failed: ${message}`);
|
|
2056
|
+
return false;
|
|
2057
|
+
}
|
|
2058
|
+
}
|
|
2059
|
+
async function runWorktreeSession(config2, issue, logFile, session, models) {
|
|
2060
|
+
if (config2.repos.length > 1) {
|
|
2061
|
+
return runWorktreeMultiRepoSession(config2, issue, logFile, session, models);
|
|
2062
|
+
}
|
|
2063
|
+
const workspace = resolve5(config2.workspace);
|
|
1094
2064
|
const repoPath = determineRepoPath(config2.repos, issue, workspace) ?? workspace;
|
|
1095
2065
|
const defaultBranch = resolveBaseBranch(config2, repoPath);
|
|
1096
2066
|
const branchName = generateBranchName(issue.id, issue.title);
|
|
@@ -1100,39 +2070,121 @@ async function runWorktreeSession(config2, issue, logFile, session) {
|
|
|
1100
2070
|
worktreePath = await createWorktree(repoPath, branchName, defaultBranch);
|
|
1101
2071
|
} catch (err) {
|
|
1102
2072
|
error(`Failed to create worktree: ${err instanceof Error ? err.message : String(err)}`);
|
|
1103
|
-
return
|
|
2073
|
+
return {
|
|
2074
|
+
success: false,
|
|
2075
|
+
providerUsed: models[0] ?? "claude",
|
|
2076
|
+
prUrls: [],
|
|
2077
|
+
fallback: {
|
|
2078
|
+
success: false,
|
|
2079
|
+
output: "",
|
|
2080
|
+
duration: 0,
|
|
2081
|
+
providerUsed: models[0] ?? "claude",
|
|
2082
|
+
attempts: []
|
|
2083
|
+
}
|
|
2084
|
+
};
|
|
1104
2085
|
}
|
|
1105
2086
|
ok(`Worktree created at ${worktreePath}`);
|
|
1106
|
-
const
|
|
2087
|
+
const repo = findRepoConfig(config2, issue);
|
|
2088
|
+
if (repo?.lifecycle) {
|
|
2089
|
+
const started = await startResources(repo, worktreePath);
|
|
2090
|
+
if (!started) {
|
|
2091
|
+
error(`Lifecycle startup failed for ${issue.id}. Aborting session.`);
|
|
2092
|
+
await cleanupWorktree(repoPath, worktreePath);
|
|
2093
|
+
return {
|
|
2094
|
+
success: false,
|
|
2095
|
+
providerUsed: models[0] ?? "claude",
|
|
2096
|
+
prUrls: [],
|
|
2097
|
+
fallback: {
|
|
2098
|
+
success: false,
|
|
2099
|
+
output: "",
|
|
2100
|
+
duration: 0,
|
|
2101
|
+
providerUsed: models[0] ?? "claude",
|
|
2102
|
+
attempts: []
|
|
2103
|
+
}
|
|
2104
|
+
};
|
|
2105
|
+
}
|
|
2106
|
+
}
|
|
2107
|
+
const testRunner = detectTestRunner(worktreePath);
|
|
2108
|
+
if (testRunner) {
|
|
2109
|
+
log(`Detected test runner: ${testRunner}`);
|
|
2110
|
+
}
|
|
2111
|
+
const prompt = buildImplementPrompt(issue, config2, testRunner);
|
|
1107
2112
|
log(`Implementing in worktree... (log: ${logFile})`);
|
|
1108
2113
|
initLogFile(logFile);
|
|
1109
|
-
const result = await
|
|
2114
|
+
const result = await runWithFallback(models, prompt, {
|
|
2115
|
+
logFile,
|
|
2116
|
+
cwd: worktreePath,
|
|
2117
|
+
guardrailsDir: repoPath,
|
|
2118
|
+
issueId: issue.id,
|
|
2119
|
+
overseer: config2.overseer
|
|
2120
|
+
});
|
|
1110
2121
|
try {
|
|
1111
|
-
appendFileSync6(
|
|
2122
|
+
appendFileSync6(
|
|
2123
|
+
logFile,
|
|
2124
|
+
`
|
|
1112
2125
|
${"=".repeat(80)}
|
|
2126
|
+
Provider used: ${result.providerUsed}
|
|
1113
2127
|
Full output:
|
|
1114
2128
|
${result.output}
|
|
1115
|
-
`
|
|
2129
|
+
`
|
|
2130
|
+
);
|
|
1116
2131
|
} catch {
|
|
1117
2132
|
}
|
|
2133
|
+
if (repo?.lifecycle) {
|
|
2134
|
+
await stopResources();
|
|
2135
|
+
}
|
|
1118
2136
|
if (!result.success) {
|
|
1119
2137
|
error(`Session ${session} failed for ${issue.id}. Check ${logFile}`);
|
|
1120
2138
|
await cleanupWorktree(repoPath, worktreePath);
|
|
1121
|
-
return [];
|
|
2139
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2140
|
+
}
|
|
2141
|
+
const testsPassed = await runTestValidation(worktreePath);
|
|
2142
|
+
if (!testsPassed) {
|
|
2143
|
+
error(`Tests failed for ${issue.id}. Blocking PR creation.`);
|
|
2144
|
+
await cleanupWorktree(repoPath, worktreePath);
|
|
2145
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2146
|
+
}
|
|
2147
|
+
const manifest = readLisaManifest(worktreePath);
|
|
2148
|
+
let effectiveBranch = branchName;
|
|
2149
|
+
if (manifest?.branch && manifest.branch !== branchName) {
|
|
2150
|
+
log(`Renaming branch to English name: ${manifest.branch}`);
|
|
2151
|
+
try {
|
|
2152
|
+
await execa3("git", ["branch", "-m", branchName, manifest.branch], { cwd: worktreePath });
|
|
2153
|
+
effectiveBranch = manifest.branch;
|
|
2154
|
+
ok(`Branch renamed to ${effectiveBranch}`);
|
|
2155
|
+
} catch (err) {
|
|
2156
|
+
warn(
|
|
2157
|
+
`Branch rename failed, using original: ${err instanceof Error ? err.message : String(err)}`
|
|
2158
|
+
);
|
|
2159
|
+
}
|
|
2160
|
+
}
|
|
2161
|
+
try {
|
|
2162
|
+
await execa3("git", ["push", "-u", "origin", effectiveBranch], { cwd: worktreePath });
|
|
2163
|
+
} catch (err) {
|
|
2164
|
+
error(
|
|
2165
|
+
`Failed to push branch to remote: ${err instanceof Error ? err.message : String(err)}`
|
|
2166
|
+
);
|
|
2167
|
+
cleanupManifest(worktreePath);
|
|
2168
|
+
await cleanupWorktree(repoPath, worktreePath);
|
|
2169
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
1122
2170
|
}
|
|
2171
|
+
const prTitle = manifest?.prTitle ?? readPrTitle(worktreePath) ?? issue.title;
|
|
2172
|
+
cleanupPrTitle(worktreePath);
|
|
2173
|
+
cleanupManifest(worktreePath);
|
|
1123
2174
|
const prUrls = [];
|
|
1124
2175
|
try {
|
|
1125
2176
|
const repoInfo = await getRepoInfo(worktreePath);
|
|
1126
|
-
const pr = await createPullRequest(
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
2177
|
+
const pr = await createPullRequest(
|
|
2178
|
+
{
|
|
2179
|
+
owner: repoInfo.owner,
|
|
2180
|
+
repo: repoInfo.repo,
|
|
2181
|
+
head: effectiveBranch,
|
|
2182
|
+
base: defaultBranch,
|
|
2183
|
+
title: prTitle,
|
|
2184
|
+
body: buildPrBody(issue, result.providerUsed)
|
|
2185
|
+
},
|
|
2186
|
+
config2.github
|
|
2187
|
+
);
|
|
1136
2188
|
ok(`PR created: ${pr.html_url}`);
|
|
1137
2189
|
prUrls.push(pr.html_url);
|
|
1138
2190
|
} catch (err) {
|
|
@@ -1140,49 +2192,190 @@ Implemented by [lisa](https://github.com/tarcisiopgs/lisa).`
|
|
|
1140
2192
|
}
|
|
1141
2193
|
await cleanupWorktree(repoPath, worktreePath);
|
|
1142
2194
|
ok(`Session ${session} complete for ${issue.id}`);
|
|
1143
|
-
return prUrls;
|
|
2195
|
+
return { success: true, providerUsed: result.providerUsed, prUrls, fallback: result };
|
|
2196
|
+
}
|
|
2197
|
+
async function runWorktreeMultiRepoSession(config2, issue, logFile, session, models) {
|
|
2198
|
+
const workspace = resolve5(config2.workspace);
|
|
2199
|
+
cleanupManifest(workspace);
|
|
2200
|
+
const prompt = buildWorktreeMultiRepoPrompt(issue, config2);
|
|
2201
|
+
log(`Multi-repo worktree session for ${issue.id} (agent selects repo and branch name)`);
|
|
2202
|
+
log(`Implementing (agent selects repo)... (log: ${logFile})`);
|
|
2203
|
+
initLogFile(logFile);
|
|
2204
|
+
const result = await runWithFallback(models, prompt, {
|
|
2205
|
+
logFile,
|
|
2206
|
+
cwd: workspace,
|
|
2207
|
+
guardrailsDir: workspace,
|
|
2208
|
+
issueId: issue.id,
|
|
2209
|
+
overseer: config2.overseer
|
|
2210
|
+
});
|
|
2211
|
+
try {
|
|
2212
|
+
appendFileSync6(
|
|
2213
|
+
logFile,
|
|
2214
|
+
`
|
|
2215
|
+
${"=".repeat(80)}
|
|
2216
|
+
Provider used: ${result.providerUsed}
|
|
2217
|
+
Full output:
|
|
2218
|
+
${result.output}
|
|
2219
|
+
`
|
|
2220
|
+
);
|
|
2221
|
+
} catch {
|
|
2222
|
+
}
|
|
2223
|
+
if (!result.success) {
|
|
2224
|
+
error(`Session ${session} failed for ${issue.id}. Check ${logFile}`);
|
|
2225
|
+
cleanupManifest(workspace);
|
|
2226
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2227
|
+
}
|
|
2228
|
+
const manifest = readLisaManifest(workspace);
|
|
2229
|
+
if (!manifest?.repoPath || !manifest.branch) {
|
|
2230
|
+
error(
|
|
2231
|
+
`Agent did not produce a valid .lisa-manifest.json (requires repoPath + branch) for ${issue.id}. Aborting.`
|
|
2232
|
+
);
|
|
2233
|
+
cleanupManifest(workspace);
|
|
2234
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2235
|
+
}
|
|
2236
|
+
ok(`Provider chose repo: ${manifest.repoPath}, branch: ${manifest.branch}`);
|
|
2237
|
+
const worktreePath = join7(manifest.repoPath, ".worktrees", manifest.branch);
|
|
2238
|
+
const baseBranch = resolveBaseBranch(config2, manifest.repoPath);
|
|
2239
|
+
const testsPassed = await runTestValidation(worktreePath);
|
|
2240
|
+
if (!testsPassed) {
|
|
2241
|
+
error(`Tests failed for ${issue.id}. Blocking PR creation.`);
|
|
2242
|
+
await cleanupWorktree(manifest.repoPath, worktreePath);
|
|
2243
|
+
cleanupManifest(workspace);
|
|
2244
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2245
|
+
}
|
|
2246
|
+
try {
|
|
2247
|
+
await execa3("git", ["push", "-u", "origin", manifest.branch], { cwd: worktreePath });
|
|
2248
|
+
} catch (err) {
|
|
2249
|
+
error(
|
|
2250
|
+
`Failed to push branch to remote: ${err instanceof Error ? err.message : String(err)}`
|
|
2251
|
+
);
|
|
2252
|
+
await cleanupWorktree(manifest.repoPath, worktreePath);
|
|
2253
|
+
cleanupManifest(workspace);
|
|
2254
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2255
|
+
}
|
|
2256
|
+
const prTitle = manifest.prTitle ?? issue.title;
|
|
2257
|
+
const prUrls = [];
|
|
2258
|
+
try {
|
|
2259
|
+
const repoInfo = await getRepoInfo(worktreePath);
|
|
2260
|
+
const pr = await createPullRequest(
|
|
2261
|
+
{
|
|
2262
|
+
owner: repoInfo.owner,
|
|
2263
|
+
repo: repoInfo.repo,
|
|
2264
|
+
head: manifest.branch,
|
|
2265
|
+
base: baseBranch,
|
|
2266
|
+
title: prTitle,
|
|
2267
|
+
body: buildPrBody(issue, result.providerUsed)
|
|
2268
|
+
},
|
|
2269
|
+
config2.github
|
|
2270
|
+
);
|
|
2271
|
+
ok(`PR created: ${pr.html_url}`);
|
|
2272
|
+
prUrls.push(pr.html_url);
|
|
2273
|
+
} catch (err) {
|
|
2274
|
+
error(`Failed to create PR: ${err instanceof Error ? err.message : String(err)}`);
|
|
2275
|
+
}
|
|
2276
|
+
cleanupManifest(workspace);
|
|
2277
|
+
await cleanupWorktree(manifest.repoPath, worktreePath);
|
|
2278
|
+
ok(`Session ${session} complete for ${issue.id}`);
|
|
2279
|
+
return { success: true, providerUsed: result.providerUsed, prUrls, fallback: result };
|
|
1144
2280
|
}
|
|
1145
|
-
async function runBranchSession(config2, issue, logFile, session) {
|
|
1146
|
-
const
|
|
1147
|
-
|
|
1148
|
-
const
|
|
2281
|
+
async function runBranchSession(config2, issue, logFile, session, models) {
|
|
2282
|
+
const workspace = resolve5(config2.workspace);
|
|
2283
|
+
cleanupManifest(workspace);
|
|
2284
|
+
const testRunner = detectTestRunner(workspace);
|
|
2285
|
+
if (testRunner) {
|
|
2286
|
+
log(`Detected test runner: ${testRunner}`);
|
|
2287
|
+
}
|
|
2288
|
+
const prompt = buildImplementPrompt(issue, config2, testRunner);
|
|
2289
|
+
const repo = findRepoConfig(config2, issue);
|
|
2290
|
+
if (repo?.lifecycle) {
|
|
2291
|
+
const cwd = resolve5(workspace, repo.path);
|
|
2292
|
+
const started = await startResources(repo, cwd);
|
|
2293
|
+
if (!started) {
|
|
2294
|
+
error(`Lifecycle startup failed for ${issue.id}. Aborting session.`);
|
|
2295
|
+
return {
|
|
2296
|
+
success: false,
|
|
2297
|
+
providerUsed: models[0] ?? "claude",
|
|
2298
|
+
prUrls: [],
|
|
2299
|
+
fallback: {
|
|
2300
|
+
success: false,
|
|
2301
|
+
output: "",
|
|
2302
|
+
duration: 0,
|
|
2303
|
+
providerUsed: models[0] ?? "claude",
|
|
2304
|
+
attempts: []
|
|
2305
|
+
}
|
|
2306
|
+
};
|
|
2307
|
+
}
|
|
2308
|
+
}
|
|
1149
2309
|
log(`Implementing... (log: ${logFile})`);
|
|
1150
2310
|
initLogFile(logFile);
|
|
1151
|
-
const result = await
|
|
2311
|
+
const result = await runWithFallback(models, prompt, {
|
|
2312
|
+
logFile,
|
|
2313
|
+
cwd: workspace,
|
|
2314
|
+
guardrailsDir: workspace,
|
|
2315
|
+
issueId: issue.id,
|
|
2316
|
+
overseer: config2.overseer
|
|
2317
|
+
});
|
|
1152
2318
|
try {
|
|
1153
|
-
appendFileSync6(
|
|
2319
|
+
appendFileSync6(
|
|
2320
|
+
logFile,
|
|
2321
|
+
`
|
|
1154
2322
|
${"=".repeat(80)}
|
|
2323
|
+
Provider used: ${result.providerUsed}
|
|
1155
2324
|
Full output:
|
|
1156
2325
|
${result.output}
|
|
1157
|
-
`
|
|
2326
|
+
`
|
|
2327
|
+
);
|
|
1158
2328
|
} catch {
|
|
1159
2329
|
}
|
|
2330
|
+
if (repo?.lifecycle) {
|
|
2331
|
+
await stopResources();
|
|
2332
|
+
}
|
|
1160
2333
|
if (!result.success) {
|
|
1161
2334
|
error(`Session ${session} failed for ${issue.id}. Check ${logFile}`);
|
|
1162
|
-
return [];
|
|
2335
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2336
|
+
}
|
|
2337
|
+
const testsPassed = await runTestValidation(workspace);
|
|
2338
|
+
if (!testsPassed) {
|
|
2339
|
+
error(`Tests failed for ${issue.id}. Blocking PR creation.`);
|
|
2340
|
+
cleanupManifest(workspace);
|
|
2341
|
+
return { success: false, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
2342
|
+
}
|
|
2343
|
+
const manifest = readLisaManifest(workspace);
|
|
2344
|
+
let detected;
|
|
2345
|
+
if (manifest?.repoPath && manifest.branch) {
|
|
2346
|
+
ok(`Using manifest: repo=${manifest.repoPath}, branch=${manifest.branch}`);
|
|
2347
|
+
detected = [{ repoPath: manifest.repoPath, branch: manifest.branch }];
|
|
2348
|
+
} else {
|
|
2349
|
+
if (manifest) {
|
|
2350
|
+
warn(`Manifest found but missing repoPath or branch \u2014 falling back to detection`);
|
|
2351
|
+
}
|
|
2352
|
+
detected = await detectFeatureBranches(config2.repos, issue.id, workspace, config2.base_branch);
|
|
1163
2353
|
}
|
|
1164
|
-
|
|
2354
|
+
cleanupManifest(workspace);
|
|
1165
2355
|
if (detected.length === 0) {
|
|
1166
2356
|
error(`Could not detect feature branch for ${issue.id} \u2014 skipping PR creation`);
|
|
1167
2357
|
ok(`Session ${session} complete for ${issue.id}`);
|
|
1168
|
-
return [];
|
|
2358
|
+
return { success: true, providerUsed: result.providerUsed, prUrls: [], fallback: result };
|
|
1169
2359
|
}
|
|
2360
|
+
const prTitle = manifest?.prTitle ?? readPrTitle(workspace) ?? issue.title;
|
|
2361
|
+
cleanupPrTitle(workspace);
|
|
1170
2362
|
const prUrls = [];
|
|
1171
2363
|
for (const { repoPath, branch } of detected) {
|
|
1172
2364
|
const baseBranch = resolveBaseBranch(config2, repoPath);
|
|
1173
2365
|
if (branch === baseBranch) continue;
|
|
1174
2366
|
try {
|
|
1175
2367
|
const repoInfo = await getRepoInfo(repoPath);
|
|
1176
|
-
const pr = await createPullRequest(
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
2368
|
+
const pr = await createPullRequest(
|
|
2369
|
+
{
|
|
2370
|
+
owner: repoInfo.owner,
|
|
2371
|
+
repo: repoInfo.repo,
|
|
2372
|
+
head: branch,
|
|
2373
|
+
base: baseBranch,
|
|
2374
|
+
title: prTitle,
|
|
2375
|
+
body: buildPrBody(issue, result.providerUsed)
|
|
2376
|
+
},
|
|
2377
|
+
config2.github
|
|
2378
|
+
);
|
|
1186
2379
|
ok(`PR created: ${pr.html_url}`);
|
|
1187
2380
|
prUrls.push(pr.html_url);
|
|
1188
2381
|
} catch (err) {
|
|
@@ -1190,7 +2383,7 @@ Implemented by [lisa](https://github.com/tarcisiopgs/lisa).`
|
|
|
1190
2383
|
}
|
|
1191
2384
|
}
|
|
1192
2385
|
ok(`Session ${session} complete for ${issue.id}`);
|
|
1193
|
-
return prUrls;
|
|
2386
|
+
return { success: true, providerUsed: result.providerUsed, prUrls, fallback: result };
|
|
1194
2387
|
}
|
|
1195
2388
|
async function cleanupWorktree(repoRoot, worktreePath) {
|
|
1196
2389
|
try {
|
|
@@ -1201,7 +2394,7 @@ async function cleanupWorktree(repoRoot, worktreePath) {
|
|
|
1201
2394
|
}
|
|
1202
2395
|
}
|
|
1203
2396
|
function sleep(ms) {
|
|
1204
|
-
return new Promise((
|
|
2397
|
+
return new Promise((resolve6) => setTimeout(resolve6, ms));
|
|
1205
2398
|
}
|
|
1206
2399
|
|
|
1207
2400
|
// src/cli.ts
|
|
@@ -1211,6 +2404,7 @@ var run = defineCommand({
|
|
|
1211
2404
|
once: { type: "boolean", description: "Run a single iteration", default: false },
|
|
1212
2405
|
limit: { type: "string", description: "Max number of issues to process", default: "0" },
|
|
1213
2406
|
"dry-run": { type: "boolean", description: "Preview without executing", default: false },
|
|
2407
|
+
issue: { type: "string", description: "Run a specific issue by identifier or URL" },
|
|
1214
2408
|
provider: { type: "string", description: "AI provider (claude, gemini, opencode)" },
|
|
1215
2409
|
source: { type: "string", description: "Issue source (linear, trello)" },
|
|
1216
2410
|
label: { type: "string", description: "Label to filter issues" },
|
|
@@ -1236,16 +2430,21 @@ var run = defineCommand({
|
|
|
1236
2430
|
const missingVars = await getMissingEnvVars(merged.source);
|
|
1237
2431
|
if (missingVars.length > 0) {
|
|
1238
2432
|
const shell = process.env.SHELL?.includes("zsh") ? "~/.zshrc" : "~/.bashrc";
|
|
1239
|
-
console.error(
|
|
1240
|
-
|
|
2433
|
+
console.error(
|
|
2434
|
+
pc2.red(
|
|
2435
|
+
`Missing required environment variables:
|
|
2436
|
+
${missingVars.map((v) => ` ${v}`).join("\n")}`
|
|
2437
|
+
)
|
|
2438
|
+
);
|
|
1241
2439
|
console.error(pc2.dim(`
|
|
1242
2440
|
Add them to your ${shell} and run: source ${shell}`));
|
|
1243
2441
|
process.exit(1);
|
|
1244
2442
|
}
|
|
1245
2443
|
await runLoop(merged, {
|
|
1246
|
-
once: args.once,
|
|
2444
|
+
once: args.once || !!args.issue,
|
|
1247
2445
|
limit: Number.parseInt(args.limit, 10),
|
|
1248
|
-
dryRun: args["dry-run"]
|
|
2446
|
+
dryRun: args["dry-run"],
|
|
2447
|
+
issueId: args.issue
|
|
1249
2448
|
});
|
|
1250
2449
|
}
|
|
1251
2450
|
});
|
|
@@ -1281,7 +2480,9 @@ var init = defineCommand({
|
|
|
1281
2480
|
meta: { name: "init", description: "Initialize lisa configuration" },
|
|
1282
2481
|
async run() {
|
|
1283
2482
|
if (!process.stdin.isTTY) {
|
|
1284
|
-
console.error(
|
|
2483
|
+
console.error(
|
|
2484
|
+
pc2.red("Interactive mode requires a TTY. Cannot run init in non-interactive environments.")
|
|
2485
|
+
);
|
|
1285
2486
|
process.exit(1);
|
|
1286
2487
|
}
|
|
1287
2488
|
if (configExists()) {
|
|
@@ -1315,8 +2516,8 @@ var status = defineCommand({
|
|
|
1315
2516
|
console.log(` In progress: ${pc2.bold(config2.source_config.in_progress)}`);
|
|
1316
2517
|
console.log(` Done: ${pc2.bold(config2.source_config.done)}`);
|
|
1317
2518
|
console.log(` Logs: ${pc2.dim(config2.logs.dir)}`);
|
|
1318
|
-
const { readdirSync: readdirSync2, existsSync:
|
|
1319
|
-
if (
|
|
2519
|
+
const { readdirSync: readdirSync2, existsSync: existsSync8 } = await import("fs");
|
|
2520
|
+
if (existsSync8(config2.logs.dir)) {
|
|
1320
2521
|
const logs = readdirSync2(config2.logs.dir).filter((f) => f.endsWith(".log"));
|
|
1321
2522
|
console.log(`
|
|
1322
2523
|
${pc2.cyan("Sessions:")} ${logs.length} log file(s) found`);
|
|
@@ -1329,7 +2530,7 @@ ${pc2.dim("No sessions yet.")}`);
|
|
|
1329
2530
|
function getVersion() {
|
|
1330
2531
|
try {
|
|
1331
2532
|
const pkgPath = resolvePath(new URL(".", import.meta.url).pathname, "../package.json");
|
|
1332
|
-
const pkg = JSON.parse(
|
|
2533
|
+
const pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
|
|
1333
2534
|
return pkg.version;
|
|
1334
2535
|
} catch {
|
|
1335
2536
|
return "0.0.0";
|
|
@@ -1339,7 +2540,7 @@ var main = defineCommand({
|
|
|
1339
2540
|
meta: {
|
|
1340
2541
|
name: "lisa",
|
|
1341
2542
|
version: getVersion(),
|
|
1342
|
-
description: "
|
|
2543
|
+
description: "Deterministic autonomous issue resolver \u2014 structured AI agent loop for Linear/Trello"
|
|
1343
2544
|
},
|
|
1344
2545
|
subCommands: { run, config, init, status }
|
|
1345
2546
|
});
|
|
@@ -1365,7 +2566,7 @@ After installing, run ${pc2.cyan("lisa init")} again.`
|
|
|
1365
2566
|
return process.exit(1);
|
|
1366
2567
|
}
|
|
1367
2568
|
let providerName;
|
|
1368
|
-
if (available.length === 1) {
|
|
2569
|
+
if (available.length === 1 && available[0]) {
|
|
1369
2570
|
providerName = available[0].name;
|
|
1370
2571
|
clack.log.info(`Found provider: ${pc2.bold(providerLabels[providerName])}`);
|
|
1371
2572
|
} else {
|
|
@@ -1551,12 +2752,12 @@ async function detectGitHubMethod() {
|
|
|
1551
2752
|
}
|
|
1552
2753
|
async function detectGitRepos() {
|
|
1553
2754
|
const cwd = process.cwd();
|
|
1554
|
-
if (
|
|
2755
|
+
if (existsSync7(join8(cwd, ".git"))) {
|
|
1555
2756
|
clack.log.info(`Detected git repository in current directory.`);
|
|
1556
2757
|
return [];
|
|
1557
2758
|
}
|
|
1558
2759
|
const entries = readdirSync(cwd, { withFileTypes: true });
|
|
1559
|
-
const gitDirs = entries.filter((e) => e.isDirectory() &&
|
|
2760
|
+
const gitDirs = entries.filter((e) => e.isDirectory() && existsSync7(join8(cwd, e.name, ".git"))).map((e) => e.name);
|
|
1560
2761
|
if (gitDirs.length === 0) {
|
|
1561
2762
|
return [];
|
|
1562
2763
|
}
|
|
@@ -1566,7 +2767,7 @@ async function detectGitRepos() {
|
|
|
1566
2767
|
});
|
|
1567
2768
|
if (clack.isCancel(selected)) return process.exit(0);
|
|
1568
2769
|
return selected.map((dir) => ({
|
|
1569
|
-
name: getGitRepoName(
|
|
2770
|
+
name: getGitRepoName(join8(cwd, dir)) ?? dir,
|
|
1570
2771
|
path: `./${dir}`,
|
|
1571
2772
|
match: "",
|
|
1572
2773
|
base_branch: ""
|