xforce 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +3195 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/index.d.ts +434 -0
- package/dist/index.js +2599 -0
- package/dist/index.js.map +1 -0
- package/package.json +76 -0
- package/templates/issue-templates/bug-fix.yml +55 -0
- package/templates/issue-templates/feature-request.yml +58 -0
- package/templates/issue-templates/refactor.yml +53 -0
- package/templates/xforce.config.example.yaml +45 -0
|
@@ -0,0 +1,3195 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/cli/index.ts
|
|
4
|
+
import yargs from "yargs";
|
|
5
|
+
import { hideBin } from "yargs/helpers";
|
|
6
|
+
|
|
7
|
+
// src/cli/env.ts
|
|
8
|
+
import { readFileSync } from "fs";
|
|
9
|
+
import { resolve } from "path";
|
|
10
|
+
function loadEnvFile(dir = process.cwd()) {
|
|
11
|
+
let content;
|
|
12
|
+
try {
|
|
13
|
+
content = readFileSync(resolve(dir, ".env.xforce"), "utf-8");
|
|
14
|
+
} catch {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
for (const line of content.split("\n")) {
|
|
18
|
+
const trimmed = line.trim();
|
|
19
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
20
|
+
const eqIndex = trimmed.indexOf("=");
|
|
21
|
+
if (eqIndex === -1) continue;
|
|
22
|
+
const key = trimmed.slice(0, eqIndex).trim();
|
|
23
|
+
let value = trimmed.slice(eqIndex + 1).trim();
|
|
24
|
+
if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
|
|
25
|
+
value = value.slice(1, -1);
|
|
26
|
+
}
|
|
27
|
+
if (process.env[key] === void 0) {
|
|
28
|
+
process.env[key] = value;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// src/cli/commands/run.ts
|
|
34
|
+
import ora2 from "ora";
|
|
35
|
+
import chalk from "chalk";
|
|
36
|
+
|
|
37
|
+
// src/core/config.ts
|
|
38
|
+
import { readFileSync as readFileSync2, existsSync } from "fs";
|
|
39
|
+
import { resolve as resolve2 } from "path";
|
|
40
|
+
import { parse as parseYaml } from "yaml";
|
|
41
|
+
import { z } from "zod";
|
|
42
|
+
|
|
43
|
+
// src/core/constants.ts
|
|
44
|
+
var DEFAULT_CONFIG = {
|
|
45
|
+
model: "claude-sonnet-4-5-20250929",
|
|
46
|
+
reviewerModel: "claude-sonnet-4-5-20250929",
|
|
47
|
+
plannerModel: "claude-sonnet-4-5-20250929",
|
|
48
|
+
maxTurns: 25,
|
|
49
|
+
maxReviewCycles: 3,
|
|
50
|
+
maxTestRetries: 2,
|
|
51
|
+
timeoutMinutes: 30,
|
|
52
|
+
budgetPerTaskUsd: 5,
|
|
53
|
+
branchPrefix: "xforce",
|
|
54
|
+
labels: {
|
|
55
|
+
ready: "xforce:ready",
|
|
56
|
+
inProgress: "xforce:in-progress",
|
|
57
|
+
done: "xforce:done",
|
|
58
|
+
failed: "xforce:failed"
|
|
59
|
+
},
|
|
60
|
+
allowedTools: ["Read", "Write", "Edit", "Bash", "Glob", "Grep"],
|
|
61
|
+
enablePlanning: true,
|
|
62
|
+
enableSecurityScan: true
|
|
63
|
+
};
|
|
64
|
+
var CONFIG_FILE_NAMES = [
|
|
65
|
+
"xforce.config.yaml",
|
|
66
|
+
"xforce.config.yml",
|
|
67
|
+
".xforce/config.yaml",
|
|
68
|
+
".xforce/config.yml"
|
|
69
|
+
];
|
|
70
|
+
|
|
71
|
+
// src/core/errors.ts
|
|
72
|
+
var XForceError = class extends Error {
|
|
73
|
+
constructor(message, code) {
|
|
74
|
+
super(message);
|
|
75
|
+
this.code = code;
|
|
76
|
+
this.name = "XForceError";
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
var ConfigError = class extends XForceError {
|
|
80
|
+
constructor(message) {
|
|
81
|
+
super(message, "CONFIG_ERROR");
|
|
82
|
+
this.name = "ConfigError";
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
var IssueParseError = class extends XForceError {
|
|
86
|
+
constructor(message) {
|
|
87
|
+
super(message, "ISSUE_PARSE_ERROR");
|
|
88
|
+
this.name = "IssueParseError";
|
|
89
|
+
}
|
|
90
|
+
};
|
|
91
|
+
var CodingAgentError = class extends XForceError {
|
|
92
|
+
constructor(message, errors, costUsd) {
|
|
93
|
+
super(message, "CODING_AGENT_ERROR");
|
|
94
|
+
this.errors = errors;
|
|
95
|
+
this.costUsd = costUsd;
|
|
96
|
+
this.name = "CodingAgentError";
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
var ReviewerError = class extends XForceError {
|
|
100
|
+
constructor(message) {
|
|
101
|
+
super(message, "REVIEWER_ERROR");
|
|
102
|
+
this.name = "ReviewerError";
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
var PipelineError = class extends XForceError {
|
|
106
|
+
constructor(message) {
|
|
107
|
+
super(message, "PIPELINE_ERROR");
|
|
108
|
+
this.name = "PipelineError";
|
|
109
|
+
}
|
|
110
|
+
};
|
|
111
|
+
var TimeoutError = class extends XForceError {
|
|
112
|
+
constructor(message) {
|
|
113
|
+
super(message, "TIMEOUT_ERROR");
|
|
114
|
+
this.name = "TimeoutError";
|
|
115
|
+
}
|
|
116
|
+
};
|
|
117
|
+
function withTimeout(promise, timeoutMs, label = "Operation") {
|
|
118
|
+
return new Promise((resolve4, reject) => {
|
|
119
|
+
const timer = setTimeout(() => {
|
|
120
|
+
reject(new TimeoutError(`${label} timed out after ${Math.round(timeoutMs / 1e3)}s`));
|
|
121
|
+
}, timeoutMs);
|
|
122
|
+
promise.then((result) => {
|
|
123
|
+
clearTimeout(timer);
|
|
124
|
+
resolve4(result);
|
|
125
|
+
}).catch((error) => {
|
|
126
|
+
clearTimeout(timer);
|
|
127
|
+
reject(error);
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
var RetryableError = class extends Error {
|
|
132
|
+
constructor(message, retryAfterMs) {
|
|
133
|
+
super(message);
|
|
134
|
+
this.retryAfterMs = retryAfterMs;
|
|
135
|
+
this.name = "RetryableError";
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
async function withRetry(fn, options) {
|
|
139
|
+
let lastError;
|
|
140
|
+
for (let attempt = 0; attempt <= options.maxRetries; attempt++) {
|
|
141
|
+
try {
|
|
142
|
+
return await fn();
|
|
143
|
+
} catch (error) {
|
|
144
|
+
lastError = error;
|
|
145
|
+
if (attempt === options.maxRetries) break;
|
|
146
|
+
const delay = error instanceof RetryableError && error.retryAfterMs ? error.retryAfterMs : Math.min(options.baseDelayMs * 2 ** attempt, options.maxDelayMs);
|
|
147
|
+
options.onRetry?.(lastError, attempt + 1);
|
|
148
|
+
await new Promise((resolve4) => setTimeout(resolve4, delay));
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
throw lastError;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// src/core/config.ts
|
|
155
|
+
var LabelsSchema = z.object({
|
|
156
|
+
ready: z.string().default(DEFAULT_CONFIG.labels.ready),
|
|
157
|
+
inProgress: z.string().default(DEFAULT_CONFIG.labels.inProgress),
|
|
158
|
+
done: z.string().default(DEFAULT_CONFIG.labels.done),
|
|
159
|
+
failed: z.string().default(DEFAULT_CONFIG.labels.failed)
|
|
160
|
+
});
|
|
161
|
+
var AutoMergeRulesSchema = z.object({
|
|
162
|
+
types: z.array(z.enum(["feature", "bugfix", "refactor", "test", "docs"])).default(["bugfix", "test", "docs"]),
|
|
163
|
+
maxSize: z.enum(["xs", "s", "m", "l", "xl"]).default("m"),
|
|
164
|
+
mergeStrategy: z.enum(["squash", "merge", "rebase"]).default("squash"),
|
|
165
|
+
requireCleanSecurityScan: z.boolean().default(true)
|
|
166
|
+
});
|
|
167
|
+
var RepoConfigSchema = z.object({
|
|
168
|
+
owner: z.string().min(1),
|
|
169
|
+
name: z.string().min(1),
|
|
170
|
+
defaultBranch: z.string().default("main"),
|
|
171
|
+
testCommand: z.string().min(1),
|
|
172
|
+
lintCommand: z.string().optional(),
|
|
173
|
+
buildCommand: z.string().optional(),
|
|
174
|
+
runCommand: z.string().optional(),
|
|
175
|
+
claudeMdPath: z.string().optional(),
|
|
176
|
+
maxTurns: z.number().int().positive().optional(),
|
|
177
|
+
maxReviewCycles: z.number().int().min(1).max(10).optional(),
|
|
178
|
+
maxTestRetries: z.number().int().min(0).max(5).optional(),
|
|
179
|
+
budgetPerTaskUsd: z.number().positive().optional(),
|
|
180
|
+
autoMerge: z.boolean().default(false),
|
|
181
|
+
autoMergeRules: AutoMergeRulesSchema.optional(),
|
|
182
|
+
allowedTools: z.array(z.string()).optional(),
|
|
183
|
+
localPath: z.string().optional()
|
|
184
|
+
});
|
|
185
|
+
var DefaultsSchema = z.object({
|
|
186
|
+
model: z.string().default(DEFAULT_CONFIG.model),
|
|
187
|
+
reviewerModel: z.string().default(DEFAULT_CONFIG.reviewerModel),
|
|
188
|
+
plannerModel: z.string().default(DEFAULT_CONFIG.plannerModel),
|
|
189
|
+
maxTurns: z.number().int().positive().default(DEFAULT_CONFIG.maxTurns),
|
|
190
|
+
maxReviewCycles: z.number().int().min(1).max(10).default(DEFAULT_CONFIG.maxReviewCycles),
|
|
191
|
+
maxTestRetries: z.number().int().min(0).max(5).default(DEFAULT_CONFIG.maxTestRetries),
|
|
192
|
+
timeoutMinutes: z.number().int().positive().default(DEFAULT_CONFIG.timeoutMinutes),
|
|
193
|
+
budgetPerTaskUsd: z.number().positive().default(DEFAULT_CONFIG.budgetPerTaskUsd),
|
|
194
|
+
branchPrefix: z.string().default(DEFAULT_CONFIG.branchPrefix),
|
|
195
|
+
labels: LabelsSchema.default({}),
|
|
196
|
+
allowedTools: z.array(z.string()).default([...DEFAULT_CONFIG.allowedTools]),
|
|
197
|
+
enablePlanning: z.boolean().default(DEFAULT_CONFIG.enablePlanning),
|
|
198
|
+
enableSecurityScan: z.boolean().default(DEFAULT_CONFIG.enableSecurityScan)
|
|
199
|
+
});
|
|
200
|
+
var XForceConfigSchema = z.object({
|
|
201
|
+
version: z.literal("1"),
|
|
202
|
+
defaults: DefaultsSchema.default({}),
|
|
203
|
+
repos: z.array(RepoConfigSchema).min(1),
|
|
204
|
+
notifications: z.object({
|
|
205
|
+
slack: z.object({
|
|
206
|
+
webhookUrl: z.string(),
|
|
207
|
+
channels: z.object({
|
|
208
|
+
success: z.string().optional(),
|
|
209
|
+
failure: z.string().optional()
|
|
210
|
+
}).optional()
|
|
211
|
+
}).optional(),
|
|
212
|
+
github: z.object({
|
|
213
|
+
mentionOnFailure: z.array(z.string()).optional(),
|
|
214
|
+
mentionOnReview: z.array(z.string()).optional()
|
|
215
|
+
}).optional()
|
|
216
|
+
}).optional()
|
|
217
|
+
});
|
|
218
|
+
function interpolateEnvVars(value) {
|
|
219
|
+
return value.replace(/\$\{([^}]+)\}/g, (_match, varName) => {
|
|
220
|
+
const envValue = process.env[varName];
|
|
221
|
+
if (envValue === void 0) {
|
|
222
|
+
throw new ConfigError(`Environment variable ${varName} is not set`);
|
|
223
|
+
}
|
|
224
|
+
return envValue;
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
function interpolateDeep(obj) {
|
|
228
|
+
if (typeof obj === "string") return interpolateEnvVars(obj);
|
|
229
|
+
if (Array.isArray(obj)) return obj.map(interpolateDeep);
|
|
230
|
+
if (obj !== null && typeof obj === "object") {
|
|
231
|
+
const result = {};
|
|
232
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
233
|
+
result[key] = interpolateDeep(value);
|
|
234
|
+
}
|
|
235
|
+
return result;
|
|
236
|
+
}
|
|
237
|
+
return obj;
|
|
238
|
+
}
|
|
239
|
+
function findConfigFile(basePath) {
|
|
240
|
+
for (const name of CONFIG_FILE_NAMES) {
|
|
241
|
+
const fullPath = resolve2(basePath, name);
|
|
242
|
+
if (existsSync(fullPath)) return fullPath;
|
|
243
|
+
}
|
|
244
|
+
return null;
|
|
245
|
+
}
|
|
246
|
+
function loadConfig(configPath) {
|
|
247
|
+
let filePath;
|
|
248
|
+
if (configPath) {
|
|
249
|
+
filePath = resolve2(configPath);
|
|
250
|
+
if (!existsSync(filePath)) {
|
|
251
|
+
throw new ConfigError(`Config file not found: ${filePath}`);
|
|
252
|
+
}
|
|
253
|
+
} else {
|
|
254
|
+
const found = findConfigFile(process.cwd());
|
|
255
|
+
if (!found) {
|
|
256
|
+
throw new ConfigError(
|
|
257
|
+
`No config file found. Create xforce.config.yaml or specify --config path.`
|
|
258
|
+
);
|
|
259
|
+
}
|
|
260
|
+
filePath = found;
|
|
261
|
+
}
|
|
262
|
+
const raw = readFileSync2(filePath, "utf-8");
|
|
263
|
+
const parsed = parseYaml(raw);
|
|
264
|
+
const interpolated = interpolateDeep(parsed);
|
|
265
|
+
const result = XForceConfigSchema.safeParse(interpolated);
|
|
266
|
+
if (!result.success) {
|
|
267
|
+
const errors = result.error.issues.map((i) => ` - ${i.path.join(".")}: ${i.message}`).join("\n");
|
|
268
|
+
throw new ConfigError(`Invalid config:
|
|
269
|
+
${errors}`);
|
|
270
|
+
}
|
|
271
|
+
return result.data;
|
|
272
|
+
}
|
|
273
|
+
function resolveRepoConfig(config, owner, name) {
|
|
274
|
+
const repo = config.repos.find((r) => r.owner === owner && r.name === name);
|
|
275
|
+
if (!repo) {
|
|
276
|
+
throw new ConfigError(
|
|
277
|
+
`Repository ${owner}/${name} not found in config. Add it under the 'repos' section.`
|
|
278
|
+
);
|
|
279
|
+
}
|
|
280
|
+
return {
|
|
281
|
+
owner: repo.owner,
|
|
282
|
+
name: repo.name,
|
|
283
|
+
defaultBranch: repo.defaultBranch,
|
|
284
|
+
testCommand: repo.testCommand,
|
|
285
|
+
lintCommand: repo.lintCommand,
|
|
286
|
+
buildCommand: repo.buildCommand,
|
|
287
|
+
runCommand: repo.runCommand,
|
|
288
|
+
claudeMdPath: repo.claudeMdPath,
|
|
289
|
+
model: config.defaults.model,
|
|
290
|
+
reviewerModel: config.defaults.reviewerModel,
|
|
291
|
+
plannerModel: config.defaults.plannerModel,
|
|
292
|
+
maxTurns: repo.maxTurns ?? config.defaults.maxTurns,
|
|
293
|
+
maxReviewCycles: repo.maxReviewCycles ?? config.defaults.maxReviewCycles,
|
|
294
|
+
maxTestRetries: repo.maxTestRetries ?? config.defaults.maxTestRetries,
|
|
295
|
+
timeoutMinutes: config.defaults.timeoutMinutes,
|
|
296
|
+
budgetPerTaskUsd: repo.budgetPerTaskUsd ?? config.defaults.budgetPerTaskUsd,
|
|
297
|
+
branchPrefix: config.defaults.branchPrefix,
|
|
298
|
+
labels: config.defaults.labels,
|
|
299
|
+
autoMerge: repo.autoMerge,
|
|
300
|
+
autoMergeRules: repo.autoMergeRules ?? {
|
|
301
|
+
types: ["feature", "bugfix", "refactor", "test", "docs"],
|
|
302
|
+
maxSize: "xl",
|
|
303
|
+
mergeStrategy: "squash",
|
|
304
|
+
requireCleanSecurityScan: false
|
|
305
|
+
},
|
|
306
|
+
allowedTools: repo.allowedTools ?? config.defaults.allowedTools,
|
|
307
|
+
enablePlanning: config.defaults.enablePlanning,
|
|
308
|
+
enableSecurityScan: config.defaults.enableSecurityScan,
|
|
309
|
+
localPath: repo.localPath
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// src/pipeline/orchestrator.ts
|
|
314
|
+
import { nanoid } from "nanoid";
|
|
315
|
+
import { rm } from "fs/promises";
|
|
316
|
+
import ora from "ora";
|
|
317
|
+
|
|
318
|
+
// src/core/logger.ts
|
|
319
|
+
import pino from "pino";
|
|
320
|
+
var isCI = !!(process.env.CI || process.env.GITHUB_ACTIONS);
|
|
321
|
+
var logger = pino({
|
|
322
|
+
level: process.env.LOG_LEVEL ?? (isCI ? "info" : "debug"),
|
|
323
|
+
...isCI ? {} : {
|
|
324
|
+
transport: {
|
|
325
|
+
target: "pino-pretty",
|
|
326
|
+
options: {
|
|
327
|
+
colorize: true,
|
|
328
|
+
translateTime: "HH:MM:ss",
|
|
329
|
+
ignore: "pid,hostname"
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
function createChildLogger(name) {
|
|
335
|
+
return logger.child({ component: name });
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
// src/pipeline/state-machine.ts
|
|
339
|
+
var VALID_TRANSITIONS = {
|
|
340
|
+
parsing_issue: ["creating_branch", "failed"],
|
|
341
|
+
creating_branch: ["planning", "coding", "failed"],
|
|
342
|
+
planning: ["coding", "failed"],
|
|
343
|
+
coding: ["running_tests", "failed"],
|
|
344
|
+
running_tests: ["reviewing", "coding", "failed"],
|
|
345
|
+
reviewing: ["awaiting_human", "addressing_review", "merging", "failed"],
|
|
346
|
+
addressing_review: ["coding", "failed"],
|
|
347
|
+
merging: ["completed", "awaiting_human", "failed"],
|
|
348
|
+
awaiting_human: ["completed", "failed"],
|
|
349
|
+
completed: [],
|
|
350
|
+
failed: []
|
|
351
|
+
};
|
|
352
|
+
function validateTransition(from, to) {
|
|
353
|
+
const valid = VALID_TRANSITIONS[from];
|
|
354
|
+
if (!valid.includes(to)) {
|
|
355
|
+
throw new PipelineError(`Invalid state transition: ${from} -> ${to}`);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
// src/pipeline/feedback-loop.ts
|
|
360
|
+
function formatCommandFeedback(kind, output) {
|
|
361
|
+
const maxLen = 8e3;
|
|
362
|
+
const truncated = output.length > maxLen ? output.slice(0, maxLen) + "\n\n... (truncated)" : output;
|
|
363
|
+
const labels = {
|
|
364
|
+
lint: "Lint",
|
|
365
|
+
build: "Build",
|
|
366
|
+
test: "Tests",
|
|
367
|
+
run: "Run verification"
|
|
368
|
+
};
|
|
369
|
+
const label = labels[kind] ?? kind.charAt(0).toUpperCase() + kind.slice(1);
|
|
370
|
+
return `${label} failed. Output:
|
|
371
|
+
|
|
372
|
+
${truncated}`;
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
// src/github/client.ts
|
|
376
|
+
import { Octokit } from "@octokit/rest";
|
|
377
|
+
var log = createChildLogger("github");
|
|
378
|
+
var RETRY_OPTIONS = {
|
|
379
|
+
maxRetries: 3,
|
|
380
|
+
baseDelayMs: 1e3,
|
|
381
|
+
maxDelayMs: 3e4,
|
|
382
|
+
onRetry: (error, attempt) => {
|
|
383
|
+
log.warn({ error: error.message, attempt }, "GitHub API call failed, retrying");
|
|
384
|
+
}
|
|
385
|
+
};
|
|
386
|
+
async function withGitHubRetry(fn) {
|
|
387
|
+
return withRetry(async () => {
|
|
388
|
+
try {
|
|
389
|
+
return await fn();
|
|
390
|
+
} catch (error) {
|
|
391
|
+
const status = error?.status ?? error?.response?.status;
|
|
392
|
+
if (status === 429 || status === 403) {
|
|
393
|
+
const retryAfter = error?.response?.headers?.["retry-after"];
|
|
394
|
+
const retryMs = retryAfter ? parseInt(retryAfter) * 1e3 : void 0;
|
|
395
|
+
throw new RetryableError(`GitHub API rate limited (${status})`, retryMs);
|
|
396
|
+
}
|
|
397
|
+
if (status >= 500) {
|
|
398
|
+
throw new RetryableError(`GitHub API server error (${status})`);
|
|
399
|
+
}
|
|
400
|
+
throw error;
|
|
401
|
+
}
|
|
402
|
+
}, RETRY_OPTIONS);
|
|
403
|
+
}
|
|
404
|
+
var _octokit = null;
|
|
405
|
+
function getOctokit() {
|
|
406
|
+
if (!_octokit) {
|
|
407
|
+
const token = process.env.GITHUB_TOKEN;
|
|
408
|
+
if (!token) {
|
|
409
|
+
throw new Error("GITHUB_TOKEN environment variable is required");
|
|
410
|
+
}
|
|
411
|
+
_octokit = new Octokit({ auth: token });
|
|
412
|
+
}
|
|
413
|
+
return _octokit;
|
|
414
|
+
}
|
|
415
|
+
async function getIssue(owner, repo, issueNumber) {
|
|
416
|
+
return withGitHubRetry(async () => {
|
|
417
|
+
const octokit = getOctokit();
|
|
418
|
+
log.debug({ owner, repo, issueNumber }, "Fetching issue");
|
|
419
|
+
const { data } = await octokit.issues.get({ owner, repo, issue_number: issueNumber });
|
|
420
|
+
return data;
|
|
421
|
+
});
|
|
422
|
+
}
|
|
423
|
+
async function getIssueLabels(owner, repo, issueNumber) {
|
|
424
|
+
return withGitHubRetry(async () => {
|
|
425
|
+
const octokit = getOctokit();
|
|
426
|
+
const { data } = await octokit.issues.listLabelsOnIssue({
|
|
427
|
+
owner,
|
|
428
|
+
repo,
|
|
429
|
+
issue_number: issueNumber
|
|
430
|
+
});
|
|
431
|
+
return data.map((l) => l.name);
|
|
432
|
+
});
|
|
433
|
+
}
|
|
434
|
+
async function addLabel(owner, repo, issueNumber, label) {
|
|
435
|
+
return withGitHubRetry(async () => {
|
|
436
|
+
const octokit = getOctokit();
|
|
437
|
+
log.debug({ owner, repo, issueNumber, label }, "Adding label");
|
|
438
|
+
await octokit.issues.addLabels({ owner, repo, issue_number: issueNumber, labels: [label] });
|
|
439
|
+
});
|
|
440
|
+
}
|
|
441
|
+
async function removeLabel(owner, repo, issueNumber, label) {
|
|
442
|
+
const octokit = getOctokit();
|
|
443
|
+
log.debug({ owner, repo, issueNumber, label }, "Removing label");
|
|
444
|
+
try {
|
|
445
|
+
await octokit.issues.removeLabel({ owner, repo, issue_number: issueNumber, name: label });
|
|
446
|
+
} catch {
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
async function addComment(owner, repo, issueNumber, body) {
|
|
450
|
+
return withGitHubRetry(async () => {
|
|
451
|
+
const octokit = getOctokit();
|
|
452
|
+
log.debug({ owner, repo, issueNumber }, "Adding comment");
|
|
453
|
+
const { data } = await octokit.issues.createComment({
|
|
454
|
+
owner,
|
|
455
|
+
repo,
|
|
456
|
+
issue_number: issueNumber,
|
|
457
|
+
body
|
|
458
|
+
});
|
|
459
|
+
return data;
|
|
460
|
+
});
|
|
461
|
+
}
|
|
462
|
+
async function getPRDiff(owner, repo, prNumber) {
|
|
463
|
+
return withGitHubRetry(async () => {
|
|
464
|
+
const octokit = getOctokit();
|
|
465
|
+
log.debug({ owner, repo, prNumber }, "Fetching PR diff");
|
|
466
|
+
const { data } = await octokit.pulls.get({
|
|
467
|
+
owner,
|
|
468
|
+
repo,
|
|
469
|
+
pull_number: prNumber,
|
|
470
|
+
mediaType: { format: "diff" }
|
|
471
|
+
});
|
|
472
|
+
return data;
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
function parseIssueUrl(url) {
|
|
476
|
+
const match = url.match(/github\.com\/([^/]+)\/([^/]+)\/issues\/(\d+)/);
|
|
477
|
+
if (!match) {
|
|
478
|
+
throw new Error(`Invalid GitHub issue URL: ${url}`);
|
|
479
|
+
}
|
|
480
|
+
return {
|
|
481
|
+
owner: match[1],
|
|
482
|
+
repo: match[2],
|
|
483
|
+
issueNumber: parseInt(match[3], 10)
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
function parsePRUrl(url) {
|
|
487
|
+
const match = url.match(/github\.com\/([^/]+)\/([^/]+)\/pull\/(\d+)/);
|
|
488
|
+
if (!match) {
|
|
489
|
+
throw new Error(`Invalid GitHub PR URL: ${url}`);
|
|
490
|
+
}
|
|
491
|
+
return {
|
|
492
|
+
owner: match[1],
|
|
493
|
+
repo: match[2],
|
|
494
|
+
prNumber: parseInt(match[3], 10)
|
|
495
|
+
};
|
|
496
|
+
}
|
|
497
|
+
async function getPR(owner, repo, prNumber) {
|
|
498
|
+
return withGitHubRetry(async () => {
|
|
499
|
+
const octokit = getOctokit();
|
|
500
|
+
log.debug({ owner, repo, prNumber }, "Fetching PR");
|
|
501
|
+
const { data } = await octokit.pulls.get({ owner, repo, pull_number: prNumber });
|
|
502
|
+
return data;
|
|
503
|
+
});
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
// src/github/issue-parser.ts
|
|
507
|
+
function extractSection(body, heading) {
|
|
508
|
+
const headingRegex = new RegExp(`^#{2,3}\\s+${escapeRegex(heading)}\\s*$`, "mi");
|
|
509
|
+
const headingMatch = headingRegex.exec(body);
|
|
510
|
+
if (!headingMatch) return null;
|
|
511
|
+
const startIdx = headingMatch.index + headingMatch[0].length;
|
|
512
|
+
const rest = body.slice(startIdx);
|
|
513
|
+
const nextHeadingMatch = rest.match(/\n#{2,3}\s+/);
|
|
514
|
+
const content = nextHeadingMatch ? rest.slice(0, nextHeadingMatch.index) : rest;
|
|
515
|
+
const trimmed = content.trim();
|
|
516
|
+
return trimmed.length > 0 ? trimmed : null;
|
|
517
|
+
}
|
|
518
|
+
function escapeRegex(str) {
|
|
519
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
520
|
+
}
|
|
521
|
+
function parseBulletList(text) {
|
|
522
|
+
return text.split("\n").map((line) => line.replace(/^[-*]\s*(\[[ x]\]\s*)?/, "").trim()).filter((line) => line.length > 0);
|
|
523
|
+
}
|
|
524
|
+
function extractPriority(labels) {
|
|
525
|
+
for (const label of labels) {
|
|
526
|
+
const lower = label.toLowerCase();
|
|
527
|
+
if (lower.includes("critical")) return "critical";
|
|
528
|
+
if (lower.includes("priority/high") || lower === "high") return "high";
|
|
529
|
+
if (lower.includes("priority/medium") || lower === "medium") return "medium";
|
|
530
|
+
if (lower.includes("priority/low") || lower === "low") return "low";
|
|
531
|
+
}
|
|
532
|
+
return "medium";
|
|
533
|
+
}
|
|
534
|
+
function extractType(labels) {
|
|
535
|
+
for (const label of labels) {
|
|
536
|
+
const lower = label.toLowerCase();
|
|
537
|
+
if (lower.includes("feature") || lower.includes("enhancement")) return "feature";
|
|
538
|
+
if (lower.includes("bug")) return "bugfix";
|
|
539
|
+
if (lower.includes("refactor")) return "refactor";
|
|
540
|
+
if (lower.includes("test")) return "test";
|
|
541
|
+
if (lower.includes("doc")) return "docs";
|
|
542
|
+
}
|
|
543
|
+
return "feature";
|
|
544
|
+
}
|
|
545
|
+
function extractSize(labels, body) {
|
|
546
|
+
for (const label of labels) {
|
|
547
|
+
const lower = label.toLowerCase();
|
|
548
|
+
if (lower.includes("size/xs") || lower.includes("extra small")) return "xs";
|
|
549
|
+
if (lower.includes("size/s") || lower === "small") return "s";
|
|
550
|
+
if (lower.includes("size/m") || lower === "medium") return "m";
|
|
551
|
+
if (lower.includes("size/l") && !lower.includes("xl")) return "l";
|
|
552
|
+
if (lower.includes("size/xl") || lower.includes("extra large")) return "xl";
|
|
553
|
+
}
|
|
554
|
+
const sizeSection = extractSection(body, "Estimated Size");
|
|
555
|
+
if (sizeSection) {
|
|
556
|
+
const lower = sizeSection.toLowerCase();
|
|
557
|
+
if (lower.includes("xs") || lower.includes("extra small")) return "xs";
|
|
558
|
+
if (lower.includes("xl") || lower.includes("extra large")) return "xl";
|
|
559
|
+
if (lower.includes("< 50") || lower.match(/\bs\b/)) return "s";
|
|
560
|
+
if (lower.includes("150-500") || lower.match(/\bm\b/)) return "m";
|
|
561
|
+
if (lower.includes("500-1000") || lower.match(/\bl\b/)) return "l";
|
|
562
|
+
}
|
|
563
|
+
return "m";
|
|
564
|
+
}
|
|
565
|
+
function parseIssueBody(params) {
|
|
566
|
+
const { title, body, labels, issueNumber, issueUrl, repoOwner, repoName } = params;
|
|
567
|
+
if (!body || body.trim().length === 0) {
|
|
568
|
+
throw new IssueParseError("Issue body is empty");
|
|
569
|
+
}
|
|
570
|
+
const context = extractSection(body, "Context");
|
|
571
|
+
if (!context) {
|
|
572
|
+
throw new IssueParseError('Missing required section: "## Context"');
|
|
573
|
+
}
|
|
574
|
+
const criteriaSection = extractSection(body, "Acceptance Criteria");
|
|
575
|
+
if (!criteriaSection) {
|
|
576
|
+
throw new IssueParseError('Missing required section: "## Acceptance Criteria"');
|
|
577
|
+
}
|
|
578
|
+
const acceptanceCriteria = parseBulletList(criteriaSection);
|
|
579
|
+
if (acceptanceCriteria.length === 0) {
|
|
580
|
+
throw new IssueParseError("Acceptance Criteria section has no items");
|
|
581
|
+
}
|
|
582
|
+
const filesSection = extractSection(body, "Affected Files");
|
|
583
|
+
const affectedFiles = filesSection ? parseBulletList(filesSection) : [];
|
|
584
|
+
const edgeCasesSection = extractSection(body, "Edge Cases");
|
|
585
|
+
const edgeCases = edgeCasesSection ? parseBulletList(edgeCasesSection) : [];
|
|
586
|
+
return {
|
|
587
|
+
title,
|
|
588
|
+
context,
|
|
589
|
+
acceptanceCriteria,
|
|
590
|
+
affectedFiles,
|
|
591
|
+
edgeCases,
|
|
592
|
+
priority: extractPriority(labels),
|
|
593
|
+
type: extractType(labels),
|
|
594
|
+
size: extractSize(labels, body),
|
|
595
|
+
issueNumber,
|
|
596
|
+
issueUrl,
|
|
597
|
+
repoOwner,
|
|
598
|
+
repoName
|
|
599
|
+
};
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
// src/github/branch.ts
|
|
603
|
+
import { tmpdir } from "os";
|
|
604
|
+
import { join } from "path";
|
|
605
|
+
import { mkdtemp, access, writeFile } from "fs/promises";
|
|
606
|
+
import { simpleGit } from "simple-git";
|
|
607
|
+
var DEFAULT_GITIGNORE = `node_modules/
|
|
608
|
+
.next/
|
|
609
|
+
dist/
|
|
610
|
+
build/
|
|
611
|
+
.env
|
|
612
|
+
.env.local
|
|
613
|
+
.env*.local
|
|
614
|
+
*.tsbuildinfo
|
|
615
|
+
.DS_Store
|
|
616
|
+
`;
|
|
617
|
+
var log2 = createChildLogger("git");
|
|
618
|
+
function slugify(text) {
|
|
619
|
+
return text.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 40);
|
|
620
|
+
}
|
|
621
|
+
async function setupBranch(params) {
|
|
622
|
+
const { owner, repo, defaultBranch, issueNumber, issueTitle, branchPrefix, localDir } = params;
|
|
623
|
+
const slug = slugify(issueTitle);
|
|
624
|
+
const branchName = `${branchPrefix}/${issueNumber}-${slug}`;
|
|
625
|
+
if (localDir) {
|
|
626
|
+
log2.info({ workDir: localDir, branchName }, "Using local repository");
|
|
627
|
+
const repoGit2 = simpleGit(localDir);
|
|
628
|
+
await repoGit2.fetch("origin");
|
|
629
|
+
const status = await repoGit2.status();
|
|
630
|
+
const hadChanges = status.files.length > 0;
|
|
631
|
+
if (hadChanges) {
|
|
632
|
+
log2.info("Stashing local changes");
|
|
633
|
+
await repoGit2.stash();
|
|
634
|
+
}
|
|
635
|
+
try {
|
|
636
|
+
await repoGit2.checkout(["-B", branchName, `origin/${defaultBranch}`]);
|
|
637
|
+
} catch {
|
|
638
|
+
await repoGit2.checkoutLocalBranch(branchName);
|
|
639
|
+
}
|
|
640
|
+
await repoGit2.addConfig("user.name", "X-Force Bot");
|
|
641
|
+
await repoGit2.addConfig("user.email", "xforce-bot@users.noreply.github.com");
|
|
642
|
+
log2.info({ branchName }, "Branch created");
|
|
643
|
+
return { workDir: localDir, branchName, git: repoGit2, isLocal: true };
|
|
644
|
+
}
|
|
645
|
+
const cloneUrl = `https://x-access-token:${process.env.GITHUB_TOKEN}@github.com/${owner}/${repo}.git`;
|
|
646
|
+
const workDir = await mkdtemp(join(tmpdir(), `xforce-${repo}-`));
|
|
647
|
+
log2.info({ workDir, branchName }, "Cloning repository");
|
|
648
|
+
const git = simpleGit();
|
|
649
|
+
await git.clone(cloneUrl, workDir, ["--depth", "1", "--branch", defaultBranch]);
|
|
650
|
+
const repoGit = simpleGit(workDir);
|
|
651
|
+
await repoGit.addConfig("user.name", "X-Force Bot");
|
|
652
|
+
await repoGit.addConfig("user.email", "xforce-bot@users.noreply.github.com");
|
|
653
|
+
await repoGit.checkoutLocalBranch(branchName);
|
|
654
|
+
const gitignorePath = join(workDir, ".gitignore");
|
|
655
|
+
try {
|
|
656
|
+
await access(gitignorePath);
|
|
657
|
+
} catch {
|
|
658
|
+
log2.info("Creating default .gitignore");
|
|
659
|
+
await writeFile(gitignorePath, DEFAULT_GITIGNORE, "utf-8");
|
|
660
|
+
}
|
|
661
|
+
log2.info({ branchName }, "Branch created");
|
|
662
|
+
return { workDir, branchName, git: repoGit, isLocal: false };
|
|
663
|
+
}
|
|
664
|
+
async function restoreDefaultBranch(git, defaultBranch) {
|
|
665
|
+
try {
|
|
666
|
+
await git.checkout(defaultBranch);
|
|
667
|
+
log2.info({ branch: defaultBranch }, "Restored default branch");
|
|
668
|
+
} catch (error) {
|
|
669
|
+
log2.warn({ error: String(error) }, "Failed to restore default branch");
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
async function commitAndPush(params) {
|
|
673
|
+
const { git, branchName, message } = params;
|
|
674
|
+
await git.add("-A");
|
|
675
|
+
const status = await git.status();
|
|
676
|
+
if (status.staged.length === 0 && status.files.length === 0) {
|
|
677
|
+
log2.warn("No changes to commit");
|
|
678
|
+
return "";
|
|
679
|
+
}
|
|
680
|
+
const commitResult = await git.commit(message);
|
|
681
|
+
log2.info({ sha: commitResult.commit }, "Changes committed");
|
|
682
|
+
await git.push("origin", branchName, ["--set-upstream", "--force"]);
|
|
683
|
+
log2.info({ branchName }, "Pushed to remote");
|
|
684
|
+
return commitResult.commit;
|
|
685
|
+
}
|
|
686
|
+
|
|
687
|
+
// src/github/pr.ts
|
|
688
|
+
var log3 = createChildLogger("pr");
|
|
689
|
+
async function createPullRequest(params) {
|
|
690
|
+
const { owner, repo, branchName, defaultBranch, taskSpec, pipeline } = params;
|
|
691
|
+
const octokit = getOctokit();
|
|
692
|
+
const body = buildPRBody(taskSpec, pipeline);
|
|
693
|
+
log3.info({ owner, repo, branchName }, "Creating pull request");
|
|
694
|
+
const { data } = await octokit.pulls.create({
|
|
695
|
+
owner,
|
|
696
|
+
repo,
|
|
697
|
+
title: `[X-Force] ${taskSpec.title}`,
|
|
698
|
+
body,
|
|
699
|
+
head: branchName,
|
|
700
|
+
base: defaultBranch
|
|
701
|
+
});
|
|
702
|
+
log3.info({ prNumber: data.number, prUrl: data.html_url }, "PR created");
|
|
703
|
+
return { prNumber: data.number, prUrl: data.html_url };
|
|
704
|
+
}
|
|
705
|
+
async function updatePullRequest(params) {
|
|
706
|
+
const { owner, repo, prNumber, taskSpec, pipeline } = params;
|
|
707
|
+
const octokit = getOctokit();
|
|
708
|
+
const body = buildPRBody(taskSpec, pipeline);
|
|
709
|
+
await octokit.pulls.update({ owner, repo, pull_number: prNumber, body });
|
|
710
|
+
}
|
|
711
|
+
async function labelPR(owner, repo, prNumber, label) {
|
|
712
|
+
const octokit = getOctokit();
|
|
713
|
+
await octokit.issues.addLabels({ owner, repo, issue_number: prNumber, labels: [label] });
|
|
714
|
+
}
|
|
715
|
+
async function commentOnPR(owner, repo, prNumber, body) {
|
|
716
|
+
const octokit = getOctokit();
|
|
717
|
+
await octokit.issues.createComment({ owner, repo, issue_number: prNumber, body });
|
|
718
|
+
}
|
|
719
|
+
async function mergePR(params) {
|
|
720
|
+
const { owner, repo, prNumber, strategy, commitTitle } = params;
|
|
721
|
+
const octokit = getOctokit();
|
|
722
|
+
log3.info({ owner, repo, prNumber, strategy }, "Attempting to merge PR");
|
|
723
|
+
try {
|
|
724
|
+
const { data } = await octokit.pulls.merge({
|
|
725
|
+
owner,
|
|
726
|
+
repo,
|
|
727
|
+
pull_number: prNumber,
|
|
728
|
+
merge_method: strategy,
|
|
729
|
+
commit_title: commitTitle
|
|
730
|
+
});
|
|
731
|
+
log3.info({ sha: data.sha, merged: data.merged }, "PR merge result");
|
|
732
|
+
return { merged: data.merged, sha: data.sha };
|
|
733
|
+
} catch (error) {
|
|
734
|
+
const err = error;
|
|
735
|
+
const message = err.message ?? String(error);
|
|
736
|
+
const status = err.status;
|
|
737
|
+
if (status === 405 || status === 409) {
|
|
738
|
+
log3.warn({ status, message }, "PR cannot be auto-merged");
|
|
739
|
+
return { merged: false, error: message };
|
|
740
|
+
}
|
|
741
|
+
throw error;
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
function buildPRBody(taskSpec, pipeline) {
|
|
745
|
+
return `## Automated by X-Force
|
|
746
|
+
|
|
747
|
+
Resolves #${taskSpec.issueNumber}
|
|
748
|
+
|
|
749
|
+
### Task
|
|
750
|
+
**${taskSpec.title}**
|
|
751
|
+
|
|
752
|
+
${taskSpec.context}
|
|
753
|
+
|
|
754
|
+
### Acceptance Criteria
|
|
755
|
+
${taskSpec.acceptanceCriteria.map((c) => `- [ ] ${c}`).join("\n")}
|
|
756
|
+
|
|
757
|
+
### Pipeline Info
|
|
758
|
+
- **Pipeline ID**: \`${pipeline.id}\`
|
|
759
|
+
- **Review Cycles**: ${pipeline.reviewCycle}
|
|
760
|
+
- **Total Cost**: $${pipeline.totalCostUsd.toFixed(4)}
|
|
761
|
+
- **Status**: ${pipeline.status}
|
|
762
|
+
|
|
763
|
+
---
|
|
764
|
+
*Generated by [X-Force](https://github.com/xforce) AI Pipeline*`;
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
// src/agents/coder.ts
|
|
768
|
+
import { query } from "@anthropic-ai/claude-agent-sdk";
|
|
769
|
+
|
|
770
|
+
// src/agents/prompts/coder-system.ts
|
|
771
|
+
var CODER_SYSTEM_PROMPT_APPEND = `
|
|
772
|
+
You are working as part of an automated development pipeline called X-Force.
|
|
773
|
+
You receive specifications from GitHub Issues and implement them autonomously.
|
|
774
|
+
|
|
775
|
+
CRITICAL: START WRITING CODE IMMEDIATELY. Do not spend more than a few turns reading existing files. Your primary job is to CREATE and EDIT files. The pipeline will handle committing, testing, and reviewing separately \u2014 you must NOT run tests or create git commits yourself.
|
|
776
|
+
|
|
777
|
+
RULES:
|
|
778
|
+
1. WRITE CODE FIRST. Briefly check the repo structure, then start creating/editing files right away.
|
|
779
|
+
2. Implement EXACTLY what the spec asks for. Do not add unrequested features.
|
|
780
|
+
3. Follow the existing code style and patterns in the repository.
|
|
781
|
+
4. Make minimal, focused changes. Do not refactor unrelated code.
|
|
782
|
+
5. Ensure a .gitignore exists with at minimum: node_modules/, .next/, dist/, build/, .env, .env.local
|
|
783
|
+
6. NEVER commit dependency directories (node_modules, vendor, .venv, __pycache__, etc.).
|
|
784
|
+
|
|
785
|
+
DO NOT:
|
|
786
|
+
- Run tests (the pipeline runs tests after you finish)
|
|
787
|
+
- Run build commands (the pipeline handles build verification)
|
|
788
|
+
- Run lint commands (the pipeline handles lint checking)
|
|
789
|
+
- Create git commits (the pipeline commits your changes)
|
|
790
|
+
- Run git commands (the pipeline manages git)
|
|
791
|
+
- Spend excessive turns exploring \u2014 get to writing code quickly
|
|
792
|
+
|
|
793
|
+
When you receive review feedback (marked with "## Review Feedback"), address EVERY issue marked as critical or major. For minor issues, use your judgment.
|
|
794
|
+
|
|
795
|
+
When you receive test failure output (marked with "## Test Failures"), analyze the failures and fix your code to make all tests pass.
|
|
796
|
+
`;
|
|
797
|
+
function buildCoderPrompt(taskSpec) {
|
|
798
|
+
return `## Task: ${taskSpec.title}
|
|
799
|
+
|
|
800
|
+
### Context
|
|
801
|
+
${taskSpec.context}
|
|
802
|
+
|
|
803
|
+
### Acceptance Criteria
|
|
804
|
+
${taskSpec.acceptanceCriteria.map((c, i) => `${i + 1}. ${c}`).join("\n")}
|
|
805
|
+
|
|
806
|
+
### Affected Files
|
|
807
|
+
${taskSpec.affectedFiles.length > 0 ? taskSpec.affectedFiles.map((f) => `- ${f}`).join("\n") : "Not specified - determine the best files to modify."}
|
|
808
|
+
|
|
809
|
+
### Edge Cases to Handle
|
|
810
|
+
${taskSpec.edgeCases.length > 0 ? taskSpec.edgeCases.map((e) => `- ${e}`).join("\n") : "None specified."}
|
|
811
|
+
|
|
812
|
+
### Task Type: ${taskSpec.type} | Size: ${taskSpec.size} | Priority: ${taskSpec.priority}
|
|
813
|
+
|
|
814
|
+
Please implement this task now. Start writing code immediately \u2014 do not over-analyze. The pipeline will handle testing, committing, and reviewing your changes.`;
|
|
815
|
+
}
|
|
816
|
+
function buildReviewFeedbackSection(review) {
|
|
817
|
+
const issues = review.issues.map(
|
|
818
|
+
(issue) => `- **[${issue.severity.toUpperCase()}]** ${issue.file}${issue.line ? `:${issue.line}` : ""}
|
|
819
|
+
${issue.description}${issue.suggestedFix ? `
|
|
820
|
+
Suggested fix: ${issue.suggestedFix}` : ""}`
|
|
821
|
+
).join("\n");
|
|
822
|
+
return `## Review Feedback (Address These Issues)
|
|
823
|
+
|
|
824
|
+
### Summary
|
|
825
|
+
${review.summary}
|
|
826
|
+
|
|
827
|
+
### Issues to Fix
|
|
828
|
+
${issues || "No specific issues listed."}
|
|
829
|
+
|
|
830
|
+
### Unmet Acceptance Criteria
|
|
831
|
+
${review.specAdherence.unmet.length > 0 ? review.specAdherence.unmet.map((c) => `- ${c}`).join("\n") : "All criteria met."}
|
|
832
|
+
|
|
833
|
+
### Security Concerns
|
|
834
|
+
${review.securityConcerns.length > 0 ? review.securityConcerns.map((c) => `- ${c}`).join("\n") : "None."}
|
|
835
|
+
|
|
836
|
+
Address all CRITICAL and MAJOR issues. Then re-run tests.`;
|
|
837
|
+
}
|
|
838
|
+
function buildTestFailureFeedbackSection(testOutput) {
|
|
839
|
+
return buildCommandFailureFeedbackSection("test", testOutput);
|
|
840
|
+
}
|
|
841
|
+
function buildCommandFailureFeedbackSection(kind, output) {
|
|
842
|
+
const label = kind.charAt(0).toUpperCase() + kind.slice(1);
|
|
843
|
+
return `## ${label} Failures
|
|
844
|
+
|
|
845
|
+
The following ${kind} failures were detected after your changes. Please analyze and fix them:
|
|
846
|
+
|
|
847
|
+
\`\`\`
|
|
848
|
+
${output}
|
|
849
|
+
\`\`\`
|
|
850
|
+
|
|
851
|
+
Fix the code to make the ${kind} pass. Do not commit \u2014 the pipeline handles that.`;
|
|
852
|
+
}
|
|
853
|
+
|
|
854
|
+
// src/agents/prompts/planner-system.ts
|
|
855
|
+
var PLANNER_SYSTEM_PROMPT = `You are a senior software architect analyzing a codebase to create an implementation plan.
|
|
856
|
+
|
|
857
|
+
You have access to read-only tools: Read, Glob, and Grep. Use them to:
|
|
858
|
+
1. Explore the repository structure (Glob for file patterns, Read for file contents)
|
|
859
|
+
2. Understand existing patterns, conventions, and architecture
|
|
860
|
+
3. Identify the exact files that need to be modified or created
|
|
861
|
+
4. Analyze dependencies and potential ripple effects
|
|
862
|
+
|
|
863
|
+
RULES:
|
|
864
|
+
- Start by quickly checking the project structure (Glob for key file patterns). If the repo is empty or has very few files, skip deep exploration and produce your plan immediately.
|
|
865
|
+
- Be specific about file paths \u2014 use actual paths you found in the codebase, or specify paths to create for greenfield projects.
|
|
866
|
+
- Consider test files and their patterns.
|
|
867
|
+
- Identify potential risks (breaking changes, missing test coverage, security).
|
|
868
|
+
- Estimate complexity honestly.
|
|
869
|
+
- Your plan should be actionable and concrete, not vague.
|
|
870
|
+
- Do NOT spend more than a few turns exploring. Produce your JSON plan as quickly as possible.
|
|
871
|
+
|
|
872
|
+
You MUST respond with valid JSON matching the required schema.`;
|
|
873
|
+
function buildPlannerPrompt(taskSpec) {
|
|
874
|
+
return `## Task: ${taskSpec.title}
|
|
875
|
+
|
|
876
|
+
### Context
|
|
877
|
+
${taskSpec.context}
|
|
878
|
+
|
|
879
|
+
### Acceptance Criteria
|
|
880
|
+
${taskSpec.acceptanceCriteria.map((c, i) => `${i + 1}. ${c}`).join("\n")}
|
|
881
|
+
|
|
882
|
+
### Affected Files (from issue \u2014 verify these)
|
|
883
|
+
${taskSpec.affectedFiles.length > 0 ? taskSpec.affectedFiles.map((f) => `- ${f}`).join("\n") : "Not specified \u2014 determine from codebase analysis."}
|
|
884
|
+
|
|
885
|
+
### Edge Cases to Handle
|
|
886
|
+
${taskSpec.edgeCases.length > 0 ? taskSpec.edgeCases.map((e) => `- ${e}`).join("\n") : "None specified."}
|
|
887
|
+
|
|
888
|
+
### Task Type: ${taskSpec.type} | Size: ${taskSpec.size} | Priority: ${taskSpec.priority}
|
|
889
|
+
|
|
890
|
+
Analyze the codebase and produce a structured implementation plan. Use your tools to explore the project structure and understand the existing patterns before planning.`;
|
|
891
|
+
}
|
|
892
|
+
function buildPlanSection(plan) {
|
|
893
|
+
const steps = plan.implementationSteps.sort((a, b) => a.order - b.order).map(
|
|
894
|
+
(s) => `${s.order}. ${s.description}
|
|
895
|
+
Files: ${s.files.join(", ")}
|
|
896
|
+
Rationale: ${s.rationale}`
|
|
897
|
+
).join("\n\n");
|
|
898
|
+
return `## Implementation Plan (Follow This)
|
|
899
|
+
|
|
900
|
+
### Approach
|
|
901
|
+
${plan.approach}
|
|
902
|
+
|
|
903
|
+
### Files to Modify
|
|
904
|
+
${plan.filesToModify.map((f) => `- ${f}`).join("\n")}
|
|
905
|
+
|
|
906
|
+
### Files to Create
|
|
907
|
+
${plan.filesToCreate.map((f) => `- ${f}`).join("\n") || "None"}
|
|
908
|
+
|
|
909
|
+
### Steps
|
|
910
|
+
${steps}
|
|
911
|
+
|
|
912
|
+
### Risks to Watch For
|
|
913
|
+
${plan.risks.map((r) => `- ${r}`).join("\n") || "None identified"}
|
|
914
|
+
|
|
915
|
+
Follow this plan closely. If you discover the plan is incorrect or incomplete, adapt as needed but document your deviations.`;
|
|
916
|
+
}
|
|
917
|
+
|
|
918
|
+
// src/agents/coder.ts
|
|
919
|
+
var log4 = createChildLogger("coder");
|
|
920
|
+
function truncate(s, max) {
|
|
921
|
+
return s.length > max ? s.slice(0, max) + "..." : s;
|
|
922
|
+
}
|
|
923
|
+
function describeToolUse(name, input) {
|
|
924
|
+
const path = input?.file_path;
|
|
925
|
+
switch (name) {
|
|
926
|
+
case "Read":
|
|
927
|
+
return `Reading ${path ?? "file"}`;
|
|
928
|
+
case "Write":
|
|
929
|
+
return `Writing ${path ?? "file"}`;
|
|
930
|
+
case "Edit":
|
|
931
|
+
return `Editing ${path ?? "file"}`;
|
|
932
|
+
case "Bash":
|
|
933
|
+
return `Running: ${truncate(input?.command ?? "command", 80)}`;
|
|
934
|
+
case "Glob":
|
|
935
|
+
return `Searching files: ${input?.pattern ?? ""}`;
|
|
936
|
+
case "Grep":
|
|
937
|
+
return `Searching for: ${truncate(input?.pattern ?? "", 60)}`;
|
|
938
|
+
default:
|
|
939
|
+
return `Using ${name}`;
|
|
940
|
+
}
|
|
941
|
+
}
|
|
942
|
+
async function runCodingAgent(params) {
|
|
943
|
+
const { taskSpec, repoConfig, workingDir, previousReview, testFailures, commandFailures, sessionId, plan, onProgress } = params;
|
|
944
|
+
let prompt = buildCoderPrompt(taskSpec);
|
|
945
|
+
if (plan) {
|
|
946
|
+
prompt += "\n\n" + buildPlanSection(plan);
|
|
947
|
+
}
|
|
948
|
+
if (previousReview) {
|
|
949
|
+
prompt += "\n\n" + buildReviewFeedbackSection(previousReview);
|
|
950
|
+
}
|
|
951
|
+
if (testFailures) {
|
|
952
|
+
prompt += "\n\n" + buildTestFailureFeedbackSection(testFailures);
|
|
953
|
+
}
|
|
954
|
+
if (commandFailures) {
|
|
955
|
+
prompt += "\n\n" + buildCommandFailureFeedbackSection(commandFailures.kind, commandFailures.output);
|
|
956
|
+
}
|
|
957
|
+
log4.info(
|
|
958
|
+
{
|
|
959
|
+
task: taskSpec.title,
|
|
960
|
+
model: repoConfig.model,
|
|
961
|
+
maxTurns: repoConfig.maxTurns,
|
|
962
|
+
budget: repoConfig.budgetPerTaskUsd,
|
|
963
|
+
hasReview: !!previousReview,
|
|
964
|
+
hasTestFailures: !!testFailures
|
|
965
|
+
},
|
|
966
|
+
"Starting coding agent"
|
|
967
|
+
);
|
|
968
|
+
const result = query({
|
|
969
|
+
prompt,
|
|
970
|
+
options: {
|
|
971
|
+
cwd: workingDir,
|
|
972
|
+
model: repoConfig.model,
|
|
973
|
+
maxTurns: repoConfig.maxTurns,
|
|
974
|
+
maxBudgetUsd: repoConfig.budgetPerTaskUsd,
|
|
975
|
+
allowedTools: repoConfig.allowedTools,
|
|
976
|
+
permissionMode: "bypassPermissions",
|
|
977
|
+
allowDangerouslySkipPermissions: true,
|
|
978
|
+
systemPrompt: {
|
|
979
|
+
type: "preset",
|
|
980
|
+
preset: "claude_code",
|
|
981
|
+
append: CODER_SYSTEM_PROMPT_APPEND
|
|
982
|
+
},
|
|
983
|
+
...sessionId ? { resume: sessionId } : {}
|
|
984
|
+
}
|
|
985
|
+
});
|
|
986
|
+
let resultMessage;
|
|
987
|
+
for await (const message of result) {
|
|
988
|
+
if (message.type === "assistant" && "content" in message) {
|
|
989
|
+
const content = message.content;
|
|
990
|
+
for (const block of content) {
|
|
991
|
+
if (block.type === "tool_use" && block.name) {
|
|
992
|
+
const detail = describeToolUse(block.name, block.input);
|
|
993
|
+
log4.info({ tool: block.name }, detail);
|
|
994
|
+
onProgress?.(detail);
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
}
|
|
998
|
+
if (message.type === "result") {
|
|
999
|
+
resultMessage = message;
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
if (!resultMessage) {
|
|
1003
|
+
throw new CodingAgentError("No result message received from coding agent", [], 0);
|
|
1004
|
+
}
|
|
1005
|
+
if (resultMessage.subtype === "error_max_turns") {
|
|
1006
|
+
log4.warn(
|
|
1007
|
+
{
|
|
1008
|
+
cost: resultMessage.total_cost_usd,
|
|
1009
|
+
turns: resultMessage.num_turns
|
|
1010
|
+
},
|
|
1011
|
+
"Coding agent hit max turns \u2014 continuing with partial work"
|
|
1012
|
+
);
|
|
1013
|
+
return {
|
|
1014
|
+
sessionId: resultMessage.session_id,
|
|
1015
|
+
costUsd: resultMessage.total_cost_usd,
|
|
1016
|
+
result: "Agent hit max turns limit. Partial work may have been completed.",
|
|
1017
|
+
numTurns: resultMessage.num_turns
|
|
1018
|
+
};
|
|
1019
|
+
}
|
|
1020
|
+
if (resultMessage.subtype !== "success") {
|
|
1021
|
+
throw new CodingAgentError(
|
|
1022
|
+
`Coding agent failed: ${resultMessage.subtype}`,
|
|
1023
|
+
"errors" in resultMessage ? resultMessage.errors : [],
|
|
1024
|
+
resultMessage.total_cost_usd
|
|
1025
|
+
);
|
|
1026
|
+
}
|
|
1027
|
+
log4.info(
|
|
1028
|
+
{
|
|
1029
|
+
cost: resultMessage.total_cost_usd,
|
|
1030
|
+
turns: resultMessage.num_turns
|
|
1031
|
+
},
|
|
1032
|
+
"Coding agent completed"
|
|
1033
|
+
);
|
|
1034
|
+
return {
|
|
1035
|
+
sessionId: resultMessage.session_id,
|
|
1036
|
+
costUsd: resultMessage.total_cost_usd,
|
|
1037
|
+
result: resultMessage.result,
|
|
1038
|
+
numTurns: resultMessage.num_turns
|
|
1039
|
+
};
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
// src/agents/planner.ts
|
|
1043
|
+
import { query as query2 } from "@anthropic-ai/claude-agent-sdk";
|
|
1044
|
+
import { z as z2 } from "zod";
|
|
1045
|
+
var log5 = createChildLogger("planner");
|
|
1046
|
+
var ImplementationStepSchema = z2.object({
|
|
1047
|
+
order: z2.number().int().positive(),
|
|
1048
|
+
description: z2.string(),
|
|
1049
|
+
files: z2.array(z2.string()),
|
|
1050
|
+
rationale: z2.string()
|
|
1051
|
+
});
|
|
1052
|
+
var ImplementationPlanSchema = z2.object({
|
|
1053
|
+
approach: z2.string(),
|
|
1054
|
+
filesToModify: z2.array(z2.string()),
|
|
1055
|
+
filesToCreate: z2.array(z2.string()),
|
|
1056
|
+
estimatedComplexity: z2.enum(["low", "medium", "high"]),
|
|
1057
|
+
risks: z2.array(z2.string()),
|
|
1058
|
+
implementationSteps: z2.array(ImplementationStepSchema),
|
|
1059
|
+
estimatedTurns: z2.number().int().positive()
|
|
1060
|
+
});
|
|
1061
|
+
var PLAN_JSON_SCHEMA = {
|
|
1062
|
+
type: "object",
|
|
1063
|
+
properties: {
|
|
1064
|
+
approach: { type: "string" },
|
|
1065
|
+
filesToModify: { type: "array", items: { type: "string" } },
|
|
1066
|
+
filesToCreate: { type: "array", items: { type: "string" } },
|
|
1067
|
+
estimatedComplexity: { type: "string", enum: ["low", "medium", "high"] },
|
|
1068
|
+
risks: { type: "array", items: { type: "string" } },
|
|
1069
|
+
implementationSteps: {
|
|
1070
|
+
type: "array",
|
|
1071
|
+
items: {
|
|
1072
|
+
type: "object",
|
|
1073
|
+
properties: {
|
|
1074
|
+
order: { type: "number" },
|
|
1075
|
+
description: { type: "string" },
|
|
1076
|
+
files: { type: "array", items: { type: "string" } },
|
|
1077
|
+
rationale: { type: "string" }
|
|
1078
|
+
},
|
|
1079
|
+
required: ["order", "description", "files", "rationale"]
|
|
1080
|
+
}
|
|
1081
|
+
},
|
|
1082
|
+
estimatedTurns: { type: "number" }
|
|
1083
|
+
},
|
|
1084
|
+
required: [
|
|
1085
|
+
"approach",
|
|
1086
|
+
"filesToModify",
|
|
1087
|
+
"filesToCreate",
|
|
1088
|
+
"estimatedComplexity",
|
|
1089
|
+
"risks",
|
|
1090
|
+
"implementationSteps",
|
|
1091
|
+
"estimatedTurns"
|
|
1092
|
+
]
|
|
1093
|
+
};
|
|
1094
|
+
function extractJSON(text) {
|
|
1095
|
+
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
1096
|
+
if (jsonMatch) return jsonMatch[0];
|
|
1097
|
+
const fenceMatch = text.match(/```(?:json)?\s*\n?([\s\S]*?)\n?```/);
|
|
1098
|
+
if (fenceMatch) return fenceMatch[1].trim();
|
|
1099
|
+
throw new PipelineError("Could not extract JSON from planner response");
|
|
1100
|
+
}
|
|
1101
|
+
function validatePlan(parsed) {
|
|
1102
|
+
const validated = ImplementationPlanSchema.safeParse(parsed);
|
|
1103
|
+
if (!validated.success) {
|
|
1104
|
+
const errors = validated.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join(", ");
|
|
1105
|
+
throw new PipelineError(`Planner response validation failed: ${errors}`);
|
|
1106
|
+
}
|
|
1107
|
+
return validated.data;
|
|
1108
|
+
}
|
|
1109
|
+
async function runPlanningAgent(params) {
|
|
1110
|
+
const { taskSpec, repoConfig, workingDir, onProgress } = params;
|
|
1111
|
+
const prompt = buildPlannerPrompt(taskSpec);
|
|
1112
|
+
log5.info(
|
|
1113
|
+
{ task: taskSpec.title, model: repoConfig.plannerModel },
|
|
1114
|
+
"Starting planning agent"
|
|
1115
|
+
);
|
|
1116
|
+
const result = query2({
|
|
1117
|
+
prompt,
|
|
1118
|
+
options: {
|
|
1119
|
+
cwd: workingDir,
|
|
1120
|
+
model: repoConfig.plannerModel,
|
|
1121
|
+
maxTurns: 50,
|
|
1122
|
+
allowedTools: ["Read", "Glob", "Grep"],
|
|
1123
|
+
permissionMode: "bypassPermissions",
|
|
1124
|
+
allowDangerouslySkipPermissions: true,
|
|
1125
|
+
systemPrompt: PLANNER_SYSTEM_PROMPT,
|
|
1126
|
+
outputFormat: {
|
|
1127
|
+
type: "json_schema",
|
|
1128
|
+
schema: PLAN_JSON_SCHEMA
|
|
1129
|
+
}
|
|
1130
|
+
}
|
|
1131
|
+
});
|
|
1132
|
+
let resultMessage;
|
|
1133
|
+
let lastAssistantText = "";
|
|
1134
|
+
for await (const message of result) {
|
|
1135
|
+
if (message.type === "assistant" && "content" in message) {
|
|
1136
|
+
const content = message.content;
|
|
1137
|
+
for (const block of content) {
|
|
1138
|
+
if (block.type === "tool_use" && block.name) {
|
|
1139
|
+
const detail = describeToolUse(block.name, block.input);
|
|
1140
|
+
log5.info({ tool: block.name }, detail);
|
|
1141
|
+
onProgress?.(detail);
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
const textParts = content.filter((c) => c.type === "text" && c.text).map((c) => c.text);
|
|
1145
|
+
if (textParts.length > 0) {
|
|
1146
|
+
lastAssistantText = textParts.join("\n");
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
if (message.type === "result") {
|
|
1150
|
+
resultMessage = message;
|
|
1151
|
+
}
|
|
1152
|
+
}
|
|
1153
|
+
if (!resultMessage) {
|
|
1154
|
+
throw new PipelineError("No result message received from planning agent");
|
|
1155
|
+
}
|
|
1156
|
+
log5.debug(
|
|
1157
|
+
{
|
|
1158
|
+
subtype: resultMessage.subtype,
|
|
1159
|
+
hasStructuredOutput: "structured_output" in resultMessage && !!resultMessage.structured_output,
|
|
1160
|
+
hasResult: "result" in resultMessage && !!resultMessage.result,
|
|
1161
|
+
hasLastAssistant: !!lastAssistantText,
|
|
1162
|
+
lastAssistantLength: lastAssistantText.length
|
|
1163
|
+
},
|
|
1164
|
+
"Planning agent result received"
|
|
1165
|
+
);
|
|
1166
|
+
const isSuccess = resultMessage.subtype === "success";
|
|
1167
|
+
const isMaxTurns = resultMessage.subtype === "error_max_turns";
|
|
1168
|
+
if (!isSuccess && !isMaxTurns) {
|
|
1169
|
+
const errorDetail = "errors" in resultMessage ? resultMessage.errors.join(", ") : "unknown";
|
|
1170
|
+
throw new PipelineError(
|
|
1171
|
+
`Planning agent failed (${resultMessage.subtype}): ${errorDetail}`
|
|
1172
|
+
);
|
|
1173
|
+
}
|
|
1174
|
+
if (isMaxTurns) {
|
|
1175
|
+
log5.warn("Planning agent hit max turns \u2014 attempting to extract plan");
|
|
1176
|
+
}
|
|
1177
|
+
let parsed;
|
|
1178
|
+
if ("structured_output" in resultMessage && resultMessage.structured_output) {
|
|
1179
|
+
parsed = resultMessage.structured_output;
|
|
1180
|
+
} else if ("result" in resultMessage && resultMessage.result) {
|
|
1181
|
+
const jsonStr = extractJSON(resultMessage.result);
|
|
1182
|
+
try {
|
|
1183
|
+
parsed = JSON.parse(jsonStr);
|
|
1184
|
+
} catch {
|
|
1185
|
+
throw new PipelineError(
|
|
1186
|
+
`Planner returned invalid JSON: ${String(resultMessage.result).slice(0, 200)}`
|
|
1187
|
+
);
|
|
1188
|
+
}
|
|
1189
|
+
} else if (lastAssistantText) {
|
|
1190
|
+
log5.warn("No result output \u2014 extracting plan from last assistant message");
|
|
1191
|
+
const jsonStr = extractJSON(lastAssistantText);
|
|
1192
|
+
try {
|
|
1193
|
+
parsed = JSON.parse(jsonStr);
|
|
1194
|
+
} catch {
|
|
1195
|
+
throw new PipelineError(
|
|
1196
|
+
`Planner returned invalid JSON in assistant message: ${lastAssistantText.slice(0, 200)}`
|
|
1197
|
+
);
|
|
1198
|
+
}
|
|
1199
|
+
} else {
|
|
1200
|
+
throw new PipelineError("Planning agent produced no output");
|
|
1201
|
+
}
|
|
1202
|
+
const plan = validatePlan(parsed);
|
|
1203
|
+
log5.info(
|
|
1204
|
+
{
|
|
1205
|
+
complexity: plan.estimatedComplexity,
|
|
1206
|
+
steps: plan.implementationSteps.length,
|
|
1207
|
+
filesToModify: plan.filesToModify.length
|
|
1208
|
+
},
|
|
1209
|
+
"Plan created"
|
|
1210
|
+
);
|
|
1211
|
+
return {
|
|
1212
|
+
plan,
|
|
1213
|
+
costUsd: resultMessage.total_cost_usd,
|
|
1214
|
+
numTurns: resultMessage.num_turns
|
|
1215
|
+
};
|
|
1216
|
+
}
|
|
1217
|
+
|
|
1218
|
+
// src/agents/reviewer.ts
|
|
1219
|
+
import { query as query3 } from "@anthropic-ai/claude-agent-sdk";
|
|
1220
|
+
import { z as z3 } from "zod";
|
|
1221
|
+
|
|
1222
|
+
// src/agents/prompts/reviewer-system.ts
|
|
1223
|
+
var REVIEWER_SYSTEM_PROMPT = `You are a senior code reviewer for an automated development pipeline.
|
|
1224
|
+
Your job is to review a code diff against a specification and provide structured feedback.
|
|
1225
|
+
|
|
1226
|
+
You must evaluate:
|
|
1227
|
+
1. **Spec Adherence**: Does the code implement all acceptance criteria?
|
|
1228
|
+
2. **Code Quality**: Is the code clean, maintainable, follows existing patterns?
|
|
1229
|
+
3. **Security**: Are there any security vulnerabilities, injection risks, exposed secrets?
|
|
1230
|
+
4. **Edge Cases**: Are the listed edge cases handled?
|
|
1231
|
+
5. **Tests**: Are tests included and do they cover the key scenarios?
|
|
1232
|
+
|
|
1233
|
+
You MUST respond with valid JSON matching this schema:
|
|
1234
|
+
{
|
|
1235
|
+
"approved": boolean,
|
|
1236
|
+
"summary": "string - brief overall assessment",
|
|
1237
|
+
"issues": [
|
|
1238
|
+
{
|
|
1239
|
+
"severity": "critical|major|minor|suggestion",
|
|
1240
|
+
"file": "path/to/file",
|
|
1241
|
+
"line": number_or_null,
|
|
1242
|
+
"description": "what's wrong",
|
|
1243
|
+
"suggestedFix": "how to fix it (optional, can be null)"
|
|
1244
|
+
}
|
|
1245
|
+
],
|
|
1246
|
+
"securityConcerns": ["string array, empty if none"],
|
|
1247
|
+
"specAdherence": {
|
|
1248
|
+
"met": ["criteria that are satisfied"],
|
|
1249
|
+
"unmet": ["criteria that are NOT satisfied"]
|
|
1250
|
+
}
|
|
1251
|
+
}
|
|
1252
|
+
|
|
1253
|
+
Rules:
|
|
1254
|
+
- Only mark "approved": true if ALL acceptance criteria are met AND there are zero critical/major issues.
|
|
1255
|
+
- Be specific about file paths and line numbers.
|
|
1256
|
+
- For review cycle > 0, focus on whether previous feedback was addressed.
|
|
1257
|
+
- Do NOT nitpick style if the code follows the repository's existing conventions.
|
|
1258
|
+
- Respond ONLY with the JSON object. No markdown fences, no explanation text.`;
|
|
1259
|
+
function buildReviewerUserPrompt(taskSpec, diff, reviewCycle) {
|
|
1260
|
+
return `## Original Specification
|
|
1261
|
+
|
|
1262
|
+
**Title**: ${taskSpec.title}
|
|
1263
|
+
|
|
1264
|
+
**Context**: ${taskSpec.context}
|
|
1265
|
+
|
|
1266
|
+
**Acceptance Criteria**:
|
|
1267
|
+
${taskSpec.acceptanceCriteria.map((c, i) => `${i + 1}. ${c}`).join("\n")}
|
|
1268
|
+
|
|
1269
|
+
**Edge Cases**:
|
|
1270
|
+
${taskSpec.edgeCases.length > 0 ? taskSpec.edgeCases.map((e) => `- ${e}`).join("\n") : "None specified."}
|
|
1271
|
+
|
|
1272
|
+
## Code Diff to Review
|
|
1273
|
+
|
|
1274
|
+
\`\`\`diff
|
|
1275
|
+
${diff}
|
|
1276
|
+
\`\`\`
|
|
1277
|
+
|
|
1278
|
+
## Review Cycle: ${reviewCycle + 1}
|
|
1279
|
+
${reviewCycle > 0 ? "This is a re-review after the coder addressed previous feedback. Focus on whether previous issues were fixed." : "This is the initial review."}
|
|
1280
|
+
|
|
1281
|
+
Provide your review as a JSON object matching the specified schema.`;
|
|
1282
|
+
}
|
|
1283
|
+
|
|
1284
|
+
// src/agents/reviewer.ts
|
|
1285
|
+
var log6 = createChildLogger("reviewer");
|
|
1286
|
+
var ReviewIssueSchema = z3.object({
|
|
1287
|
+
severity: z3.enum(["critical", "major", "minor", "suggestion"]),
|
|
1288
|
+
file: z3.string(),
|
|
1289
|
+
line: z3.number().nullable().optional(),
|
|
1290
|
+
description: z3.string(),
|
|
1291
|
+
suggestedFix: z3.string().nullable().optional()
|
|
1292
|
+
});
|
|
1293
|
+
var ReviewResultSchema = z3.object({
|
|
1294
|
+
approved: z3.boolean(),
|
|
1295
|
+
summary: z3.string(),
|
|
1296
|
+
issues: z3.array(ReviewIssueSchema),
|
|
1297
|
+
securityConcerns: z3.array(z3.string()),
|
|
1298
|
+
specAdherence: z3.object({
|
|
1299
|
+
met: z3.array(z3.string()),
|
|
1300
|
+
unmet: z3.array(z3.string())
|
|
1301
|
+
})
|
|
1302
|
+
});
|
|
1303
|
+
var REVIEW_JSON_SCHEMA = {
|
|
1304
|
+
type: "object",
|
|
1305
|
+
properties: {
|
|
1306
|
+
approved: { type: "boolean" },
|
|
1307
|
+
summary: { type: "string" },
|
|
1308
|
+
issues: {
|
|
1309
|
+
type: "array",
|
|
1310
|
+
items: {
|
|
1311
|
+
type: "object",
|
|
1312
|
+
properties: {
|
|
1313
|
+
severity: { type: "string", enum: ["critical", "major", "minor", "suggestion"] },
|
|
1314
|
+
file: { type: "string" },
|
|
1315
|
+
line: { type: ["number", "null"] },
|
|
1316
|
+
description: { type: "string" },
|
|
1317
|
+
suggestedFix: { type: ["string", "null"] }
|
|
1318
|
+
},
|
|
1319
|
+
required: ["severity", "file", "description"]
|
|
1320
|
+
}
|
|
1321
|
+
},
|
|
1322
|
+
securityConcerns: { type: "array", items: { type: "string" } },
|
|
1323
|
+
specAdherence: {
|
|
1324
|
+
type: "object",
|
|
1325
|
+
properties: {
|
|
1326
|
+
met: { type: "array", items: { type: "string" } },
|
|
1327
|
+
unmet: { type: "array", items: { type: "string" } }
|
|
1328
|
+
},
|
|
1329
|
+
required: ["met", "unmet"]
|
|
1330
|
+
}
|
|
1331
|
+
},
|
|
1332
|
+
required: ["approved", "summary", "issues", "securityConcerns", "specAdherence"]
|
|
1333
|
+
};
|
|
1334
|
+
function extractJSON2(text) {
|
|
1335
|
+
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
1336
|
+
if (jsonMatch) return jsonMatch[0];
|
|
1337
|
+
const fenceMatch = text.match(/```(?:json)?\s*\n?([\s\S]*?)\n?```/);
|
|
1338
|
+
if (fenceMatch) return fenceMatch[1].trim();
|
|
1339
|
+
throw new ReviewerError("Could not extract JSON from reviewer response");
|
|
1340
|
+
}
|
|
1341
|
+
function validateReviewResult(parsed) {
|
|
1342
|
+
const validated = ReviewResultSchema.safeParse(parsed);
|
|
1343
|
+
if (!validated.success) {
|
|
1344
|
+
const errors = validated.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join(", ");
|
|
1345
|
+
throw new ReviewerError(`Reviewer response validation failed: ${errors}`);
|
|
1346
|
+
}
|
|
1347
|
+
return {
|
|
1348
|
+
...validated.data,
|
|
1349
|
+
issues: validated.data.issues.map((issue) => ({
|
|
1350
|
+
...issue,
|
|
1351
|
+
line: issue.line ?? void 0,
|
|
1352
|
+
suggestedFix: issue.suggestedFix ?? void 0
|
|
1353
|
+
}))
|
|
1354
|
+
};
|
|
1355
|
+
}
|
|
1356
|
+
async function runReviewerAgent(params) {
|
|
1357
|
+
const { taskSpec, diff, repoConfig, reviewCycle } = params;
|
|
1358
|
+
log6.info(
|
|
1359
|
+
{ model: repoConfig.reviewerModel, cycle: reviewCycle },
|
|
1360
|
+
"Starting reviewer agent"
|
|
1361
|
+
);
|
|
1362
|
+
const userPrompt = buildReviewerUserPrompt(taskSpec, diff, reviewCycle);
|
|
1363
|
+
const agentResult = query3({
|
|
1364
|
+
prompt: userPrompt,
|
|
1365
|
+
options: {
|
|
1366
|
+
model: repoConfig.reviewerModel,
|
|
1367
|
+
maxTurns: 50,
|
|
1368
|
+
tools: [],
|
|
1369
|
+
systemPrompt: REVIEWER_SYSTEM_PROMPT,
|
|
1370
|
+
permissionMode: "bypassPermissions",
|
|
1371
|
+
allowDangerouslySkipPermissions: true,
|
|
1372
|
+
outputFormat: {
|
|
1373
|
+
type: "json_schema",
|
|
1374
|
+
schema: REVIEW_JSON_SCHEMA
|
|
1375
|
+
}
|
|
1376
|
+
}
|
|
1377
|
+
});
|
|
1378
|
+
let resultMessage;
|
|
1379
|
+
let lastAssistantText = "";
|
|
1380
|
+
for await (const message of agentResult) {
|
|
1381
|
+
if (message.type === "assistant" && "content" in message) {
|
|
1382
|
+
const textParts = message.content.filter((c) => c.type === "text" && c.text).map((c) => c.text);
|
|
1383
|
+
if (textParts.length > 0) {
|
|
1384
|
+
lastAssistantText = textParts.join("\n");
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
if (message.type === "result") {
|
|
1388
|
+
resultMessage = message;
|
|
1389
|
+
}
|
|
1390
|
+
}
|
|
1391
|
+
if (!resultMessage) {
|
|
1392
|
+
throw new ReviewerError("No result message received from reviewer agent");
|
|
1393
|
+
}
|
|
1394
|
+
const isSuccess = resultMessage.subtype === "success";
|
|
1395
|
+
const isMaxTurns = resultMessage.subtype === "error_max_turns";
|
|
1396
|
+
if (!isSuccess && !isMaxTurns) {
|
|
1397
|
+
const errorDetail = "errors" in resultMessage ? resultMessage.errors.join(", ") : "unknown";
|
|
1398
|
+
throw new ReviewerError(`Reviewer agent failed (${resultMessage.subtype}): ${errorDetail}`);
|
|
1399
|
+
}
|
|
1400
|
+
if (isMaxTurns) {
|
|
1401
|
+
log6.warn("Reviewer hit max turns \u2014 attempting to extract result");
|
|
1402
|
+
}
|
|
1403
|
+
let parsed;
|
|
1404
|
+
if ("structured_output" in resultMessage && resultMessage.structured_output) {
|
|
1405
|
+
parsed = resultMessage.structured_output;
|
|
1406
|
+
} else if ("result" in resultMessage && resultMessage.result) {
|
|
1407
|
+
const jsonStr = extractJSON2(resultMessage.result);
|
|
1408
|
+
try {
|
|
1409
|
+
parsed = JSON.parse(jsonStr);
|
|
1410
|
+
} catch {
|
|
1411
|
+
throw new ReviewerError(`Reviewer returned invalid JSON: ${String(resultMessage.result).slice(0, 200)}`);
|
|
1412
|
+
}
|
|
1413
|
+
} else if (lastAssistantText) {
|
|
1414
|
+
log6.warn("No result output \u2014 extracting review from last assistant message");
|
|
1415
|
+
const jsonStr = extractJSON2(lastAssistantText);
|
|
1416
|
+
try {
|
|
1417
|
+
parsed = JSON.parse(jsonStr);
|
|
1418
|
+
} catch {
|
|
1419
|
+
throw new ReviewerError(
|
|
1420
|
+
`Reviewer returned invalid JSON in assistant message: ${lastAssistantText.slice(0, 200)}`
|
|
1421
|
+
);
|
|
1422
|
+
}
|
|
1423
|
+
} else {
|
|
1424
|
+
throw new ReviewerError("Reviewer agent produced no output");
|
|
1425
|
+
}
|
|
1426
|
+
const review = validateReviewResult(parsed);
|
|
1427
|
+
log6.info(
|
|
1428
|
+
{
|
|
1429
|
+
approved: review.approved,
|
|
1430
|
+
issueCount: review.issues.length,
|
|
1431
|
+
cycle: reviewCycle
|
|
1432
|
+
},
|
|
1433
|
+
"Review completed"
|
|
1434
|
+
);
|
|
1435
|
+
return review;
|
|
1436
|
+
}
|
|
1437
|
+
|
|
1438
|
+
// src/agents/security-scanner.ts
|
|
1439
|
+
import { query as query4 } from "@anthropic-ai/claude-agent-sdk";
|
|
1440
|
+
import { z as z4 } from "zod";
|
|
1441
|
+
var log7 = createChildLogger("security-scanner");
|
|
1442
|
+
var SecurityFindingSchema = z4.object({
|
|
1443
|
+
severity: z4.enum(["critical", "high", "medium", "low", "info"]),
|
|
1444
|
+
category: z4.string(),
|
|
1445
|
+
file: z4.string(),
|
|
1446
|
+
line: z4.number().nullable().optional(),
|
|
1447
|
+
description: z4.string(),
|
|
1448
|
+
recommendation: z4.string()
|
|
1449
|
+
});
|
|
1450
|
+
var SecurityReportSchema = z4.object({
|
|
1451
|
+
riskLevel: z4.enum(["critical", "high", "medium", "low", "none"]),
|
|
1452
|
+
findings: z4.array(SecurityFindingSchema),
|
|
1453
|
+
recommendations: z4.array(z4.string()),
|
|
1454
|
+
summary: z4.string()
|
|
1455
|
+
});
|
|
1456
|
+
var SECURITY_REPORT_JSON_SCHEMA = {
|
|
1457
|
+
type: "object",
|
|
1458
|
+
properties: {
|
|
1459
|
+
riskLevel: { type: "string", enum: ["critical", "high", "medium", "low", "none"] },
|
|
1460
|
+
findings: {
|
|
1461
|
+
type: "array",
|
|
1462
|
+
items: {
|
|
1463
|
+
type: "object",
|
|
1464
|
+
properties: {
|
|
1465
|
+
severity: { type: "string", enum: ["critical", "high", "medium", "low", "info"] },
|
|
1466
|
+
category: { type: "string" },
|
|
1467
|
+
file: { type: "string" },
|
|
1468
|
+
line: { type: ["number", "null"] },
|
|
1469
|
+
description: { type: "string" },
|
|
1470
|
+
recommendation: { type: "string" }
|
|
1471
|
+
},
|
|
1472
|
+
required: ["severity", "category", "file", "description", "recommendation"]
|
|
1473
|
+
}
|
|
1474
|
+
},
|
|
1475
|
+
recommendations: { type: "array", items: { type: "string" } },
|
|
1476
|
+
summary: { type: "string" }
|
|
1477
|
+
},
|
|
1478
|
+
required: ["riskLevel", "findings", "recommendations", "summary"]
|
|
1479
|
+
};
|
|
1480
|
+
var SECURITY_SCANNER_SYSTEM_PROMPT = `You are a security-focused code reviewer for an automated development pipeline.
|
|
1481
|
+
Your job is to analyze code diffs for security vulnerabilities and risks.
|
|
1482
|
+
|
|
1483
|
+
Focus areas:
|
|
1484
|
+
1. **Injection**: SQL injection, command injection, XSS, template injection
|
|
1485
|
+
2. **Authentication/Authorization**: Broken auth, missing access controls, privilege escalation
|
|
1486
|
+
3. **Secrets**: Hardcoded credentials, API keys, tokens, connection strings
|
|
1487
|
+
4. **Data Exposure**: Sensitive data in logs, error messages, or responses
|
|
1488
|
+
5. **Dependencies**: Known vulnerable patterns, unsafe imports
|
|
1489
|
+
6. **Input Validation**: Missing or inadequate validation, buffer overflows
|
|
1490
|
+
7. **Cryptography**: Weak algorithms, improper random generation, insecure hashing
|
|
1491
|
+
8. **Configuration**: Insecure defaults, debug mode in production, CORS misconfiguration
|
|
1492
|
+
|
|
1493
|
+
Rules:
|
|
1494
|
+
- Only report actual vulnerabilities visible in the diff, not hypothetical ones.
|
|
1495
|
+
- Be specific about file paths and line numbers.
|
|
1496
|
+
- Provide actionable recommendations.
|
|
1497
|
+
- Set riskLevel based on the highest severity finding (or "none" if no findings).
|
|
1498
|
+
- Respond ONLY with the JSON object. No markdown fences, no explanation text.`;
|
|
1499
|
+
function extractJSON3(text) {
|
|
1500
|
+
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
1501
|
+
if (jsonMatch) return jsonMatch[0];
|
|
1502
|
+
throw new PipelineError("Could not extract JSON from security scanner response");
|
|
1503
|
+
}
|
|
1504
|
+
function validateReport(parsed) {
|
|
1505
|
+
const validated = SecurityReportSchema.safeParse(parsed);
|
|
1506
|
+
if (!validated.success) {
|
|
1507
|
+
const errors = validated.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join(", ");
|
|
1508
|
+
throw new PipelineError(`Security report validation failed: ${errors}`);
|
|
1509
|
+
}
|
|
1510
|
+
return {
|
|
1511
|
+
...validated.data,
|
|
1512
|
+
findings: validated.data.findings.map((f) => ({
|
|
1513
|
+
...f,
|
|
1514
|
+
line: f.line ?? void 0
|
|
1515
|
+
}))
|
|
1516
|
+
};
|
|
1517
|
+
}
|
|
1518
|
+
async function runSecurityScanner(params) {
|
|
1519
|
+
const { taskSpec, diff, repoConfig } = params;
|
|
1520
|
+
log7.info({ task: taskSpec.title, model: repoConfig.reviewerModel }, "Starting security scan");
|
|
1521
|
+
const prompt = `## Security Review Request
|
|
1522
|
+
|
|
1523
|
+
**Task**: ${taskSpec.title}
|
|
1524
|
+
**Type**: ${taskSpec.type}
|
|
1525
|
+
|
|
1526
|
+
## Code Diff to Analyze
|
|
1527
|
+
|
|
1528
|
+
\`\`\`diff
|
|
1529
|
+
${diff}
|
|
1530
|
+
\`\`\`
|
|
1531
|
+
|
|
1532
|
+
Analyze this diff for security vulnerabilities. Provide your report as a JSON object matching the required schema.`;
|
|
1533
|
+
const result = query4({
|
|
1534
|
+
prompt,
|
|
1535
|
+
options: {
|
|
1536
|
+
model: repoConfig.reviewerModel,
|
|
1537
|
+
maxTurns: 50,
|
|
1538
|
+
tools: [],
|
|
1539
|
+
systemPrompt: SECURITY_SCANNER_SYSTEM_PROMPT,
|
|
1540
|
+
permissionMode: "bypassPermissions",
|
|
1541
|
+
allowDangerouslySkipPermissions: true,
|
|
1542
|
+
outputFormat: {
|
|
1543
|
+
type: "json_schema",
|
|
1544
|
+
schema: SECURITY_REPORT_JSON_SCHEMA
|
|
1545
|
+
}
|
|
1546
|
+
}
|
|
1547
|
+
});
|
|
1548
|
+
let resultMessage;
|
|
1549
|
+
let lastAssistantText = "";
|
|
1550
|
+
for await (const message of result) {
|
|
1551
|
+
if (message.type === "assistant" && "content" in message) {
|
|
1552
|
+
const textParts = message.content.filter((c) => c.type === "text" && c.text).map((c) => c.text);
|
|
1553
|
+
if (textParts.length > 0) {
|
|
1554
|
+
lastAssistantText = textParts.join("\n");
|
|
1555
|
+
}
|
|
1556
|
+
}
|
|
1557
|
+
if (message.type === "result") {
|
|
1558
|
+
resultMessage = message;
|
|
1559
|
+
}
|
|
1560
|
+
}
|
|
1561
|
+
if (!resultMessage) {
|
|
1562
|
+
throw new PipelineError("No result message received from security scanner");
|
|
1563
|
+
}
|
|
1564
|
+
const isSuccess = resultMessage.subtype === "success";
|
|
1565
|
+
const isMaxTurns = resultMessage.subtype === "error_max_turns";
|
|
1566
|
+
if (!isSuccess && !isMaxTurns) {
|
|
1567
|
+
const errorDetail = "errors" in resultMessage ? resultMessage.errors.join(", ") : "unknown";
|
|
1568
|
+
throw new PipelineError(
|
|
1569
|
+
`Security scanner failed (${resultMessage.subtype}): ${errorDetail}`
|
|
1570
|
+
);
|
|
1571
|
+
}
|
|
1572
|
+
let parsed;
|
|
1573
|
+
if ("structured_output" in resultMessage && resultMessage.structured_output) {
|
|
1574
|
+
parsed = resultMessage.structured_output;
|
|
1575
|
+
} else if ("result" in resultMessage && resultMessage.result) {
|
|
1576
|
+
const jsonStr = extractJSON3(resultMessage.result);
|
|
1577
|
+
try {
|
|
1578
|
+
parsed = JSON.parse(jsonStr);
|
|
1579
|
+
} catch {
|
|
1580
|
+
throw new PipelineError(
|
|
1581
|
+
`Security scanner returned invalid JSON: ${String(resultMessage.result).slice(0, 200)}`
|
|
1582
|
+
);
|
|
1583
|
+
}
|
|
1584
|
+
} else if (lastAssistantText) {
|
|
1585
|
+
log7.warn("No result output \u2014 extracting report from last assistant message");
|
|
1586
|
+
const jsonStr = extractJSON3(lastAssistantText);
|
|
1587
|
+
try {
|
|
1588
|
+
parsed = JSON.parse(jsonStr);
|
|
1589
|
+
} catch {
|
|
1590
|
+
throw new PipelineError(
|
|
1591
|
+
`Security scanner returned invalid JSON in assistant message: ${lastAssistantText.slice(0, 200)}`
|
|
1592
|
+
);
|
|
1593
|
+
}
|
|
1594
|
+
} else {
|
|
1595
|
+
throw new PipelineError("Security scanner produced no output");
|
|
1596
|
+
}
|
|
1597
|
+
const report = validateReport(parsed);
|
|
1598
|
+
log7.info(
|
|
1599
|
+
{
|
|
1600
|
+
riskLevel: report.riskLevel,
|
|
1601
|
+
findingCount: report.findings.length
|
|
1602
|
+
},
|
|
1603
|
+
"Security scan completed"
|
|
1604
|
+
);
|
|
1605
|
+
return {
|
|
1606
|
+
report,
|
|
1607
|
+
costUsd: resultMessage.total_cost_usd
|
|
1608
|
+
};
|
|
1609
|
+
}
|
|
1610
|
+
|
|
1611
|
+
// src/testing/test-runner.ts
|
|
1612
|
+
import { execa } from "execa";
|
|
1613
|
+
var log8 = createChildLogger("test-runner");
|
|
1614
|
+
async function runCommand(params) {
|
|
1615
|
+
const { workingDir, command, kind, timeoutMs = 3e5 } = params;
|
|
1616
|
+
const [cmd, ...args] = command.split(" ");
|
|
1617
|
+
log8.info({ kind, command, workingDir }, `Running ${kind} command`);
|
|
1618
|
+
const start = Date.now();
|
|
1619
|
+
try {
|
|
1620
|
+
const result = await execa(cmd, args, {
|
|
1621
|
+
cwd: workingDir,
|
|
1622
|
+
timeout: timeoutMs,
|
|
1623
|
+
reject: false,
|
|
1624
|
+
all: true
|
|
1625
|
+
});
|
|
1626
|
+
const durationMs = Date.now() - start;
|
|
1627
|
+
const passed = result.exitCode === 0;
|
|
1628
|
+
log8.info({ kind, passed, durationMs, exitCode: result.exitCode }, `${kind} completed`);
|
|
1629
|
+
return {
|
|
1630
|
+
passed,
|
|
1631
|
+
output: result.all ?? result.stdout + "\n" + result.stderr,
|
|
1632
|
+
durationMs
|
|
1633
|
+
};
|
|
1634
|
+
} catch (error) {
|
|
1635
|
+
const durationMs = Date.now() - start;
|
|
1636
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1637
|
+
log8.error({ kind, error: message, durationMs }, `${kind} execution failed`);
|
|
1638
|
+
return {
|
|
1639
|
+
passed: false,
|
|
1640
|
+
output: message,
|
|
1641
|
+
durationMs
|
|
1642
|
+
};
|
|
1643
|
+
}
|
|
1644
|
+
}
|
|
1645
|
+
async function runTests(params) {
|
|
1646
|
+
return runCommand({
|
|
1647
|
+
workingDir: params.workingDir,
|
|
1648
|
+
command: params.testCommand,
|
|
1649
|
+
kind: "test",
|
|
1650
|
+
timeoutMs: params.timeoutMs
|
|
1651
|
+
});
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
// src/notifications/sender.ts
|
|
1655
|
+
var log9 = createChildLogger("notifications");
|
|
1656
|
+
async function sendSlackWebhook(webhookUrl, message) {
|
|
1657
|
+
try {
|
|
1658
|
+
const response = await fetch(webhookUrl, {
|
|
1659
|
+
method: "POST",
|
|
1660
|
+
headers: { "Content-Type": "application/json" },
|
|
1661
|
+
body: JSON.stringify(message)
|
|
1662
|
+
});
|
|
1663
|
+
if (!response.ok) {
|
|
1664
|
+
log9.warn({ status: response.status }, "Slack webhook request failed");
|
|
1665
|
+
}
|
|
1666
|
+
} catch (error) {
|
|
1667
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1668
|
+
log9.warn({ error: msg }, "Failed to send Slack notification");
|
|
1669
|
+
}
|
|
1670
|
+
}
|
|
1671
|
+
function formatSlackMessage(state, event) {
|
|
1672
|
+
const taskTitle = state.taskSpec?.title ?? "Unknown task";
|
|
1673
|
+
const pipelineId = state.id;
|
|
1674
|
+
const cost = `$${state.totalCostUsd.toFixed(4)}`;
|
|
1675
|
+
const eventDescriptions = {
|
|
1676
|
+
started: `Pipeline started for: *${taskTitle}*`,
|
|
1677
|
+
tests_passed: `Tests passed for: *${taskTitle}*`,
|
|
1678
|
+
tests_failed: `Tests failed for: *${taskTitle}*`,
|
|
1679
|
+
review_approved: `Review approved for: *${taskTitle}*`,
|
|
1680
|
+
review_rejected: `Review rejected for: *${taskTitle}* (cycle ${state.reviewCycle + 1})`,
|
|
1681
|
+
completed: `Pipeline completed for: *${taskTitle}*
|
|
1682
|
+
PR: ${state.prUrl ?? "N/A"}
|
|
1683
|
+
Cost: ${cost}`,
|
|
1684
|
+
failed: `Pipeline failed for: *${taskTitle}*
|
|
1685
|
+
Error: ${state.error ?? "Unknown"}
|
|
1686
|
+
Cost: ${cost}`
|
|
1687
|
+
};
|
|
1688
|
+
const icons = {
|
|
1689
|
+
started: ":rocket:",
|
|
1690
|
+
tests_passed: ":white_check_mark:",
|
|
1691
|
+
tests_failed: ":x:",
|
|
1692
|
+
review_approved: ":thumbsup:",
|
|
1693
|
+
review_rejected: ":eyes:",
|
|
1694
|
+
completed: ":tada:",
|
|
1695
|
+
failed: ":rotating_light:"
|
|
1696
|
+
};
|
|
1697
|
+
return {
|
|
1698
|
+
text: `${icons[event]} [X-Force ${pipelineId}] ${eventDescriptions[event]}`
|
|
1699
|
+
};
|
|
1700
|
+
}
|
|
1701
|
+
async function postGitHubMentions(config, owner, repo, issueNumber, event, state) {
|
|
1702
|
+
const github = config.github;
|
|
1703
|
+
if (!github) return;
|
|
1704
|
+
let mentions = [];
|
|
1705
|
+
let message = "";
|
|
1706
|
+
if (event === "failed" && github.mentionOnFailure?.length) {
|
|
1707
|
+
mentions = github.mentionOnFailure;
|
|
1708
|
+
message = `X-Force pipeline failed. ${mentions.join(" ")} \u2014 please review.
|
|
1709
|
+
|
|
1710
|
+
**Error**: ${state.error ?? "Unknown"}`;
|
|
1711
|
+
}
|
|
1712
|
+
if ((event === "completed" || event === "review_approved") && github.mentionOnReview?.length) {
|
|
1713
|
+
mentions = github.mentionOnReview;
|
|
1714
|
+
message = `X-Force pipeline completed. ${mentions.join(" ")} \u2014 PR ready for review: ${state.prUrl ?? "N/A"}`;
|
|
1715
|
+
}
|
|
1716
|
+
if (message) {
|
|
1717
|
+
try {
|
|
1718
|
+
await addComment(owner, repo, issueNumber, message);
|
|
1719
|
+
} catch (error) {
|
|
1720
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1721
|
+
log9.warn({ error: msg }, "Failed to post GitHub mention");
|
|
1722
|
+
}
|
|
1723
|
+
}
|
|
1724
|
+
}
|
|
1725
|
+
async function notify(params) {
|
|
1726
|
+
const { config, state, event, owner, repo, issueNumber } = params;
|
|
1727
|
+
if (!config) return;
|
|
1728
|
+
const promises = [];
|
|
1729
|
+
if (config.slack?.webhookUrl) {
|
|
1730
|
+
const message = formatSlackMessage(state, event);
|
|
1731
|
+
promises.push(sendSlackWebhook(config.slack.webhookUrl, message));
|
|
1732
|
+
}
|
|
1733
|
+
promises.push(postGitHubMentions(config, owner, repo, issueNumber, event, state));
|
|
1734
|
+
await Promise.allSettled(promises);
|
|
1735
|
+
}
|
|
1736
|
+
|
|
1737
|
+
// src/pipeline/auto-merge.ts
|
|
1738
|
+
var log10 = createChildLogger("auto-merge");
|
|
1739
|
+
var SIZE_ORDER = {
|
|
1740
|
+
xs: 1,
|
|
1741
|
+
s: 2,
|
|
1742
|
+
m: 3,
|
|
1743
|
+
l: 4,
|
|
1744
|
+
xl: 5
|
|
1745
|
+
};
|
|
1746
|
+
function isAutoMergeEligible(params) {
|
|
1747
|
+
const { taskSpec, repoConfig, securityReport } = params;
|
|
1748
|
+
const rules = repoConfig.autoMergeRules;
|
|
1749
|
+
const reasons = [];
|
|
1750
|
+
if (!repoConfig.autoMerge) {
|
|
1751
|
+
return { eligible: false, reasons: ["Auto-merge is disabled for this repo"] };
|
|
1752
|
+
}
|
|
1753
|
+
if (!rules.types.includes(taskSpec.type)) {
|
|
1754
|
+
reasons.push(
|
|
1755
|
+
`Task type "${taskSpec.type}" is not in allowed types: [${rules.types.join(", ")}]`
|
|
1756
|
+
);
|
|
1757
|
+
}
|
|
1758
|
+
if (SIZE_ORDER[taskSpec.size] > SIZE_ORDER[rules.maxSize]) {
|
|
1759
|
+
reasons.push(`Task size "${taskSpec.size}" exceeds max size "${rules.maxSize}"`);
|
|
1760
|
+
}
|
|
1761
|
+
if (rules.requireCleanSecurityScan && securityReport) {
|
|
1762
|
+
const hasCriticalOrHigh = securityReport.findings.some(
|
|
1763
|
+
(f) => f.severity === "critical" || f.severity === "high"
|
|
1764
|
+
);
|
|
1765
|
+
if (hasCriticalOrHigh) {
|
|
1766
|
+
reasons.push("Security scan found critical or high severity issues");
|
|
1767
|
+
}
|
|
1768
|
+
}
|
|
1769
|
+
if (reasons.length > 0) {
|
|
1770
|
+
log10.info({ reasons }, "PR not eligible for auto-merge");
|
|
1771
|
+
}
|
|
1772
|
+
return { eligible: reasons.length === 0, reasons };
|
|
1773
|
+
}
|
|
1774
|
+
|
|
1775
|
+
// src/tracking/cost-tracker.ts
|
|
1776
|
+
import { homedir } from "os";
|
|
1777
|
+
import { join as join2 } from "path";
|
|
1778
|
+
import { mkdir, appendFile, readFile } from "fs/promises";
|
|
1779
|
+
var log11 = createChildLogger("cost-tracker");
|
|
1780
|
+
var HISTORY_DIR = join2(homedir(), ".xforce");
|
|
1781
|
+
var HISTORY_FILE = "history.jsonl";
|
|
1782
|
+
function getHistoryPath(basePath) {
|
|
1783
|
+
return join2(basePath ?? HISTORY_DIR, HISTORY_FILE);
|
|
1784
|
+
}
|
|
1785
|
+
function buildRecordFromState(state, repoConfig) {
|
|
1786
|
+
const startedAt = state.startedAt instanceof Date ? state.startedAt : new Date(state.startedAt);
|
|
1787
|
+
const completedAt = state.completedAt instanceof Date ? state.completedAt : new Date(state.completedAt ?? Date.now());
|
|
1788
|
+
return {
|
|
1789
|
+
id: state.id,
|
|
1790
|
+
repo: `${repoConfig.owner}/${repoConfig.name}`,
|
|
1791
|
+
issueNumber: state.taskSpec.issueNumber,
|
|
1792
|
+
issueUrl: state.taskSpec.issueUrl,
|
|
1793
|
+
prNumber: state.prNumber,
|
|
1794
|
+
prUrl: state.prUrl,
|
|
1795
|
+
status: state.status === "failed" ? "failed" : "completed",
|
|
1796
|
+
totalCostUsd: state.totalCostUsd,
|
|
1797
|
+
durationMs: completedAt.getTime() - startedAt.getTime(),
|
|
1798
|
+
reviewCycles: state.reviewCycle,
|
|
1799
|
+
model: repoConfig.model,
|
|
1800
|
+
startedAt: startedAt.toISOString(),
|
|
1801
|
+
completedAt: completedAt.toISOString(),
|
|
1802
|
+
taskTitle: state.taskSpec.title,
|
|
1803
|
+
taskType: state.taskSpec.type,
|
|
1804
|
+
taskSize: state.taskSpec.size,
|
|
1805
|
+
error: state.error
|
|
1806
|
+
};
|
|
1807
|
+
}
|
|
1808
|
+
async function appendRecord(record, basePath) {
|
|
1809
|
+
const dir = basePath ?? HISTORY_DIR;
|
|
1810
|
+
await mkdir(dir, { recursive: true });
|
|
1811
|
+
const filePath = join2(dir, HISTORY_FILE);
|
|
1812
|
+
const line = JSON.stringify(record) + "\n";
|
|
1813
|
+
await appendFile(filePath, line, "utf-8");
|
|
1814
|
+
log11.debug({ id: record.id, repo: record.repo }, "Persisted pipeline run record");
|
|
1815
|
+
}
|
|
1816
|
+
async function readRecords(filter, basePath) {
|
|
1817
|
+
const filePath = getHistoryPath(basePath);
|
|
1818
|
+
let content;
|
|
1819
|
+
try {
|
|
1820
|
+
content = await readFile(filePath, "utf-8");
|
|
1821
|
+
} catch {
|
|
1822
|
+
return [];
|
|
1823
|
+
}
|
|
1824
|
+
const records = [];
|
|
1825
|
+
for (const line of content.split("\n")) {
|
|
1826
|
+
const trimmed = line.trim();
|
|
1827
|
+
if (!trimmed) continue;
|
|
1828
|
+
try {
|
|
1829
|
+
const record = JSON.parse(trimmed);
|
|
1830
|
+
if (filter?.repo && record.repo !== filter.repo) continue;
|
|
1831
|
+
if (filter?.since && new Date(record.startedAt) < filter.since) continue;
|
|
1832
|
+
if (filter?.until && new Date(record.startedAt) > filter.until) continue;
|
|
1833
|
+
records.push(record);
|
|
1834
|
+
} catch {
|
|
1835
|
+
log11.warn("Skipping malformed line in history file");
|
|
1836
|
+
}
|
|
1837
|
+
}
|
|
1838
|
+
return records;
|
|
1839
|
+
}
|
|
1840
|
+
function summarize(records) {
|
|
1841
|
+
if (records.length === 0) {
|
|
1842
|
+
return {
|
|
1843
|
+
totalCostUsd: 0,
|
|
1844
|
+
totalRuns: 0,
|
|
1845
|
+
successfulRuns: 0,
|
|
1846
|
+
failedRuns: 0,
|
|
1847
|
+
avgCostPerRun: 0,
|
|
1848
|
+
avgDurationMs: 0,
|
|
1849
|
+
costByRepo: {}
|
|
1850
|
+
};
|
|
1851
|
+
}
|
|
1852
|
+
const totalCostUsd = records.reduce((sum, r) => sum + r.totalCostUsd, 0);
|
|
1853
|
+
const totalDurationMs = records.reduce((sum, r) => sum + r.durationMs, 0);
|
|
1854
|
+
const successfulRuns = records.filter((r) => r.status === "completed").length;
|
|
1855
|
+
const failedRuns = records.filter((r) => r.status === "failed").length;
|
|
1856
|
+
const costByRepo = {};
|
|
1857
|
+
for (const r of records) {
|
|
1858
|
+
costByRepo[r.repo] = (costByRepo[r.repo] ?? 0) + r.totalCostUsd;
|
|
1859
|
+
}
|
|
1860
|
+
return {
|
|
1861
|
+
totalCostUsd,
|
|
1862
|
+
totalRuns: records.length,
|
|
1863
|
+
successfulRuns,
|
|
1864
|
+
failedRuns,
|
|
1865
|
+
avgCostPerRun: totalCostUsd / records.length,
|
|
1866
|
+
avgDurationMs: totalDurationMs / records.length,
|
|
1867
|
+
costByRepo
|
|
1868
|
+
};
|
|
1869
|
+
}
|
|
1870
|
+
|
|
1871
|
+
// src/pipeline/orchestrator.ts
|
|
1872
|
+
var log12 = createChildLogger("orchestrator");
|
|
1873
|
+
function transition(state, to, message) {
|
|
1874
|
+
validateTransition(state.status, to);
|
|
1875
|
+
state.status = to;
|
|
1876
|
+
state.logs.push({ timestamp: /* @__PURE__ */ new Date(), status: to, message });
|
|
1877
|
+
log12.info({ status: to }, message);
|
|
1878
|
+
}
|
|
1879
|
+
async function runPipeline(params) {
|
|
1880
|
+
const { config } = params;
|
|
1881
|
+
let owner;
|
|
1882
|
+
let repo;
|
|
1883
|
+
let issueNumber;
|
|
1884
|
+
if (params.issueUrl) {
|
|
1885
|
+
const parsed = parseIssueUrl(params.issueUrl);
|
|
1886
|
+
owner = parsed.owner;
|
|
1887
|
+
repo = parsed.repo;
|
|
1888
|
+
issueNumber = parsed.issueNumber;
|
|
1889
|
+
} else if (params.repoOwner && params.repoName && params.issueNumber) {
|
|
1890
|
+
owner = params.repoOwner;
|
|
1891
|
+
repo = params.repoName;
|
|
1892
|
+
issueNumber = params.issueNumber;
|
|
1893
|
+
} else {
|
|
1894
|
+
throw new PipelineError("Either issueUrl or (repoOwner + repoName + issueNumber) is required");
|
|
1895
|
+
}
|
|
1896
|
+
const repoConfig = resolveRepoConfig(config, owner, repo);
|
|
1897
|
+
const state = {
|
|
1898
|
+
id: nanoid(12),
|
|
1899
|
+
taskSpec: null,
|
|
1900
|
+
status: "parsing_issue",
|
|
1901
|
+
branchName: "",
|
|
1902
|
+
reviewCycle: 0,
|
|
1903
|
+
testRetry: 0,
|
|
1904
|
+
totalCostUsd: 0,
|
|
1905
|
+
logs: [{ timestamp: /* @__PURE__ */ new Date(), status: "parsing_issue", message: "Pipeline started" }],
|
|
1906
|
+
startedAt: /* @__PURE__ */ new Date()
|
|
1907
|
+
};
|
|
1908
|
+
let workDir = null;
|
|
1909
|
+
let isLocal = false;
|
|
1910
|
+
let branchGit = null;
|
|
1911
|
+
const timeoutMs = repoConfig.timeoutMinutes * 60 * 1e3;
|
|
1912
|
+
try {
|
|
1913
|
+
await removeLabel(owner, repo, issueNumber, repoConfig.labels.done).catch(() => {
|
|
1914
|
+
});
|
|
1915
|
+
await removeLabel(owner, repo, issueNumber, repoConfig.labels.failed).catch(() => {
|
|
1916
|
+
});
|
|
1917
|
+
await addLabel(owner, repo, issueNumber, repoConfig.labels.inProgress);
|
|
1918
|
+
log12.info({ owner, repo, issueNumber }, "Fetching and parsing issue");
|
|
1919
|
+
const issue = await getIssue(owner, repo, issueNumber);
|
|
1920
|
+
const labels = await getIssueLabels(owner, repo, issueNumber);
|
|
1921
|
+
const taskSpec = parseIssueBody({
|
|
1922
|
+
title: issue.title,
|
|
1923
|
+
body: issue.body ?? "",
|
|
1924
|
+
labels,
|
|
1925
|
+
issueNumber,
|
|
1926
|
+
issueUrl: issue.html_url,
|
|
1927
|
+
repoOwner: owner,
|
|
1928
|
+
repoName: repo
|
|
1929
|
+
});
|
|
1930
|
+
state.taskSpec = taskSpec;
|
|
1931
|
+
await notify({ config: config.notifications, state, event: "started", owner, repo, issueNumber });
|
|
1932
|
+
transition(state, "creating_branch", "Setting up repository and branch");
|
|
1933
|
+
const branchResult = await setupBranch({
|
|
1934
|
+
owner,
|
|
1935
|
+
repo,
|
|
1936
|
+
defaultBranch: repoConfig.defaultBranch,
|
|
1937
|
+
issueNumber,
|
|
1938
|
+
issueTitle: taskSpec.title,
|
|
1939
|
+
branchPrefix: repoConfig.branchPrefix,
|
|
1940
|
+
localDir: params.localDir ?? repoConfig.localPath
|
|
1941
|
+
});
|
|
1942
|
+
const { branchName, git } = branchResult;
|
|
1943
|
+
workDir = branchResult.workDir;
|
|
1944
|
+
isLocal = branchResult.isLocal;
|
|
1945
|
+
branchGit = git;
|
|
1946
|
+
state.branchName = branchName;
|
|
1947
|
+
let plan;
|
|
1948
|
+
if (repoConfig.enablePlanning) {
|
|
1949
|
+
transition(state, "planning", `Planning with ${repoConfig.plannerModel}`);
|
|
1950
|
+
const planSpinner = ora(`Planning with ${repoConfig.plannerModel}`).start();
|
|
1951
|
+
const planningResult = await runPlanningAgent({
|
|
1952
|
+
taskSpec,
|
|
1953
|
+
repoConfig,
|
|
1954
|
+
workingDir: workDir,
|
|
1955
|
+
onProgress: (msg) => {
|
|
1956
|
+
planSpinner.text = `Planning: ${msg}`;
|
|
1957
|
+
}
|
|
1958
|
+
});
|
|
1959
|
+
plan = planningResult.plan;
|
|
1960
|
+
state.plan = plan;
|
|
1961
|
+
state.totalCostUsd += planningResult.costUsd;
|
|
1962
|
+
planSpinner.succeed(`Plan created (${plan.implementationSteps.length} steps, ${plan.estimatedComplexity} complexity, $${planningResult.costUsd.toFixed(2)})`);
|
|
1963
|
+
await addComment(owner, repo, issueNumber, formatPlanComment(plan));
|
|
1964
|
+
}
|
|
1965
|
+
await withTimeout(
|
|
1966
|
+
codeReviewLoop(state, taskSpec, repoConfig, git, workDir, owner, repo, issueNumber, config.notifications, plan),
|
|
1967
|
+
timeoutMs,
|
|
1968
|
+
"Pipeline"
|
|
1969
|
+
);
|
|
1970
|
+
await removeLabel(owner, repo, issueNumber, repoConfig.labels.inProgress);
|
|
1971
|
+
await addLabel(owner, repo, issueNumber, repoConfig.labels.done);
|
|
1972
|
+
await addComment(
|
|
1973
|
+
owner,
|
|
1974
|
+
repo,
|
|
1975
|
+
issueNumber,
|
|
1976
|
+
`X-Force pipeline completed.
|
|
1977
|
+
|
|
1978
|
+
- **PR**: ${state.prUrl}
|
|
1979
|
+
- **Cost**: $${state.totalCostUsd.toFixed(4)}
|
|
1980
|
+
- **Review cycles**: ${state.reviewCycle}`
|
|
1981
|
+
);
|
|
1982
|
+
await notify({ config: config.notifications, state, event: "completed", owner, repo, issueNumber });
|
|
1983
|
+
state.completedAt = /* @__PURE__ */ new Date();
|
|
1984
|
+
await appendRecord(buildRecordFromState(state, repoConfig)).catch((err) => {
|
|
1985
|
+
log12.warn({ error: err.message }, "Failed to persist cost tracking record");
|
|
1986
|
+
});
|
|
1987
|
+
if (state.status === "merging") {
|
|
1988
|
+
transition(state, "completed", "PR auto-merged");
|
|
1989
|
+
} else if (state.status !== "awaiting_human") {
|
|
1990
|
+
transition(state, "awaiting_human", "PR ready for human review");
|
|
1991
|
+
}
|
|
1992
|
+
return state;
|
|
1993
|
+
} catch (error) {
|
|
1994
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1995
|
+
state.status = "failed";
|
|
1996
|
+
state.error = message;
|
|
1997
|
+
state.completedAt = /* @__PURE__ */ new Date();
|
|
1998
|
+
log12.error({ error: message }, "Pipeline failed");
|
|
1999
|
+
await notify({ config: config.notifications, state, event: "failed", owner, repo, issueNumber }).catch(() => {
|
|
2000
|
+
});
|
|
2001
|
+
await appendRecord(buildRecordFromState(state, repoConfig)).catch(() => {
|
|
2002
|
+
});
|
|
2003
|
+
try {
|
|
2004
|
+
await removeLabel(owner, repo, issueNumber, repoConfig.labels.inProgress);
|
|
2005
|
+
await addLabel(owner, repo, issueNumber, repoConfig.labels.failed);
|
|
2006
|
+
await addComment(
|
|
2007
|
+
owner,
|
|
2008
|
+
repo,
|
|
2009
|
+
issueNumber,
|
|
2010
|
+
`X-Force pipeline failed.
|
|
2011
|
+
|
|
2012
|
+
**Error**: ${message}
|
|
2013
|
+
**Cost**: $${state.totalCostUsd.toFixed(4)}`
|
|
2014
|
+
);
|
|
2015
|
+
} catch {
|
|
2016
|
+
log12.warn("Failed to update issue labels/comments after pipeline failure");
|
|
2017
|
+
}
|
|
2018
|
+
return state;
|
|
2019
|
+
} finally {
|
|
2020
|
+
if (isLocal && branchGit) {
|
|
2021
|
+
await restoreDefaultBranch(branchGit, repoConfig.defaultBranch);
|
|
2022
|
+
} else if (workDir) {
|
|
2023
|
+
try {
|
|
2024
|
+
await rm(workDir, { recursive: true, force: true });
|
|
2025
|
+
} catch {
|
|
2026
|
+
log12.warn({ workDir }, "Failed to clean up working directory");
|
|
2027
|
+
}
|
|
2028
|
+
}
|
|
2029
|
+
}
|
|
2030
|
+
}
|
|
2031
|
+
async function codeReviewLoop(state, taskSpec, repoConfig, git, workDir, owner, repo, issueNumber, notificationsConfig, plan) {
|
|
2032
|
+
let previousReview;
|
|
2033
|
+
let sessionId;
|
|
2034
|
+
for (let cycle = 0; cycle <= repoConfig.maxReviewCycles; cycle++) {
|
|
2035
|
+
state.reviewCycle = cycle;
|
|
2036
|
+
transition(state, "coding", `Coding with ${repoConfig.model} (cycle ${cycle + 1})`);
|
|
2037
|
+
const codeSpinner = ora(`Coding with ${repoConfig.model} (cycle ${cycle + 1})`).start();
|
|
2038
|
+
const codingResult = await runCodingAgent({
|
|
2039
|
+
taskSpec,
|
|
2040
|
+
repoConfig,
|
|
2041
|
+
workingDir: workDir,
|
|
2042
|
+
previousReview,
|
|
2043
|
+
sessionId,
|
|
2044
|
+
plan: cycle === 0 ? plan : void 0,
|
|
2045
|
+
onProgress: (msg) => {
|
|
2046
|
+
codeSpinner.text = `Coding: ${msg}`;
|
|
2047
|
+
}
|
|
2048
|
+
});
|
|
2049
|
+
state.totalCostUsd += codingResult.costUsd;
|
|
2050
|
+
sessionId = codingResult.sessionId;
|
|
2051
|
+
codeSpinner.succeed(`Coding complete (${codingResult.numTurns} turns, $${codingResult.costUsd.toFixed(2)})`);
|
|
2052
|
+
const commitSpinner = ora("Committing and pushing changes").start();
|
|
2053
|
+
const sha = await commitAndPush({
|
|
2054
|
+
git,
|
|
2055
|
+
branchName: state.branchName,
|
|
2056
|
+
message: `feat: ${taskSpec.title} (xforce cycle ${cycle + 1})`
|
|
2057
|
+
});
|
|
2058
|
+
if (!sha && cycle === 0) {
|
|
2059
|
+
commitSpinner.fail("No changes to commit");
|
|
2060
|
+
throw new PipelineError("Coding agent made no changes");
|
|
2061
|
+
}
|
|
2062
|
+
commitSpinner.succeed(sha ? `Committed ${sha.slice(0, 7)}` : "No new changes");
|
|
2063
|
+
transition(state, "running_tests", "Running verification checks");
|
|
2064
|
+
let allChecksPassed = false;
|
|
2065
|
+
for (let retry = 0; retry <= repoConfig.maxTestRetries; retry++) {
|
|
2066
|
+
state.testRetry = retry;
|
|
2067
|
+
let failedCheck = null;
|
|
2068
|
+
const retryLabel = retry > 0 ? ` (retry ${retry})` : "";
|
|
2069
|
+
if (repoConfig.lintCommand && !failedCheck) {
|
|
2070
|
+
const lintSpinner = ora(`Running lint${retryLabel}`).start();
|
|
2071
|
+
const lintResult = await runCommand({
|
|
2072
|
+
workingDir: workDir,
|
|
2073
|
+
command: repoConfig.lintCommand,
|
|
2074
|
+
kind: "lint"
|
|
2075
|
+
});
|
|
2076
|
+
if (lintResult.passed) {
|
|
2077
|
+
lintSpinner.succeed(`Lint passed (${(lintResult.durationMs / 1e3).toFixed(1)}s)`);
|
|
2078
|
+
} else {
|
|
2079
|
+
lintSpinner.fail("Lint failed");
|
|
2080
|
+
failedCheck = { kind: "lint", output: lintResult.output };
|
|
2081
|
+
}
|
|
2082
|
+
}
|
|
2083
|
+
if (repoConfig.buildCommand && !failedCheck) {
|
|
2084
|
+
const buildSpinner = ora(`Running build${retryLabel}`).start();
|
|
2085
|
+
const buildResult = await runCommand({
|
|
2086
|
+
workingDir: workDir,
|
|
2087
|
+
command: repoConfig.buildCommand,
|
|
2088
|
+
kind: "build"
|
|
2089
|
+
});
|
|
2090
|
+
if (buildResult.passed) {
|
|
2091
|
+
buildSpinner.succeed(`Build passed (${(buildResult.durationMs / 1e3).toFixed(1)}s)`);
|
|
2092
|
+
} else {
|
|
2093
|
+
buildSpinner.fail("Build failed");
|
|
2094
|
+
failedCheck = { kind: "build", output: buildResult.output };
|
|
2095
|
+
}
|
|
2096
|
+
}
|
|
2097
|
+
if (!failedCheck) {
|
|
2098
|
+
const testSpinner = ora(`Running tests${retryLabel}`).start();
|
|
2099
|
+
const testResult = await runTests({
|
|
2100
|
+
workingDir: workDir,
|
|
2101
|
+
testCommand: repoConfig.testCommand
|
|
2102
|
+
});
|
|
2103
|
+
if (testResult.passed) {
|
|
2104
|
+
testSpinner.succeed(`Tests passed (${(testResult.durationMs / 1e3).toFixed(1)}s)`);
|
|
2105
|
+
} else {
|
|
2106
|
+
testSpinner.fail("Tests failed");
|
|
2107
|
+
failedCheck = { kind: "test", output: testResult.output };
|
|
2108
|
+
}
|
|
2109
|
+
}
|
|
2110
|
+
if (repoConfig.runCommand && !failedCheck) {
|
|
2111
|
+
const runSpinner = ora(`Running verification${retryLabel}`).start();
|
|
2112
|
+
const runResult = await runCommand({
|
|
2113
|
+
workingDir: workDir,
|
|
2114
|
+
command: repoConfig.runCommand,
|
|
2115
|
+
kind: "run"
|
|
2116
|
+
});
|
|
2117
|
+
if (runResult.passed) {
|
|
2118
|
+
runSpinner.succeed(`Verification passed (${(runResult.durationMs / 1e3).toFixed(1)}s)`);
|
|
2119
|
+
} else {
|
|
2120
|
+
runSpinner.fail("Verification failed");
|
|
2121
|
+
failedCheck = { kind: "run", output: runResult.output };
|
|
2122
|
+
}
|
|
2123
|
+
}
|
|
2124
|
+
if (!failedCheck) {
|
|
2125
|
+
allChecksPassed = true;
|
|
2126
|
+
break;
|
|
2127
|
+
}
|
|
2128
|
+
if (retry < repoConfig.maxTestRetries) {
|
|
2129
|
+
transition(state, "coding", `Fixing ${failedCheck.kind} failures (retry ${retry + 1})`);
|
|
2130
|
+
const fixSpinner = ora(`Fixing ${failedCheck.kind} failures (retry ${retry + 1})`).start();
|
|
2131
|
+
const feedback = formatCommandFeedback(failedCheck.kind, failedCheck.output);
|
|
2132
|
+
const fixResult = await runCodingAgent({
|
|
2133
|
+
taskSpec,
|
|
2134
|
+
repoConfig,
|
|
2135
|
+
workingDir: workDir,
|
|
2136
|
+
...failedCheck.kind === "test" ? { testFailures: feedback } : { commandFailures: { kind: failedCheck.kind, output: failedCheck.output } },
|
|
2137
|
+
sessionId,
|
|
2138
|
+
onProgress: (msg) => {
|
|
2139
|
+
fixSpinner.text = `Fixing ${failedCheck.kind}: ${msg}`;
|
|
2140
|
+
}
|
|
2141
|
+
});
|
|
2142
|
+
state.totalCostUsd += fixResult.costUsd;
|
|
2143
|
+
sessionId = fixResult.sessionId;
|
|
2144
|
+
fixSpinner.succeed(`Fix applied (${fixResult.numTurns} turns, $${fixResult.costUsd.toFixed(2)})`);
|
|
2145
|
+
await commitAndPush({
|
|
2146
|
+
git,
|
|
2147
|
+
branchName: state.branchName,
|
|
2148
|
+
message: `fix: address ${failedCheck.kind} failures (xforce retry ${retry + 1})`
|
|
2149
|
+
});
|
|
2150
|
+
transition(state, "running_tests", `Re-running verification checks (retry ${retry + 1})`);
|
|
2151
|
+
}
|
|
2152
|
+
}
|
|
2153
|
+
if (!allChecksPassed) {
|
|
2154
|
+
throw new PipelineError(
|
|
2155
|
+
`Verification checks still failing after ${repoConfig.maxTestRetries} retries`
|
|
2156
|
+
);
|
|
2157
|
+
}
|
|
2158
|
+
if (!state.prNumber) {
|
|
2159
|
+
const { prNumber, prUrl } = await createPullRequest({
|
|
2160
|
+
owner,
|
|
2161
|
+
repo,
|
|
2162
|
+
branchName: state.branchName,
|
|
2163
|
+
defaultBranch: repoConfig.defaultBranch,
|
|
2164
|
+
taskSpec,
|
|
2165
|
+
pipeline: state
|
|
2166
|
+
});
|
|
2167
|
+
state.prNumber = prNumber;
|
|
2168
|
+
state.prUrl = prUrl;
|
|
2169
|
+
} else {
|
|
2170
|
+
await updatePullRequest({
|
|
2171
|
+
owner,
|
|
2172
|
+
repo,
|
|
2173
|
+
prNumber: state.prNumber,
|
|
2174
|
+
taskSpec,
|
|
2175
|
+
pipeline: state
|
|
2176
|
+
});
|
|
2177
|
+
}
|
|
2178
|
+
transition(state, "reviewing", `Reviewing with ${repoConfig.reviewerModel} (cycle ${cycle + 1})`);
|
|
2179
|
+
const reviewSpinner = ora(`Reviewing with ${repoConfig.reviewerModel} (cycle ${cycle + 1})`).start();
|
|
2180
|
+
const diff = await getPRDiff(owner, repo, state.prNumber);
|
|
2181
|
+
const [review, securityResult] = await Promise.all([
|
|
2182
|
+
runReviewerAgent({ taskSpec, diff, repoConfig, reviewCycle: cycle }),
|
|
2183
|
+
repoConfig.enableSecurityScan ? runSecurityScanner({ taskSpec, diff, repoConfig }) : Promise.resolve(void 0)
|
|
2184
|
+
]);
|
|
2185
|
+
if (securityResult) {
|
|
2186
|
+
state.totalCostUsd += securityResult.costUsd;
|
|
2187
|
+
}
|
|
2188
|
+
const reviewStatus = review.approved ? "Approved" : `Changes requested (${review.issues.length} issues)`;
|
|
2189
|
+
reviewSpinner.succeed(`Review: ${reviewStatus}`);
|
|
2190
|
+
await commentOnPR(
|
|
2191
|
+
owner,
|
|
2192
|
+
repo,
|
|
2193
|
+
state.prNumber,
|
|
2194
|
+
formatReviewComment(review, cycle)
|
|
2195
|
+
);
|
|
2196
|
+
if (securityResult && securityResult.report.findings.length > 0) {
|
|
2197
|
+
await commentOnPR(
|
|
2198
|
+
owner,
|
|
2199
|
+
repo,
|
|
2200
|
+
state.prNumber,
|
|
2201
|
+
formatSecurityComment(securityResult.report)
|
|
2202
|
+
);
|
|
2203
|
+
}
|
|
2204
|
+
if (review.approved) {
|
|
2205
|
+
await labelPR(owner, repo, state.prNumber, repoConfig.labels.done);
|
|
2206
|
+
const autoMergeCheck = isAutoMergeEligible({
|
|
2207
|
+
taskSpec,
|
|
2208
|
+
repoConfig,
|
|
2209
|
+
securityReport: securityResult?.report
|
|
2210
|
+
});
|
|
2211
|
+
if (autoMergeCheck.eligible) {
|
|
2212
|
+
transition(state, "merging", "Auto-merging approved PR");
|
|
2213
|
+
const mergeSpinner = ora("Auto-merging approved PR").start();
|
|
2214
|
+
const mergeResult = await mergePR({
|
|
2215
|
+
owner,
|
|
2216
|
+
repo,
|
|
2217
|
+
prNumber: state.prNumber,
|
|
2218
|
+
strategy: repoConfig.autoMergeRules.mergeStrategy,
|
|
2219
|
+
commitTitle: `${taskSpec.title} (#${state.prNumber})`
|
|
2220
|
+
});
|
|
2221
|
+
if (mergeResult.merged) {
|
|
2222
|
+
mergeSpinner.succeed(`PR auto-merged (${mergeResult.sha?.slice(0, 7)})`);
|
|
2223
|
+
await addComment(
|
|
2224
|
+
owner,
|
|
2225
|
+
repo,
|
|
2226
|
+
issueNumber,
|
|
2227
|
+
`X-Force auto-merged PR #${state.prNumber} (${repoConfig.autoMergeRules.mergeStrategy}).`
|
|
2228
|
+
);
|
|
2229
|
+
} else {
|
|
2230
|
+
mergeSpinner.fail(`Auto-merge failed: ${mergeResult.error}`);
|
|
2231
|
+
await commentOnPR(
|
|
2232
|
+
owner,
|
|
2233
|
+
repo,
|
|
2234
|
+
state.prNumber,
|
|
2235
|
+
`Auto-merge failed: ${mergeResult.error}
|
|
2236
|
+
|
|
2237
|
+
This PR requires manual merge.`
|
|
2238
|
+
);
|
|
2239
|
+
transition(state, "awaiting_human", "Auto-merge failed, awaiting human review");
|
|
2240
|
+
}
|
|
2241
|
+
}
|
|
2242
|
+
return;
|
|
2243
|
+
}
|
|
2244
|
+
if (cycle < repoConfig.maxReviewCycles) {
|
|
2245
|
+
previousReview = review;
|
|
2246
|
+
transition(state, "addressing_review", `Addressing review feedback (cycle ${cycle + 1})`);
|
|
2247
|
+
}
|
|
2248
|
+
}
|
|
2249
|
+
throw new PipelineError(
|
|
2250
|
+
`Review not approved after ${repoConfig.maxReviewCycles} cycles`
|
|
2251
|
+
);
|
|
2252
|
+
}
|
|
2253
|
+
function formatSecurityComment(report) {
|
|
2254
|
+
const findingsTable = report.findings.map(
|
|
2255
|
+
(f) => `| ${f.severity} | ${f.category} | \`${f.file}${f.line ? ":" + f.line : ""}\` | ${f.description} |`
|
|
2256
|
+
).join("\n");
|
|
2257
|
+
return `## X-Force Security Scan - Risk Level: ${report.riskLevel.toUpperCase()}
|
|
2258
|
+
|
|
2259
|
+
### Summary
|
|
2260
|
+
${report.summary}
|
|
2261
|
+
|
|
2262
|
+
### Findings
|
|
2263
|
+
| Severity | Category | File | Description |
|
|
2264
|
+
|----------|----------|------|-------------|
|
|
2265
|
+
${findingsTable}
|
|
2266
|
+
|
|
2267
|
+
${report.recommendations.length > 0 ? `### Recommendations
|
|
2268
|
+
${report.recommendations.map((r) => `- ${r}`).join("\n")}` : ""}
|
|
2269
|
+
|
|
2270
|
+
---
|
|
2271
|
+
*Security scan by X-Force AI Pipeline*`;
|
|
2272
|
+
}
|
|
2273
|
+
function formatPlanComment(plan) {
|
|
2274
|
+
const steps = plan.implementationSteps.sort((a, b) => a.order - b.order).map(
|
|
2275
|
+
(s) => `${s.order}. **${s.description}**
|
|
2276
|
+
Files: ${s.files.map((f) => `\`${f}\``).join(", ")}`
|
|
2277
|
+
).join("\n");
|
|
2278
|
+
return `## X-Force Implementation Plan
|
|
2279
|
+
|
|
2280
|
+
### Approach
|
|
2281
|
+
${plan.approach}
|
|
2282
|
+
|
|
2283
|
+
### Estimated Complexity: ${plan.estimatedComplexity}
|
|
2284
|
+
|
|
2285
|
+
### Files to Modify
|
|
2286
|
+
${plan.filesToModify.map((f) => `- \`${f}\``).join("\n") || "None"}
|
|
2287
|
+
|
|
2288
|
+
### Files to Create
|
|
2289
|
+
${plan.filesToCreate.map((f) => `- \`${f}\``).join("\n") || "None"}
|
|
2290
|
+
|
|
2291
|
+
### Implementation Steps
|
|
2292
|
+
${steps}
|
|
2293
|
+
|
|
2294
|
+
### Risks
|
|
2295
|
+
${plan.risks.map((r) => `- ${r}`).join("\n") || "None identified"}
|
|
2296
|
+
|
|
2297
|
+
---
|
|
2298
|
+
*Plan generated by X-Force AI Pipeline*`;
|
|
2299
|
+
}
|
|
2300
|
+
function formatReviewComment(review, cycle) {
|
|
2301
|
+
const status = review.approved ? "APPROVED" : "CHANGES REQUESTED";
|
|
2302
|
+
const emoji = review.approved ? "approved" : "requesting changes";
|
|
2303
|
+
const issueTable = review.issues.length > 0 ? `### Issues Found
|
|
2304
|
+
| Severity | File | Description |
|
|
2305
|
+
|----------|------|-------------|
|
|
2306
|
+
${review.issues.map((i) => `| ${i.severity} | \`${i.file}${i.line ? ":" + i.line : ""}\` | ${i.description} |`).join("\n")}
|
|
2307
|
+
` : "";
|
|
2308
|
+
return `## X-Force AI Review (Cycle ${cycle + 1}) - ${status}
|
|
2309
|
+
|
|
2310
|
+
### Summary
|
|
2311
|
+
${review.summary}
|
|
2312
|
+
|
|
2313
|
+
${issueTable}
|
|
2314
|
+
### Spec Adherence
|
|
2315
|
+
**Met**: ${review.specAdherence.met.join(", ") || "None yet"}
|
|
2316
|
+
**Unmet**: ${review.specAdherence.unmet.join(", ") || "All met"}
|
|
2317
|
+
|
|
2318
|
+
${review.securityConcerns.length > 0 ? `### Security Concerns
|
|
2319
|
+
${review.securityConcerns.map((c) => `- ${c}`).join("\n")}` : ""}
|
|
2320
|
+
|
|
2321
|
+
---
|
|
2322
|
+
*Automated review by X-Force (${emoji})*`;
|
|
2323
|
+
}
|
|
2324
|
+
|
|
2325
|
+
// src/cli/commands/run.ts
|
|
2326
|
+
async function runCommand2(params) {
|
|
2327
|
+
const spinner = ora2("Loading configuration...").start();
|
|
2328
|
+
try {
|
|
2329
|
+
const config = loadConfig(params.configPath);
|
|
2330
|
+
spinner.succeed("Configuration loaded");
|
|
2331
|
+
let owner;
|
|
2332
|
+
let name;
|
|
2333
|
+
if (params.repo) {
|
|
2334
|
+
const parts = params.repo.split("/");
|
|
2335
|
+
if (parts.length !== 2) {
|
|
2336
|
+
spinner.fail("Invalid repo format. Use owner/name");
|
|
2337
|
+
process.exit(1);
|
|
2338
|
+
}
|
|
2339
|
+
[owner, name] = parts;
|
|
2340
|
+
}
|
|
2341
|
+
spinner.start("Starting pipeline...");
|
|
2342
|
+
const result = await runPipeline({
|
|
2343
|
+
issueUrl: params.issueUrl,
|
|
2344
|
+
repoOwner: owner,
|
|
2345
|
+
repoName: name,
|
|
2346
|
+
issueNumber: params.issueNumber,
|
|
2347
|
+
config
|
|
2348
|
+
});
|
|
2349
|
+
if (result.status === "failed") {
|
|
2350
|
+
spinner.fail(chalk.red(`Pipeline failed: ${result.error}`));
|
|
2351
|
+
console.log();
|
|
2352
|
+
console.log(chalk.dim(`Pipeline ID: ${result.id}`));
|
|
2353
|
+
console.log(chalk.dim(`Cost: $${result.totalCostUsd.toFixed(4)}`));
|
|
2354
|
+
process.exit(1);
|
|
2355
|
+
}
|
|
2356
|
+
spinner.succeed(chalk.green("Pipeline completed successfully"));
|
|
2357
|
+
console.log();
|
|
2358
|
+
console.log(` ${chalk.bold("Pipeline ID:")} ${result.id}`);
|
|
2359
|
+
console.log(` ${chalk.bold("PR:")} ${result.prUrl}`);
|
|
2360
|
+
console.log(` ${chalk.bold("Branch:")} ${result.branchName}`);
|
|
2361
|
+
console.log(` ${chalk.bold("Cost:")} $${result.totalCostUsd.toFixed(4)}`);
|
|
2362
|
+
console.log(` ${chalk.bold("Cycles:")} ${result.reviewCycle + 1}`);
|
|
2363
|
+
console.log();
|
|
2364
|
+
} catch (error) {
|
|
2365
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2366
|
+
spinner.fail(chalk.red(message));
|
|
2367
|
+
process.exit(1);
|
|
2368
|
+
}
|
|
2369
|
+
}
|
|
2370
|
+
|
|
2371
|
+
// src/cli/commands/review.ts
|
|
2372
|
+
import ora3 from "ora";
|
|
2373
|
+
import chalk2 from "chalk";
|
|
2374
|
+
async function reviewCommand(params) {
|
|
2375
|
+
const spinner = ora3("Loading configuration...").start();
|
|
2376
|
+
try {
|
|
2377
|
+
const config = loadConfig(params.configPath);
|
|
2378
|
+
spinner.succeed("Configuration loaded");
|
|
2379
|
+
const { owner, repo, prNumber } = parsePRUrl(params.prUrl);
|
|
2380
|
+
const repoConfig = resolveRepoConfig(config, owner, repo);
|
|
2381
|
+
spinner.start("Fetching PR details...");
|
|
2382
|
+
const pr = await getPR(owner, repo, prNumber);
|
|
2383
|
+
spinner.succeed(`PR #${prNumber}: ${pr.title}`);
|
|
2384
|
+
spinner.start("Fetching PR diff...");
|
|
2385
|
+
const diff = await getPRDiff(owner, repo, prNumber);
|
|
2386
|
+
spinner.succeed(`Diff fetched (${diff.length} chars)`);
|
|
2387
|
+
const taskSpec = {
|
|
2388
|
+
title: pr.title,
|
|
2389
|
+
context: pr.body ?? "No description provided.",
|
|
2390
|
+
acceptanceCriteria: [],
|
|
2391
|
+
affectedFiles: [],
|
|
2392
|
+
edgeCases: [],
|
|
2393
|
+
priority: "medium",
|
|
2394
|
+
type: "feature",
|
|
2395
|
+
size: "m",
|
|
2396
|
+
issueNumber: prNumber,
|
|
2397
|
+
issueUrl: pr.html_url,
|
|
2398
|
+
repoOwner: owner,
|
|
2399
|
+
repoName: repo
|
|
2400
|
+
};
|
|
2401
|
+
spinner.start("Running AI reviewer...");
|
|
2402
|
+
const review = await runReviewerAgent({
|
|
2403
|
+
taskSpec,
|
|
2404
|
+
diff,
|
|
2405
|
+
repoConfig,
|
|
2406
|
+
reviewCycle: 0
|
|
2407
|
+
});
|
|
2408
|
+
spinner.stop();
|
|
2409
|
+
console.log();
|
|
2410
|
+
const verdict = review.approved ? chalk2.green.bold("APPROVED") : chalk2.red.bold("CHANGES REQUESTED");
|
|
2411
|
+
console.log(` ${chalk2.bold("Verdict:")} ${verdict}`);
|
|
2412
|
+
console.log();
|
|
2413
|
+
console.log(` ${chalk2.bold("Summary:")}`);
|
|
2414
|
+
console.log(` ${review.summary}`);
|
|
2415
|
+
if (review.issues.length > 0) {
|
|
2416
|
+
console.log();
|
|
2417
|
+
console.log(` ${chalk2.bold("Issues:")} ${review.issues.length}`);
|
|
2418
|
+
for (const issue of review.issues) {
|
|
2419
|
+
const severity = issue.severity === "critical" ? chalk2.red(issue.severity) : issue.severity === "major" ? chalk2.yellow(issue.severity) : chalk2.dim(issue.severity);
|
|
2420
|
+
const location = issue.line ? `${issue.file}:${issue.line}` : issue.file;
|
|
2421
|
+
console.log(` ${severity} ${chalk2.cyan(location)} \u2014 ${issue.description}`);
|
|
2422
|
+
if (issue.suggestedFix) {
|
|
2423
|
+
console.log(` ${chalk2.dim("Fix:")} ${issue.suggestedFix}`);
|
|
2424
|
+
}
|
|
2425
|
+
}
|
|
2426
|
+
}
|
|
2427
|
+
if (review.securityConcerns.length > 0) {
|
|
2428
|
+
console.log();
|
|
2429
|
+
console.log(` ${chalk2.bold.red("Security Concerns:")}`);
|
|
2430
|
+
for (const concern of review.securityConcerns) {
|
|
2431
|
+
console.log(` - ${concern}`);
|
|
2432
|
+
}
|
|
2433
|
+
}
|
|
2434
|
+
if (review.specAdherence.unmet.length > 0) {
|
|
2435
|
+
console.log();
|
|
2436
|
+
console.log(` ${chalk2.bold("Unmet Criteria:")}`);
|
|
2437
|
+
for (const unmet of review.specAdherence.unmet) {
|
|
2438
|
+
console.log(` - ${unmet}`);
|
|
2439
|
+
}
|
|
2440
|
+
}
|
|
2441
|
+
console.log();
|
|
2442
|
+
if (!review.approved) {
|
|
2443
|
+
process.exit(1);
|
|
2444
|
+
}
|
|
2445
|
+
} catch (error) {
|
|
2446
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2447
|
+
spinner.fail(chalk2.red(message));
|
|
2448
|
+
process.exit(1);
|
|
2449
|
+
}
|
|
2450
|
+
}
|
|
2451
|
+
|
|
2452
|
+
// src/cli/commands/init.ts
|
|
2453
|
+
import { join as join3, resolve as resolve3 } from "path";
|
|
2454
|
+
import { mkdir as mkdir2, writeFile as writeFile2, readFile as readFile2, access as access2 } from "fs/promises";
|
|
2455
|
+
import { fileURLToPath } from "url";
|
|
2456
|
+
import ora4 from "ora";
|
|
2457
|
+
import chalk3 from "chalk";
|
|
2458
|
+
async function findTemplateDir() {
|
|
2459
|
+
const thisFile = fileURLToPath(import.meta.url);
|
|
2460
|
+
let dir = resolve3(thisFile, "..");
|
|
2461
|
+
for (let i = 0; i < 6; i++) {
|
|
2462
|
+
const pkgPath = join3(dir, "package.json");
|
|
2463
|
+
if (await fileExists(pkgPath)) {
|
|
2464
|
+
return join3(dir, "templates");
|
|
2465
|
+
}
|
|
2466
|
+
dir = resolve3(dir, "..");
|
|
2467
|
+
}
|
|
2468
|
+
throw new Error("Could not find xforce package root (no package.json found)");
|
|
2469
|
+
}
|
|
2470
|
+
function configTemplate(params) {
|
|
2471
|
+
const buildLine = params.buildCommand ? ` buildCommand: "${params.buildCommand}"` : ` # buildCommand: "npm run build"`;
|
|
2472
|
+
const lintLine = params.lintCommand ? ` lintCommand: "${params.lintCommand}"` : ` # lintCommand: "npm run lint"`;
|
|
2473
|
+
const runLine = params.runCommand ? ` runCommand: "${params.runCommand}"` : ` # runCommand: "node dist/index.js --health-check"`;
|
|
2474
|
+
return `version: "1"
|
|
2475
|
+
|
|
2476
|
+
defaults:
|
|
2477
|
+
model: "claude-opus-4-6"
|
|
2478
|
+
reviewerModel: "claude-opus-4-6"
|
|
2479
|
+
plannerModel: "claude-opus-4-6"
|
|
2480
|
+
maxTurns: 500
|
|
2481
|
+
maxReviewCycles: 5
|
|
2482
|
+
maxTestRetries: 5
|
|
2483
|
+
timeoutMinutes: 120
|
|
2484
|
+
budgetPerTaskUsd: 50.00
|
|
2485
|
+
branchPrefix: "xforce"
|
|
2486
|
+
enablePlanning: true
|
|
2487
|
+
enableSecurityScan: true
|
|
2488
|
+
labels:
|
|
2489
|
+
ready: "xforce:ready"
|
|
2490
|
+
inProgress: "xforce:in-progress"
|
|
2491
|
+
done: "xforce:done"
|
|
2492
|
+
failed: "xforce:failed"
|
|
2493
|
+
allowedTools:
|
|
2494
|
+
- Read
|
|
2495
|
+
- Write
|
|
2496
|
+
- Edit
|
|
2497
|
+
- Bash
|
|
2498
|
+
- Glob
|
|
2499
|
+
- Grep
|
|
2500
|
+
|
|
2501
|
+
repos:
|
|
2502
|
+
- owner: "${params.repoOwner}"
|
|
2503
|
+
name: "${params.repoName}"
|
|
2504
|
+
defaultBranch: "${params.defaultBranch}"
|
|
2505
|
+
testCommand: "${params.testCommand}"
|
|
2506
|
+
${buildLine}
|
|
2507
|
+
${lintLine}
|
|
2508
|
+
${runLine}
|
|
2509
|
+
autoMerge: true
|
|
2510
|
+
autoMergeRules:
|
|
2511
|
+
types: [feature, bugfix, refactor, test, docs]
|
|
2512
|
+
maxSize: "xl"
|
|
2513
|
+
mergeStrategy: "squash"
|
|
2514
|
+
requireCleanSecurityScan: false
|
|
2515
|
+
`;
|
|
2516
|
+
}
|
|
2517
|
+
async function fileExists(path) {
|
|
2518
|
+
try {
|
|
2519
|
+
await access2(path);
|
|
2520
|
+
return true;
|
|
2521
|
+
} catch {
|
|
2522
|
+
return false;
|
|
2523
|
+
}
|
|
2524
|
+
}
|
|
2525
|
+
async function initCommand(params) {
|
|
2526
|
+
const targetDir = resolve3(params.path ?? ".");
|
|
2527
|
+
const spinner = ora4("Initializing X-Force...").start();
|
|
2528
|
+
try {
|
|
2529
|
+
const owner = params.owner ?? "your-org";
|
|
2530
|
+
const name = params.name ?? targetDir.split("/").pop() ?? "your-repo";
|
|
2531
|
+
const defaultBranch = params.branch ?? "main";
|
|
2532
|
+
const testCommand = params.testCommand ?? "npm test";
|
|
2533
|
+
const created = [];
|
|
2534
|
+
const skipped = [];
|
|
2535
|
+
const issueTemplateDir = join3(targetDir, ".github", "ISSUE_TEMPLATE");
|
|
2536
|
+
await mkdir2(issueTemplateDir, { recursive: true });
|
|
2537
|
+
const templateNames = ["feature-request.yml", "bug-fix.yml", "refactor.yml"];
|
|
2538
|
+
const srcTemplateDir = join3(await findTemplateDir(), "issue-templates");
|
|
2539
|
+
for (const tmpl of templateNames) {
|
|
2540
|
+
const destPath = join3(issueTemplateDir, tmpl);
|
|
2541
|
+
if (await fileExists(destPath)) {
|
|
2542
|
+
skipped.push(`.github/ISSUE_TEMPLATE/${tmpl}`);
|
|
2543
|
+
} else {
|
|
2544
|
+
const content = await readFile2(join3(srcTemplateDir, tmpl), "utf-8");
|
|
2545
|
+
await writeFile2(destPath, content, "utf-8");
|
|
2546
|
+
created.push(`.github/ISSUE_TEMPLATE/${tmpl}`);
|
|
2547
|
+
}
|
|
2548
|
+
}
|
|
2549
|
+
const envPath = join3(targetDir, ".env.xforce");
|
|
2550
|
+
if (await fileExists(envPath)) {
|
|
2551
|
+
skipped.push(".env.xforce");
|
|
2552
|
+
} else {
|
|
2553
|
+
await writeFile2(
|
|
2554
|
+
envPath,
|
|
2555
|
+
`# X-Force environment variables
|
|
2556
|
+
# This file is loaded automatically by xforce CLI
|
|
2557
|
+
# IMPORTANT: Do not commit this file to git
|
|
2558
|
+
|
|
2559
|
+
GITHUB_TOKEN=
|
|
2560
|
+
XFORCE_WEBHOOK_SECRET=
|
|
2561
|
+
`,
|
|
2562
|
+
"utf-8"
|
|
2563
|
+
);
|
|
2564
|
+
created.push(".env.xforce");
|
|
2565
|
+
}
|
|
2566
|
+
const gitignorePath = join3(targetDir, ".gitignore");
|
|
2567
|
+
let gitignoreContent = "";
|
|
2568
|
+
try {
|
|
2569
|
+
gitignoreContent = await readFile2(gitignorePath, "utf-8");
|
|
2570
|
+
} catch {
|
|
2571
|
+
}
|
|
2572
|
+
if (!gitignoreContent.split("\n").some((line) => line.trim() === ".env.xforce")) {
|
|
2573
|
+
const separator = gitignoreContent && !gitignoreContent.endsWith("\n") ? "\n" : "";
|
|
2574
|
+
await writeFile2(gitignorePath, gitignoreContent + separator + ".env.xforce\n", "utf-8");
|
|
2575
|
+
if (gitignoreContent) {
|
|
2576
|
+
} else {
|
|
2577
|
+
created.push(".gitignore");
|
|
2578
|
+
}
|
|
2579
|
+
}
|
|
2580
|
+
const configPath = join3(targetDir, "xforce.config.yaml");
|
|
2581
|
+
if (await fileExists(configPath)) {
|
|
2582
|
+
skipped.push("xforce.config.yaml");
|
|
2583
|
+
} else {
|
|
2584
|
+
await writeFile2(configPath, configTemplate({
|
|
2585
|
+
repoOwner: owner,
|
|
2586
|
+
repoName: name,
|
|
2587
|
+
defaultBranch,
|
|
2588
|
+
testCommand,
|
|
2589
|
+
buildCommand: params.buildCommand,
|
|
2590
|
+
lintCommand: params.lintCommand,
|
|
2591
|
+
runCommand: params.runCommand
|
|
2592
|
+
}), "utf-8");
|
|
2593
|
+
created.push("xforce.config.yaml");
|
|
2594
|
+
}
|
|
2595
|
+
spinner.succeed("X-Force initialized");
|
|
2596
|
+
console.log();
|
|
2597
|
+
if (created.length > 0) {
|
|
2598
|
+
console.log(chalk3.green(" Created:"));
|
|
2599
|
+
for (const file of created) {
|
|
2600
|
+
console.log(chalk3.green(` + ${file}`));
|
|
2601
|
+
}
|
|
2602
|
+
}
|
|
2603
|
+
if (skipped.length > 0) {
|
|
2604
|
+
console.log(chalk3.dim(" Skipped (already exists):"));
|
|
2605
|
+
for (const file of skipped) {
|
|
2606
|
+
console.log(chalk3.dim(` - ${file}`));
|
|
2607
|
+
}
|
|
2608
|
+
}
|
|
2609
|
+
console.log();
|
|
2610
|
+
console.log(chalk3.bold(" Next steps:"));
|
|
2611
|
+
console.log(` 1. Edit ${chalk3.cyan("xforce.config.yaml")} with your repo details`);
|
|
2612
|
+
console.log(` 2. Fill in your keys in ${chalk3.cyan(".env.xforce")}`);
|
|
2613
|
+
console.log(` 3. Run ${chalk3.cyan("xforce serve")} to start the webhook server`);
|
|
2614
|
+
console.log(` 4. Configure a GitHub webhook pointing to your server`);
|
|
2615
|
+
console.log(` 5. Create an issue, add the ${chalk3.cyan("xforce:ready")} label`);
|
|
2616
|
+
console.log();
|
|
2617
|
+
} catch (error) {
|
|
2618
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2619
|
+
spinner.fail(chalk3.red(`Init failed: ${message}`));
|
|
2620
|
+
process.exit(1);
|
|
2621
|
+
}
|
|
2622
|
+
}
|
|
2623
|
+
|
|
2624
|
+
// src/cli/commands/costs.ts
|
|
2625
|
+
import chalk4 from "chalk";
|
|
2626
|
+
function formatDuration(ms) {
|
|
2627
|
+
const seconds = Math.floor(ms / 1e3);
|
|
2628
|
+
if (seconds < 60) return `${seconds}s`;
|
|
2629
|
+
const minutes = Math.floor(seconds / 60);
|
|
2630
|
+
const remainingSeconds = seconds % 60;
|
|
2631
|
+
return `${minutes}m ${remainingSeconds}s`;
|
|
2632
|
+
}
|
|
2633
|
+
function formatCost(usd) {
|
|
2634
|
+
return `$${usd.toFixed(4)}`;
|
|
2635
|
+
}
|
|
2636
|
+
function printSummary(summary) {
|
|
2637
|
+
console.log(chalk4.bold("\nCost Summary"));
|
|
2638
|
+
console.log("\u2500".repeat(50));
|
|
2639
|
+
console.log(` Total runs: ${summary.totalRuns}`);
|
|
2640
|
+
console.log(` Successful: ${chalk4.green(summary.successfulRuns.toString())}`);
|
|
2641
|
+
console.log(` Failed: ${chalk4.red(summary.failedRuns.toString())}`);
|
|
2642
|
+
console.log(` Total cost: ${chalk4.yellow(formatCost(summary.totalCostUsd))}`);
|
|
2643
|
+
console.log(` Avg cost/run: ${formatCost(summary.avgCostPerRun)}`);
|
|
2644
|
+
console.log(` Avg duration: ${formatDuration(summary.avgDurationMs)}`);
|
|
2645
|
+
const repos = Object.entries(summary.costByRepo);
|
|
2646
|
+
if (repos.length > 0) {
|
|
2647
|
+
console.log(chalk4.bold("\nCost by Repository"));
|
|
2648
|
+
console.log("\u2500".repeat(50));
|
|
2649
|
+
for (const [repo, cost] of repos.sort((a, b) => b[1] - a[1])) {
|
|
2650
|
+
console.log(` ${repo.padEnd(35)} ${formatCost(cost)}`);
|
|
2651
|
+
}
|
|
2652
|
+
}
|
|
2653
|
+
console.log();
|
|
2654
|
+
}
|
|
2655
|
+
function printDetailed(records) {
|
|
2656
|
+
console.log(chalk4.bold("\nPipeline Runs"));
|
|
2657
|
+
console.log("\u2500".repeat(90));
|
|
2658
|
+
console.log(
|
|
2659
|
+
chalk4.dim(
|
|
2660
|
+
" Date".padEnd(14) + "Repo".padEnd(25) + "Issue".padEnd(8) + "Status".padEnd(12) + "Cost".padEnd(12) + "Duration".padEnd(12) + "Cycles"
|
|
2661
|
+
)
|
|
2662
|
+
);
|
|
2663
|
+
console.log("\u2500".repeat(90));
|
|
2664
|
+
for (const r of records.sort((a, b) => b.startedAt.localeCompare(a.startedAt))) {
|
|
2665
|
+
const date = new Date(r.startedAt).toISOString().slice(0, 10);
|
|
2666
|
+
const status = r.status === "completed" ? chalk4.green("completed") : chalk4.red("failed");
|
|
2667
|
+
console.log(
|
|
2668
|
+
` ${date.padEnd(14)}${r.repo.padEnd(25)}#${String(r.issueNumber).padEnd(7)}${status.padEnd(21)}${formatCost(r.totalCostUsd).padEnd(12)}${formatDuration(r.durationMs).padEnd(12)}${r.reviewCycles}`
|
|
2669
|
+
);
|
|
2670
|
+
}
|
|
2671
|
+
console.log();
|
|
2672
|
+
}
|
|
2673
|
+
async function costsCommand(options) {
|
|
2674
|
+
const filter = {
|
|
2675
|
+
repo: options.repo,
|
|
2676
|
+
since: options.since ? new Date(options.since) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1e3),
|
|
2677
|
+
until: options.until ? new Date(options.until) : void 0
|
|
2678
|
+
};
|
|
2679
|
+
const records = await readRecords(filter);
|
|
2680
|
+
if (options.json) {
|
|
2681
|
+
const summary2 = summarize(records);
|
|
2682
|
+
console.log(JSON.stringify({ summary: summary2, records: options.detailed ? records : void 0 }, null, 2));
|
|
2683
|
+
return;
|
|
2684
|
+
}
|
|
2685
|
+
if (records.length === 0) {
|
|
2686
|
+
console.log(chalk4.dim("\nNo pipeline runs found for the specified period.\n"));
|
|
2687
|
+
return;
|
|
2688
|
+
}
|
|
2689
|
+
const summary = summarize(records);
|
|
2690
|
+
printSummary(summary);
|
|
2691
|
+
if (options.detailed) {
|
|
2692
|
+
printDetailed(records);
|
|
2693
|
+
}
|
|
2694
|
+
}
|
|
2695
|
+
|
|
2696
|
+
// src/cli/commands/serve.ts
|
|
2697
|
+
import chalk5 from "chalk";
|
|
2698
|
+
|
|
2699
|
+
// src/server/webhook.ts
|
|
2700
|
+
import { createServer } from "http";
|
|
2701
|
+
import crypto from "crypto";
|
|
2702
|
+
|
|
2703
|
+
// src/server/queue.ts
|
|
2704
|
+
import { nanoid as nanoid2 } from "nanoid";
|
|
2705
|
+
var log13 = createChildLogger("queue");
|
|
2706
|
+
var DEFAULT_OPTIONS = {
|
|
2707
|
+
maxSize: 10,
|
|
2708
|
+
historySize: 50
|
|
2709
|
+
};
|
|
2710
|
+
var JobQueue = class {
|
|
2711
|
+
pending = [];
|
|
2712
|
+
active = null;
|
|
2713
|
+
completed = [];
|
|
2714
|
+
totalProcessed = 0;
|
|
2715
|
+
processing = false;
|
|
2716
|
+
options;
|
|
2717
|
+
processor;
|
|
2718
|
+
constructor(processor, options) {
|
|
2719
|
+
this.processor = processor;
|
|
2720
|
+
this.options = { ...DEFAULT_OPTIONS, ...options };
|
|
2721
|
+
}
|
|
2722
|
+
enqueue(params) {
|
|
2723
|
+
if (this.pending.length >= this.options.maxSize) {
|
|
2724
|
+
throw new Error(`Queue is full (max ${this.options.maxSize})`);
|
|
2725
|
+
}
|
|
2726
|
+
const isDuplicate = this.isDuplicateIn(this.pending, params) || this.active && this.active.owner === params.owner && this.active.repo === params.repo && this.active.issueNumber === params.issueNumber;
|
|
2727
|
+
if (isDuplicate) {
|
|
2728
|
+
throw new Error(
|
|
2729
|
+
`Issue ${params.owner}/${params.repo}#${params.issueNumber} is already queued or active`
|
|
2730
|
+
);
|
|
2731
|
+
}
|
|
2732
|
+
const job = {
|
|
2733
|
+
id: nanoid2(12),
|
|
2734
|
+
owner: params.owner,
|
|
2735
|
+
repo: params.repo,
|
|
2736
|
+
issueNumber: params.issueNumber,
|
|
2737
|
+
issueUrl: params.issueUrl,
|
|
2738
|
+
status: "pending",
|
|
2739
|
+
enqueuedAt: /* @__PURE__ */ new Date()
|
|
2740
|
+
};
|
|
2741
|
+
this.pending.push(job);
|
|
2742
|
+
log13.info(
|
|
2743
|
+
{ jobId: job.id, owner: job.owner, repo: job.repo, issue: job.issueNumber },
|
|
2744
|
+
"Job enqueued"
|
|
2745
|
+
);
|
|
2746
|
+
void this.processNext();
|
|
2747
|
+
return job;
|
|
2748
|
+
}
|
|
2749
|
+
getStatus() {
|
|
2750
|
+
return {
|
|
2751
|
+
active: this.active,
|
|
2752
|
+
pending: [...this.pending],
|
|
2753
|
+
completed: [...this.completed],
|
|
2754
|
+
totalProcessed: this.totalProcessed
|
|
2755
|
+
};
|
|
2756
|
+
}
|
|
2757
|
+
getJob(id) {
|
|
2758
|
+
if (this.active?.id === id) return this.active;
|
|
2759
|
+
return this.pending.find((j) => j.id === id) ?? this.completed.find((j) => j.id === id);
|
|
2760
|
+
}
|
|
2761
|
+
get size() {
|
|
2762
|
+
return this.pending.length;
|
|
2763
|
+
}
|
|
2764
|
+
get isFull() {
|
|
2765
|
+
return this.pending.length >= this.options.maxSize;
|
|
2766
|
+
}
|
|
2767
|
+
isDuplicateIn(jobs, params) {
|
|
2768
|
+
return jobs.some(
|
|
2769
|
+
(j) => j.owner === params.owner && j.repo === params.repo && j.issueNumber === params.issueNumber
|
|
2770
|
+
);
|
|
2771
|
+
}
|
|
2772
|
+
async processNext() {
|
|
2773
|
+
if (this.processing || this.pending.length === 0) return;
|
|
2774
|
+
this.processing = true;
|
|
2775
|
+
const job = this.pending.shift();
|
|
2776
|
+
job.status = "running";
|
|
2777
|
+
job.startedAt = /* @__PURE__ */ new Date();
|
|
2778
|
+
this.active = job;
|
|
2779
|
+
log13.info(
|
|
2780
|
+
{ jobId: job.id, owner: job.owner, repo: job.repo, issue: job.issueNumber },
|
|
2781
|
+
"Processing job"
|
|
2782
|
+
);
|
|
2783
|
+
try {
|
|
2784
|
+
const result = await this.processor(job);
|
|
2785
|
+
job.status = "completed";
|
|
2786
|
+
job.result = result;
|
|
2787
|
+
} catch (error) {
|
|
2788
|
+
job.status = "failed";
|
|
2789
|
+
job.error = error instanceof Error ? error.message : String(error);
|
|
2790
|
+
log13.error(
|
|
2791
|
+
{ jobId: job.id, error: job.error },
|
|
2792
|
+
"Job failed"
|
|
2793
|
+
);
|
|
2794
|
+
} finally {
|
|
2795
|
+
job.completedAt = /* @__PURE__ */ new Date();
|
|
2796
|
+
this.active = null;
|
|
2797
|
+
this.totalProcessed++;
|
|
2798
|
+
this.completed.unshift(job);
|
|
2799
|
+
if (this.completed.length > this.options.historySize) {
|
|
2800
|
+
this.completed = this.completed.slice(0, this.options.historySize);
|
|
2801
|
+
}
|
|
2802
|
+
this.processing = false;
|
|
2803
|
+
log13.info(
|
|
2804
|
+
{ jobId: job.id, status: job.status, totalProcessed: this.totalProcessed },
|
|
2805
|
+
"Job finished"
|
|
2806
|
+
);
|
|
2807
|
+
void this.processNext();
|
|
2808
|
+
}
|
|
2809
|
+
}
|
|
2810
|
+
};
|
|
2811
|
+
|
|
2812
|
+
// src/server/webhook.ts
|
|
2813
|
+
var log14 = createChildLogger("webhook-server");
|
|
2814
|
+
var MAX_BODY_SIZE = 1048576;
|
|
2815
|
+
function readBody(req) {
|
|
2816
|
+
return new Promise((resolve4, reject) => {
|
|
2817
|
+
const chunks = [];
|
|
2818
|
+
let size = 0;
|
|
2819
|
+
req.on("data", (chunk) => {
|
|
2820
|
+
size += chunk.length;
|
|
2821
|
+
if (size > MAX_BODY_SIZE) {
|
|
2822
|
+
req.destroy();
|
|
2823
|
+
reject(new Error("Payload too large"));
|
|
2824
|
+
return;
|
|
2825
|
+
}
|
|
2826
|
+
chunks.push(chunk);
|
|
2827
|
+
});
|
|
2828
|
+
req.on("end", () => resolve4(Buffer.concat(chunks)));
|
|
2829
|
+
req.on("error", reject);
|
|
2830
|
+
});
|
|
2831
|
+
}
|
|
2832
|
+
function verifySignature(payload, signature, secret) {
|
|
2833
|
+
const expected = "sha256=" + crypto.createHmac("sha256", secret).update(payload).digest("hex");
|
|
2834
|
+
if (expected.length !== signature.length) return false;
|
|
2835
|
+
return crypto.timingSafeEqual(Buffer.from(expected), Buffer.from(signature));
|
|
2836
|
+
}
|
|
2837
|
+
function sendJson(res, statusCode, body) {
|
|
2838
|
+
res.writeHead(statusCode, { "Content-Type": "application/json" });
|
|
2839
|
+
res.end(JSON.stringify(body));
|
|
2840
|
+
}
|
|
2841
|
+
function createWebhookServer(config, options) {
|
|
2842
|
+
const startTime = Date.now();
|
|
2843
|
+
const readyLabel = config.defaults.labels.ready;
|
|
2844
|
+
const queue = new JobQueue(
|
|
2845
|
+
async (job) => {
|
|
2846
|
+
const repoConf = config.repos.find((r) => r.owner === job.owner && r.name === job.repo);
|
|
2847
|
+
return runPipeline({
|
|
2848
|
+
repoOwner: job.owner,
|
|
2849
|
+
repoName: job.repo,
|
|
2850
|
+
issueNumber: job.issueNumber,
|
|
2851
|
+
config,
|
|
2852
|
+
localDir: repoConf?.localPath
|
|
2853
|
+
});
|
|
2854
|
+
},
|
|
2855
|
+
{
|
|
2856
|
+
maxSize: options.maxQueueSize ?? 10,
|
|
2857
|
+
historySize: options.historySize ?? 50
|
|
2858
|
+
}
|
|
2859
|
+
);
|
|
2860
|
+
function handleHealth(_req, res) {
|
|
2861
|
+
sendJson(res, 200, {
|
|
2862
|
+
status: "ok",
|
|
2863
|
+
uptime: Math.floor((Date.now() - startTime) / 1e3)
|
|
2864
|
+
});
|
|
2865
|
+
}
|
|
2866
|
+
function handleStatus(_req, res) {
|
|
2867
|
+
sendJson(res, 200, queue.getStatus());
|
|
2868
|
+
}
|
|
2869
|
+
async function handleWebhook(req, res) {
|
|
2870
|
+
let body;
|
|
2871
|
+
try {
|
|
2872
|
+
body = await readBody(req);
|
|
2873
|
+
} catch {
|
|
2874
|
+
sendJson(res, 413, { error: "Payload too large" });
|
|
2875
|
+
return;
|
|
2876
|
+
}
|
|
2877
|
+
const signature = req.headers["x-hub-signature-256"];
|
|
2878
|
+
if (!signature) {
|
|
2879
|
+
sendJson(res, 401, { error: "Missing signature" });
|
|
2880
|
+
return;
|
|
2881
|
+
}
|
|
2882
|
+
if (!verifySignature(body, signature, options.secret)) {
|
|
2883
|
+
sendJson(res, 401, { error: "Invalid signature" });
|
|
2884
|
+
return;
|
|
2885
|
+
}
|
|
2886
|
+
let payload;
|
|
2887
|
+
try {
|
|
2888
|
+
payload = JSON.parse(body.toString("utf-8"));
|
|
2889
|
+
} catch {
|
|
2890
|
+
sendJson(res, 400, { error: "Invalid JSON" });
|
|
2891
|
+
return;
|
|
2892
|
+
}
|
|
2893
|
+
const event = req.headers["x-github-event"];
|
|
2894
|
+
if (event !== "issues") {
|
|
2895
|
+
sendJson(res, 200, { ignored: true, reason: `Event type "${event}" not handled` });
|
|
2896
|
+
return;
|
|
2897
|
+
}
|
|
2898
|
+
if (payload.action === "labeled") {
|
|
2899
|
+
const labelName = payload.label?.name;
|
|
2900
|
+
if (labelName !== readyLabel) {
|
|
2901
|
+
sendJson(res, 200, { ignored: true, reason: `Label "${labelName}" does not match ready label "${readyLabel}"` });
|
|
2902
|
+
return;
|
|
2903
|
+
}
|
|
2904
|
+
} else if (payload.action === "reopened") {
|
|
2905
|
+
const labels = payload.issue?.labels;
|
|
2906
|
+
const hasReadyLabel = labels?.some((l) => l.name === readyLabel);
|
|
2907
|
+
if (!hasReadyLabel) {
|
|
2908
|
+
sendJson(res, 200, { ignored: true, reason: `Reopened issue does not have "${readyLabel}" label` });
|
|
2909
|
+
return;
|
|
2910
|
+
}
|
|
2911
|
+
} else {
|
|
2912
|
+
sendJson(res, 200, { ignored: true, reason: `Action "${payload.action}" not handled` });
|
|
2913
|
+
return;
|
|
2914
|
+
}
|
|
2915
|
+
const owner = payload.repository?.owner?.login;
|
|
2916
|
+
const repo = payload.repository?.name;
|
|
2917
|
+
const issueNumber = payload.issue?.number;
|
|
2918
|
+
const issueUrl = payload.issue?.html_url;
|
|
2919
|
+
if (!owner || !repo || !issueNumber || !issueUrl) {
|
|
2920
|
+
sendJson(res, 400, { error: "Missing required fields in payload" });
|
|
2921
|
+
return;
|
|
2922
|
+
}
|
|
2923
|
+
const repoConfigured = config.repos.some((r) => r.owner === owner && r.name === repo);
|
|
2924
|
+
if (!repoConfigured) {
|
|
2925
|
+
sendJson(res, 422, { error: `Repository ${owner}/${repo} not configured` });
|
|
2926
|
+
return;
|
|
2927
|
+
}
|
|
2928
|
+
try {
|
|
2929
|
+
const job = queue.enqueue({ owner, repo, issueNumber, issueUrl });
|
|
2930
|
+
log14.info(
|
|
2931
|
+
{ jobId: job.id, owner, repo, issueNumber },
|
|
2932
|
+
"Webhook accepted, job enqueued"
|
|
2933
|
+
);
|
|
2934
|
+
sendJson(res, 202, { accepted: true, jobId: job.id, position: queue.size });
|
|
2935
|
+
} catch (error) {
|
|
2936
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2937
|
+
if (message.includes("already queued")) {
|
|
2938
|
+
sendJson(res, 409, { error: message });
|
|
2939
|
+
} else if (message.includes("Queue is full")) {
|
|
2940
|
+
sendJson(res, 503, { error: message });
|
|
2941
|
+
} else {
|
|
2942
|
+
sendJson(res, 500, { error: message });
|
|
2943
|
+
}
|
|
2944
|
+
}
|
|
2945
|
+
}
|
|
2946
|
+
const server = createServer(async (req, res) => {
|
|
2947
|
+
const method = req.method ?? "GET";
|
|
2948
|
+
const url = req.url ?? "/";
|
|
2949
|
+
log14.debug({ method, url }, "Request received");
|
|
2950
|
+
try {
|
|
2951
|
+
if (method === "GET" && url === "/health") {
|
|
2952
|
+
handleHealth(req, res);
|
|
2953
|
+
} else if (method === "GET" && url === "/status") {
|
|
2954
|
+
handleStatus(req, res);
|
|
2955
|
+
} else if (method === "POST" && url === "/webhook") {
|
|
2956
|
+
await handleWebhook(req, res);
|
|
2957
|
+
} else {
|
|
2958
|
+
sendJson(res, 404, { error: "Not found" });
|
|
2959
|
+
}
|
|
2960
|
+
} catch (error) {
|
|
2961
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2962
|
+
log14.error({ error: message }, "Request handler error");
|
|
2963
|
+
if (!res.headersSent) {
|
|
2964
|
+
sendJson(res, 500, { error: "Internal server error" });
|
|
2965
|
+
}
|
|
2966
|
+
}
|
|
2967
|
+
});
|
|
2968
|
+
return {
|
|
2969
|
+
start() {
|
|
2970
|
+
return new Promise((resolve4, reject) => {
|
|
2971
|
+
server.listen(options.port, options.host, () => {
|
|
2972
|
+
log14.info({ port: options.port, host: options.host }, "Webhook server started");
|
|
2973
|
+
resolve4();
|
|
2974
|
+
});
|
|
2975
|
+
server.on("error", reject);
|
|
2976
|
+
});
|
|
2977
|
+
},
|
|
2978
|
+
stop() {
|
|
2979
|
+
return new Promise((resolve4) => {
|
|
2980
|
+
server.close(() => {
|
|
2981
|
+
log14.info("Webhook server stopped");
|
|
2982
|
+
resolve4();
|
|
2983
|
+
});
|
|
2984
|
+
});
|
|
2985
|
+
},
|
|
2986
|
+
getQueueStatus() {
|
|
2987
|
+
return queue.getStatus();
|
|
2988
|
+
},
|
|
2989
|
+
address() {
|
|
2990
|
+
const addr = server.address();
|
|
2991
|
+
if (!addr || typeof addr === "string") return null;
|
|
2992
|
+
return { host: addr.address, port: addr.port };
|
|
2993
|
+
}
|
|
2994
|
+
};
|
|
2995
|
+
}
|
|
2996
|
+
|
|
2997
|
+
// src/cli/commands/serve.ts
|
|
2998
|
+
async function serveCommand(params) {
|
|
2999
|
+
const secret = params.secret ?? process.env.XFORCE_WEBHOOK_SECRET;
|
|
3000
|
+
if (!secret) {
|
|
3001
|
+
console.error(
|
|
3002
|
+
chalk5.red("Webhook secret is required. Use --secret or set XFORCE_WEBHOOK_SECRET.")
|
|
3003
|
+
);
|
|
3004
|
+
process.exit(1);
|
|
3005
|
+
}
|
|
3006
|
+
let config;
|
|
3007
|
+
try {
|
|
3008
|
+
config = loadConfig(params.configPath);
|
|
3009
|
+
} catch (error) {
|
|
3010
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
3011
|
+
console.error(chalk5.red(`Failed to load config: ${message}`));
|
|
3012
|
+
process.exit(1);
|
|
3013
|
+
}
|
|
3014
|
+
const server = createWebhookServer(config, {
|
|
3015
|
+
port: params.port,
|
|
3016
|
+
host: params.host,
|
|
3017
|
+
secret,
|
|
3018
|
+
maxQueueSize: params.maxQueueSize
|
|
3019
|
+
});
|
|
3020
|
+
const shutdown = async () => {
|
|
3021
|
+
console.log(chalk5.dim("\nShutting down..."));
|
|
3022
|
+
await server.stop();
|
|
3023
|
+
process.exit(0);
|
|
3024
|
+
};
|
|
3025
|
+
process.on("SIGINT", shutdown);
|
|
3026
|
+
process.on("SIGTERM", shutdown);
|
|
3027
|
+
await server.start();
|
|
3028
|
+
const addr = server.address();
|
|
3029
|
+
const url = addr ? `http://${addr.host}:${addr.port}` : `http://${params.host}:${params.port}`;
|
|
3030
|
+
console.log();
|
|
3031
|
+
console.log(chalk5.bold("X-Force Webhook Server"));
|
|
3032
|
+
console.log();
|
|
3033
|
+
console.log(` ${chalk5.bold("URL:")} ${url}`);
|
|
3034
|
+
console.log(` ${chalk5.bold("Ready label:")} ${config.defaults.labels.ready}`);
|
|
3035
|
+
console.log(` ${chalk5.bold("Repos:")} ${config.repos.map((r) => `${r.owner}/${r.name}`).join(", ")}`);
|
|
3036
|
+
console.log();
|
|
3037
|
+
console.log(chalk5.dim("Endpoints:"));
|
|
3038
|
+
console.log(chalk5.dim(` GET ${url}/health \u2014 health check`));
|
|
3039
|
+
console.log(chalk5.dim(` GET ${url}/status \u2014 queue status`));
|
|
3040
|
+
console.log(chalk5.dim(` POST ${url}/webhook \u2014 GitHub webhook receiver`));
|
|
3041
|
+
console.log();
|
|
3042
|
+
console.log(chalk5.dim("Configure your GitHub repo webhook:"));
|
|
3043
|
+
console.log(chalk5.dim(` Payload URL: ${url}/webhook`));
|
|
3044
|
+
console.log(chalk5.dim(` Content type: application/json`));
|
|
3045
|
+
console.log(chalk5.dim(` Secret: (your webhook secret)`));
|
|
3046
|
+
console.log(chalk5.dim(` Events: Issues`));
|
|
3047
|
+
console.log();
|
|
3048
|
+
console.log(chalk5.green("Listening for webhook events... (Ctrl+C to stop)"));
|
|
3049
|
+
}
|
|
3050
|
+
|
|
3051
|
+
// src/cli/index.ts
|
|
3052
|
+
loadEnvFile();
|
|
3053
|
+
yargs(hideBin(process.argv)).scriptName("xforce").usage("$0 <command> [options]").command(
|
|
3054
|
+
"run",
|
|
3055
|
+
"Run the X-Force pipeline on a GitHub issue",
|
|
3056
|
+
(y) => y.option("issue", {
|
|
3057
|
+
alias: "i",
|
|
3058
|
+
type: "string",
|
|
3059
|
+
description: "GitHub issue URL (e.g., https://github.com/org/repo/issues/42)"
|
|
3060
|
+
}).option("repo", {
|
|
3061
|
+
alias: "r",
|
|
3062
|
+
type: "string",
|
|
3063
|
+
description: "Repository in owner/name format"
|
|
3064
|
+
}).option("issue-number", {
|
|
3065
|
+
alias: "n",
|
|
3066
|
+
type: "number",
|
|
3067
|
+
description: "Issue number (used with --repo)"
|
|
3068
|
+
}).option("config", {
|
|
3069
|
+
alias: "c",
|
|
3070
|
+
type: "string",
|
|
3071
|
+
description: "Path to xforce.config.yaml"
|
|
3072
|
+
}).check((argv) => {
|
|
3073
|
+
if (!argv.issue && !(argv.repo && argv.issueNumber)) {
|
|
3074
|
+
throw new Error("Provide --issue <url> or --repo <owner/name> --issue-number <n>");
|
|
3075
|
+
}
|
|
3076
|
+
return true;
|
|
3077
|
+
}),
|
|
3078
|
+
async (argv) => {
|
|
3079
|
+
await runCommand2({
|
|
3080
|
+
issueUrl: argv.issue,
|
|
3081
|
+
repo: argv.repo,
|
|
3082
|
+
issueNumber: argv.issueNumber,
|
|
3083
|
+
configPath: argv.config
|
|
3084
|
+
});
|
|
3085
|
+
}
|
|
3086
|
+
).command(
|
|
3087
|
+
"init",
|
|
3088
|
+
"Initialize X-Force in a target repository",
|
|
3089
|
+
(y) => y.option("path", {
|
|
3090
|
+
type: "string",
|
|
3091
|
+
description: "Target repository path (defaults to current directory)"
|
|
3092
|
+
}).option("owner", {
|
|
3093
|
+
type: "string",
|
|
3094
|
+
description: "GitHub repo owner"
|
|
3095
|
+
}).option("name", {
|
|
3096
|
+
type: "string",
|
|
3097
|
+
description: "GitHub repo name"
|
|
3098
|
+
}).option("branch", {
|
|
3099
|
+
type: "string",
|
|
3100
|
+
description: "Default branch name",
|
|
3101
|
+
default: "main"
|
|
3102
|
+
}).option("test-command", {
|
|
3103
|
+
type: "string",
|
|
3104
|
+
description: "Test command for the repo",
|
|
3105
|
+
default: "npm test"
|
|
3106
|
+
}).option("build-command", {
|
|
3107
|
+
type: "string",
|
|
3108
|
+
description: "Build command for the repo"
|
|
3109
|
+
}).option("lint-command", {
|
|
3110
|
+
type: "string",
|
|
3111
|
+
description: "Lint command for the repo"
|
|
3112
|
+
}).option("run-command", {
|
|
3113
|
+
type: "string",
|
|
3114
|
+
description: "Run/verification command for the repo"
|
|
3115
|
+
}),
|
|
3116
|
+
async (argv) => {
|
|
3117
|
+
await initCommand({
|
|
3118
|
+
path: argv.path,
|
|
3119
|
+
owner: argv.owner,
|
|
3120
|
+
name: argv.name,
|
|
3121
|
+
branch: argv.branch,
|
|
3122
|
+
testCommand: argv.testCommand,
|
|
3123
|
+
buildCommand: argv.buildCommand,
|
|
3124
|
+
lintCommand: argv.lintCommand,
|
|
3125
|
+
runCommand: argv.runCommand
|
|
3126
|
+
});
|
|
3127
|
+
}
|
|
3128
|
+
).command(
|
|
3129
|
+
"review",
|
|
3130
|
+
"Run the AI reviewer on a pull request",
|
|
3131
|
+
(y) => y.option("pr", {
|
|
3132
|
+
alias: "p",
|
|
3133
|
+
type: "string",
|
|
3134
|
+
description: "GitHub PR URL (e.g., https://github.com/org/repo/pull/42)",
|
|
3135
|
+
demandOption: true
|
|
3136
|
+
}).option("config", {
|
|
3137
|
+
alias: "c",
|
|
3138
|
+
type: "string",
|
|
3139
|
+
description: "Path to xforce.config.yaml"
|
|
3140
|
+
}),
|
|
3141
|
+
async (argv) => {
|
|
3142
|
+
await reviewCommand({
|
|
3143
|
+
prUrl: argv.pr,
|
|
3144
|
+
configPath: argv.config
|
|
3145
|
+
});
|
|
3146
|
+
}
|
|
3147
|
+
).command(
|
|
3148
|
+
"costs",
|
|
3149
|
+
"Show cost tracking dashboard",
|
|
3150
|
+
(y) => y.option("repo", { type: "string", description: "Filter by repo (owner/name)" }).option("since", { type: "string", description: "Start date (YYYY-MM-DD)" }).option("until", { type: "string", description: "End date (YYYY-MM-DD)" }).option("detailed", { type: "boolean", description: "Show individual runs", default: false }).option("json", { type: "boolean", description: "Output as JSON", default: false }),
|
|
3151
|
+
async (argv) => {
|
|
3152
|
+
await costsCommand({
|
|
3153
|
+
repo: argv.repo,
|
|
3154
|
+
since: argv.since,
|
|
3155
|
+
until: argv.until,
|
|
3156
|
+
detailed: argv.detailed,
|
|
3157
|
+
json: argv.json
|
|
3158
|
+
});
|
|
3159
|
+
}
|
|
3160
|
+
).command(
|
|
3161
|
+
"serve",
|
|
3162
|
+
"Start the webhook server to receive GitHub events",
|
|
3163
|
+
(y) => y.option("port", {
|
|
3164
|
+
alias: "p",
|
|
3165
|
+
type: "number",
|
|
3166
|
+
description: "Port to listen on",
|
|
3167
|
+
default: 3e3
|
|
3168
|
+
}).option("host", {
|
|
3169
|
+
type: "string",
|
|
3170
|
+
description: "Host to bind to",
|
|
3171
|
+
default: "0.0.0.0"
|
|
3172
|
+
}).option("secret", {
|
|
3173
|
+
alias: "s",
|
|
3174
|
+
type: "string",
|
|
3175
|
+
description: "GitHub webhook secret (or set XFORCE_WEBHOOK_SECRET)"
|
|
3176
|
+
}).option("config", {
|
|
3177
|
+
alias: "c",
|
|
3178
|
+
type: "string",
|
|
3179
|
+
description: "Path to xforce.config.yaml"
|
|
3180
|
+
}).option("max-queue-size", {
|
|
3181
|
+
type: "number",
|
|
3182
|
+
description: "Maximum pending jobs in queue",
|
|
3183
|
+
default: 10
|
|
3184
|
+
}),
|
|
3185
|
+
async (argv) => {
|
|
3186
|
+
await serveCommand({
|
|
3187
|
+
port: argv.port,
|
|
3188
|
+
host: argv.host,
|
|
3189
|
+
secret: argv.secret,
|
|
3190
|
+
configPath: argv.config,
|
|
3191
|
+
maxQueueSize: argv.maxQueueSize
|
|
3192
|
+
});
|
|
3193
|
+
}
|
|
3194
|
+
).demandCommand(1, "Please specify a command").strict().help().version().parse();
|
|
3195
|
+
//# sourceMappingURL=index.js.map
|