@jiraacp/cli 2026.405.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +283 -0
- package/dist/abort-GQE4OI5S.js +103 -0
- package/dist/abort-GQE4OI5S.js.map +1 -0
- package/dist/abort-VMRQOADY.js +96 -0
- package/dist/abort-VMRQOADY.js.map +1 -0
- package/dist/bot-WOTETAJY.js +13 -0
- package/dist/bot-WOTETAJY.js.map +1 -0
- package/dist/cancel-clarification-4G5S2HJZ.js +64 -0
- package/dist/cancel-clarification-4G5S2HJZ.js.map +1 -0
- package/dist/chunk-3U373M37.js +67 -0
- package/dist/chunk-3U373M37.js.map +1 -0
- package/dist/chunk-3YHD4SIN.js +97 -0
- package/dist/chunk-3YHD4SIN.js.map +1 -0
- package/dist/chunk-6IY6CRUJ.js +690 -0
- package/dist/chunk-6IY6CRUJ.js.map +1 -0
- package/dist/chunk-B6OA3XJK.js +1167 -0
- package/dist/chunk-B6OA3XJK.js.map +1 -0
- package/dist/chunk-BM4R6NST.js +191 -0
- package/dist/chunk-BM4R6NST.js.map +1 -0
- package/dist/chunk-FLPIU2QO.js +77 -0
- package/dist/chunk-FLPIU2QO.js.map +1 -0
- package/dist/chunk-H7YXX4UA.js +86 -0
- package/dist/chunk-H7YXX4UA.js.map +1 -0
- package/dist/chunk-IT74N3UH.js +19 -0
- package/dist/chunk-IT74N3UH.js.map +1 -0
- package/dist/chunk-JOT4UVSO.js +186 -0
- package/dist/chunk-JOT4UVSO.js.map +1 -0
- package/dist/chunk-KSJKCLEJ.js +222 -0
- package/dist/chunk-KSJKCLEJ.js.map +1 -0
- package/dist/chunk-LIEW4ULF.js +139 -0
- package/dist/chunk-LIEW4ULF.js.map +1 -0
- package/dist/chunk-M4V3YOCY.js +82 -0
- package/dist/chunk-M4V3YOCY.js.map +1 -0
- package/dist/chunk-MMWQHH25.js +207 -0
- package/dist/chunk-MMWQHH25.js.map +1 -0
- package/dist/chunk-OJ4CNF73.js +78 -0
- package/dist/chunk-OJ4CNF73.js.map +1 -0
- package/dist/chunk-PFJAC3RO.js +137 -0
- package/dist/chunk-PFJAC3RO.js.map +1 -0
- package/dist/chunk-PVKVCUNR.js +159 -0
- package/dist/chunk-PVKVCUNR.js.map +1 -0
- package/dist/chunk-RXT4WSIY.js +35 -0
- package/dist/chunk-RXT4WSIY.js.map +1 -0
- package/dist/chunk-RZK74PDF.js +34 -0
- package/dist/chunk-RZK74PDF.js.map +1 -0
- package/dist/chunk-UDTWVKRX.js +68 -0
- package/dist/chunk-UDTWVKRX.js.map +1 -0
- package/dist/chunk-VCEONSWJ.js +307 -0
- package/dist/chunk-VCEONSWJ.js.map +1 -0
- package/dist/chunk-VWBCDZWQ.js +119 -0
- package/dist/chunk-VWBCDZWQ.js.map +1 -0
- package/dist/chunk-WEJCTFQB.js +228 -0
- package/dist/chunk-WEJCTFQB.js.map +1 -0
- package/dist/chunk-YJK7IRPI.js +223 -0
- package/dist/chunk-YJK7IRPI.js.map +1 -0
- package/dist/claude-md-HQ6L4CRP.js +8 -0
- package/dist/claude-md-HQ6L4CRP.js.map +1 -0
- package/dist/cli.js +276 -0
- package/dist/cli.js.map +1 -0
- package/dist/commands-RG45VBTZ.js +407 -0
- package/dist/commands-RG45VBTZ.js.map +1 -0
- package/dist/commands-WYVRVE5Z.js +400 -0
- package/dist/commands-WYVRVE5Z.js.map +1 -0
- package/dist/config-edit-G7O56HXO.js +50 -0
- package/dist/config-edit-G7O56HXO.js.map +1 -0
- package/dist/config-set-QN3JRNZL.js +63 -0
- package/dist/config-set-QN3JRNZL.js.map +1 -0
- package/dist/daemon-CGBV55JK.js +104 -0
- package/dist/daemon-CGBV55JK.js.map +1 -0
- package/dist/dashboard-YVFJ5DXR.js +143 -0
- package/dist/dashboard-YVFJ5DXR.js.map +1 -0
- package/dist/doctor-BPTLVLTD.js +98 -0
- package/dist/doctor-BPTLVLTD.js.map +1 -0
- package/dist/human-loop-RBTA2TYK.js +16 -0
- package/dist/human-loop-RBTA2TYK.js.map +1 -0
- package/dist/human-loop-XGWXUNCS.js +18 -0
- package/dist/human-loop-XGWXUNCS.js.map +1 -0
- package/dist/index.d.ts +583 -0
- package/dist/index.js +28 -0
- package/dist/index.js.map +1 -0
- package/dist/loader-DGW7HCJ5.js +21 -0
- package/dist/loader-DGW7HCJ5.js.map +1 -0
- package/dist/logs-JUVQWN6C.js +93 -0
- package/dist/logs-JUVQWN6C.js.map +1 -0
- package/dist/mcp.js +132 -0
- package/dist/mcp.js.map +1 -0
- package/dist/orchestrator-3MGXX3QW.js +22 -0
- package/dist/orchestrator-3MGXX3QW.js.map +1 -0
- package/dist/orchestrator-BVUKN5N3.js +13 -0
- package/dist/orchestrator-BVUKN5N3.js.map +1 -0
- package/dist/pause-FLDZ3OD6.js +62 -0
- package/dist/pause-FLDZ3OD6.js.map +1 -0
- package/dist/projects-QMIGNW7U.js +129 -0
- package/dist/projects-QMIGNW7U.js.map +1 -0
- package/dist/replay-M4JEG4Z4.js +151 -0
- package/dist/replay-M4JEG4Z4.js.map +1 -0
- package/dist/schedule-CDHD77VZ.js +17 -0
- package/dist/schedule-CDHD77VZ.js.map +1 -0
- package/dist/serve-XI7JTIPZ.js +231 -0
- package/dist/serve-XI7JTIPZ.js.map +1 -0
- package/dist/sprint-KZZWVNK6.js +200 -0
- package/dist/sprint-KZZWVNK6.js.map +1 -0
- package/dist/status-I6GU2LWE.js +48 -0
- package/dist/status-I6GU2LWE.js.map +1 -0
- package/dist/topic-manager-4AMEPMFI.js +12 -0
- package/dist/topic-manager-4AMEPMFI.js.map +1 -0
- package/dist/triage-WNHGPVZQ.js +251 -0
- package/dist/triage-WNHGPVZQ.js.map +1 -0
- package/dist/usage-AWWBI37F.js +155 -0
- package/dist/usage-AWWBI37F.js.map +1 -0
- package/dist/wizard-CYEJJLNF.js +190 -0
- package/dist/wizard-CYEJJLNF.js.map +1 -0
- package/package.json +56 -0
|
@@ -0,0 +1,690 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
estimateCostUsd,
|
|
4
|
+
extractTokenUsage
|
|
5
|
+
} from "./chunk-RXT4WSIY.js";
|
|
6
|
+
import {
|
|
7
|
+
analyzeStage,
|
|
8
|
+
appendClarifications,
|
|
9
|
+
createGitHubClient,
|
|
10
|
+
detectComplexity,
|
|
11
|
+
fetchStage,
|
|
12
|
+
getContextFilesForStage,
|
|
13
|
+
readTicketContext,
|
|
14
|
+
runAgent,
|
|
15
|
+
runAgentsParallel,
|
|
16
|
+
writeReviewFeedback
|
|
17
|
+
} from "./chunk-VCEONSWJ.js";
|
|
18
|
+
import {
|
|
19
|
+
buildMinimalEnv,
|
|
20
|
+
spawnSafe
|
|
21
|
+
} from "./chunk-UDTWVKRX.js";
|
|
22
|
+
import {
|
|
23
|
+
addComment,
|
|
24
|
+
getClient,
|
|
25
|
+
reassign,
|
|
26
|
+
transitionTicket
|
|
27
|
+
} from "./chunk-JOT4UVSO.js";
|
|
28
|
+
import {
|
|
29
|
+
requestApproval,
|
|
30
|
+
requestClarification
|
|
31
|
+
} from "./chunk-YJK7IRPI.js";
|
|
32
|
+
import {
|
|
33
|
+
acquireLock
|
|
34
|
+
} from "./chunk-FLPIU2QO.js";
|
|
35
|
+
import {
|
|
36
|
+
createTelegramNotifier
|
|
37
|
+
} from "./chunk-PVKVCUNR.js";
|
|
38
|
+
import {
|
|
39
|
+
StateManager,
|
|
40
|
+
getEvents,
|
|
41
|
+
getLockPath,
|
|
42
|
+
getMemoryDir,
|
|
43
|
+
getRunDir
|
|
44
|
+
} from "./chunk-VWBCDZWQ.js";
|
|
45
|
+
import {
|
|
46
|
+
initBot
|
|
47
|
+
} from "./chunk-OJ4CNF73.js";
|
|
48
|
+
import {
|
|
49
|
+
createLogger
|
|
50
|
+
} from "./chunk-IT74N3UH.js";
|
|
51
|
+
|
|
52
|
+
// src/pipeline/cost-guard.ts
|
|
53
|
+
async function checkCostLimit(opts) {
|
|
54
|
+
const events = getEvents(opts.runDir);
|
|
55
|
+
let totalCost = 0;
|
|
56
|
+
for (const event of events) {
|
|
57
|
+
if (event.type !== "STAGE_COMPLETED") continue;
|
|
58
|
+
const tu = extractTokenUsage(event.output);
|
|
59
|
+
if (!tu) continue;
|
|
60
|
+
totalCost += estimateCostUsd(tu.inputTokens, tu.outputTokens, tu.model);
|
|
61
|
+
}
|
|
62
|
+
if (totalCost >= opts.maxCostUsd) {
|
|
63
|
+
await opts.telegram.send(
|
|
64
|
+
`\u26D4 <b>${opts.ticketKey}</b> \u2014 Cost limit reached ($${totalCost.toFixed(4)} / $${opts.maxCostUsd}). Aborting pipeline.`
|
|
65
|
+
);
|
|
66
|
+
return "abort";
|
|
67
|
+
}
|
|
68
|
+
if (totalCost >= 0.8 * opts.maxCostUsd) {
|
|
69
|
+
await opts.telegram.send(
|
|
70
|
+
`\u26A0\uFE0F <b>${opts.ticketKey}</b> \u2014 Cost at 80% of limit ($${totalCost.toFixed(4)} / $${opts.maxCostUsd}).`
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
return "continue";
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// src/pipeline/stages/3-clarify.ts
|
|
77
|
+
import path from "path";
|
|
78
|
+
var clarifyStage = {
|
|
79
|
+
id: "clarify",
|
|
80
|
+
name: "Clarify",
|
|
81
|
+
model: "haiku",
|
|
82
|
+
async shouldSkip(ctx) {
|
|
83
|
+
const lastAnalyze = ctx.state.current.completedStages.includes("analyze");
|
|
84
|
+
if (!lastAnalyze) return true;
|
|
85
|
+
const events = ctx.state.events;
|
|
86
|
+
const analyzeEvent = events.findLast(
|
|
87
|
+
(e) => e.type === "STAGE_COMPLETED" && e.stage === "analyze"
|
|
88
|
+
);
|
|
89
|
+
const output = analyzeEvent?.output;
|
|
90
|
+
return !output?.needsClarification;
|
|
91
|
+
},
|
|
92
|
+
async run(ctx) {
|
|
93
|
+
const { config, ticketKey, memoryDir } = ctx;
|
|
94
|
+
const storeDir = path.join(config.workspace.rootDir, ".jira-acp");
|
|
95
|
+
const events = ctx.state.events;
|
|
96
|
+
const analyzeEvent = events.findLast(
|
|
97
|
+
(e) => e.type === "STAGE_COMPLETED" && e.stage === "analyze"
|
|
98
|
+
);
|
|
99
|
+
const questions = analyzeEvent?.output?.questions ?? ["Please clarify the ticket requirements."];
|
|
100
|
+
ctx.logger.info(
|
|
101
|
+
{ ticketKey, questions },
|
|
102
|
+
"Requesting clarification via Telegram"
|
|
103
|
+
);
|
|
104
|
+
const hil = config.telegram.humanInTheLoop;
|
|
105
|
+
const answers = await requestClarification(
|
|
106
|
+
config.telegram.botToken,
|
|
107
|
+
config.telegram.chatId,
|
|
108
|
+
ticketKey,
|
|
109
|
+
questions,
|
|
110
|
+
storeDir,
|
|
111
|
+
{
|
|
112
|
+
timeoutMs: hil?.clarificationTimeoutMs ?? 36e5,
|
|
113
|
+
onTimeout: hil?.clarificationTimeoutAction ?? "skip"
|
|
114
|
+
}
|
|
115
|
+
);
|
|
116
|
+
appendClarifications(memoryDir, answers);
|
|
117
|
+
ctx.logger.info({ ticketKey }, "Clarifications received and saved");
|
|
118
|
+
return { clarified: true, answers };
|
|
119
|
+
}
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
// src/pipeline/stages/4-code.ts
|
|
123
|
+
var codeStage = {
|
|
124
|
+
id: "code",
|
|
125
|
+
name: "Code",
|
|
126
|
+
model: "sonnet",
|
|
127
|
+
async run(ctx) {
|
|
128
|
+
const { config, ticketKey, memoryDir, projectDir } = ctx;
|
|
129
|
+
const ticketCtx = readTicketContext(memoryDir);
|
|
130
|
+
const contextFiles = getContextFilesForStage(projectDir, memoryDir, "code");
|
|
131
|
+
const model = detectComplexity(ticketCtx);
|
|
132
|
+
ctx.logger.info({ ticketKey, model }, "Starting code agent");
|
|
133
|
+
const branchName = buildBranchName(
|
|
134
|
+
config.github.branchPattern ?? "feature/{ticketKey}-{slug}",
|
|
135
|
+
ticketKey,
|
|
136
|
+
ticketCtx
|
|
137
|
+
);
|
|
138
|
+
const prompt = `
|
|
139
|
+
Implement the following Jira ticket: ${ticketKey}
|
|
140
|
+
|
|
141
|
+
Read the ticket context from the provided context files.
|
|
142
|
+
|
|
143
|
+
Requirements:
|
|
144
|
+
1. Create branch: ${branchName}
|
|
145
|
+
2. Implement all acceptance criteria
|
|
146
|
+
3. Write tests for new functionality
|
|
147
|
+
4. Commit with message: "${ticketKey}: <short description>"
|
|
148
|
+
5. Do NOT push \u2014 pipeline will handle git operations
|
|
149
|
+
|
|
150
|
+
Branch naming: ${branchName}
|
|
151
|
+
Workspace: ${config.workspace.rootDir}
|
|
152
|
+
${config.workspace.buildCommand ? `Build command: ${config.workspace.buildCommand}` : ""}
|
|
153
|
+
`.trim();
|
|
154
|
+
if (!ctx.dryRun) {
|
|
155
|
+
await runAgent({
|
|
156
|
+
prompt,
|
|
157
|
+
workdir: config.workspace.rootDir,
|
|
158
|
+
model,
|
|
159
|
+
contextFiles,
|
|
160
|
+
timeoutMs: config.pipeline?.stageTimeouts?.code ?? 18e5,
|
|
161
|
+
stallTimeoutMs: config.pipeline?.agentStallTimeoutMs ?? 3e5
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
ctx.logger.info({ ticketKey, branchName }, "Code agent completed");
|
|
165
|
+
return { branchName };
|
|
166
|
+
}
|
|
167
|
+
};
|
|
168
|
+
function buildBranchName(pattern, ticketKey, ticketCtx) {
|
|
169
|
+
const summaryLine = ticketCtx.split("\n").find((l) => l.startsWith("## Summary"));
|
|
170
|
+
const nextLine = summaryLine ? ticketCtx.split("\n")[ticketCtx.split("\n").indexOf(summaryLine) + 1] ?? "" : "";
|
|
171
|
+
const slug = nextLine.toLowerCase().replace(/[^a-z0-9\s-]/g, "").trim().replace(/\s+/g, "-").slice(0, 40);
|
|
172
|
+
return pattern.replace("{ticketKey}", ticketKey).replace("{prefix}", "feature").replace("{slug}", slug || "implementation");
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// src/pipeline/stages/5-git.ts
|
|
176
|
+
var gitStage = {
|
|
177
|
+
id: "git",
|
|
178
|
+
name: "Git & PR",
|
|
179
|
+
model: "haiku",
|
|
180
|
+
async run(ctx) {
|
|
181
|
+
const { config, ticketKey, state } = ctx;
|
|
182
|
+
const branchName = state.current.branchName;
|
|
183
|
+
if (!branchName)
|
|
184
|
+
throw new Error("No branch name in state \u2014 code stage must run first");
|
|
185
|
+
const workdir = config.workspace.rootDir;
|
|
186
|
+
const ticketCtx = readTicketContext(ctx.memoryDir);
|
|
187
|
+
const summaryLine = ticketCtx.split("\n").find((l) => l.startsWith("## Summary"));
|
|
188
|
+
const summary = ticketCtx.split("\n")[ticketCtx.split("\n").indexOf(summaryLine ?? "") + 1] ?? ticketKey;
|
|
189
|
+
ctx.logger.info(
|
|
190
|
+
{ ticketKey, branchName },
|
|
191
|
+
"Pushing branch and creating PR"
|
|
192
|
+
);
|
|
193
|
+
if (!ctx.dryRun) {
|
|
194
|
+
await spawnSafe("git", ["push", "origin", branchName], { cwd: workdir });
|
|
195
|
+
const prNumber = await ctx.github.createPR({
|
|
196
|
+
title: `[${ticketKey}] ${summary.trim()}`,
|
|
197
|
+
body: buildPrBody(ticketKey, ticketCtx),
|
|
198
|
+
head: branchName,
|
|
199
|
+
base: config.github.defaultBranch ?? "main",
|
|
200
|
+
draft: config.github.prDraftByDefault ?? false
|
|
201
|
+
});
|
|
202
|
+
if (config.github.reviewers?.length) {
|
|
203
|
+
await ctx.github.addReviewers(prNumber, config.github.reviewers);
|
|
204
|
+
}
|
|
205
|
+
ctx.logger.info({ ticketKey, prNumber }, "PR created");
|
|
206
|
+
return { branchName, prNumber };
|
|
207
|
+
}
|
|
208
|
+
return { branchName, prNumber: null };
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
function buildPrBody(ticketKey, ticketCtx) {
|
|
212
|
+
return `## ${ticketKey}
|
|
213
|
+
|
|
214
|
+
${ticketCtx}
|
|
215
|
+
|
|
216
|
+
---
|
|
217
|
+
*Generated by jiraACP automated pipeline*`;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// src/pipeline/stages/6-review.ts
|
|
221
|
+
var reviewStage = {
|
|
222
|
+
id: "review",
|
|
223
|
+
name: "Review",
|
|
224
|
+
model: "sonnet",
|
|
225
|
+
async run(ctx) {
|
|
226
|
+
const { config, ticketKey, state, memoryDir, projectDir } = ctx;
|
|
227
|
+
const prNumber = state.current.prNumber;
|
|
228
|
+
if (!prNumber)
|
|
229
|
+
throw new Error("No PR number in state \u2014 git stage must run first");
|
|
230
|
+
const contextFiles = getContextFilesForStage(
|
|
231
|
+
projectDir,
|
|
232
|
+
memoryDir,
|
|
233
|
+
"review"
|
|
234
|
+
);
|
|
235
|
+
ctx.logger.info({ ticketKey, prNumber }, "Running parallel review agents");
|
|
236
|
+
const logicPrompt = `Review PR #${prNumber} for ticket ${ticketKey}.
|
|
237
|
+
Focus on: Does the implementation correctly satisfy the acceptance criteria?
|
|
238
|
+
List issues as JSON: { "issues": [{ "severity": "minor"|"major", "message": "..." }] }`;
|
|
239
|
+
const qualityPrompt = `Review PR #${prNumber} for ticket ${ticketKey}.
|
|
240
|
+
Focus on: Missing tests, security issues, type safety, performance red flags.
|
|
241
|
+
List issues as JSON: { "issues": [{ "severity": "minor"|"major", "message": "..." }] }`;
|
|
242
|
+
const agentOpts = {
|
|
243
|
+
workdir: config.workspace.rootDir,
|
|
244
|
+
model: "sonnet",
|
|
245
|
+
contextFiles,
|
|
246
|
+
timeoutMs: (config.pipeline?.stageTimeouts?.review ?? 6e5) / 2,
|
|
247
|
+
stallTimeoutMs: 12e4
|
|
248
|
+
};
|
|
249
|
+
const [logicRaw, qualityRaw] = ctx.dryRun ? ['{"issues":[]}', '{"issues":[]}'] : await runAgentsParallel(
|
|
250
|
+
{ ...agentOpts, prompt: logicPrompt },
|
|
251
|
+
{ ...agentOpts, prompt: qualityPrompt }
|
|
252
|
+
);
|
|
253
|
+
const issues = [...parseIssues(logicRaw), ...parseIssues(qualityRaw)];
|
|
254
|
+
const majorCount = issues.filter((i) => i.severity === "major").length;
|
|
255
|
+
const threshold = config.github.majorIssueThreshold ?? 1;
|
|
256
|
+
const needsHumanApproval = majorCount >= threshold;
|
|
257
|
+
writeReviewFeedback(memoryDir, {
|
|
258
|
+
prNumber,
|
|
259
|
+
issues,
|
|
260
|
+
autoResolved: !needsHumanApproval
|
|
261
|
+
});
|
|
262
|
+
if (needsHumanApproval) {
|
|
263
|
+
ctx.logger.warn(
|
|
264
|
+
{ ticketKey, majorCount },
|
|
265
|
+
"Major issues found \u2014 requesting human approval via Telegram"
|
|
266
|
+
);
|
|
267
|
+
ctx.state.emit({
|
|
268
|
+
type: "HUMAN_APPROVAL_REQUESTED",
|
|
269
|
+
context: { prNumber, majorCount }
|
|
270
|
+
});
|
|
271
|
+
const hil = config.telegram.humanInTheLoop;
|
|
272
|
+
const approved = ctx.dryRun ? true : await requestApproval(
|
|
273
|
+
config.telegram.botToken,
|
|
274
|
+
config.telegram.chatId,
|
|
275
|
+
ticketKey,
|
|
276
|
+
issues,
|
|
277
|
+
{
|
|
278
|
+
timeoutMs: hil?.reviewApprovalTimeoutMs ?? 864e5,
|
|
279
|
+
onTimeout: hil?.reviewApprovalTimeoutAction ?? "abort",
|
|
280
|
+
topicId: config.telegram.topicId
|
|
281
|
+
}
|
|
282
|
+
);
|
|
283
|
+
if (approved) {
|
|
284
|
+
ctx.state.emit({ type: "HUMAN_APPROVED" });
|
|
285
|
+
ctx.logger.info(
|
|
286
|
+
{ ticketKey, prNumber },
|
|
287
|
+
"Human approved \u2014 proceeding to merge"
|
|
288
|
+
);
|
|
289
|
+
} else {
|
|
290
|
+
ctx.state.emit({
|
|
291
|
+
type: "HUMAN_REJECTED",
|
|
292
|
+
reason: "Reviewer rejected PR"
|
|
293
|
+
});
|
|
294
|
+
throw new Error(`REVIEW_REJECTED:PR #${prNumber} rejected by reviewer`);
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
if (!ctx.dryRun) {
|
|
298
|
+
await ctx.github.mergePR(
|
|
299
|
+
prNumber,
|
|
300
|
+
config.github.autoMergeStrategy ?? "squash"
|
|
301
|
+
);
|
|
302
|
+
ctx.logger.info({ ticketKey, prNumber }, "PR merged");
|
|
303
|
+
}
|
|
304
|
+
return { prNumber, issues, merged: true };
|
|
305
|
+
}
|
|
306
|
+
};
|
|
307
|
+
function parseIssues(raw) {
|
|
308
|
+
try {
|
|
309
|
+
const match = raw.match(/\{[\s\S]*\}/);
|
|
310
|
+
if (!match) return [];
|
|
311
|
+
const parsed = JSON.parse(match[0]);
|
|
312
|
+
return (parsed.issues ?? []).map((i) => ({
|
|
313
|
+
severity: i.severity === "major" ? "major" : "minor",
|
|
314
|
+
message: i.message
|
|
315
|
+
}));
|
|
316
|
+
} catch {
|
|
317
|
+
return [];
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
// src/pipeline/stages/7-deploy.ts
|
|
322
|
+
var deployStage = {
|
|
323
|
+
id: "deploy",
|
|
324
|
+
name: "Deploy",
|
|
325
|
+
model: "haiku",
|
|
326
|
+
async shouldSkip(ctx) {
|
|
327
|
+
return !ctx.config.deploy?.enabled;
|
|
328
|
+
},
|
|
329
|
+
async run(ctx) {
|
|
330
|
+
const { config, ticketKey } = ctx;
|
|
331
|
+
const deploy = config.deploy;
|
|
332
|
+
if (!deploy?.command) throw new Error("deploy.command not configured");
|
|
333
|
+
ctx.logger.info({ ticketKey }, "Deploying to dev server");
|
|
334
|
+
if (ctx.dryRun) return { deployed: false, dryRun: true };
|
|
335
|
+
const result = await spawnSafe(deploy.command, [], {
|
|
336
|
+
cwd: config.workspace.rootDir,
|
|
337
|
+
env: buildMinimalEnv(deploy.env ?? {}),
|
|
338
|
+
timeoutMs: deploy.timeoutMs ?? 12e5
|
|
339
|
+
});
|
|
340
|
+
if (result.exitCode !== 0) {
|
|
341
|
+
throw new Error(
|
|
342
|
+
`Deploy failed (exit ${result.exitCode}):
|
|
343
|
+
${result.stderr}`
|
|
344
|
+
);
|
|
345
|
+
}
|
|
346
|
+
if (deploy.healthCheckUrl) {
|
|
347
|
+
await healthCheck(
|
|
348
|
+
deploy.healthCheckUrl,
|
|
349
|
+
deploy.healthCheckTimeoutMs ?? 3e4
|
|
350
|
+
);
|
|
351
|
+
}
|
|
352
|
+
ctx.logger.info({ ticketKey }, "Deploy successful");
|
|
353
|
+
return { deployed: true, deployUrl: deploy.healthCheckUrl };
|
|
354
|
+
}
|
|
355
|
+
};
|
|
356
|
+
async function healthCheck(url, timeoutMs) {
|
|
357
|
+
const start = Date.now();
|
|
358
|
+
while (Date.now() - start < timeoutMs) {
|
|
359
|
+
try {
|
|
360
|
+
const res = await fetch(url, { signal: AbortSignal.timeout(5e3) });
|
|
361
|
+
if (res.ok) return;
|
|
362
|
+
} catch {
|
|
363
|
+
}
|
|
364
|
+
await sleep(2e3);
|
|
365
|
+
}
|
|
366
|
+
throw new Error(`Health check timed out after ${timeoutMs}ms: ${url}`);
|
|
367
|
+
}
|
|
368
|
+
var sleep = (ms) => new Promise((r) => setTimeout(r, ms));
|
|
369
|
+
|
|
370
|
+
// src/pipeline/stages/8-test.ts
|
|
371
|
+
var sleep2 = (ms) => new Promise((r) => setTimeout(r, ms));
|
|
372
|
+
var testStage = {
|
|
373
|
+
id: "test",
|
|
374
|
+
name: "UI Test",
|
|
375
|
+
model: "sonnet",
|
|
376
|
+
async shouldSkip(ctx) {
|
|
377
|
+
return !ctx.config.test?.enabled;
|
|
378
|
+
},
|
|
379
|
+
async run(ctx) {
|
|
380
|
+
const { config, ticketKey, memoryDir, projectDir } = ctx;
|
|
381
|
+
const testConfig = config.test;
|
|
382
|
+
if (!testConfig?.baseUrl) throw new Error("test.baseUrl not configured");
|
|
383
|
+
const waitMs = testConfig.waitBeforeTestMs ?? 5e3;
|
|
384
|
+
ctx.logger.info({ ticketKey, waitMs }, "Waiting before UI tests");
|
|
385
|
+
await sleep2(waitMs);
|
|
386
|
+
const contextFiles = getContextFilesForStage(projectDir, memoryDir, "test");
|
|
387
|
+
const retries = testConfig.retries ?? 2;
|
|
388
|
+
const prompt = `
|
|
389
|
+
Run Playwright UI tests for ticket ${ticketKey} on ${testConfig.baseUrl}.
|
|
390
|
+
|
|
391
|
+
Test the acceptance criteria from the ticket context.
|
|
392
|
+
Spec pattern: ${testConfig.specPattern ?? "e2e/**/*.spec.ts"}
|
|
393
|
+
|
|
394
|
+
Report results as JSON: { "passed": boolean, "summary": "...", "failures": ["..."] }
|
|
395
|
+
`.trim();
|
|
396
|
+
let lastError;
|
|
397
|
+
for (let attempt = 0; attempt <= retries; attempt++) {
|
|
398
|
+
try {
|
|
399
|
+
ctx.logger.info(
|
|
400
|
+
{ ticketKey, attempt: attempt + 1 },
|
|
401
|
+
"Running UI tests"
|
|
402
|
+
);
|
|
403
|
+
const raw = ctx.dryRun ? '{"passed":true,"summary":"dry-run","failures":[]}' : await runAgent({
|
|
404
|
+
prompt,
|
|
405
|
+
workdir: config.workspace.rootDir,
|
|
406
|
+
model: "sonnet",
|
|
407
|
+
contextFiles,
|
|
408
|
+
timeoutMs: testConfig.timeoutMs ?? 3e5,
|
|
409
|
+
stallTimeoutMs: 6e4
|
|
410
|
+
});
|
|
411
|
+
const match = raw.match(/\{[\s\S]*\}/);
|
|
412
|
+
if (!match) throw new Error("Could not parse test results JSON");
|
|
413
|
+
const result = JSON.parse(match[0]);
|
|
414
|
+
if (result.passed) {
|
|
415
|
+
ctx.logger.info({ ticketKey }, "UI tests passed");
|
|
416
|
+
return { passed: true, summary: result.summary };
|
|
417
|
+
}
|
|
418
|
+
lastError = new Error(`Tests failed: ${result.failures.join(", ")}`);
|
|
419
|
+
ctx.logger.warn(
|
|
420
|
+
{ ticketKey, attempt: attempt + 1, failures: result.failures },
|
|
421
|
+
"Tests failed, retrying"
|
|
422
|
+
);
|
|
423
|
+
} catch (err) {
|
|
424
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
const failOnTest = config.pipeline?.failOnTestFailure ?? false;
|
|
428
|
+
if (failOnTest)
|
|
429
|
+
throw lastError ?? new Error("Tests failed after all retries");
|
|
430
|
+
await ctx.telegram.send(
|
|
431
|
+
`\u26A0\uFE0F <b>${ticketKey}</b> UI tests failed after ${retries + 1} attempts. Continuing pipeline.
|
|
432
|
+
${lastError?.message}`
|
|
433
|
+
);
|
|
434
|
+
return { passed: false, summary: lastError?.message ?? "Tests failed" };
|
|
435
|
+
}
|
|
436
|
+
};
|
|
437
|
+
|
|
438
|
+
// src/pipeline/stages/9-notify.ts
|
|
439
|
+
var notifyStage = {
|
|
440
|
+
id: "notify",
|
|
441
|
+
name: "Notify",
|
|
442
|
+
model: "haiku",
|
|
443
|
+
async run(ctx) {
|
|
444
|
+
const { config, ticketKey, state } = ctx;
|
|
445
|
+
const current = state.current;
|
|
446
|
+
ctx.logger.info({ ticketKey }, "Notifying: Jira + Telegram");
|
|
447
|
+
if (!ctx.dryRun) {
|
|
448
|
+
await transitionTicket({
|
|
449
|
+
instance: config.jira.instance,
|
|
450
|
+
ticket_key: ticketKey,
|
|
451
|
+
transition_name: config.jira.doneTransition ?? "Done"
|
|
452
|
+
});
|
|
453
|
+
const prInfo = current.prNumber ? `
|
|
454
|
+
PR: #${current.prNumber}` : "";
|
|
455
|
+
const branchInfo = current.branchName ? `
|
|
456
|
+
Branch: ${current.branchName}` : "";
|
|
457
|
+
await addComment({
|
|
458
|
+
instance: config.jira.instance,
|
|
459
|
+
ticket_key: ticketKey,
|
|
460
|
+
comment: `\u2705 Implemented via jiraACP automated pipeline.${prInfo}${branchInfo}
|
|
461
|
+
|
|
462
|
+
All stages completed: fetch \u2192 analyze \u2192 code \u2192 review \u2192 deploy \u2192 test`
|
|
463
|
+
});
|
|
464
|
+
if (config.jira.reassignTo) {
|
|
465
|
+
await reassign({
|
|
466
|
+
instance: config.jira.instance,
|
|
467
|
+
ticket_key: ticketKey,
|
|
468
|
+
account_id: config.jira.reassignTo
|
|
469
|
+
});
|
|
470
|
+
}
|
|
471
|
+
await ctx.telegram.sendDone(ticketKey, {
|
|
472
|
+
summary: "Pipeline completed successfully",
|
|
473
|
+
prNumber: current.prNumber ?? void 0
|
|
474
|
+
});
|
|
475
|
+
}
|
|
476
|
+
ctx.logger.info({ ticketKey }, "Notifications sent");
|
|
477
|
+
return { notified: true };
|
|
478
|
+
}
|
|
479
|
+
};
|
|
480
|
+
|
|
481
|
+
// src/pipeline/hooks.ts
|
|
482
|
+
var HookError = class extends Error {
|
|
483
|
+
constructor(hookName, exitCode) {
|
|
484
|
+
super(`Hook '${hookName}' failed with exit code ${exitCode}`);
|
|
485
|
+
this.hookName = hookName;
|
|
486
|
+
this.exitCode = exitCode;
|
|
487
|
+
this.name = "HookError";
|
|
488
|
+
}
|
|
489
|
+
hookName;
|
|
490
|
+
exitCode;
|
|
491
|
+
};
|
|
492
|
+
async function runHook(name, command, ctx) {
|
|
493
|
+
if (!command?.trim()) return;
|
|
494
|
+
const [bin, ...args] = command.trim().split(/\s+/);
|
|
495
|
+
const { logger } = ctx;
|
|
496
|
+
logger.info({ hookName: name, command }, "Running hook");
|
|
497
|
+
const result = await spawnSafe(bin, args, {
|
|
498
|
+
env: buildMinimalEnv({ JIRA_ACP_TICKET: ctx.ticketKey })
|
|
499
|
+
});
|
|
500
|
+
if (result.exitCode !== 0) {
|
|
501
|
+
logger.error(
|
|
502
|
+
{ hookName: name, exitCode: result.exitCode, stderr: result.stderr },
|
|
503
|
+
"Hook failed"
|
|
504
|
+
);
|
|
505
|
+
throw new HookError(name, result.exitCode);
|
|
506
|
+
}
|
|
507
|
+
logger.info({ hookName: name }, "Hook completed");
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
// src/pipeline/orchestrator.ts
|
|
511
|
+
var ALL_STAGES = [
|
|
512
|
+
fetchStage,
|
|
513
|
+
analyzeStage,
|
|
514
|
+
clarifyStage,
|
|
515
|
+
codeStage,
|
|
516
|
+
gitStage,
|
|
517
|
+
reviewStage,
|
|
518
|
+
deployStage,
|
|
519
|
+
testStage,
|
|
520
|
+
notifyStage
|
|
521
|
+
];
|
|
522
|
+
async function runPipeline(ticketKey, config, opts = {}) {
|
|
523
|
+
const projectDir = config.workspace.rootDir;
|
|
524
|
+
const runDir = getRunDir(config.name, ticketKey);
|
|
525
|
+
const lockPath = getLockPath(config.name, ticketKey);
|
|
526
|
+
const memoryDir = getMemoryDir(config.name, ticketKey);
|
|
527
|
+
const logger = createLogger(`pipeline:${ticketKey}`);
|
|
528
|
+
const state = new StateManager(runDir);
|
|
529
|
+
if (config.telegram?.botToken) {
|
|
530
|
+
await initBot(config.telegram.botToken);
|
|
531
|
+
}
|
|
532
|
+
const lock = await acquireLock(lockPath);
|
|
533
|
+
const ctx = {
|
|
534
|
+
config,
|
|
535
|
+
ticketKey,
|
|
536
|
+
projectDir,
|
|
537
|
+
state,
|
|
538
|
+
memoryDir,
|
|
539
|
+
dryRun: opts.dryRun ?? false,
|
|
540
|
+
logger,
|
|
541
|
+
jira: getClient(config.jira.instance),
|
|
542
|
+
github: createGitHubClient(
|
|
543
|
+
config.github.token,
|
|
544
|
+
config.github.owner,
|
|
545
|
+
config.github.repo
|
|
546
|
+
),
|
|
547
|
+
telegram: createTelegramNotifier(
|
|
548
|
+
config.telegram.botToken,
|
|
549
|
+
config.telegram.chatId,
|
|
550
|
+
ticketKey,
|
|
551
|
+
config.name,
|
|
552
|
+
config.telegram.topicId
|
|
553
|
+
)
|
|
554
|
+
};
|
|
555
|
+
state.emit({ type: "STARTED", ticketKey });
|
|
556
|
+
const stages = filterStages(ALL_STAGES, opts.fromStage, opts.toStage);
|
|
557
|
+
const hooksConfig = config.pipeline?.hooks;
|
|
558
|
+
try {
|
|
559
|
+
await runHook("beforePipeline", hooksConfig?.beforePipeline, {
|
|
560
|
+
ticketKey,
|
|
561
|
+
logger
|
|
562
|
+
});
|
|
563
|
+
for (const stage of stages) {
|
|
564
|
+
if (await stage.shouldSkip?.(ctx)) {
|
|
565
|
+
state.emit({
|
|
566
|
+
type: "STAGE_SKIPPED",
|
|
567
|
+
stage: stage.id,
|
|
568
|
+
reason: "shouldSkip returned true"
|
|
569
|
+
});
|
|
570
|
+
logger.info({ stage: stage.id }, "Stage skipped");
|
|
571
|
+
await ctx.telegram.notifyStageSkipped(stage.id).catch(() => void 0);
|
|
572
|
+
continue;
|
|
573
|
+
}
|
|
574
|
+
if (stage.id === "code") {
|
|
575
|
+
await runHook("beforeCode", hooksConfig?.beforeCode, {
|
|
576
|
+
ticketKey,
|
|
577
|
+
logger
|
|
578
|
+
});
|
|
579
|
+
}
|
|
580
|
+
state.emit({ type: "STAGE_STARTED", stage: stage.id });
|
|
581
|
+
logger.info({ stage: stage.id }, `\u25B6 ${stage.name}`);
|
|
582
|
+
await ctx.telegram.notifyStageStarted(stage.id).catch(() => void 0);
|
|
583
|
+
try {
|
|
584
|
+
const timeout = config.pipeline?.stageTimeouts?.[stage.id];
|
|
585
|
+
const output = timeout ? await withTimeout(stage.run(ctx), timeout) : await stage.run(ctx);
|
|
586
|
+
state.emit({ type: "STAGE_COMPLETED", stage: stage.id, output });
|
|
587
|
+
logger.info({ stage: stage.id }, `\u2713 ${stage.name}`);
|
|
588
|
+
await ctx.telegram.notifyStageCompleted(stage.id).catch(() => void 0);
|
|
589
|
+
if (config.pipeline?.maxCostUsdPerRun) {
|
|
590
|
+
const decision = await checkCostLimit({
|
|
591
|
+
runDir,
|
|
592
|
+
maxCostUsd: config.pipeline.maxCostUsdPerRun,
|
|
593
|
+
telegram: ctx.telegram,
|
|
594
|
+
ticketKey
|
|
595
|
+
});
|
|
596
|
+
if (decision === "abort") {
|
|
597
|
+
state.emit({
|
|
598
|
+
type: "PIPELINE_ABORTED",
|
|
599
|
+
reason: "Cost limit exceeded"
|
|
600
|
+
});
|
|
601
|
+
throw new Error("Cost limit exceeded");
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
if (stage.id === "code") {
|
|
605
|
+
await runHook("afterCode", hooksConfig?.afterCode, {
|
|
606
|
+
ticketKey,
|
|
607
|
+
logger
|
|
608
|
+
});
|
|
609
|
+
}
|
|
610
|
+
if (stage.id === "deploy") {
|
|
611
|
+
await runHook("afterDeploy", hooksConfig?.afterDeploy, {
|
|
612
|
+
ticketKey,
|
|
613
|
+
logger
|
|
614
|
+
});
|
|
615
|
+
}
|
|
616
|
+
} catch (err) {
|
|
617
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
618
|
+
if (message.startsWith("SKIP:")) {
|
|
619
|
+
state.emit({
|
|
620
|
+
type: "STAGE_SKIPPED",
|
|
621
|
+
stage: stage.id,
|
|
622
|
+
reason: message
|
|
623
|
+
});
|
|
624
|
+
logger.warn({ stage: stage.id }, `Skipped: ${message}`);
|
|
625
|
+
return;
|
|
626
|
+
}
|
|
627
|
+
if (err instanceof HookError) {
|
|
628
|
+
state.emit({ type: "PIPELINE_ABORTED", reason: message });
|
|
629
|
+
throw err;
|
|
630
|
+
}
|
|
631
|
+
state.emit({ type: "STAGE_FAILED", stage: stage.id, error: message });
|
|
632
|
+
await ctx.telegram.notifyStageFailed(stage.id, message).catch(() => void 0);
|
|
633
|
+
throw err;
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
state.emit({ type: "PIPELINE_COMPLETED" });
|
|
637
|
+
logger.info({ ticketKey }, "Pipeline completed");
|
|
638
|
+
} catch (err) {
|
|
639
|
+
const reason = err instanceof Error ? err.message : String(err);
|
|
640
|
+
if (!(err instanceof HookError)) {
|
|
641
|
+
state.emit({ type: "PIPELINE_ABORTED", reason });
|
|
642
|
+
}
|
|
643
|
+
await ctx.telegram.sendError(ticketKey, err);
|
|
644
|
+
logger.error({ ticketKey, reason }, "Pipeline aborted");
|
|
645
|
+
process.exitCode = 1;
|
|
646
|
+
} finally {
|
|
647
|
+
await runHook("afterPipeline", hooksConfig?.afterPipeline, {
|
|
648
|
+
ticketKey,
|
|
649
|
+
logger
|
|
650
|
+
});
|
|
651
|
+
lock.release();
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
async function resumePipeline(ticketKey, config) {
|
|
655
|
+
const logger = createLogger(`pipeline:${ticketKey}`);
|
|
656
|
+
const state = new StateManager(getRunDir(config.name, ticketKey));
|
|
657
|
+
const current = state.current;
|
|
658
|
+
if (current.isCompleted) {
|
|
659
|
+
logger.info({ ticketKey }, "Pipeline already completed");
|
|
660
|
+
process.stdout.write(`Pipeline for ${ticketKey} already completed.
|
|
661
|
+
`);
|
|
662
|
+
return;
|
|
663
|
+
}
|
|
664
|
+
const fromStage = current.currentStage ?? current.failedStage ?? "fetch";
|
|
665
|
+
logger.info({ ticketKey, fromStage }, "Resuming pipeline");
|
|
666
|
+
await runPipeline(ticketKey, config, { fromStage });
|
|
667
|
+
}
|
|
668
|
+
function filterStages(stages, from, to) {
|
|
669
|
+
const fromIdx = from ? stages.findIndex((s) => s.id === from) : 0;
|
|
670
|
+
const toIdx = to ? stages.findIndex((s) => s.id === to) : stages.length - 1;
|
|
671
|
+
return stages.slice(
|
|
672
|
+
fromIdx < 0 ? 0 : fromIdx,
|
|
673
|
+
toIdx < 0 ? stages.length : toIdx + 1
|
|
674
|
+
);
|
|
675
|
+
}
|
|
676
|
+
async function withTimeout(promise, ms) {
|
|
677
|
+
const signal = AbortSignal.timeout(ms);
|
|
678
|
+
const aborted = new Promise((_, reject) => {
|
|
679
|
+
signal.addEventListener("abort", () => {
|
|
680
|
+
reject(new Error(`Stage timed out after ${ms}ms`));
|
|
681
|
+
});
|
|
682
|
+
});
|
|
683
|
+
return Promise.race([promise, aborted]);
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
export {
|
|
687
|
+
runPipeline,
|
|
688
|
+
resumePipeline
|
|
689
|
+
};
|
|
690
|
+
//# sourceMappingURL=chunk-6IY6CRUJ.js.map
|