@redwoodjs/agent-ci 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +110 -0
- package/README.md +79 -0
- package/dist/cli.js +628 -0
- package/dist/config.js +63 -0
- package/dist/docker/container-config.js +178 -0
- package/dist/docker/container-config.test.js +156 -0
- package/dist/docker/service-containers.js +205 -0
- package/dist/docker/service-containers.test.js +236 -0
- package/dist/docker/shutdown.js +120 -0
- package/dist/docker/shutdown.test.js +148 -0
- package/dist/output/agent-mode.js +7 -0
- package/dist/output/agent-mode.test.js +36 -0
- package/dist/output/cleanup.js +218 -0
- package/dist/output/cleanup.test.js +241 -0
- package/dist/output/concurrency.js +57 -0
- package/dist/output/concurrency.test.js +88 -0
- package/dist/output/debug.js +36 -0
- package/dist/output/logger.js +57 -0
- package/dist/output/logger.test.js +82 -0
- package/dist/output/reporter.js +67 -0
- package/dist/output/run-state.js +126 -0
- package/dist/output/run-state.test.js +169 -0
- package/dist/output/state-renderer.js +149 -0
- package/dist/output/state-renderer.test.js +488 -0
- package/dist/output/tree-renderer.js +52 -0
- package/dist/output/tree-renderer.test.js +105 -0
- package/dist/output/working-directory.js +20 -0
- package/dist/runner/directory-setup.js +98 -0
- package/dist/runner/directory-setup.test.js +31 -0
- package/dist/runner/git-shim.js +92 -0
- package/dist/runner/git-shim.test.js +57 -0
- package/dist/runner/local-job.js +691 -0
- package/dist/runner/metadata.js +90 -0
- package/dist/runner/metadata.test.js +127 -0
- package/dist/runner/result-builder.js +119 -0
- package/dist/runner/result-builder.test.js +177 -0
- package/dist/runner/step-wrapper.js +82 -0
- package/dist/runner/step-wrapper.test.js +77 -0
- package/dist/runner/sync.js +80 -0
- package/dist/runner/workspace.js +66 -0
- package/dist/types.js +1 -0
- package/dist/workflow/job-scheduler.js +62 -0
- package/dist/workflow/job-scheduler.test.js +130 -0
- package/dist/workflow/workflow-parser.js +556 -0
- package/dist/workflow/workflow-parser.test.js +642 -0
- package/package.json +39 -0
- package/shim.sh +11 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,628 @@
|
|
|
1
|
+
import { execSync } from "child_process";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import fs from "fs";
|
|
4
|
+
import { config, loadMachineSecrets } from "./config.js";
|
|
5
|
+
import { getNextLogNum } from "./output/logger.js";
|
|
6
|
+
import { setWorkingDirectory, DEFAULT_WORKING_DIR, PROJECT_ROOT, } from "./output/working-directory.js";
|
|
7
|
+
import { debugCli } from "./output/debug.js";
|
|
8
|
+
import { executeLocalJob } from "./runner/local-job.js";
|
|
9
|
+
import { getWorkflowTemplate, parseWorkflowSteps, parseWorkflowServices, parseWorkflowContainer, validateSecrets, parseMatrixDef, expandMatrixCombinations, isWorkflowRelevant, getChangedFiles, } from "./workflow/workflow-parser.js";
|
|
10
|
+
import { createConcurrencyLimiter, getDefaultMaxConcurrentJobs } from "./output/concurrency.js";
|
|
11
|
+
import { isWarmNodeModules, computeLockfileHash } from "./output/cleanup.js";
|
|
12
|
+
import { getWorkingDirectory } from "./output/working-directory.js";
|
|
13
|
+
import { pruneOrphanedDockerResources } from "./docker/shutdown.js";
|
|
14
|
+
import { parseJobDependencies, topoSort } from "./workflow/job-scheduler.js";
|
|
15
|
+
import { printSummary } from "./output/reporter.js";
|
|
16
|
+
import { syncWorkspaceForRetry } from "./runner/sync.js";
|
|
17
|
+
import { RunStateStore } from "./output/run-state.js";
|
|
18
|
+
import { renderRunState } from "./output/state-renderer.js";
|
|
19
|
+
import { isAgentMode, setQuietMode } from "./output/agent-mode.js";
|
|
20
|
+
import logUpdate from "log-update";
|
|
21
|
+
// ─── Signal helpers for retry / abort commands ────────────────────────────────
|
|
22
|
+
function findSignalsDir(runnerName) {
|
|
23
|
+
const workDir = getWorkingDirectory();
|
|
24
|
+
const runsDir = path.resolve(workDir, "runs");
|
|
25
|
+
if (!fs.existsSync(runsDir)) {
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
for (const entry of fs.readdirSync(runsDir)) {
|
|
29
|
+
if (entry === runnerName || entry.endsWith(runnerName)) {
|
|
30
|
+
const signalsDir = path.join(runsDir, entry, "signals");
|
|
31
|
+
if (fs.existsSync(signalsDir)) {
|
|
32
|
+
return signalsDir;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
async function run() {
|
|
39
|
+
const args = process.argv.slice(2);
|
|
40
|
+
const command = args[0];
|
|
41
|
+
if (command === "run") {
|
|
42
|
+
let sha;
|
|
43
|
+
let workflow;
|
|
44
|
+
let pauseOnFailure = false;
|
|
45
|
+
let runAll = false;
|
|
46
|
+
for (let i = 1; i < args.length; i++) {
|
|
47
|
+
if ((args[i] === "--workflow" || args[i] === "-w") && args[i + 1]) {
|
|
48
|
+
workflow = args[i + 1];
|
|
49
|
+
i++;
|
|
50
|
+
}
|
|
51
|
+
else if (args[i] === "--pause-on-failure" || args[i] === "-p") {
|
|
52
|
+
pauseOnFailure = true;
|
|
53
|
+
}
|
|
54
|
+
else if (args[i] === "--all" || args[i] === "-a") {
|
|
55
|
+
runAll = true;
|
|
56
|
+
}
|
|
57
|
+
else if (args[i] === "--quiet" || args[i] === "-q") {
|
|
58
|
+
setQuietMode(true);
|
|
59
|
+
}
|
|
60
|
+
else if (!args[i].startsWith("-")) {
|
|
61
|
+
sha = args[i];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
let workingDir = process.env.AGENT_CI_WORKING_DIR;
|
|
65
|
+
if (workingDir) {
|
|
66
|
+
if (!path.isAbsolute(workingDir)) {
|
|
67
|
+
workingDir = path.resolve(PROJECT_ROOT, workingDir);
|
|
68
|
+
}
|
|
69
|
+
setWorkingDirectory(workingDir);
|
|
70
|
+
}
|
|
71
|
+
if (runAll) {
|
|
72
|
+
// Discover all relevant workflows for the current branch
|
|
73
|
+
const repoRoot = resolveRepoRoot();
|
|
74
|
+
const workflowsDir = path.resolve(repoRoot, ".github", "workflows");
|
|
75
|
+
if (!fs.existsSync(workflowsDir)) {
|
|
76
|
+
console.error(`[Agent CI] No .github/workflows directory found in ${repoRoot}`);
|
|
77
|
+
process.exit(1);
|
|
78
|
+
}
|
|
79
|
+
const branch = execSync("git rev-parse --abbrev-ref HEAD", { cwd: repoRoot })
|
|
80
|
+
.toString()
|
|
81
|
+
.trim();
|
|
82
|
+
const changedFiles = getChangedFiles(repoRoot);
|
|
83
|
+
const files = fs
|
|
84
|
+
.readdirSync(workflowsDir)
|
|
85
|
+
.filter((f) => f.endsWith(".yml") || f.endsWith(".yaml"))
|
|
86
|
+
.map((f) => path.join(workflowsDir, f));
|
|
87
|
+
const relevant = [];
|
|
88
|
+
for (const file of files) {
|
|
89
|
+
try {
|
|
90
|
+
const { parse: parseYaml } = await import("yaml");
|
|
91
|
+
const raw = parseYaml(fs.readFileSync(file, "utf8"));
|
|
92
|
+
const onDef = raw?.on || raw?.true;
|
|
93
|
+
if (!onDef) {
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
const events = {};
|
|
97
|
+
if (Array.isArray(onDef)) {
|
|
98
|
+
for (const e of onDef) {
|
|
99
|
+
events[e] = {};
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
else if (typeof onDef === "string") {
|
|
103
|
+
events[onDef] = {};
|
|
104
|
+
}
|
|
105
|
+
else {
|
|
106
|
+
Object.assign(events, onDef);
|
|
107
|
+
}
|
|
108
|
+
if (isWorkflowRelevant({ events }, branch, changedFiles)) {
|
|
109
|
+
relevant.push(file);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
catch {
|
|
113
|
+
// Skip unparsable workflows
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (relevant.length === 0) {
|
|
117
|
+
console.log(`[Agent CI] No relevant workflows found for branch '${branch}'.`);
|
|
118
|
+
process.exit(0);
|
|
119
|
+
}
|
|
120
|
+
const results = await runWorkflows({ workflowPaths: relevant, sha, pauseOnFailure });
|
|
121
|
+
printSummary(results);
|
|
122
|
+
const anyFailed = results.some((r) => !r.succeeded);
|
|
123
|
+
process.exit(anyFailed ? 1 : 0);
|
|
124
|
+
}
|
|
125
|
+
if (!workflow) {
|
|
126
|
+
console.error("[Agent CI] Error: You must specify --workflow <path> or --all");
|
|
127
|
+
console.log("");
|
|
128
|
+
printUsage();
|
|
129
|
+
process.exit(1);
|
|
130
|
+
}
|
|
131
|
+
// Resolve workflow path before calling runWorkflows
|
|
132
|
+
let workflowPath;
|
|
133
|
+
if (path.isAbsolute(workflow)) {
|
|
134
|
+
workflowPath = workflow;
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
const cwd = process.cwd();
|
|
138
|
+
const repoRootFallback = resolveRepoRoot();
|
|
139
|
+
const workflowsDir = path.resolve(repoRootFallback, ".github", "workflows");
|
|
140
|
+
const pathsToTry = [
|
|
141
|
+
path.resolve(cwd, workflow),
|
|
142
|
+
path.resolve(repoRootFallback, workflow),
|
|
143
|
+
path.resolve(workflowsDir, workflow),
|
|
144
|
+
];
|
|
145
|
+
workflowPath = pathsToTry.find((p) => fs.existsSync(p)) || pathsToTry[1];
|
|
146
|
+
}
|
|
147
|
+
const results = await runWorkflows({ workflowPaths: [workflowPath], sha, pauseOnFailure });
|
|
148
|
+
printSummary(results);
|
|
149
|
+
if (results.some((r) => !r.succeeded)) {
|
|
150
|
+
process.exit(1);
|
|
151
|
+
}
|
|
152
|
+
process.exit(0);
|
|
153
|
+
}
|
|
154
|
+
else if (command === "retry" || command === "abort") {
|
|
155
|
+
let runnerName;
|
|
156
|
+
let fromStep;
|
|
157
|
+
for (let i = 1; i < args.length; i++) {
|
|
158
|
+
if ((args[i] === "--name" || args[i] === "-n" || args[i] === "--runner") && args[i + 1]) {
|
|
159
|
+
runnerName = args[i + 1];
|
|
160
|
+
i++;
|
|
161
|
+
}
|
|
162
|
+
else if (args[i] === "--from-step" && args[i + 1]) {
|
|
163
|
+
fromStep = args[i + 1];
|
|
164
|
+
i++;
|
|
165
|
+
}
|
|
166
|
+
else if (args[i] === "--from-start") {
|
|
167
|
+
fromStep = "*";
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
if (!runnerName) {
|
|
171
|
+
console.error(`[Agent CI] Error: --name <name> is required for '${command}'`);
|
|
172
|
+
process.exit(1);
|
|
173
|
+
}
|
|
174
|
+
if (fromStep && fromStep !== "*" && (isNaN(Number(fromStep)) || Number(fromStep) < 1)) {
|
|
175
|
+
console.error(`[Agent CI] Error: --from-step must be a positive step number`);
|
|
176
|
+
process.exit(1);
|
|
177
|
+
}
|
|
178
|
+
const signalsDir = findSignalsDir(runnerName);
|
|
179
|
+
if (!signalsDir) {
|
|
180
|
+
console.error(`[Agent CI] Error: No runner '${runnerName}' found. It may have already exited.`);
|
|
181
|
+
process.exit(1);
|
|
182
|
+
}
|
|
183
|
+
const pausedFile = path.join(signalsDir, "paused");
|
|
184
|
+
if (!fs.existsSync(pausedFile)) {
|
|
185
|
+
fs.rmSync(signalsDir, { recursive: true, force: true });
|
|
186
|
+
console.error(`[Agent CI] Error: Runner '${runnerName}' is not currently paused. It may have already exited.`);
|
|
187
|
+
process.exit(1);
|
|
188
|
+
}
|
|
189
|
+
try {
|
|
190
|
+
const { execSync } = await import("node:child_process");
|
|
191
|
+
const status = execSync(`docker inspect -f '{{.State.Running}}' ${runnerName} 2>/dev/null`, {
|
|
192
|
+
encoding: "utf-8",
|
|
193
|
+
}).trim();
|
|
194
|
+
if (status !== "true") {
|
|
195
|
+
throw new Error("not running");
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
catch {
|
|
199
|
+
fs.rmSync(signalsDir, { recursive: true, force: true });
|
|
200
|
+
console.error(`[Agent CI] Error: Runner '${runnerName}' is no longer running.`);
|
|
201
|
+
process.exit(1);
|
|
202
|
+
}
|
|
203
|
+
if (command === "retry") {
|
|
204
|
+
const runDir = path.dirname(signalsDir);
|
|
205
|
+
syncWorkspaceForRetry(runDir);
|
|
206
|
+
if (fromStep) {
|
|
207
|
+
fs.writeFileSync(path.join(signalsDir, "from-step"), fromStep);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
fs.writeFileSync(path.join(signalsDir, command), "");
|
|
211
|
+
const extra = fromStep ? ` (from step ${fromStep === "*" ? "1" : fromStep})` : "";
|
|
212
|
+
console.log(`[Agent CI] Sent '${command}' signal to ${runnerName}${extra}`);
|
|
213
|
+
process.exit(0);
|
|
214
|
+
}
|
|
215
|
+
else {
|
|
216
|
+
printUsage();
|
|
217
|
+
process.exit(1);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
// ─── runWorkflows ──────────────────────────────────────────────────────────────
|
|
221
|
+
// Single entry point for both `--workflow` and `--all`.
|
|
222
|
+
// One workflow = --all with a single entry.
|
|
223
|
+
async function runWorkflows(options) {
|
|
224
|
+
const { workflowPaths, sha, pauseOnFailure } = options;
|
|
225
|
+
// Create the run state store — single source of truth for all progress
|
|
226
|
+
const runId = `run-${Date.now()}`;
|
|
227
|
+
const storeFilePath = path.join(getWorkingDirectory(), "runs", runId, "run-state.json");
|
|
228
|
+
const store = new RunStateStore(runId, storeFilePath);
|
|
229
|
+
// Start the render loop — reads from store, never touches execution logic
|
|
230
|
+
// In agent mode (AI_AGENT=1 or --quiet), skip animated rendering to avoid token waste
|
|
231
|
+
// but register a synchronous callback for important state changes.
|
|
232
|
+
let renderInterval = null;
|
|
233
|
+
if (isAgentMode()) {
|
|
234
|
+
const reportedPauses = new Set();
|
|
235
|
+
const reportedRunners = new Set();
|
|
236
|
+
const reportedSteps = new Map();
|
|
237
|
+
const emit = (msg) => process.stderr.write(msg + "\n");
|
|
238
|
+
store.onUpdate((state) => {
|
|
239
|
+
for (const wf of state.workflows) {
|
|
240
|
+
for (const job of wf.jobs) {
|
|
241
|
+
if (job.status !== "queued" && !reportedRunners.has(job.runnerId)) {
|
|
242
|
+
reportedRunners.add(job.runnerId);
|
|
243
|
+
emit(`[Agent CI] Starting runner ${job.runnerId} (${wf.id} > ${job.id})`);
|
|
244
|
+
if (job.logDir) {
|
|
245
|
+
emit(` Logs: ${job.logDir}`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
if (!reportedSteps.has(job.runnerId)) {
|
|
249
|
+
reportedSteps.set(job.runnerId, new Set());
|
|
250
|
+
}
|
|
251
|
+
const seen = reportedSteps.get(job.runnerId);
|
|
252
|
+
for (const step of job.steps) {
|
|
253
|
+
const key = `${step.index}:${step.status}`;
|
|
254
|
+
if (seen.has(key)) {
|
|
255
|
+
continue;
|
|
256
|
+
}
|
|
257
|
+
if (step.status === "completed") {
|
|
258
|
+
seen.add(key);
|
|
259
|
+
const dur = step.durationMs != null ? ` (${(step.durationMs / 1000).toFixed(1)}s)` : "";
|
|
260
|
+
emit(` ✓ ${step.name}${dur}`);
|
|
261
|
+
}
|
|
262
|
+
else if (step.status === "failed") {
|
|
263
|
+
seen.add(key);
|
|
264
|
+
emit(` ✗ ${step.name}`);
|
|
265
|
+
}
|
|
266
|
+
else if (step.status === "running") {
|
|
267
|
+
seen.add(key);
|
|
268
|
+
emit(` ▸ ${step.name}`);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
if (job.status === "paused" && !reportedPauses.has(job.runnerId)) {
|
|
272
|
+
reportedPauses.add(job.runnerId);
|
|
273
|
+
const lines = [];
|
|
274
|
+
lines.push(`\n[Agent CI] Step failed: "${job.pausedAtStep}" (${wf.id} > ${job.id})`);
|
|
275
|
+
if (job.attempt && job.attempt > 1) {
|
|
276
|
+
lines.push(` Attempt: ${job.attempt}`);
|
|
277
|
+
}
|
|
278
|
+
if (job.lastOutputLines && job.lastOutputLines.length > 0) {
|
|
279
|
+
lines.push(" Last output:");
|
|
280
|
+
for (const l of job.lastOutputLines) {
|
|
281
|
+
lines.push(` ${l}`);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
lines.push(` To retry: agent-ci retry --name ${job.runnerId}`);
|
|
285
|
+
emit(lines.join("\n"));
|
|
286
|
+
}
|
|
287
|
+
else if (job.status !== "paused" && reportedPauses.has(job.runnerId)) {
|
|
288
|
+
reportedPauses.delete(job.runnerId);
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
else {
|
|
295
|
+
renderInterval = setInterval(() => {
|
|
296
|
+
const state = store.getState();
|
|
297
|
+
if (state.workflows.length > 0) {
|
|
298
|
+
logUpdate(renderRunState(state));
|
|
299
|
+
}
|
|
300
|
+
}, 80);
|
|
301
|
+
}
|
|
302
|
+
try {
|
|
303
|
+
const allResults = [];
|
|
304
|
+
if (workflowPaths.length === 1) {
|
|
305
|
+
// Single workflow — no cross-workflow warm-cache serialization needed
|
|
306
|
+
const results = await handleWorkflow({
|
|
307
|
+
workflowPath: workflowPaths[0],
|
|
308
|
+
sha,
|
|
309
|
+
pauseOnFailure,
|
|
310
|
+
store,
|
|
311
|
+
});
|
|
312
|
+
allResults.push(...results);
|
|
313
|
+
}
|
|
314
|
+
else {
|
|
315
|
+
// Multiple workflows (--all mode)
|
|
316
|
+
// Determine warm-cache status from the first workflow's repo root
|
|
317
|
+
const firstRepoRoot = resolveRepoRootFromWorkflow(workflowPaths[0]);
|
|
318
|
+
const repoSlug = resolveRepoInfo(firstRepoRoot).replace("/", "-");
|
|
319
|
+
let lockfileHash = "no-lockfile";
|
|
320
|
+
try {
|
|
321
|
+
lockfileHash = computeLockfileHash(firstRepoRoot);
|
|
322
|
+
}
|
|
323
|
+
catch { }
|
|
324
|
+
const warmModulesDir = path.resolve(getWorkingDirectory(), "cache", "warm-modules", repoSlug, lockfileHash);
|
|
325
|
+
const warm = isWarmNodeModules(warmModulesDir);
|
|
326
|
+
if (!warm && workflowPaths.length > 1) {
|
|
327
|
+
// Cold cache — run first workflow serially to populate warm modules,
|
|
328
|
+
// then launch the rest in parallel.
|
|
329
|
+
const firstResults = await handleWorkflow({
|
|
330
|
+
workflowPath: workflowPaths[0],
|
|
331
|
+
sha,
|
|
332
|
+
pauseOnFailure,
|
|
333
|
+
store,
|
|
334
|
+
});
|
|
335
|
+
allResults.push(...firstResults);
|
|
336
|
+
const settled = await Promise.allSettled(workflowPaths
|
|
337
|
+
.slice(1)
|
|
338
|
+
.map((wf) => handleWorkflow({ workflowPath: wf, sha, pauseOnFailure, store })));
|
|
339
|
+
for (const s of settled) {
|
|
340
|
+
if (s.status === "fulfilled") {
|
|
341
|
+
allResults.push(...s.value);
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
else {
|
|
346
|
+
const settled = await Promise.allSettled(workflowPaths.map((wf) => handleWorkflow({ workflowPath: wf, sha, pauseOnFailure, store })));
|
|
347
|
+
for (const s of settled) {
|
|
348
|
+
if (s.status === "fulfilled") {
|
|
349
|
+
allResults.push(...s.value);
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
store.complete(allResults.some((r) => !r.succeeded) ? "failed" : "completed");
|
|
355
|
+
return allResults;
|
|
356
|
+
}
|
|
357
|
+
finally {
|
|
358
|
+
if (renderInterval) {
|
|
359
|
+
clearInterval(renderInterval);
|
|
360
|
+
}
|
|
361
|
+
if (!isAgentMode()) {
|
|
362
|
+
// Final render — show the completed state
|
|
363
|
+
const finalState = store.getState();
|
|
364
|
+
if (finalState.workflows.length > 0) {
|
|
365
|
+
logUpdate(renderRunState(finalState));
|
|
366
|
+
}
|
|
367
|
+
logUpdate.done();
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
// ─── handleWorkflow ───────────────────────────────────────────────────────────
|
|
372
|
+
// Processes a single workflow file: parses jobs, handles matrix expansion,
|
|
373
|
+
// wave scheduling, warm-cache serialization, and concurrency limiting.
|
|
374
|
+
async function handleWorkflow(options) {
|
|
375
|
+
const { sha, pauseOnFailure, store } = options;
|
|
376
|
+
let workflowPath = options.workflowPath;
|
|
377
|
+
try {
|
|
378
|
+
if (!fs.existsSync(workflowPath)) {
|
|
379
|
+
throw new Error(`Workflow file not found: ${workflowPath}`);
|
|
380
|
+
}
|
|
381
|
+
const repoRoot = resolveRepoRootFromWorkflow(workflowPath);
|
|
382
|
+
if (!process.env.AGENT_CI_WORKING_DIR) {
|
|
383
|
+
setWorkingDirectory(DEFAULT_WORKING_DIR);
|
|
384
|
+
}
|
|
385
|
+
const { headSha, shaRef } = sha
|
|
386
|
+
? resolveHeadSha(repoRoot, sha)
|
|
387
|
+
: { headSha: undefined, shaRef: undefined };
|
|
388
|
+
const githubRepo = resolveRepoInfo(repoRoot);
|
|
389
|
+
const [owner, name] = githubRepo.split("/");
|
|
390
|
+
const template = await getWorkflowTemplate(workflowPath);
|
|
391
|
+
const jobs = template.jobs.filter((j) => j.type === "job");
|
|
392
|
+
if (jobs.length === 0) {
|
|
393
|
+
debugCli(`[Agent CI] No jobs found in workflow: ${path.basename(workflowPath)}`);
|
|
394
|
+
return [];
|
|
395
|
+
}
|
|
396
|
+
const expandedJobs = [];
|
|
397
|
+
for (const job of jobs) {
|
|
398
|
+
const id = job.id.toString();
|
|
399
|
+
const matrixDef = await parseMatrixDef(workflowPath, id);
|
|
400
|
+
if (matrixDef) {
|
|
401
|
+
const combos = expandMatrixCombinations(matrixDef);
|
|
402
|
+
const total = combos.length;
|
|
403
|
+
for (let ci = 0; ci < combos.length; ci++) {
|
|
404
|
+
expandedJobs.push({
|
|
405
|
+
workflowPath,
|
|
406
|
+
taskName: id,
|
|
407
|
+
matrixContext: {
|
|
408
|
+
...combos[ci],
|
|
409
|
+
__job_total: String(total),
|
|
410
|
+
__job_index: String(ci),
|
|
411
|
+
},
|
|
412
|
+
});
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
else {
|
|
416
|
+
expandedJobs.push({ workflowPath, taskName: id });
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
// For single-job workflows, run directly without extra orchestration
|
|
420
|
+
if (expandedJobs.length === 1) {
|
|
421
|
+
const ej = expandedJobs[0];
|
|
422
|
+
const secrets = loadMachineSecrets(repoRoot);
|
|
423
|
+
const secretsFilePath = path.join(repoRoot, ".env.agent-ci");
|
|
424
|
+
validateSecrets(workflowPath, ej.taskName, secrets, secretsFilePath);
|
|
425
|
+
const steps = await parseWorkflowSteps(workflowPath, ej.taskName, secrets, ej.matrixContext);
|
|
426
|
+
const services = await parseWorkflowServices(workflowPath, ej.taskName);
|
|
427
|
+
const container = await parseWorkflowContainer(workflowPath, ej.taskName);
|
|
428
|
+
const job = {
|
|
429
|
+
deliveryId: `run-${Date.now()}`,
|
|
430
|
+
eventType: "workflow_job",
|
|
431
|
+
githubJobId: `local-${Date.now()}-${Math.floor(Math.random() * 100000)}`,
|
|
432
|
+
githubRepo: githubRepo,
|
|
433
|
+
githubToken: "mock_token",
|
|
434
|
+
headSha: headSha,
|
|
435
|
+
shaRef: shaRef,
|
|
436
|
+
env: { AGENT_CI_LOCAL: "true" },
|
|
437
|
+
repository: {
|
|
438
|
+
name: name,
|
|
439
|
+
full_name: githubRepo,
|
|
440
|
+
owner: { login: owner },
|
|
441
|
+
default_branch: "main",
|
|
442
|
+
},
|
|
443
|
+
steps,
|
|
444
|
+
services,
|
|
445
|
+
container: container ?? undefined,
|
|
446
|
+
workflowPath,
|
|
447
|
+
taskId: ej.taskName,
|
|
448
|
+
};
|
|
449
|
+
const result = await executeLocalJob(job, { pauseOnFailure, store });
|
|
450
|
+
return [result];
|
|
451
|
+
}
|
|
452
|
+
// ── Multi-job orchestration ────────────────────────────────────────────────
|
|
453
|
+
const maxJobs = getDefaultMaxConcurrentJobs();
|
|
454
|
+
// ── Warm-cache check ───────────────────────────────────────────────────────
|
|
455
|
+
const repoSlug = githubRepo.replace("/", "-");
|
|
456
|
+
let lockfileHash = "no-lockfile";
|
|
457
|
+
try {
|
|
458
|
+
lockfileHash = computeLockfileHash(repoRoot);
|
|
459
|
+
}
|
|
460
|
+
catch { }
|
|
461
|
+
const warmModulesDir = path.resolve(getWorkingDirectory(), "cache", "warm-modules", repoSlug, lockfileHash);
|
|
462
|
+
let warm = isWarmNodeModules(warmModulesDir);
|
|
463
|
+
// Naming convention: agent-ci-<N>[-j<idx>][-m<shardIdx>]
|
|
464
|
+
const baseRunNum = getNextLogNum("agent-ci");
|
|
465
|
+
let globalIdx = 0;
|
|
466
|
+
const buildJob = (ej) => {
|
|
467
|
+
const secrets = loadMachineSecrets(repoRoot);
|
|
468
|
+
const secretsFilePath = path.join(repoRoot, ".env.agent-ci");
|
|
469
|
+
validateSecrets(workflowPath, ej.taskName, secrets, secretsFilePath);
|
|
470
|
+
const idx = globalIdx++;
|
|
471
|
+
let suffix = `-j${idx + 1}`;
|
|
472
|
+
if (ej.matrixContext) {
|
|
473
|
+
const shardIdx = parseInt(ej.matrixContext.__job_index ?? "0", 10) + 1;
|
|
474
|
+
suffix += `-m${shardIdx}`;
|
|
475
|
+
}
|
|
476
|
+
const derivedRunnerName = `agent-ci-${baseRunNum}${suffix}`;
|
|
477
|
+
return {
|
|
478
|
+
deliveryId: `run-${Date.now()}`,
|
|
479
|
+
eventType: "workflow_job",
|
|
480
|
+
githubJobId: Math.floor(Math.random() * 1000000).toString(),
|
|
481
|
+
githubRepo: githubRepo,
|
|
482
|
+
githubToken: "mock_token",
|
|
483
|
+
headSha: headSha,
|
|
484
|
+
shaRef: shaRef,
|
|
485
|
+
env: { AGENT_CI_LOCAL: "true" },
|
|
486
|
+
repository: {
|
|
487
|
+
name: name,
|
|
488
|
+
full_name: githubRepo,
|
|
489
|
+
owner: { login: owner },
|
|
490
|
+
default_branch: "main",
|
|
491
|
+
},
|
|
492
|
+
runnerName: derivedRunnerName,
|
|
493
|
+
steps: undefined,
|
|
494
|
+
services: undefined,
|
|
495
|
+
container: undefined,
|
|
496
|
+
workflowPath,
|
|
497
|
+
taskId: ej.taskName,
|
|
498
|
+
};
|
|
499
|
+
};
|
|
500
|
+
const runJob = async (ej) => {
|
|
501
|
+
const { taskName, matrixContext } = ej;
|
|
502
|
+
debugCli(`Running: ${path.basename(workflowPath)} | Task: ${taskName}${matrixContext ? ` | Matrix: ${JSON.stringify(Object.fromEntries(Object.entries(matrixContext).filter(([k]) => !k.startsWith("__"))))}` : ""}`);
|
|
503
|
+
const secrets = loadMachineSecrets(repoRoot);
|
|
504
|
+
const secretsFilePath = path.join(repoRoot, ".env.agent-ci");
|
|
505
|
+
validateSecrets(workflowPath, taskName, secrets, secretsFilePath);
|
|
506
|
+
const steps = await parseWorkflowSteps(workflowPath, taskName, secrets, matrixContext);
|
|
507
|
+
const services = await parseWorkflowServices(workflowPath, taskName);
|
|
508
|
+
const container = await parseWorkflowContainer(workflowPath, taskName);
|
|
509
|
+
const job = buildJob(ej);
|
|
510
|
+
job.steps = steps;
|
|
511
|
+
job.services = services;
|
|
512
|
+
job.container = container ?? undefined;
|
|
513
|
+
return executeLocalJob(job, { pauseOnFailure, store });
|
|
514
|
+
};
|
|
515
|
+
pruneOrphanedDockerResources();
|
|
516
|
+
const limiter = createConcurrencyLimiter(maxJobs);
|
|
517
|
+
const allResults = [];
|
|
518
|
+
// ── Dependency-aware wave scheduling ──────────────────────────────────────
|
|
519
|
+
const deps = parseJobDependencies(workflowPath);
|
|
520
|
+
const waves = topoSort(deps);
|
|
521
|
+
const taskNamesInWf = new Set(expandedJobs.map((j) => j.taskName));
|
|
522
|
+
const filteredWaves = waves
|
|
523
|
+
.map((wave) => wave.filter((jobId) => taskNamesInWf.has(jobId)))
|
|
524
|
+
.filter((wave) => wave.length > 0);
|
|
525
|
+
if (filteredWaves.length === 0) {
|
|
526
|
+
filteredWaves.push(Array.from(taskNamesInWf));
|
|
527
|
+
}
|
|
528
|
+
for (let wi = 0; wi < filteredWaves.length; wi++) {
|
|
529
|
+
const waveJobIds = new Set(filteredWaves[wi]);
|
|
530
|
+
const waveJobs = expandedJobs.filter((j) => waveJobIds.has(j.taskName));
|
|
531
|
+
if (waveJobs.length === 0) {
|
|
532
|
+
continue;
|
|
533
|
+
}
|
|
534
|
+
// ── Warm-cache serialization for the first wave ────────────────────────
|
|
535
|
+
if (!warm && wi === 0 && waveJobs.length > 1) {
|
|
536
|
+
debugCli("Cold cache — running first job to populate warm modules...");
|
|
537
|
+
const firstResult = await runJob(waveJobs[0]);
|
|
538
|
+
allResults.push(firstResult);
|
|
539
|
+
const results = await Promise.allSettled(waveJobs.slice(1).map((ej) => limiter.run(() => runJob(ej))));
|
|
540
|
+
for (const r of results) {
|
|
541
|
+
if (r.status === "fulfilled") {
|
|
542
|
+
allResults.push(r.value);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
warm = true;
|
|
546
|
+
}
|
|
547
|
+
else {
|
|
548
|
+
const results = await Promise.allSettled(waveJobs.map((ej) => limiter.run(() => runJob(ej))));
|
|
549
|
+
for (const r of results) {
|
|
550
|
+
if (r.status === "fulfilled") {
|
|
551
|
+
allResults.push(r.value);
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
// Abort remaining waves if this wave had failures
|
|
556
|
+
if (allResults.some((r) => !r.succeeded) && wi < filteredWaves.length - 1) {
|
|
557
|
+
debugCli(`Wave ${wi + 1} had failures — aborting remaining waves for ${path.basename(workflowPath)}`);
|
|
558
|
+
break;
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
return allResults;
|
|
562
|
+
}
|
|
563
|
+
catch (error) {
|
|
564
|
+
console.error(`[Agent CI] Failed to trigger run: ${error.message}`);
|
|
565
|
+
return [];
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
// ─── Utilities ────────────────────────────────────────────────────────────────
|
|
569
|
+
function printUsage() {
|
|
570
|
+
console.log("Usage: agent-ci <command> [args]");
|
|
571
|
+
console.log("");
|
|
572
|
+
console.log("Commands:");
|
|
573
|
+
console.log(" run [sha] --workflow <path> Run all jobs in a workflow file (defaults to HEAD)");
|
|
574
|
+
console.log(" run --all Run all relevant PR/Push workflows for current branch");
|
|
575
|
+
console.log(" retry --name <name> Send retry signal to a paused runner");
|
|
576
|
+
console.log(" --from-step <N> Re-run from step N (skips earlier steps)");
|
|
577
|
+
console.log(" --from-start Re-run all run: steps from the beginning");
|
|
578
|
+
console.log(" abort --name <name> Send abort signal to a paused runner");
|
|
579
|
+
console.log("");
|
|
580
|
+
console.log("Options:");
|
|
581
|
+
console.log(" -w, --workflow <path> Path to the workflow file");
|
|
582
|
+
console.log(" -a, --all Discover and run all relevant workflows");
|
|
583
|
+
console.log(" -p, --pause-on-failure Pause on step failure for interactive debugging");
|
|
584
|
+
console.log(" -q, --quiet Suppress animated rendering (also enabled by AI_AGENT=1)");
|
|
585
|
+
}
|
|
586
|
+
function resolveRepoRoot() {
|
|
587
|
+
let repoRoot = process.cwd();
|
|
588
|
+
while (repoRoot !== "/" && !fs.existsSync(path.join(repoRoot, ".git"))) {
|
|
589
|
+
repoRoot = path.dirname(repoRoot);
|
|
590
|
+
}
|
|
591
|
+
return repoRoot === "/" ? process.cwd() : repoRoot;
|
|
592
|
+
}
|
|
593
|
+
function resolveRepoRootFromWorkflow(workflowPath) {
|
|
594
|
+
let repoRoot = path.dirname(workflowPath);
|
|
595
|
+
while (repoRoot !== "/" && !fs.existsSync(path.join(repoRoot, ".git"))) {
|
|
596
|
+
repoRoot = path.dirname(repoRoot);
|
|
597
|
+
}
|
|
598
|
+
return repoRoot === "/" ? resolveRepoRoot() : repoRoot;
|
|
599
|
+
}
|
|
600
|
+
function resolveRepoInfo(repoRoot) {
|
|
601
|
+
let githubRepo = config.GITHUB_REPO;
|
|
602
|
+
try {
|
|
603
|
+
const remoteUrl = execSync("git remote get-url origin", { cwd: repoRoot }).toString().trim();
|
|
604
|
+
const match = remoteUrl.match(/[:/]([^/]+\/[^/]+)\.git$/);
|
|
605
|
+
if (match) {
|
|
606
|
+
githubRepo = match[1];
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
catch {
|
|
610
|
+
debugCli("Could not detect remote 'origin', using config default.");
|
|
611
|
+
}
|
|
612
|
+
return githubRepo;
|
|
613
|
+
}
|
|
614
|
+
function resolveHeadSha(repoRoot, sha) {
|
|
615
|
+
try {
|
|
616
|
+
return {
|
|
617
|
+
headSha: execSync(`git rev-parse ${sha}`, { cwd: repoRoot }).toString().trim(),
|
|
618
|
+
shaRef: sha,
|
|
619
|
+
};
|
|
620
|
+
}
|
|
621
|
+
catch {
|
|
622
|
+
throw new Error(`Failed to resolve ref: ${sha}`);
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
run().catch((err) => {
|
|
626
|
+
console.error("[Agent CI] Fatal error:", err);
|
|
627
|
+
process.exit(1);
|
|
628
|
+
});
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { execSync } from "child_process";
|
|
2
|
+
import fs from "fs";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import { PROJECT_ROOT } from "./output/working-directory.js";
|
|
5
|
+
/**
|
|
6
|
+
* Derive `owner/repo` from the git remote URL.
|
|
7
|
+
* Falls back to "unknown/unknown" if detection fails.
|
|
8
|
+
*/
|
|
9
|
+
function deriveGithubRepo() {
|
|
10
|
+
try {
|
|
11
|
+
const remoteUrl = execSync("git remote get-url origin", {
|
|
12
|
+
cwd: PROJECT_ROOT,
|
|
13
|
+
encoding: "utf-8",
|
|
14
|
+
}).trim();
|
|
15
|
+
// Handles both SSH (git@github.com:owner/repo.git) and HTTPS URLs
|
|
16
|
+
const match = remoteUrl.match(/[/:]([^/]+\/[^/]+?)(?:\.git)?$/);
|
|
17
|
+
if (match) {
|
|
18
|
+
return match[1];
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
catch {
|
|
22
|
+
// git not available or no remote configured
|
|
23
|
+
}
|
|
24
|
+
return "unknown/unknown";
|
|
25
|
+
}
|
|
26
|
+
export const config = {
|
|
27
|
+
GITHUB_REPO: process.env.GITHUB_REPO || deriveGithubRepo(),
|
|
28
|
+
GITHUB_API_URL: process.env.GITHUB_API_URL || "http://localhost:8910",
|
|
29
|
+
};
|
|
30
|
+
/**
|
|
31
|
+
* Load machine-local secrets from `.env.machine` at the agent-ci project root.
|
|
32
|
+
* The file uses KEY=VALUE syntax (lines starting with # are ignored).
|
|
33
|
+
* Returns an empty object if the file doesn't exist.
|
|
34
|
+
*/
|
|
35
|
+
export function loadMachineSecrets(baseDir) {
|
|
36
|
+
const envMachinePath = path.join(baseDir ?? PROJECT_ROOT, ".env.agent-ci");
|
|
37
|
+
if (!fs.existsSync(envMachinePath)) {
|
|
38
|
+
return {};
|
|
39
|
+
}
|
|
40
|
+
const secrets = {};
|
|
41
|
+
const lines = fs.readFileSync(envMachinePath, "utf-8").split("\n");
|
|
42
|
+
for (const line of lines) {
|
|
43
|
+
const trimmed = line.trim();
|
|
44
|
+
if (!trimmed || trimmed.startsWith("#")) {
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
const eqIdx = trimmed.indexOf("=");
|
|
48
|
+
if (eqIdx < 1) {
|
|
49
|
+
continue;
|
|
50
|
+
}
|
|
51
|
+
const key = trimmed.slice(0, eqIdx).trim();
|
|
52
|
+
let value = trimmed.slice(eqIdx + 1).trim();
|
|
53
|
+
// Strip optional surrounding quotes
|
|
54
|
+
if ((value.startsWith('"') && value.endsWith('"')) ||
|
|
55
|
+
(value.startsWith("'") && value.endsWith("'"))) {
|
|
56
|
+
value = value.slice(1, -1);
|
|
57
|
+
}
|
|
58
|
+
if (key) {
|
|
59
|
+
secrets[key] = value;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return secrets;
|
|
63
|
+
}
|