@redwoodjs/agent-ci 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +110 -0
- package/README.md +79 -0
- package/dist/cli.js +628 -0
- package/dist/config.js +63 -0
- package/dist/docker/container-config.js +178 -0
- package/dist/docker/container-config.test.js +156 -0
- package/dist/docker/service-containers.js +205 -0
- package/dist/docker/service-containers.test.js +236 -0
- package/dist/docker/shutdown.js +120 -0
- package/dist/docker/shutdown.test.js +148 -0
- package/dist/output/agent-mode.js +7 -0
- package/dist/output/agent-mode.test.js +36 -0
- package/dist/output/cleanup.js +218 -0
- package/dist/output/cleanup.test.js +241 -0
- package/dist/output/concurrency.js +57 -0
- package/dist/output/concurrency.test.js +88 -0
- package/dist/output/debug.js +36 -0
- package/dist/output/logger.js +57 -0
- package/dist/output/logger.test.js +82 -0
- package/dist/output/reporter.js +67 -0
- package/dist/output/run-state.js +126 -0
- package/dist/output/run-state.test.js +169 -0
- package/dist/output/state-renderer.js +149 -0
- package/dist/output/state-renderer.test.js +488 -0
- package/dist/output/tree-renderer.js +52 -0
- package/dist/output/tree-renderer.test.js +105 -0
- package/dist/output/working-directory.js +20 -0
- package/dist/runner/directory-setup.js +98 -0
- package/dist/runner/directory-setup.test.js +31 -0
- package/dist/runner/git-shim.js +92 -0
- package/dist/runner/git-shim.test.js +57 -0
- package/dist/runner/local-job.js +691 -0
- package/dist/runner/metadata.js +90 -0
- package/dist/runner/metadata.test.js +127 -0
- package/dist/runner/result-builder.js +119 -0
- package/dist/runner/result-builder.test.js +177 -0
- package/dist/runner/step-wrapper.js +82 -0
- package/dist/runner/step-wrapper.test.js +77 -0
- package/dist/runner/sync.js +80 -0
- package/dist/runner/workspace.js +66 -0
- package/dist/types.js +1 -0
- package/dist/workflow/job-scheduler.js +62 -0
- package/dist/workflow/job-scheduler.test.js +130 -0
- package/dist/workflow/workflow-parser.js +556 -0
- package/dist/workflow/workflow-parser.test.js +642 -0
- package/package.json +39 -0
- package/shim.sh +11 -0
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import path from "path";
|
|
2
|
+
import fs from "fs";
|
|
3
|
+
import { getWorkingDirectory } from "./working-directory.js";
|
|
4
|
+
/** Root of all run directories: `<workingDir>/runs/` */
|
|
5
|
+
function getRunsDir() {
|
|
6
|
+
return path.join(getWorkingDirectory(), "runs");
|
|
7
|
+
}
|
|
8
|
+
export function ensureLogDirs() {
|
|
9
|
+
fs.mkdirSync(getRunsDir(), { recursive: true });
|
|
10
|
+
}
|
|
11
|
+
export function getNextLogNum(prefix) {
|
|
12
|
+
const runsDir = getRunsDir();
|
|
13
|
+
if (!fs.existsSync(runsDir)) {
|
|
14
|
+
return 1;
|
|
15
|
+
}
|
|
16
|
+
const items = fs.readdirSync(runsDir, { withFileTypes: true });
|
|
17
|
+
const nums = items
|
|
18
|
+
.filter((item) => item.isDirectory() && item.name.startsWith(`${prefix}-`))
|
|
19
|
+
.map((item) => {
|
|
20
|
+
// Extract the trailing numeric run counter from a name like:
|
|
21
|
+
// agent-ci-redwoodjssdk-14 → 14
|
|
22
|
+
// agent-ci-redwoodjssdk-15-j1 → 15
|
|
23
|
+
// agent-ci-redwoodjssdk-15-j1-m2 → 15
|
|
24
|
+
// Strategy: strip any -j<N>, -m<N>, -r<N> suffixes first, then grab the last number.
|
|
25
|
+
const baseName = item.name
|
|
26
|
+
.replace(/-j\d+(-m\d+)?(-r\d+)?$/, "")
|
|
27
|
+
.replace(/-m\d+(-r\d+)?$/, "")
|
|
28
|
+
.replace(/-r\d+$/, "");
|
|
29
|
+
const match = baseName.match(/-(\d+)$/);
|
|
30
|
+
return match ? parseInt(match[1], 10) : 0;
|
|
31
|
+
});
|
|
32
|
+
return nums.length > 0 ? Math.max(...nums) + 1 : 1;
|
|
33
|
+
}
|
|
34
|
+
export function createLogContext(prefix, preferredName) {
|
|
35
|
+
ensureLogDirs();
|
|
36
|
+
let num = 0;
|
|
37
|
+
let name = preferredName;
|
|
38
|
+
if (!name) {
|
|
39
|
+
num = getNextLogNum(prefix);
|
|
40
|
+
name = `${prefix}-${num}`;
|
|
41
|
+
}
|
|
42
|
+
const runDir = path.join(getRunsDir(), name);
|
|
43
|
+
const logDir = path.join(runDir, "logs");
|
|
44
|
+
fs.mkdirSync(logDir, { recursive: true });
|
|
45
|
+
return {
|
|
46
|
+
num,
|
|
47
|
+
name,
|
|
48
|
+
runDir,
|
|
49
|
+
logDir,
|
|
50
|
+
outputLogPath: path.join(logDir, "output.log"),
|
|
51
|
+
debugLogPath: path.join(logDir, "debug.log"),
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
export function finalizeLog(logPath, _exitCode, _commitSha, _preferredName) {
|
|
55
|
+
// Log file stays in place; just return the path as-is.
|
|
56
|
+
return logPath;
|
|
57
|
+
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
describe("Logger utilities", () => {
|
|
6
|
+
let tmpDir;
|
|
7
|
+
beforeEach(() => {
|
|
8
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "agent-ci-logger-test-"));
|
|
9
|
+
vi.resetModules();
|
|
10
|
+
});
|
|
11
|
+
afterEach(() => {
|
|
12
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
13
|
+
vi.restoreAllMocks();
|
|
14
|
+
});
|
|
15
|
+
describe("ensureLogDirs", () => {
|
|
16
|
+
it("creates the runs/ directory", async () => {
|
|
17
|
+
const { setWorkingDirectory } = await import("./working-directory.js");
|
|
18
|
+
const { ensureLogDirs } = await import("./logger.js");
|
|
19
|
+
setWorkingDirectory(tmpDir);
|
|
20
|
+
ensureLogDirs();
|
|
21
|
+
expect(fs.existsSync(path.join(tmpDir, "runs"))).toBe(true);
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
describe("getNextLogNum", () => {
|
|
25
|
+
it("returns 1 when runs/ dir is empty or absent", async () => {
|
|
26
|
+
const { setWorkingDirectory } = await import("./working-directory.js");
|
|
27
|
+
const { getNextLogNum } = await import("./logger.js");
|
|
28
|
+
setWorkingDirectory(tmpDir);
|
|
29
|
+
expect(getNextLogNum("agent-ci")).toBe(1);
|
|
30
|
+
});
|
|
31
|
+
it("returns next number after existing agent-ci-* entries", async () => {
|
|
32
|
+
const { setWorkingDirectory } = await import("./working-directory.js");
|
|
33
|
+
const { getNextLogNum } = await import("./logger.js");
|
|
34
|
+
setWorkingDirectory(tmpDir);
|
|
35
|
+
fs.mkdirSync(path.join(tmpDir, "runs", "agent-ci-1"), { recursive: true });
|
|
36
|
+
fs.mkdirSync(path.join(tmpDir, "runs", "agent-ci-2"), { recursive: true });
|
|
37
|
+
expect(getNextLogNum("agent-ci")).toBe(3);
|
|
38
|
+
});
|
|
39
|
+
it("counts only the base run number from multi-job names", async () => {
|
|
40
|
+
const { setWorkingDirectory } = await import("./working-directory.js");
|
|
41
|
+
const { getNextLogNum } = await import("./logger.js");
|
|
42
|
+
setWorkingDirectory(tmpDir);
|
|
43
|
+
// Multi-job run: agent-ci-15 with -j1-m2 suffix — base is 15
|
|
44
|
+
fs.mkdirSync(path.join(tmpDir, "runs", "agent-ci-redwoodjssdk-14"), { recursive: true });
|
|
45
|
+
fs.mkdirSync(path.join(tmpDir, "runs", "agent-ci-redwoodjssdk-15-j1"), { recursive: true });
|
|
46
|
+
fs.mkdirSync(path.join(tmpDir, "runs", "agent-ci-redwoodjssdk-15-j2-m1"), {
|
|
47
|
+
recursive: true,
|
|
48
|
+
});
|
|
49
|
+
expect(getNextLogNum("agent-ci")).toBe(16);
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
describe("createLogContext", () => {
|
|
53
|
+
it("creates runDir/logs/ and returns correct paths", async () => {
|
|
54
|
+
const { setWorkingDirectory } = await import("./working-directory.js");
|
|
55
|
+
const { createLogContext } = await import("./logger.js");
|
|
56
|
+
setWorkingDirectory(tmpDir);
|
|
57
|
+
const ctx = createLogContext("agent-ci");
|
|
58
|
+
expect(ctx.name).toMatch(/^agent-ci-\d+$/);
|
|
59
|
+
expect(fs.existsSync(ctx.runDir)).toBe(true);
|
|
60
|
+
expect(fs.existsSync(ctx.logDir)).toBe(true);
|
|
61
|
+
expect(ctx.outputLogPath).toBe(path.join(ctx.logDir, "output.log"));
|
|
62
|
+
expect(ctx.debugLogPath).toBe(path.join(ctx.logDir, "debug.log"));
|
|
63
|
+
});
|
|
64
|
+
it("uses preferredName when provided", async () => {
|
|
65
|
+
const { setWorkingDirectory } = await import("./working-directory.js");
|
|
66
|
+
const { createLogContext } = await import("./logger.js");
|
|
67
|
+
setWorkingDirectory(tmpDir);
|
|
68
|
+
const ctx = createLogContext("agent-ci", "agent-ci-redwoodjssdk-42");
|
|
69
|
+
expect(ctx.name).toBe("agent-ci-redwoodjssdk-42");
|
|
70
|
+
expect(ctx.runDir).toBe(path.join(tmpDir, "runs", "agent-ci-redwoodjssdk-42"));
|
|
71
|
+
expect(ctx.logDir).toBe(path.join(tmpDir, "runs", "agent-ci-redwoodjssdk-42", "logs"));
|
|
72
|
+
});
|
|
73
|
+
it("auto-increments when no preferredName given", async () => {
|
|
74
|
+
const { setWorkingDirectory } = await import("./working-directory.js");
|
|
75
|
+
const { createLogContext } = await import("./logger.js");
|
|
76
|
+
setWorkingDirectory(tmpDir);
|
|
77
|
+
const first = createLogContext("agent-ci");
|
|
78
|
+
const second = createLogContext("agent-ci");
|
|
79
|
+
expect(second.num).toBe(first.num + 1);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
});
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
// ─── Formatting helpers ───────────────────────────────────────────────────────
|
|
3
|
+
function formatDuration(ms) {
|
|
4
|
+
const s = Math.round(ms / 1000);
|
|
5
|
+
if (s < 60) {
|
|
6
|
+
return `${s}s`;
|
|
7
|
+
}
|
|
8
|
+
const m = Math.floor(s / 60);
|
|
9
|
+
const rem = s % 60;
|
|
10
|
+
return rem > 0 ? `${m}m ${rem}s` : `${m}m`;
|
|
11
|
+
}
|
|
12
|
+
// ─── Failures-first summary (emitted after all jobs complete) ─────────────────
|
|
13
|
+
export function printSummary(results, runDir) {
|
|
14
|
+
const failures = results.filter((r) => !r.succeeded);
|
|
15
|
+
const passes = results.filter((r) => r.succeeded);
|
|
16
|
+
const totalMs = results.reduce((sum, r) => sum + r.durationMs, 0);
|
|
17
|
+
if (failures.length > 0) {
|
|
18
|
+
process.stdout.write("\n━━━ FAILURES ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n\n");
|
|
19
|
+
for (const f of failures) {
|
|
20
|
+
if (f.failedStep) {
|
|
21
|
+
process.stdout.write(` ✗ ${f.workflow} > ${f.taskId} > "${f.failedStep}"\n`);
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
process.stdout.write(` ✗ ${f.workflow} > ${f.taskId}\n`);
|
|
25
|
+
}
|
|
26
|
+
if (f.failedExitCode !== undefined) {
|
|
27
|
+
process.stdout.write(` Exit code: ${f.failedExitCode}\n`);
|
|
28
|
+
}
|
|
29
|
+
if (f.lastOutputLines && f.lastOutputLines.length > 0) {
|
|
30
|
+
process.stdout.write(` Last output:\n`);
|
|
31
|
+
for (const line of f.lastOutputLines) {
|
|
32
|
+
process.stdout.write(` ${line}\n`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
process.stdout.write("\n");
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
process.stdout.write("\n━━━ SUMMARY ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n\n");
|
|
39
|
+
const status = failures.length > 0
|
|
40
|
+
? `✗ ${failures.length} failed, ${passes.length} passed`
|
|
41
|
+
: `✓ ${passes.length} passed`;
|
|
42
|
+
process.stdout.write(` Status: ${status} (${results.length} total)\n`);
|
|
43
|
+
process.stdout.write(` Duration: ${formatDuration(totalMs)}\n`);
|
|
44
|
+
if (runDir) {
|
|
45
|
+
process.stdout.write(` Root: ${runDir}\n`);
|
|
46
|
+
}
|
|
47
|
+
process.stdout.write("\n");
|
|
48
|
+
}
|
|
49
|
+
// ─── Tail helper ──────────────────────────────────────────────────────────────
|
|
50
|
+
/** Read the last N lines from a log file. */
|
|
51
|
+
export function tailLogFile(filePath, lineCount = 20) {
|
|
52
|
+
try {
|
|
53
|
+
if (!fs.existsSync(filePath)) {
|
|
54
|
+
return [];
|
|
55
|
+
}
|
|
56
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
57
|
+
const lines = content.split("\n");
|
|
58
|
+
// Trim trailing empty lines
|
|
59
|
+
while (lines.length > 0 && lines[lines.length - 1].trim() === "") {
|
|
60
|
+
lines.pop();
|
|
61
|
+
}
|
|
62
|
+
return lines.slice(-lineCount);
|
|
63
|
+
}
|
|
64
|
+
catch {
|
|
65
|
+
return [];
|
|
66
|
+
}
|
|
67
|
+
}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
export class RunStateStore {
|
|
4
|
+
state;
|
|
5
|
+
filePath;
|
|
6
|
+
listeners = [];
|
|
7
|
+
constructor(runId, filePath) {
|
|
8
|
+
this.state = {
|
|
9
|
+
runId,
|
|
10
|
+
status: "running",
|
|
11
|
+
startedAt: new Date().toISOString(),
|
|
12
|
+
workflows: [],
|
|
13
|
+
};
|
|
14
|
+
this.filePath = filePath;
|
|
15
|
+
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
|
16
|
+
}
|
|
17
|
+
/** Register a callback that fires synchronously on every state change. */
|
|
18
|
+
onUpdate(listener) {
|
|
19
|
+
this.listeners.push(listener);
|
|
20
|
+
}
|
|
21
|
+
getState() {
|
|
22
|
+
return this.state;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Register a job under a workflow (creating the workflow entry if needed).
|
|
26
|
+
* Call this before executing the job so the render loop can show it immediately.
|
|
27
|
+
*/
|
|
28
|
+
addJob(workflowPath, jobId, runnerId, options) {
|
|
29
|
+
let wf = this.state.workflows.find((w) => w.path === workflowPath);
|
|
30
|
+
if (!wf) {
|
|
31
|
+
wf = {
|
|
32
|
+
id: path.basename(workflowPath),
|
|
33
|
+
path: workflowPath,
|
|
34
|
+
status: "queued",
|
|
35
|
+
jobs: [],
|
|
36
|
+
};
|
|
37
|
+
this.state.workflows.push(wf);
|
|
38
|
+
}
|
|
39
|
+
if (!wf.jobs.some((j) => j.runnerId === runnerId)) {
|
|
40
|
+
wf.jobs.push({
|
|
41
|
+
id: jobId,
|
|
42
|
+
runnerId,
|
|
43
|
+
status: "queued",
|
|
44
|
+
steps: [],
|
|
45
|
+
...options,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
this.notify();
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Update fields on a job (matched by runnerId).
|
|
52
|
+
* Automatically syncs parent workflow status and saves to disk.
|
|
53
|
+
*/
|
|
54
|
+
updateJob(runnerId, updates) {
|
|
55
|
+
for (const wf of this.state.workflows) {
|
|
56
|
+
const job = wf.jobs.find((j) => j.runnerId === runnerId);
|
|
57
|
+
if (job) {
|
|
58
|
+
Object.assign(job, updates);
|
|
59
|
+
this.syncWorkflowStatus(wf);
|
|
60
|
+
break;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
this.save();
|
|
64
|
+
this.notify();
|
|
65
|
+
}
|
|
66
|
+
/** Mark the overall run complete and persist. */
|
|
67
|
+
complete(status) {
|
|
68
|
+
this.state.status = status;
|
|
69
|
+
this.state.completedAt = new Date().toISOString();
|
|
70
|
+
this.save();
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Atomically write state to disk.
|
|
74
|
+
* Uses write-tmp-then-rename to prevent corruption on concurrent reads.
|
|
75
|
+
*/
|
|
76
|
+
save() {
|
|
77
|
+
try {
|
|
78
|
+
const tmp = this.filePath + ".tmp";
|
|
79
|
+
fs.writeFileSync(tmp, JSON.stringify(this.state, null, 2));
|
|
80
|
+
fs.renameSync(tmp, this.filePath);
|
|
81
|
+
}
|
|
82
|
+
catch {
|
|
83
|
+
// Best-effort — rendering uses in-memory state, not disk
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
/** Load a previously-written RunState from disk. */
|
|
87
|
+
static load(filePath) {
|
|
88
|
+
try {
|
|
89
|
+
return JSON.parse(fs.readFileSync(filePath, "utf-8"));
|
|
90
|
+
}
|
|
91
|
+
catch {
|
|
92
|
+
return JSON.parse(fs.readFileSync(filePath + ".tmp", "utf-8"));
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
notify() {
|
|
96
|
+
for (const listener of this.listeners) {
|
|
97
|
+
try {
|
|
98
|
+
listener(this.state);
|
|
99
|
+
}
|
|
100
|
+
catch {
|
|
101
|
+
// Best-effort — don't let listener errors break state updates
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
syncWorkflowStatus(wf) {
|
|
106
|
+
const statuses = wf.jobs.map((j) => j.status);
|
|
107
|
+
if (statuses.length === 0) {
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
if (statuses.every((s) => s === "completed")) {
|
|
111
|
+
wf.status = "completed";
|
|
112
|
+
if (!wf.completedAt) {
|
|
113
|
+
wf.completedAt = new Date().toISOString();
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
else if (statuses.some((s) => s === "failed")) {
|
|
117
|
+
wf.status = "failed";
|
|
118
|
+
}
|
|
119
|
+
else if (statuses.some((s) => s === "running" || s === "booting" || s === "paused")) {
|
|
120
|
+
wf.status = "running";
|
|
121
|
+
if (!wf.startedAt) {
|
|
122
|
+
wf.startedAt = new Date().toISOString();
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import fs from "fs";
|
|
3
|
+
import os from "os";
|
|
4
|
+
import path from "path";
|
|
5
|
+
import { RunStateStore } from "./run-state.js";
|
|
6
|
+
let tmpDir;
|
|
7
|
+
beforeEach(() => {
|
|
8
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "run-state-test-"));
|
|
9
|
+
});
|
|
10
|
+
afterEach(() => {
|
|
11
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
12
|
+
});
|
|
13
|
+
function makeStore(runId = "test-run-1") {
|
|
14
|
+
return new RunStateStore(runId, path.join(tmpDir, runId, "run-state.json"));
|
|
15
|
+
}
|
|
16
|
+
describe("RunStateStore", () => {
|
|
17
|
+
it("initialises with an empty running state", () => {
|
|
18
|
+
const store = makeStore();
|
|
19
|
+
const state = store.getState();
|
|
20
|
+
expect(state.status).toBe("running");
|
|
21
|
+
expect(state.workflows).toEqual([]);
|
|
22
|
+
expect(state.runId).toBe("test-run-1");
|
|
23
|
+
expect(state.startedAt).toMatch(/^\d{4}-/); // ISO date
|
|
24
|
+
});
|
|
25
|
+
it("addJob creates a workflow entry and adds a job", () => {
|
|
26
|
+
const store = makeStore();
|
|
27
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
28
|
+
const state = store.getState();
|
|
29
|
+
expect(state.workflows).toHaveLength(1);
|
|
30
|
+
expect(state.workflows[0].id).toBe("ci.yml");
|
|
31
|
+
expect(state.workflows[0].jobs).toHaveLength(1);
|
|
32
|
+
expect(state.workflows[0].jobs[0].id).toBe("test");
|
|
33
|
+
expect(state.workflows[0].jobs[0].runnerId).toBe("agent-ci-1");
|
|
34
|
+
expect(state.workflows[0].jobs[0].status).toBe("queued");
|
|
35
|
+
});
|
|
36
|
+
it("addJob appends to an existing workflow", () => {
|
|
37
|
+
const store = makeStore();
|
|
38
|
+
store.addJob("/repo/.github/workflows/ci.yml", "lint", "agent-ci-1-j1");
|
|
39
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1-j2");
|
|
40
|
+
const wf = store.getState().workflows[0];
|
|
41
|
+
expect(wf.jobs).toHaveLength(2);
|
|
42
|
+
expect(wf.jobs[0].id).toBe("lint");
|
|
43
|
+
expect(wf.jobs[1].id).toBe("test");
|
|
44
|
+
});
|
|
45
|
+
it("addJob ignores duplicate runnerId", () => {
|
|
46
|
+
const store = makeStore();
|
|
47
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
48
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
49
|
+
expect(store.getState().workflows[0].jobs).toHaveLength(1);
|
|
50
|
+
});
|
|
51
|
+
it("updateJob updates the correct job", () => {
|
|
52
|
+
const store = makeStore();
|
|
53
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
54
|
+
store.updateJob("agent-ci-1", { status: "booting", startedAt: "2024-01-01T00:00:00Z" });
|
|
55
|
+
const job = store.getState().workflows[0].jobs[0];
|
|
56
|
+
expect(job.status).toBe("booting");
|
|
57
|
+
expect(job.startedAt).toBe("2024-01-01T00:00:00Z");
|
|
58
|
+
});
|
|
59
|
+
it("updateJob syncs workflow status to running when a job boots", () => {
|
|
60
|
+
const store = makeStore();
|
|
61
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
62
|
+
store.updateJob("agent-ci-1", { status: "booting" });
|
|
63
|
+
expect(store.getState().workflows[0].status).toBe("running");
|
|
64
|
+
});
|
|
65
|
+
it("updateJob syncs workflow status to completed when all jobs complete", () => {
|
|
66
|
+
const store = makeStore();
|
|
67
|
+
store.addJob("/repo/.github/workflows/ci.yml", "lint", "agent-ci-1-j1");
|
|
68
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1-j2");
|
|
69
|
+
store.updateJob("agent-ci-1-j1", { status: "completed" });
|
|
70
|
+
store.updateJob("agent-ci-1-j2", { status: "completed" });
|
|
71
|
+
expect(store.getState().workflows[0].status).toBe("completed");
|
|
72
|
+
});
|
|
73
|
+
it("updateJob syncs workflow status to failed when any job fails", () => {
|
|
74
|
+
const store = makeStore();
|
|
75
|
+
store.addJob("/repo/.github/workflows/ci.yml", "lint", "agent-ci-1-j1");
|
|
76
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1-j2");
|
|
77
|
+
store.updateJob("agent-ci-1-j1", { status: "failed" });
|
|
78
|
+
store.updateJob("agent-ci-1-j2", { status: "completed" });
|
|
79
|
+
expect(store.getState().workflows[0].status).toBe("failed");
|
|
80
|
+
});
|
|
81
|
+
it("updateJob handles pause state", () => {
|
|
82
|
+
const store = makeStore();
|
|
83
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
84
|
+
store.updateJob("agent-ci-1", {
|
|
85
|
+
status: "paused",
|
|
86
|
+
pausedAtStep: "Run tests",
|
|
87
|
+
pausedAtMs: "2024-01-01T00:01:00Z",
|
|
88
|
+
attempt: 1,
|
|
89
|
+
lastOutputLines: ["Error: test failed"],
|
|
90
|
+
});
|
|
91
|
+
const job = store.getState().workflows[0].jobs[0];
|
|
92
|
+
expect(job.status).toBe("paused");
|
|
93
|
+
expect(job.pausedAtStep).toBe("Run tests");
|
|
94
|
+
expect(job.attempt).toBe(1);
|
|
95
|
+
expect(job.lastOutputLines).toEqual(["Error: test failed"]);
|
|
96
|
+
});
|
|
97
|
+
it("complete marks the run as completed", () => {
|
|
98
|
+
const store = makeStore();
|
|
99
|
+
store.complete("completed");
|
|
100
|
+
const state = store.getState();
|
|
101
|
+
expect(state.status).toBe("completed");
|
|
102
|
+
expect(state.completedAt).toBeDefined();
|
|
103
|
+
});
|
|
104
|
+
it("complete marks the run as failed", () => {
|
|
105
|
+
const store = makeStore();
|
|
106
|
+
store.complete("failed");
|
|
107
|
+
expect(store.getState().status).toBe("failed");
|
|
108
|
+
});
|
|
109
|
+
describe("atomic persistence", () => {
|
|
110
|
+
it("save writes a valid JSON file", () => {
|
|
111
|
+
const store = makeStore("persist-test");
|
|
112
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
113
|
+
store.save();
|
|
114
|
+
const filePath = path.join(tmpDir, "persist-test", "run-state.json");
|
|
115
|
+
expect(fs.existsSync(filePath)).toBe(true);
|
|
116
|
+
const parsed = JSON.parse(fs.readFileSync(filePath, "utf-8"));
|
|
117
|
+
expect(parsed.runId).toBe("persist-test");
|
|
118
|
+
expect(parsed.workflows).toHaveLength(1);
|
|
119
|
+
});
|
|
120
|
+
it("load round-trips the state from disk", () => {
|
|
121
|
+
const store = makeStore("roundtrip");
|
|
122
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
123
|
+
store.updateJob("agent-ci-1", { status: "running" });
|
|
124
|
+
store.save();
|
|
125
|
+
const filePath = path.join(tmpDir, "roundtrip", "run-state.json");
|
|
126
|
+
const loaded = RunStateStore.load(filePath);
|
|
127
|
+
expect(loaded.runId).toBe("roundtrip");
|
|
128
|
+
expect(loaded.workflows[0].jobs[0].status).toBe("running");
|
|
129
|
+
});
|
|
130
|
+
it("save does not leave .tmp files behind", () => {
|
|
131
|
+
const store = makeStore("no-tmp");
|
|
132
|
+
store.save();
|
|
133
|
+
const dir = path.join(tmpDir, "no-tmp");
|
|
134
|
+
const files = fs.readdirSync(dir);
|
|
135
|
+
expect(files.some((f) => f.endsWith(".tmp"))).toBe(false);
|
|
136
|
+
});
|
|
137
|
+
});
|
|
138
|
+
describe("matrix jobs", () => {
|
|
139
|
+
it("supports multiple matrix combinations under one workflow", () => {
|
|
140
|
+
const store = makeStore();
|
|
141
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1-j1-m1", {
|
|
142
|
+
matrixValues: { node: "18" },
|
|
143
|
+
});
|
|
144
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1-j1-m2", {
|
|
145
|
+
matrixValues: { node: "20" },
|
|
146
|
+
});
|
|
147
|
+
const wf = store.getState().workflows[0];
|
|
148
|
+
expect(wf.jobs).toHaveLength(2);
|
|
149
|
+
expect(wf.jobs[0].matrixValues).toEqual({ node: "18" });
|
|
150
|
+
expect(wf.jobs[1].matrixValues).toEqual({ node: "20" });
|
|
151
|
+
});
|
|
152
|
+
});
|
|
153
|
+
describe("step state", () => {
|
|
154
|
+
it("can update job with steps array", () => {
|
|
155
|
+
const store = makeStore();
|
|
156
|
+
store.addJob("/repo/.github/workflows/ci.yml", "test", "agent-ci-1");
|
|
157
|
+
store.updateJob("agent-ci-1", {
|
|
158
|
+
steps: [
|
|
159
|
+
{ name: "Set up job", index: 1, status: "completed", durationMs: 1000 },
|
|
160
|
+
{ name: "Run tests", index: 2, status: "running", startedAt: new Date().toISOString() },
|
|
161
|
+
],
|
|
162
|
+
});
|
|
163
|
+
const job = store.getState().workflows[0].jobs[0];
|
|
164
|
+
expect(job.steps).toHaveLength(2);
|
|
165
|
+
expect(job.steps[0].status).toBe("completed");
|
|
166
|
+
expect(job.steps[1].status).toBe("running");
|
|
167
|
+
});
|
|
168
|
+
});
|
|
169
|
+
});
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
// ─── State Renderer ───────────────────────────────────────────────────────────
|
|
2
|
+
// Pure function: RunState → string.
|
|
3
|
+
// The render loop in cli.ts calls this on every tick and passes the result to
|
|
4
|
+
// logUpdate. No side effects, no I/O — fully testable in isolation.
|
|
5
|
+
import path from "path";
|
|
6
|
+
import { renderTree } from "./tree-renderer.js";
|
|
7
|
+
// ─── ANSI helpers ─────────────────────────────────────────────────────────────
|
|
8
|
+
const YELLOW = `${String.fromCharCode(27)}[33m`;
|
|
9
|
+
const DIM = `${String.fromCharCode(27)}[2m`;
|
|
10
|
+
const RESET = `${String.fromCharCode(27)}[0m`;
|
|
11
|
+
// ─── Spinner ──────────────────────────────────────────────────────────────────
|
|
12
|
+
const SPINNER_FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
|
|
13
|
+
function getSpinnerFrame() {
|
|
14
|
+
return SPINNER_FRAMES[Math.floor(Date.now() / 80) % SPINNER_FRAMES.length];
|
|
15
|
+
}
|
|
16
|
+
// ─── Formatting ───────────────────────────────────────────────────────────────
|
|
17
|
+
function fmtMs(ms) {
|
|
18
|
+
return ms >= 1000 ? `${(ms / 1000).toFixed(1)}s` : `${ms}ms`;
|
|
19
|
+
}
|
|
20
|
+
// ─── Step node builder ────────────────────────────────────────────────────────
|
|
21
|
+
function buildStepNode(step, job, padW) {
|
|
22
|
+
const pad = (n) => String(n).padStart(padW);
|
|
23
|
+
const dur = step.durationMs !== undefined ? ` (${Math.round(step.durationMs / 1000)}s)` : "";
|
|
24
|
+
switch (step.status) {
|
|
25
|
+
case "running": {
|
|
26
|
+
const elapsed = step.startedAt
|
|
27
|
+
? Math.round((Date.now() - new Date(step.startedAt).getTime()) / 1000)
|
|
28
|
+
: 0;
|
|
29
|
+
const frame = getSpinnerFrame();
|
|
30
|
+
// Retrying (was paused, now running again on same step)
|
|
31
|
+
if ((job.attempt ?? 0) > 0 && job.pausedAtStep === step.name) {
|
|
32
|
+
return { label: `${frame} ${pad(step.index)}. ${step.name} — retrying (${elapsed}s...)` };
|
|
33
|
+
}
|
|
34
|
+
return { label: `${frame} ${pad(step.index)}. ${step.name} (${elapsed}s...)` };
|
|
35
|
+
}
|
|
36
|
+
case "paused": {
|
|
37
|
+
const frozenElapsed = job.pausedAtMs && step.startedAt
|
|
38
|
+
? Math.round((new Date(job.pausedAtMs).getTime() - new Date(step.startedAt).getTime()) / 1000)
|
|
39
|
+
: step.startedAt
|
|
40
|
+
? Math.round((Date.now() - new Date(step.startedAt).getTime()) / 1000)
|
|
41
|
+
: 0;
|
|
42
|
+
return {
|
|
43
|
+
label: `⏸ ${pad(step.index)}. ${step.name} (${frozenElapsed}s)`,
|
|
44
|
+
children: [{ label: `${YELLOW}Step failed attempt #${job.attempt ?? 1}${RESET}` }],
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
case "failed":
|
|
48
|
+
return { label: `✗ ${pad(step.index)}. ${step.name}${dur}` };
|
|
49
|
+
case "skipped":
|
|
50
|
+
return { label: `⊘ ${pad(step.index)}. ${step.name}${dur}` };
|
|
51
|
+
case "completed":
|
|
52
|
+
return { label: `✓ ${pad(step.index)}. ${step.name}${dur}` };
|
|
53
|
+
case "pending":
|
|
54
|
+
default:
|
|
55
|
+
return { label: `○ ${pad(step.index)}. ${step.name}` };
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
// ─── Job node builder ─────────────────────────────────────────────────────────
|
|
59
|
+
/**
|
|
60
|
+
* Build the TreeNode(s) for a job.
|
|
61
|
+
*
|
|
62
|
+
* - `singleJobMode`: true when there is exactly one job across all workflows.
|
|
63
|
+
* In this mode the "Starting runner" node is shown alongside the job node,
|
|
64
|
+
* matching the pre-refactor single-workflow rendering.
|
|
65
|
+
*/
|
|
66
|
+
function buildJobNodes(job, singleJobMode) {
|
|
67
|
+
// ── Booting (container starting, no timeline yet) ──────────────────────────
|
|
68
|
+
if (job.status === "booting") {
|
|
69
|
+
const elapsed = job.startedAt
|
|
70
|
+
? Math.round((Date.now() - new Date(job.startedAt).getTime()) / 1000)
|
|
71
|
+
: 0;
|
|
72
|
+
const bootNode = {
|
|
73
|
+
label: `${getSpinnerFrame()} Starting runner ${job.runnerId} (${elapsed}s)`,
|
|
74
|
+
};
|
|
75
|
+
if (job.logDir) {
|
|
76
|
+
bootNode.children = [{ label: `${DIM}Logs: ${job.logDir}${RESET}` }];
|
|
77
|
+
}
|
|
78
|
+
return [bootNode];
|
|
79
|
+
}
|
|
80
|
+
// ── Completed / failed in multi-job mode → collapse to one line ────────────
|
|
81
|
+
if (!singleJobMode && (job.status === "completed" || job.status === "failed")) {
|
|
82
|
+
const icon = job.failedStep ? "✗" : "✓";
|
|
83
|
+
const dur = job.durationMs !== undefined ? ` (${Math.round(job.durationMs / 1000)}s)` : "";
|
|
84
|
+
return [{ label: `${icon} ${job.id} ${DIM}${job.runnerId}${RESET}${dur}` }];
|
|
85
|
+
}
|
|
86
|
+
// ── Build step nodes ───────────────────────────────────────────────────────
|
|
87
|
+
const padW = String(job.steps.length).length;
|
|
88
|
+
const stepNodes = job.steps.map((step) => buildStepNode(step, job, padW));
|
|
89
|
+
// Retry hint in multi-job paused mode (shown as a child node)
|
|
90
|
+
if (!singleJobMode && job.status === "paused" && job.pausedAtStep) {
|
|
91
|
+
stepNodes.push({
|
|
92
|
+
label: `${YELLOW}↻ retry: agent-ci retry --runner ${job.runnerId}${RESET}`,
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
// ── Single-job mode: show "Starting runner" alongside job node ─────────────
|
|
96
|
+
if (singleJobMode) {
|
|
97
|
+
const bootLabel = job.bootDurationMs !== undefined
|
|
98
|
+
? `Starting runner ${job.runnerId} (${fmtMs(job.bootDurationMs)})`
|
|
99
|
+
: `Starting runner ${job.runnerId}`;
|
|
100
|
+
const bootNode = { label: bootLabel };
|
|
101
|
+
if (job.logDir) {
|
|
102
|
+
bootNode.children = [{ label: `${DIM}Logs: ${job.logDir}${RESET}` }];
|
|
103
|
+
}
|
|
104
|
+
return [bootNode, { label: job.id, children: stepNodes }];
|
|
105
|
+
}
|
|
106
|
+
// ── Multi-job mode: show job name with steps as children ──────────────────
|
|
107
|
+
return [{ label: `${job.id} ${DIM}${job.runnerId}${RESET}`, children: stepNodes }];
|
|
108
|
+
}
|
|
109
|
+
// ─── Main renderer ────────────────────────────────────────────────────────────
|
|
110
|
+
/**
|
|
111
|
+
* Render the full run state into a string for display via logUpdate.
|
|
112
|
+
*
|
|
113
|
+
* This is a pure function: given the same RunState and the same wall-clock time
|
|
114
|
+
* it always produces the same output. Spinner frames are derived from Date.now().
|
|
115
|
+
*/
|
|
116
|
+
export function renderRunState(state) {
|
|
117
|
+
const totalJobs = state.workflows.reduce((sum, wf) => sum + wf.jobs.length, 0);
|
|
118
|
+
const singleJobMode = state.workflows.length === 1 && totalJobs === 1;
|
|
119
|
+
const roots = [];
|
|
120
|
+
let pausedSingleJob;
|
|
121
|
+
for (const wf of state.workflows) {
|
|
122
|
+
const children = [];
|
|
123
|
+
for (const job of wf.jobs) {
|
|
124
|
+
children.push(...buildJobNodes(job, singleJobMode));
|
|
125
|
+
// Capture the first paused job for single-job trailing output
|
|
126
|
+
if (singleJobMode && job.status === "paused" && !pausedSingleJob) {
|
|
127
|
+
pausedSingleJob = job;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
roots.push({ label: path.basename(wf.path), children });
|
|
131
|
+
}
|
|
132
|
+
let output = renderTree(roots);
|
|
133
|
+
// ── Single-job pause: append last output + retry/abort hints below tree ────
|
|
134
|
+
if (pausedSingleJob) {
|
|
135
|
+
const { lastOutputLines, runnerId } = pausedSingleJob;
|
|
136
|
+
if (lastOutputLines && lastOutputLines.length > 0) {
|
|
137
|
+
output += `\n\n ${DIM}Last output:${RESET}`;
|
|
138
|
+
for (const line of lastOutputLines) {
|
|
139
|
+
const trimmed = line.trimEnd();
|
|
140
|
+
if (trimmed) {
|
|
141
|
+
output += `\n ${DIM}${trimmed}${RESET}`;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
output += `\n\n ${YELLOW}↻ To retry: agent-ci retry --runner ${runnerId} [enter]${RESET}`;
|
|
146
|
+
output += `\n ${YELLOW}■ To abort: agent-ci abort --runner ${runnerId}${RESET}`;
|
|
147
|
+
}
|
|
148
|
+
return output;
|
|
149
|
+
}
|