@redwoodjs/agent-ci 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +450 -302
- package/dist/commit-status.js +1 -1
- package/dist/config.js +42 -15
- package/dist/config.test.js +157 -0
- package/dist/docker/container-config.js +8 -5
- package/dist/docker/container-config.test.js +60 -3
- package/dist/docker/docker-socket.js +119 -0
- package/dist/docker/docker-socket.test.js +117 -0
- package/dist/docker/repro-126.test.js +72 -0
- package/dist/docker/shutdown.js +21 -4
- package/dist/output/cleanup.js +42 -6
- package/dist/output/cleanup.test.js +15 -0
- package/dist/output/logger.js +35 -6
- package/dist/output/logger.test.js +40 -0
- package/dist/runner/directory-setup.js +2 -3
- package/dist/runner/local-job.js +88 -48
- package/dist/runner/local-job.test.js +43 -0
- package/dist/runner/result-builder.js +14 -1
- package/dist/runner/result-builder.test.js +21 -0
- package/dist/runner/workspace.js +3 -2
- package/dist/workflow/remote-workflow-fetch.js +131 -0
- package/dist/workflow/remote-workflow-fetch.test.js +233 -0
- package/dist/workflow/reusable-workflow.js +134 -0
- package/dist/workflow/reusable-workflow.test.js +655 -0
- package/dist/workflow/workflow-parser.js +51 -21
- package/dist/workflow/workflow-parser.test.js +116 -2
- package/package.json +2 -2
|
@@ -189,6 +189,18 @@ export function resolveJobOutputs(outputDefs, stepOutputs) {
|
|
|
189
189
|
}
|
|
190
190
|
return result;
|
|
191
191
|
}
|
|
192
|
+
// ─── Job success determination ────────────────────────────────────────────────
|
|
193
|
+
/**
|
|
194
|
+
* Determine whether a job succeeded based on container exit state and
|
|
195
|
+
* whether the runner ever contacted the DTU.
|
|
196
|
+
*
|
|
197
|
+
* `isBooting` stays `true` when the runner never sent any timeline entries —
|
|
198
|
+
* it started but couldn't reach the DTU or crashed before executing any steps.
|
|
199
|
+
* That must be treated as a failure regardless of exit code.
|
|
200
|
+
*/
|
|
201
|
+
export function isJobSuccessful(opts) {
|
|
202
|
+
return opts.lastFailedStep === null && opts.containerExitCode === 0 && !opts.isBooting;
|
|
203
|
+
}
|
|
192
204
|
/**
|
|
193
205
|
* Build the structured `JobResult` from container exit state and timeline data.
|
|
194
206
|
*/
|
|
@@ -218,7 +230,8 @@ export function buildJobResult(opts) {
|
|
|
218
230
|
result.lastOutputLines = failure.tailLines ?? [];
|
|
219
231
|
}
|
|
220
232
|
else {
|
|
221
|
-
|
|
233
|
+
// Boot failure — no timeline, so fall back to debug.log for error context
|
|
234
|
+
result.lastOutputLines = tailLogFile(debugLogPath);
|
|
222
235
|
}
|
|
223
236
|
}
|
|
224
237
|
// Attach raw step outputs (will be resolved to job outputs by cli.ts)
|
|
@@ -113,6 +113,27 @@ describe("extractFailureDetails", () => {
|
|
|
113
113
|
expect(details).toEqual({});
|
|
114
114
|
});
|
|
115
115
|
});
|
|
116
|
+
// ── isJobSuccessful ──────────────────────────────────────────────────────────
|
|
117
|
+
describe("isJobSuccessful", () => {
|
|
118
|
+
it("succeeds when no failed step, exit code 0, and not booting", async () => {
|
|
119
|
+
const { isJobSuccessful } = await import("./result-builder.js");
|
|
120
|
+
expect(isJobSuccessful({ lastFailedStep: null, containerExitCode: 0, isBooting: false })).toBe(true);
|
|
121
|
+
});
|
|
122
|
+
it("fails when a step failed", async () => {
|
|
123
|
+
const { isJobSuccessful } = await import("./result-builder.js");
|
|
124
|
+
expect(isJobSuccessful({ lastFailedStep: "Build", containerExitCode: 0, isBooting: false })).toBe(false);
|
|
125
|
+
});
|
|
126
|
+
it("fails when container exit code is non-zero", async () => {
|
|
127
|
+
const { isJobSuccessful } = await import("./result-builder.js");
|
|
128
|
+
expect(isJobSuccessful({ lastFailedStep: null, containerExitCode: 1, isBooting: false })).toBe(false);
|
|
129
|
+
});
|
|
130
|
+
it("fails when runner never contacted DTU (isBooting=true)", async () => {
|
|
131
|
+
const { isJobSuccessful } = await import("./result-builder.js");
|
|
132
|
+
// This is the bug from #102: container exits 0 with no failed steps,
|
|
133
|
+
// but the runner never sent any timeline entries (isBooting stayed true).
|
|
134
|
+
expect(isJobSuccessful({ lastFailedStep: null, containerExitCode: 0, isBooting: true })).toBe(false);
|
|
135
|
+
});
|
|
136
|
+
});
|
|
116
137
|
// ── buildJobResult ────────────────────────────────────────────────────────────
|
|
117
138
|
describe("buildJobResult", () => {
|
|
118
139
|
let tmpDir;
|
package/dist/runner/workspace.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { execSync } from "child_process";
|
|
2
2
|
import { copyWorkspace } from "../output/cleanup.js";
|
|
3
3
|
import { findRepoRoot } from "./metadata.js";
|
|
4
|
-
import { config } from "../config.js";
|
|
5
4
|
/**
|
|
6
5
|
* Copy source files into the workspace directory, then initialise a fake
|
|
7
6
|
* git repo so `actions/checkout` finds a valid workspace.
|
|
@@ -31,7 +30,9 @@ export function prepareWorkspace(opts) {
|
|
|
31
30
|
// On macOS: per-file APFS CoW clones. On Linux: rsync. Fallback: fs.cpSync.
|
|
32
31
|
copyWorkspace(repoRoot, workspaceDir);
|
|
33
32
|
}
|
|
34
|
-
|
|
33
|
+
if (githubRepo) {
|
|
34
|
+
initFakeGitRepo(workspaceDir, githubRepo);
|
|
35
|
+
}
|
|
35
36
|
}
|
|
36
37
|
// ─── Fake git init ────────────────────────────────────────────────────────────
|
|
37
38
|
/**
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { execSync } from "node:child_process";
|
|
4
|
+
import { parse as parseYaml } from "yaml";
|
|
5
|
+
/**
|
|
6
|
+
* Parse a remote reusable workflow ref string.
|
|
7
|
+
* Format: owner/repo/path/to/file.yml@ref
|
|
8
|
+
*/
|
|
9
|
+
export function parseRemoteRef(uses) {
|
|
10
|
+
const atIdx = uses.lastIndexOf("@");
|
|
11
|
+
if (atIdx < 0) {
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
const pathPart = uses.slice(0, atIdx);
|
|
15
|
+
const ref = uses.slice(atIdx + 1);
|
|
16
|
+
if (!ref) {
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
const segments = pathPart.split("/");
|
|
20
|
+
if (segments.length < 3) {
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
return {
|
|
24
|
+
owner: segments[0],
|
|
25
|
+
repo: segments[1],
|
|
26
|
+
path: segments.slice(2).join("/"),
|
|
27
|
+
ref,
|
|
28
|
+
raw: uses,
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
/** Returns true for 40-character hex SHA refs (immutable, safe to cache forever). */
|
|
32
|
+
export function isShaRef(ref) {
|
|
33
|
+
return /^[0-9a-f]{40}$/i.test(ref);
|
|
34
|
+
}
|
|
35
|
+
/** Build the local cache path for a remote workflow file. */
|
|
36
|
+
export function remoteCachePath(cacheDir, ref) {
|
|
37
|
+
const sanitizedRef = ref.ref.replace(/[^a-zA-Z0-9._-]/g, "-");
|
|
38
|
+
return path.join(cacheDir, `${ref.owner}__${ref.repo}@${sanitizedRef}`, ref.path);
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Resolve a GitHub token for API access.
|
|
42
|
+
* Tries `gh auth token` first (real user credentials), then falls back to
|
|
43
|
+
* GITHUB_TOKEN env var. This ordering matters because agent-ci injects a
|
|
44
|
+
* fake token as GITHUB_TOKEN for the runner context.
|
|
45
|
+
*/
|
|
46
|
+
function resolveGitHubToken() {
|
|
47
|
+
try {
|
|
48
|
+
const token = execSync("gh auth token", {
|
|
49
|
+
encoding: "utf-8",
|
|
50
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
51
|
+
}).trim();
|
|
52
|
+
if (token) {
|
|
53
|
+
return token;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
catch {
|
|
57
|
+
// gh not installed or not authenticated
|
|
58
|
+
}
|
|
59
|
+
return process.env.GITHUB_TOKEN || null;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Scan a workflow YAML and prefetch all remote reusable workflow refs.
|
|
63
|
+
* Downloaded files are written to cacheDir.
|
|
64
|
+
*
|
|
65
|
+
* - SHA refs: cached forever (immutable)
|
|
66
|
+
* - Tag/branch refs: always re-fetched (mutable)
|
|
67
|
+
*
|
|
68
|
+
* Throws on fetch failures (404, auth errors, network errors).
|
|
69
|
+
*/
|
|
70
|
+
export async function prefetchRemoteWorkflows(workflowPath, cacheDir) {
|
|
71
|
+
const resolved = new Map();
|
|
72
|
+
const raw = parseYaml(fs.readFileSync(workflowPath, "utf-8"));
|
|
73
|
+
const jobs = raw?.jobs ?? {};
|
|
74
|
+
const remoteRefs = [];
|
|
75
|
+
for (const [, jobDef] of Object.entries(jobs)) {
|
|
76
|
+
const uses = jobDef?.uses;
|
|
77
|
+
if (typeof uses === "string" && !uses.startsWith("./")) {
|
|
78
|
+
const ref = parseRemoteRef(uses);
|
|
79
|
+
if (ref) {
|
|
80
|
+
remoteRefs.push(ref);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (remoteRefs.length === 0) {
|
|
85
|
+
return resolved;
|
|
86
|
+
}
|
|
87
|
+
const token = resolveGitHubToken();
|
|
88
|
+
const errors = [];
|
|
89
|
+
await Promise.all(remoteRefs.map(async (ref) => {
|
|
90
|
+
const dest = remoteCachePath(cacheDir, ref);
|
|
91
|
+
// Cache hit for SHA refs (immutable — safe to skip)
|
|
92
|
+
if (isShaRef(ref.ref) && fs.existsSync(dest)) {
|
|
93
|
+
resolved.set(ref.raw, dest);
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
try {
|
|
97
|
+
const url = `https://api.github.com/repos/${ref.owner}/${ref.repo}/contents/${ref.path}?ref=${ref.ref}`;
|
|
98
|
+
const headers = {
|
|
99
|
+
Accept: "application/vnd.github.v3+json",
|
|
100
|
+
"User-Agent": "agent-ci/1.0",
|
|
101
|
+
};
|
|
102
|
+
if (token) {
|
|
103
|
+
headers["Authorization"] = `token ${token}`;
|
|
104
|
+
}
|
|
105
|
+
const response = await fetch(url, { headers });
|
|
106
|
+
if (!response.ok) {
|
|
107
|
+
const hint = response.status === 401 || response.status === 403
|
|
108
|
+
? " Ensure GITHUB_TOKEN is set or run `gh auth login`."
|
|
109
|
+
: "";
|
|
110
|
+
errors.push(`Failed to fetch remote workflow ${ref.raw} (HTTP ${response.status}).${hint}`);
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
const data = (await response.json());
|
|
114
|
+
if (!data.content || data.encoding !== "base64") {
|
|
115
|
+
errors.push(`Unexpected response format for remote workflow ${ref.raw}`);
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
const content = Buffer.from(data.content, "base64").toString("utf-8");
|
|
119
|
+
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
|
120
|
+
fs.writeFileSync(dest, content, "utf-8");
|
|
121
|
+
resolved.set(ref.raw, dest);
|
|
122
|
+
}
|
|
123
|
+
catch (err) {
|
|
124
|
+
errors.push(`Error fetching remote workflow ${ref.raw}: ${err.message}`);
|
|
125
|
+
}
|
|
126
|
+
}));
|
|
127
|
+
if (errors.length > 0) {
|
|
128
|
+
throw new Error(`[Agent CI] Remote workflow fetch failed:\n ${errors.join("\n ")}`);
|
|
129
|
+
}
|
|
130
|
+
return resolved;
|
|
131
|
+
}
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
import { describe, it, expect, vi, afterEach, beforeEach } from "vitest";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
import { parseRemoteRef, isShaRef, remoteCachePath, prefetchRemoteWorkflows, } from "./remote-workflow-fetch.js";
|
|
6
|
+
describe("parseRemoteRef", () => {
|
|
7
|
+
it("parses owner/repo/path@ref", () => {
|
|
8
|
+
const ref = parseRemoteRef("redwoodjs/actions/.github/workflows/lint.yml@main");
|
|
9
|
+
expect(ref).toEqual({
|
|
10
|
+
owner: "redwoodjs",
|
|
11
|
+
repo: "actions",
|
|
12
|
+
path: ".github/workflows/lint.yml",
|
|
13
|
+
ref: "main",
|
|
14
|
+
raw: "redwoodjs/actions/.github/workflows/lint.yml@main",
|
|
15
|
+
});
|
|
16
|
+
});
|
|
17
|
+
it("parses SHA refs", () => {
|
|
18
|
+
const ref = parseRemoteRef("org/repo/.github/workflows/ci.yml@abc123def456abc123def456abc123def456abc1");
|
|
19
|
+
expect(ref).not.toBeNull();
|
|
20
|
+
expect(ref.ref).toBe("abc123def456abc123def456abc123def456abc1");
|
|
21
|
+
});
|
|
22
|
+
it("parses deeply nested paths", () => {
|
|
23
|
+
const ref = parseRemoteRef("org/repo/some/deep/path/workflow.yml@v1");
|
|
24
|
+
expect(ref).not.toBeNull();
|
|
25
|
+
expect(ref.path).toBe("some/deep/path/workflow.yml");
|
|
26
|
+
});
|
|
27
|
+
it("returns null for local refs", () => {
|
|
28
|
+
expect(parseRemoteRef("./.github/workflows/lint.yml")).toBeNull();
|
|
29
|
+
});
|
|
30
|
+
it("returns null for missing @ref", () => {
|
|
31
|
+
expect(parseRemoteRef("org/repo/.github/workflows/lint.yml")).toBeNull();
|
|
32
|
+
});
|
|
33
|
+
it("returns null for owner/repo@ref (no path)", () => {
|
|
34
|
+
expect(parseRemoteRef("org/repo@v1")).toBeNull();
|
|
35
|
+
});
|
|
36
|
+
it("returns null for empty ref after @", () => {
|
|
37
|
+
expect(parseRemoteRef("org/repo/path@")).toBeNull();
|
|
38
|
+
});
|
|
39
|
+
});
|
|
40
|
+
describe("isShaRef", () => {
|
|
41
|
+
it("returns true for 40-char hex", () => {
|
|
42
|
+
expect(isShaRef("abc123def456abc123def456abc123def456abc1")).toBe(true);
|
|
43
|
+
});
|
|
44
|
+
it("returns true for uppercase hex", () => {
|
|
45
|
+
expect(isShaRef("ABC123DEF456ABC123DEF456ABC123DEF456ABC1")).toBe(true);
|
|
46
|
+
});
|
|
47
|
+
it("returns false for short strings", () => {
|
|
48
|
+
expect(isShaRef("abc123")).toBe(false);
|
|
49
|
+
});
|
|
50
|
+
it("returns false for tags", () => {
|
|
51
|
+
expect(isShaRef("v1.0.0")).toBe(false);
|
|
52
|
+
});
|
|
53
|
+
it("returns false for branch names", () => {
|
|
54
|
+
expect(isShaRef("main")).toBe(false);
|
|
55
|
+
});
|
|
56
|
+
});
|
|
57
|
+
describe("remoteCachePath", () => {
|
|
58
|
+
it("builds expected path", () => {
|
|
59
|
+
const ref = parseRemoteRef("org/repo/.github/workflows/lint.yml@v1");
|
|
60
|
+
const result = remoteCachePath("/cache", ref);
|
|
61
|
+
expect(result).toBe("/cache/org__repo@v1/.github/workflows/lint.yml");
|
|
62
|
+
});
|
|
63
|
+
it("sanitizes special characters in ref", () => {
|
|
64
|
+
const ref = parseRemoteRef("org/repo/.github/workflows/ci.yml@refs/heads/main");
|
|
65
|
+
const result = remoteCachePath("/cache", ref);
|
|
66
|
+
expect(result).toContain("org__repo@refs-heads-main");
|
|
67
|
+
});
|
|
68
|
+
});
|
|
69
|
+
describe("prefetchRemoteWorkflows", () => {
|
|
70
|
+
let tmpDir;
|
|
71
|
+
let cacheDir;
|
|
72
|
+
const originalFetch = globalThis.fetch;
|
|
73
|
+
beforeEach(() => {
|
|
74
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "remote-wf-test-"));
|
|
75
|
+
cacheDir = path.join(tmpDir, "cache");
|
|
76
|
+
fs.mkdirSync(cacheDir, { recursive: true });
|
|
77
|
+
});
|
|
78
|
+
afterEach(() => {
|
|
79
|
+
globalThis.fetch = originalFetch;
|
|
80
|
+
vi.restoreAllMocks();
|
|
81
|
+
if (tmpDir) {
|
|
82
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
function writeWorkflow(content) {
|
|
86
|
+
const wf = path.join(tmpDir, "workflow.yml");
|
|
87
|
+
fs.writeFileSync(wf, content);
|
|
88
|
+
return wf;
|
|
89
|
+
}
|
|
90
|
+
function mockFetchSuccess(yamlContent) {
|
|
91
|
+
const base64Content = Buffer.from(yamlContent).toString("base64");
|
|
92
|
+
globalThis.fetch = vi.fn().mockResolvedValue({
|
|
93
|
+
ok: true,
|
|
94
|
+
json: () => Promise.resolve({ content: base64Content, encoding: "base64" }),
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
it("returns empty map when no remote refs", async () => {
|
|
98
|
+
const wf = writeWorkflow(`
|
|
99
|
+
jobs:
|
|
100
|
+
build:
|
|
101
|
+
runs-on: ubuntu-latest
|
|
102
|
+
steps:
|
|
103
|
+
- run: echo build
|
|
104
|
+
`);
|
|
105
|
+
const result = await prefetchRemoteWorkflows(wf, cacheDir);
|
|
106
|
+
expect(result.size).toBe(0);
|
|
107
|
+
});
|
|
108
|
+
it("fetches remote workflow and writes to cache", async () => {
|
|
109
|
+
const remoteYaml = `
|
|
110
|
+
on: workflow_call
|
|
111
|
+
jobs:
|
|
112
|
+
lint:
|
|
113
|
+
runs-on: ubuntu-latest
|
|
114
|
+
steps:
|
|
115
|
+
- run: echo lint
|
|
116
|
+
`;
|
|
117
|
+
mockFetchSuccess(remoteYaml);
|
|
118
|
+
const wf = writeWorkflow(`
|
|
119
|
+
jobs:
|
|
120
|
+
lint:
|
|
121
|
+
uses: org/repo/.github/workflows/lint.yml@v1
|
|
122
|
+
`);
|
|
123
|
+
const result = await prefetchRemoteWorkflows(wf, cacheDir);
|
|
124
|
+
expect(result.size).toBe(1);
|
|
125
|
+
expect(result.has("org/repo/.github/workflows/lint.yml@v1")).toBe(true);
|
|
126
|
+
// Verify cached file was written
|
|
127
|
+
const cachedPath = result.get("org/repo/.github/workflows/lint.yml@v1");
|
|
128
|
+
expect(fs.existsSync(cachedPath)).toBe(true);
|
|
129
|
+
expect(fs.readFileSync(cachedPath, "utf-8")).toBe(remoteYaml);
|
|
130
|
+
});
|
|
131
|
+
it("uses cache for SHA refs on subsequent calls", async () => {
|
|
132
|
+
const sha = "abc123def456abc123def456abc123def456abc1";
|
|
133
|
+
const remoteYaml = `
|
|
134
|
+
on: workflow_call
|
|
135
|
+
jobs:
|
|
136
|
+
lint:
|
|
137
|
+
runs-on: ubuntu-latest
|
|
138
|
+
steps:
|
|
139
|
+
- run: echo lint
|
|
140
|
+
`;
|
|
141
|
+
mockFetchSuccess(remoteYaml);
|
|
142
|
+
const wf = writeWorkflow(`
|
|
143
|
+
jobs:
|
|
144
|
+
lint:
|
|
145
|
+
uses: org/repo/.github/workflows/lint.yml@${sha}
|
|
146
|
+
`);
|
|
147
|
+
// First call fetches
|
|
148
|
+
await prefetchRemoteWorkflows(wf, cacheDir);
|
|
149
|
+
expect(globalThis.fetch).toHaveBeenCalledTimes(1);
|
|
150
|
+
// Second call uses cache (SHA ref is immutable)
|
|
151
|
+
const result = await prefetchRemoteWorkflows(wf, cacheDir);
|
|
152
|
+
expect(globalThis.fetch).toHaveBeenCalledTimes(1); // not called again
|
|
153
|
+
expect(result.size).toBe(1);
|
|
154
|
+
});
|
|
155
|
+
it("re-fetches for tag/branch refs even when cached", async () => {
|
|
156
|
+
const remoteYaml = `
|
|
157
|
+
on: workflow_call
|
|
158
|
+
jobs:
|
|
159
|
+
lint:
|
|
160
|
+
runs-on: ubuntu-latest
|
|
161
|
+
steps:
|
|
162
|
+
- run: echo lint
|
|
163
|
+
`;
|
|
164
|
+
mockFetchSuccess(remoteYaml);
|
|
165
|
+
const wf = writeWorkflow(`
|
|
166
|
+
jobs:
|
|
167
|
+
lint:
|
|
168
|
+
uses: org/repo/.github/workflows/lint.yml@main
|
|
169
|
+
`);
|
|
170
|
+
// First call fetches
|
|
171
|
+
await prefetchRemoteWorkflows(wf, cacheDir);
|
|
172
|
+
expect(globalThis.fetch).toHaveBeenCalledTimes(1);
|
|
173
|
+
// Second call also fetches (branch ref is mutable)
|
|
174
|
+
await prefetchRemoteWorkflows(wf, cacheDir);
|
|
175
|
+
expect(globalThis.fetch).toHaveBeenCalledTimes(2);
|
|
176
|
+
});
|
|
177
|
+
it("throws on 404 response", async () => {
|
|
178
|
+
globalThis.fetch = vi.fn().mockResolvedValue({
|
|
179
|
+
ok: false,
|
|
180
|
+
status: 404,
|
|
181
|
+
});
|
|
182
|
+
const wf = writeWorkflow(`
|
|
183
|
+
jobs:
|
|
184
|
+
lint:
|
|
185
|
+
uses: org/repo/.github/workflows/nonexistent.yml@v1
|
|
186
|
+
`);
|
|
187
|
+
await expect(prefetchRemoteWorkflows(wf, cacheDir)).rejects.toThrow(/Remote workflow fetch failed/);
|
|
188
|
+
});
|
|
189
|
+
it("throws on 401 with auth hint", async () => {
|
|
190
|
+
globalThis.fetch = vi.fn().mockResolvedValue({
|
|
191
|
+
ok: false,
|
|
192
|
+
status: 401,
|
|
193
|
+
});
|
|
194
|
+
const wf = writeWorkflow(`
|
|
195
|
+
jobs:
|
|
196
|
+
lint:
|
|
197
|
+
uses: org/private-repo/.github/workflows/lint.yml@v1
|
|
198
|
+
`);
|
|
199
|
+
await expect(prefetchRemoteWorkflows(wf, cacheDir)).rejects.toThrow(/gh auth login/);
|
|
200
|
+
});
|
|
201
|
+
it("fetches multiple remote refs in parallel", async () => {
|
|
202
|
+
const remoteYaml = `
|
|
203
|
+
on: workflow_call
|
|
204
|
+
jobs:
|
|
205
|
+
job:
|
|
206
|
+
runs-on: ubuntu-latest
|
|
207
|
+
steps:
|
|
208
|
+
- run: echo hello
|
|
209
|
+
`;
|
|
210
|
+
mockFetchSuccess(remoteYaml);
|
|
211
|
+
const wf = writeWorkflow(`
|
|
212
|
+
jobs:
|
|
213
|
+
lint:
|
|
214
|
+
uses: org/repo/.github/workflows/lint.yml@v1
|
|
215
|
+
test:
|
|
216
|
+
uses: org/repo/.github/workflows/test.yml@v1
|
|
217
|
+
`);
|
|
218
|
+
const result = await prefetchRemoteWorkflows(wf, cacheDir);
|
|
219
|
+
expect(result.size).toBe(2);
|
|
220
|
+
expect(globalThis.fetch).toHaveBeenCalledTimes(2);
|
|
221
|
+
});
|
|
222
|
+
it("skips local refs", async () => {
|
|
223
|
+
mockFetchSuccess("unused");
|
|
224
|
+
const wf = writeWorkflow(`
|
|
225
|
+
jobs:
|
|
226
|
+
lint:
|
|
227
|
+
uses: ./.github/workflows/lint.yml
|
|
228
|
+
`);
|
|
229
|
+
const result = await prefetchRemoteWorkflows(wf, cacheDir);
|
|
230
|
+
expect(result.size).toBe(0);
|
|
231
|
+
expect(globalThis.fetch).not.toHaveBeenCalled();
|
|
232
|
+
});
|
|
233
|
+
});
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { parse as parseYaml } from "yaml";
|
|
4
|
+
const MAX_REUSABLE_DEPTH = 4;
|
|
5
|
+
/**
|
|
6
|
+
* Expand reusable workflow jobs (`uses: ./.github/workflows/...`) into concrete
|
|
7
|
+
* job entries that can be scheduled alongside regular jobs.
|
|
8
|
+
*
|
|
9
|
+
* Local refs (starting with `./`) are resolved relative to repoRoot.
|
|
10
|
+
* Remote refs are resolved via the remoteCache map (pre-fetched by
|
|
11
|
+
* prefetchRemoteWorkflows). Nesting is supported up to 4 levels deep
|
|
12
|
+
* (matching GitHub Actions' limit). Cycles are detected and rejected.
|
|
13
|
+
*/
|
|
14
|
+
export function expandReusableJobs(workflowPath, repoRoot, remoteCache) {
|
|
15
|
+
return expandReusableJobsInternal(workflowPath, repoRoot, remoteCache, 0, new Set());
|
|
16
|
+
}
|
|
17
|
+
function expandReusableJobsInternal(workflowPath, repoRoot, remoteCache, depth, visitedPaths) {
|
|
18
|
+
if (depth > MAX_REUSABLE_DEPTH) {
|
|
19
|
+
throw new Error(`Reusable workflow nesting depth exceeds maximum of ${MAX_REUSABLE_DEPTH}: ${workflowPath}`);
|
|
20
|
+
}
|
|
21
|
+
const resolvedPath = path.resolve(workflowPath);
|
|
22
|
+
if (visitedPaths.has(resolvedPath)) {
|
|
23
|
+
throw new Error(`Cycle detected in reusable workflows: ${resolvedPath} is already in the call chain`);
|
|
24
|
+
}
|
|
25
|
+
visitedPaths.add(resolvedPath);
|
|
26
|
+
const raw = parseYaml(fs.readFileSync(workflowPath, "utf-8"));
|
|
27
|
+
const jobs = raw?.jobs ?? {};
|
|
28
|
+
const entries = [];
|
|
29
|
+
// Track which caller job IDs map to which inlined terminal job IDs,
|
|
30
|
+
// so we can rewire downstream `needs:` references.
|
|
31
|
+
const callerToTerminals = new Map();
|
|
32
|
+
for (const [jobId, jobDef] of Object.entries(jobs)) {
|
|
33
|
+
const uses = jobDef?.uses;
|
|
34
|
+
if (typeof uses === "string") {
|
|
35
|
+
// This is a reusable workflow call
|
|
36
|
+
let calledPath;
|
|
37
|
+
if (uses.startsWith("./")) {
|
|
38
|
+
calledPath = path.resolve(repoRoot, uses);
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
const cached = remoteCache?.get(uses);
|
|
42
|
+
if (!cached) {
|
|
43
|
+
throw new Error(`Remote reusable workflow not resolved: job "${jobId}" uses ${uses}`);
|
|
44
|
+
}
|
|
45
|
+
calledPath = cached;
|
|
46
|
+
}
|
|
47
|
+
if (!fs.existsSync(calledPath)) {
|
|
48
|
+
throw new Error(`Reusable workflow file not found: ${calledPath} (referenced by job "${jobId}")`);
|
|
49
|
+
}
|
|
50
|
+
// Extract caller inputs (raw `with:` values)
|
|
51
|
+
const callerWith = jobDef.with
|
|
52
|
+
? Object.fromEntries(Object.entries(jobDef.with).map(([k, v]) => [k, String(v)]))
|
|
53
|
+
: undefined;
|
|
54
|
+
// Extract input defaults and output defs from the called workflow's on.workflow_call
|
|
55
|
+
const calledRaw = parseYaml(fs.readFileSync(calledPath, "utf-8"));
|
|
56
|
+
const wcInputs = (calledRaw.on || calledRaw.true)?.workflow_call?.inputs;
|
|
57
|
+
const inputDefaults = wcInputs && typeof wcInputs === "object"
|
|
58
|
+
? Object.fromEntries(Object.entries(wcInputs)
|
|
59
|
+
.filter(([, def]) => def?.default != null)
|
|
60
|
+
.map(([k, def]) => [k, String(def.default)]))
|
|
61
|
+
: undefined;
|
|
62
|
+
const wcOutputs = (calledRaw.on || calledRaw.true)?.workflow_call?.outputs;
|
|
63
|
+
const workflowCallOutputDefs = wcOutputs && typeof wcOutputs === "object"
|
|
64
|
+
? Object.fromEntries(Object.entries(wcOutputs)
|
|
65
|
+
.filter(([, def]) => def?.value != null)
|
|
66
|
+
.map(([k, def]) => [k, String(def.value)]))
|
|
67
|
+
: undefined;
|
|
68
|
+
// Recursively expand the called workflow
|
|
69
|
+
const calledEntries = expandReusableJobsInternal(calledPath, repoRoot, remoteCache, depth + 1, visitedPaths);
|
|
70
|
+
const callerNeeds = parseNeeds(jobDef?.needs);
|
|
71
|
+
// Prefix all entry IDs and needs with the caller job ID,
|
|
72
|
+
// and attach inputs/outputs metadata
|
|
73
|
+
const prefixed = calledEntries.map((entry) => ({
|
|
74
|
+
id: `${jobId}/${entry.id}`,
|
|
75
|
+
workflowPath: entry.workflowPath,
|
|
76
|
+
sourceTaskName: entry.sourceTaskName,
|
|
77
|
+
needs: entry.needs.length === 0 ? callerNeeds : entry.needs.map((n) => `${jobId}/${n}`),
|
|
78
|
+
inputs: callerWith,
|
|
79
|
+
inputDefaults: inputDefaults && Object.keys(inputDefaults).length > 0 ? inputDefaults : undefined,
|
|
80
|
+
workflowCallOutputDefs: workflowCallOutputDefs && Object.keys(workflowCallOutputDefs).length > 0
|
|
81
|
+
? workflowCallOutputDefs
|
|
82
|
+
: undefined,
|
|
83
|
+
callerJobId: jobId,
|
|
84
|
+
}));
|
|
85
|
+
// Compute terminals among the prefixed entries
|
|
86
|
+
const prefixedIds = new Set(prefixed.map((e) => e.id));
|
|
87
|
+
const depended = new Set();
|
|
88
|
+
for (const entry of prefixed) {
|
|
89
|
+
for (const n of entry.needs) {
|
|
90
|
+
if (prefixedIds.has(n)) {
|
|
91
|
+
depended.add(n);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
const terminals = prefixed.filter((e) => !depended.has(e.id)).map((e) => e.id);
|
|
96
|
+
callerToTerminals.set(jobId, terminals);
|
|
97
|
+
entries.push(...prefixed);
|
|
98
|
+
}
|
|
99
|
+
else {
|
|
100
|
+
// Regular job — has `steps:` or `runs-on:`
|
|
101
|
+
entries.push({
|
|
102
|
+
id: jobId,
|
|
103
|
+
workflowPath,
|
|
104
|
+
sourceTaskName: jobId,
|
|
105
|
+
needs: parseNeeds(jobDef?.needs),
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
// Rewire downstream dependencies: any job that `needs: [callerJobId]`
|
|
110
|
+
// should now depend on the terminal jobs of the inlined sub-graph
|
|
111
|
+
for (const entry of entries) {
|
|
112
|
+
entry.needs = entry.needs.flatMap((dep) => {
|
|
113
|
+
const terminals = callerToTerminals.get(dep);
|
|
114
|
+
if (terminals && terminals.length > 0) {
|
|
115
|
+
return terminals;
|
|
116
|
+
}
|
|
117
|
+
return [dep];
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
visitedPaths.delete(resolvedPath);
|
|
121
|
+
return entries;
|
|
122
|
+
}
|
|
123
|
+
function parseNeeds(needs) {
|
|
124
|
+
if (!needs) {
|
|
125
|
+
return [];
|
|
126
|
+
}
|
|
127
|
+
if (typeof needs === "string") {
|
|
128
|
+
return [needs];
|
|
129
|
+
}
|
|
130
|
+
if (Array.isArray(needs)) {
|
|
131
|
+
return needs.map(String);
|
|
132
|
+
}
|
|
133
|
+
return [];
|
|
134
|
+
}
|