@redwoodjs/agent-ci 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/LICENSE +110 -0
  2. package/README.md +79 -0
  3. package/dist/cli.js +628 -0
  4. package/dist/config.js +63 -0
  5. package/dist/docker/container-config.js +178 -0
  6. package/dist/docker/container-config.test.js +156 -0
  7. package/dist/docker/service-containers.js +205 -0
  8. package/dist/docker/service-containers.test.js +236 -0
  9. package/dist/docker/shutdown.js +120 -0
  10. package/dist/docker/shutdown.test.js +148 -0
  11. package/dist/output/agent-mode.js +7 -0
  12. package/dist/output/agent-mode.test.js +36 -0
  13. package/dist/output/cleanup.js +218 -0
  14. package/dist/output/cleanup.test.js +241 -0
  15. package/dist/output/concurrency.js +57 -0
  16. package/dist/output/concurrency.test.js +88 -0
  17. package/dist/output/debug.js +36 -0
  18. package/dist/output/logger.js +57 -0
  19. package/dist/output/logger.test.js +82 -0
  20. package/dist/output/reporter.js +67 -0
  21. package/dist/output/run-state.js +126 -0
  22. package/dist/output/run-state.test.js +169 -0
  23. package/dist/output/state-renderer.js +149 -0
  24. package/dist/output/state-renderer.test.js +488 -0
  25. package/dist/output/tree-renderer.js +52 -0
  26. package/dist/output/tree-renderer.test.js +105 -0
  27. package/dist/output/working-directory.js +20 -0
  28. package/dist/runner/directory-setup.js +98 -0
  29. package/dist/runner/directory-setup.test.js +31 -0
  30. package/dist/runner/git-shim.js +92 -0
  31. package/dist/runner/git-shim.test.js +57 -0
  32. package/dist/runner/local-job.js +691 -0
  33. package/dist/runner/metadata.js +90 -0
  34. package/dist/runner/metadata.test.js +127 -0
  35. package/dist/runner/result-builder.js +119 -0
  36. package/dist/runner/result-builder.test.js +177 -0
  37. package/dist/runner/step-wrapper.js +82 -0
  38. package/dist/runner/step-wrapper.test.js +77 -0
  39. package/dist/runner/sync.js +80 -0
  40. package/dist/runner/workspace.js +66 -0
  41. package/dist/types.js +1 -0
  42. package/dist/workflow/job-scheduler.js +62 -0
  43. package/dist/workflow/job-scheduler.test.js +130 -0
  44. package/dist/workflow/workflow-parser.js +556 -0
  45. package/dist/workflow/workflow-parser.test.js +642 -0
  46. package/package.json +39 -0
  47. package/shim.sh +11 -0
@@ -0,0 +1,80 @@
1
+ import path from "path";
2
+ import fs from "fs";
3
+ import { execSync, spawnSync } from "child_process";
4
+ // ─── Retry workspace sync ─────────────────────────────────────────────────────
5
+ /**
6
+ * Resolve the repo root by walking up from `cwd` to find `.git`.
7
+ */
8
+ function resolveRepoRoot() {
9
+ let dir = process.cwd();
10
+ while (dir !== "/" && !fs.existsSync(path.join(dir, ".git"))) {
11
+ dir = path.dirname(dir);
12
+ }
13
+ return dir === "/" ? process.cwd() : dir;
14
+ }
15
+ /**
16
+ * Discover the workspace directory inside a run directory.
17
+ *
18
+ * The structure is: `<runDir>/work/<repoName>/<repoName>/`
19
+ */
20
+ function findWorkspaceDir(runDir) {
21
+ const workDir = path.join(runDir, "work");
22
+ if (!fs.existsSync(workDir)) {
23
+ return null;
24
+ }
25
+ for (const entry of fs.readdirSync(workDir)) {
26
+ const nested = path.join(workDir, entry, entry);
27
+ if (fs.existsSync(nested) && fs.statSync(nested).isDirectory()) {
28
+ return nested;
29
+ }
30
+ }
31
+ return null;
32
+ }
33
+ /**
34
+ * Sync source files from the local repo into the run's workspace directory.
35
+ *
36
+ * Uses `rsync --delete` to mirror changes (including deleted files), while
37
+ * preserving `node_modules` and `.git` so installed dependencies and the
38
+ * fake git repo remain intact.
39
+ *
40
+ * Called before sending the `retry` signal so the container sees local edits.
41
+ */
42
+ export function syncWorkspaceForRetry(runDir) {
43
+ const workspaceDir = findWorkspaceDir(runDir);
44
+ if (!workspaceDir) {
45
+ return;
46
+ }
47
+ const repoRoot = resolveRepoRoot();
48
+ // Get tracked + untracked (respecting .gitignore) file list — same as
49
+ // copyWorkspace uses for the initial clone.
50
+ const files = execSync("git ls-files --cached --others --exclude-standard -z", {
51
+ stdio: "pipe",
52
+ cwd: repoRoot,
53
+ })
54
+ .toString()
55
+ .split("\0")
56
+ .filter(Boolean);
57
+ // Sync via rsync on all platforms (we need --delete semantics).
58
+ // Pass the file list via stdin to avoid shell injection.
59
+ const input = files.join("\0");
60
+ const result = spawnSync("rsync", ["-a", "--delete", "--files-from=-", "--from0", "./", workspaceDir + "/"], {
61
+ input,
62
+ stdio: ["pipe", "pipe", "pipe"],
63
+ cwd: repoRoot,
64
+ });
65
+ if (result.status !== 0) {
66
+ // Fallback: copy files individually
67
+ for (const file of files) {
68
+ const src = path.join(repoRoot, file);
69
+ const dest = path.join(workspaceDir, file);
70
+ try {
71
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
72
+ fs.copyFileSync(src, dest);
73
+ }
74
+ catch {
75
+ // Skip files that can't be copied
76
+ }
77
+ }
78
+ }
79
+ console.log(`[Agent CI] Synced workspace from ${repoRoot}`);
80
+ }
@@ -0,0 +1,66 @@
1
+ import { execSync } from "child_process";
2
+ import { copyWorkspace } from "../output/cleanup.js";
3
+ import { findRepoRoot } from "./metadata.js";
4
+ import { config } from "../config.js";
5
+ /**
6
+ * Copy source files into the workspace directory, then initialise a fake
7
+ * git repo so `actions/checkout` finds a valid workspace.
8
+ */
9
+ export function prepareWorkspace(opts) {
10
+ const { workflowPath, headSha, githubRepo, workspaceDir } = opts;
11
+ // Resolve repo root — needed for both archive and rsync paths.
12
+ // Derive from the workflow path (which lives inside the target repo) so we copy
13
+ // from the correct repo, not from the CLI's CWD (which is agent-ci).
14
+ let repoRoot;
15
+ if (workflowPath) {
16
+ repoRoot = findRepoRoot(workflowPath);
17
+ }
18
+ if (!repoRoot) {
19
+ repoRoot = execSync(`git rev-parse --show-toplevel`).toString().trim();
20
+ }
21
+ if (headSha && headSha !== "HEAD") {
22
+ // Specific SHA requested — use git archive (clean snapshot)
23
+ execSync(`git archive ${headSha} | tar -x -C ${workspaceDir}`, {
24
+ stdio: "pipe",
25
+ cwd: repoRoot,
26
+ });
27
+ }
28
+ else {
29
+ // Default: copy the working directory as-is, including dirty/untracked files.
30
+ // Uses git ls-files to respect .gitignore (avoids copying node_modules, _/, etc.)
31
+ // On macOS: per-file APFS CoW clones. On Linux: rsync. Fallback: fs.cpSync.
32
+ copyWorkspace(repoRoot, workspaceDir);
33
+ }
34
+ initFakeGitRepo(workspaceDir, githubRepo || config.GITHUB_REPO);
35
+ }
36
+ // ─── Fake git init ────────────────────────────────────────────────────────────
37
+ /**
38
+ * Initialise a fake git repository in `dir` so that `actions/checkout`
39
+ * finds a valid workspace with a remote origin and detached HEAD.
40
+ */
41
+ export function initFakeGitRepo(dir, githubRepo) {
42
+ // The remote URL must exactly match what actions/checkout computes via URL.origin.
43
+ // Node.js URL.origin strips the default port (80), so we must NOT include :80.
44
+ execSync(`git init`, { cwd: dir, stdio: "pipe" });
45
+ execSync(`git config user.name "agent-ci"`, { cwd: dir, stdio: "pipe" });
46
+ execSync(`git config user.email "agent-ci@example.com"`, {
47
+ cwd: dir,
48
+ stdio: "pipe",
49
+ });
50
+ execSync(`git remote add origin http://127.0.0.1/${githubRepo}`, {
51
+ cwd: dir,
52
+ stdio: "pipe",
53
+ });
54
+ execSync(`git add . && git commit -m "workspace" || true`, {
55
+ cwd: dir,
56
+ stdio: "pipe",
57
+ });
58
+ // Create main and refs/remotes/origin/main pointing to this commit
59
+ execSync(`git branch -M main`, { cwd: dir, stdio: "pipe" });
60
+ execSync(`git update-ref refs/remotes/origin/main HEAD`, {
61
+ cwd: dir,
62
+ stdio: "pipe",
63
+ });
64
+ // Detach HEAD so checkout can freely delete ALL branches (it can't delete the current branch)
65
+ execSync(`git checkout --detach HEAD`, { cwd: dir, stdio: "pipe" });
66
+ }
package/dist/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,62 @@
1
+ import fs from "node:fs";
2
+ import { parse as parseYaml } from "yaml";
3
+ /**
4
+ * Parse job `needs:` dependencies from raw workflow YAML.
5
+ * Returns a Map<jobId, string[]> of upstream job IDs each job depends on.
6
+ */
7
+ export function parseJobDependencies(workflowPath) {
8
+ const deps = new Map();
9
+ try {
10
+ const yaml = parseYaml(fs.readFileSync(workflowPath, "utf-8"));
11
+ const jobs = yaml?.jobs ?? {};
12
+ for (const [jobId, jobDef] of Object.entries(jobs)) {
13
+ const needs = jobDef?.needs;
14
+ if (!needs) {
15
+ deps.set(jobId, []);
16
+ }
17
+ else if (typeof needs === "string") {
18
+ deps.set(jobId, [needs]);
19
+ }
20
+ else if (Array.isArray(needs)) {
21
+ deps.set(jobId, needs.map(String));
22
+ }
23
+ else {
24
+ deps.set(jobId, []);
25
+ }
26
+ }
27
+ }
28
+ catch {
29
+ // Can't parse — return empty deps
30
+ }
31
+ return deps;
32
+ }
33
+ /**
34
+ * Topological sort of job IDs by their dependencies.
35
+ * Returns an array of waves; each wave is a set of job IDs that can run in parallel.
36
+ * Falls back to a single wave containing all remaining jobs if a cycle is detected.
37
+ */
38
+ export function topoSort(deps) {
39
+ const waves = [];
40
+ const remaining = new Map(deps);
41
+ const completed = new Set();
42
+ while (remaining.size > 0) {
43
+ // Find jobs whose all dependencies are already completed
44
+ const wave = [];
45
+ for (const [jobId, needs] of remaining) {
46
+ if (needs.every((n) => completed.has(n))) {
47
+ wave.push(jobId);
48
+ }
49
+ }
50
+ if (wave.length === 0) {
51
+ // Cycle detected or unresolvable dependency — run remaining in one wave
52
+ waves.push(Array.from(remaining.keys()));
53
+ break;
54
+ }
55
+ for (const jobId of wave) {
56
+ remaining.delete(jobId);
57
+ completed.add(jobId);
58
+ }
59
+ waves.push(wave);
60
+ }
61
+ return waves;
62
+ }
@@ -0,0 +1,130 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
4
+ import os from "node:os";
5
+ import { parseJobDependencies, topoSort } from "./job-scheduler.js";
6
+ describe("parseJobDependencies", () => {
7
+ let tmpDir;
8
+ function writeWorkflow(content) {
9
+ const filePath = path.join(tmpDir, "workflow.yml");
10
+ fs.writeFileSync(filePath, content, "utf-8");
11
+ return filePath;
12
+ }
13
+ it("returns empty deps for jobs without needs", () => {
14
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sched-test-"));
15
+ const wf = writeWorkflow(`
16
+ jobs:
17
+ build:
18
+ runs-on: ubuntu-latest
19
+ steps:
20
+ - run: echo hello
21
+ test:
22
+ runs-on: ubuntu-latest
23
+ steps:
24
+ - run: echo world
25
+ `);
26
+ const deps = parseJobDependencies(wf);
27
+ expect(deps.get("build")).toEqual([]);
28
+ expect(deps.get("test")).toEqual([]);
29
+ fs.rmSync(tmpDir, { recursive: true, force: true });
30
+ });
31
+ it("parses string needs", () => {
32
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sched-test-"));
33
+ const wf = writeWorkflow(`
34
+ jobs:
35
+ build:
36
+ runs-on: ubuntu-latest
37
+ steps:
38
+ - run: echo build
39
+ test:
40
+ needs: build
41
+ runs-on: ubuntu-latest
42
+ steps:
43
+ - run: echo test
44
+ `);
45
+ const deps = parseJobDependencies(wf);
46
+ expect(deps.get("build")).toEqual([]);
47
+ expect(deps.get("test")).toEqual(["build"]);
48
+ fs.rmSync(tmpDir, { recursive: true, force: true });
49
+ });
50
+ it("parses array needs", () => {
51
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sched-test-"));
52
+ const wf = writeWorkflow(`
53
+ jobs:
54
+ build:
55
+ runs-on: ubuntu-latest
56
+ steps:
57
+ - run: echo build
58
+ lint:
59
+ runs-on: ubuntu-latest
60
+ steps:
61
+ - run: echo lint
62
+ deploy:
63
+ needs: [build, lint]
64
+ runs-on: ubuntu-latest
65
+ steps:
66
+ - run: echo deploy
67
+ `);
68
+ const deps = parseJobDependencies(wf);
69
+ expect(deps.get("build")).toEqual([]);
70
+ expect(deps.get("lint")).toEqual([]);
71
+ expect(deps.get("deploy")).toEqual(["build", "lint"]);
72
+ fs.rmSync(tmpDir, { recursive: true, force: true });
73
+ });
74
+ it("returns empty map for non-existent file", () => {
75
+ const deps = parseJobDependencies("/tmp/nonexistent.yml");
76
+ expect(deps.size).toBe(0);
77
+ });
78
+ });
79
+ describe("topoSort", () => {
80
+ it("puts all independent jobs in one wave", () => {
81
+ const deps = new Map([
82
+ ["a", []],
83
+ ["b", []],
84
+ ["c", []],
85
+ ]);
86
+ const waves = topoSort(deps);
87
+ expect(waves).toEqual([["a", "b", "c"]]);
88
+ });
89
+ it("creates two waves for a simple dependency chain", () => {
90
+ const deps = new Map([
91
+ ["build", []],
92
+ ["test", ["build"]],
93
+ ]);
94
+ const waves = topoSort(deps);
95
+ expect(waves).toEqual([["build"], ["test"]]);
96
+ });
97
+ it("creates correct waves for mixed dependencies", () => {
98
+ const deps = new Map([
99
+ ["build", []],
100
+ ["lint", []],
101
+ ["test", ["build"]],
102
+ ["deploy", ["build", "lint"]],
103
+ ]);
104
+ const waves = topoSort(deps);
105
+ // Wave 1: build, lint (no deps)
106
+ // Wave 2: test, deploy (all deps in wave 1)
107
+ expect(waves[0]).toEqual(expect.arrayContaining(["build", "lint"]));
108
+ expect(waves[1]).toEqual(expect.arrayContaining(["test", "deploy"]));
109
+ expect(waves.length).toBe(2);
110
+ });
111
+ it("creates three waves for a chain: build -> test -> deploy", () => {
112
+ const deps = new Map([
113
+ ["build", []],
114
+ ["test", ["build"]],
115
+ ["deploy", ["test"]],
116
+ ]);
117
+ const waves = topoSort(deps);
118
+ expect(waves).toEqual([["build"], ["test"], ["deploy"]]);
119
+ });
120
+ it("handles cycles gracefully by dumping remaining into one wave", () => {
121
+ const deps = new Map([
122
+ ["a", ["b"]],
123
+ ["b", ["a"]],
124
+ ]);
125
+ const waves = topoSort(deps);
126
+ // Should still produce output (fallback to one wave)
127
+ expect(waves.length).toBe(1);
128
+ expect(waves[0]).toEqual(expect.arrayContaining(["a", "b"]));
129
+ });
130
+ });