@redwoodjs/agent-ci 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +110 -0
- package/README.md +79 -0
- package/dist/cli.js +628 -0
- package/dist/config.js +63 -0
- package/dist/docker/container-config.js +178 -0
- package/dist/docker/container-config.test.js +156 -0
- package/dist/docker/service-containers.js +205 -0
- package/dist/docker/service-containers.test.js +236 -0
- package/dist/docker/shutdown.js +120 -0
- package/dist/docker/shutdown.test.js +148 -0
- package/dist/output/agent-mode.js +7 -0
- package/dist/output/agent-mode.test.js +36 -0
- package/dist/output/cleanup.js +218 -0
- package/dist/output/cleanup.test.js +241 -0
- package/dist/output/concurrency.js +57 -0
- package/dist/output/concurrency.test.js +88 -0
- package/dist/output/debug.js +36 -0
- package/dist/output/logger.js +57 -0
- package/dist/output/logger.test.js +82 -0
- package/dist/output/reporter.js +67 -0
- package/dist/output/run-state.js +126 -0
- package/dist/output/run-state.test.js +169 -0
- package/dist/output/state-renderer.js +149 -0
- package/dist/output/state-renderer.test.js +488 -0
- package/dist/output/tree-renderer.js +52 -0
- package/dist/output/tree-renderer.test.js +105 -0
- package/dist/output/working-directory.js +20 -0
- package/dist/runner/directory-setup.js +98 -0
- package/dist/runner/directory-setup.test.js +31 -0
- package/dist/runner/git-shim.js +92 -0
- package/dist/runner/git-shim.test.js +57 -0
- package/dist/runner/local-job.js +691 -0
- package/dist/runner/metadata.js +90 -0
- package/dist/runner/metadata.test.js +127 -0
- package/dist/runner/result-builder.js +119 -0
- package/dist/runner/result-builder.test.js +177 -0
- package/dist/runner/step-wrapper.js +82 -0
- package/dist/runner/step-wrapper.test.js +77 -0
- package/dist/runner/sync.js +80 -0
- package/dist/runner/workspace.js +66 -0
- package/dist/types.js +1 -0
- package/dist/workflow/job-scheduler.js +62 -0
- package/dist/workflow/job-scheduler.test.js +130 -0
- package/dist/workflow/workflow-parser.js +556 -0
- package/dist/workflow/workflow-parser.test.js +642 -0
- package/package.json +39 -0
- package/shim.sh +11 -0
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import crypto from "node:crypto";
|
|
4
|
+
import { execFileSync, execSync, spawnSync } from "node:child_process";
|
|
5
|
+
/**
|
|
6
|
+
* Copy workspace files from a git repo root to dest using git ls-files.
|
|
7
|
+
* On macOS: uses per-file `cp -c` (APFS CoW clones) for zero-disk copies.
|
|
8
|
+
* On Linux: uses rsync with file list from git ls-files.
|
|
9
|
+
* Fallback: Node.js fs.cpSync when neither is available.
|
|
10
|
+
*
|
|
11
|
+
* Only copies tracked + untracked-but-not-gitignored files (respects .gitignore).
|
|
12
|
+
* File paths are never interpolated into shell strings — arguments are always
|
|
13
|
+
* passed as arrays to avoid shell injection.
|
|
14
|
+
*/
|
|
15
|
+
export function copyWorkspace(repoRoot, dest) {
|
|
16
|
+
// Get the list of files to copy from git (NUL-separated for safety with
|
|
17
|
+
// paths that contain spaces or special characters).
|
|
18
|
+
const files = execSync("git ls-files --cached --others --exclude-standard -z", {
|
|
19
|
+
stdio: "pipe",
|
|
20
|
+
cwd: repoRoot,
|
|
21
|
+
})
|
|
22
|
+
.toString()
|
|
23
|
+
.split("\0")
|
|
24
|
+
.filter(Boolean);
|
|
25
|
+
if (process.platform === "darwin") {
|
|
26
|
+
// On macOS with APFS, use per-file cp -c (CoW clone) via execFileSync so
|
|
27
|
+
// file names are never interpreted by a shell.
|
|
28
|
+
for (const file of files) {
|
|
29
|
+
const src = path.join(repoRoot, file);
|
|
30
|
+
const fileDest = path.join(dest, file);
|
|
31
|
+
try {
|
|
32
|
+
fs.mkdirSync(path.dirname(fileDest), { recursive: true });
|
|
33
|
+
// Try CoW clone first; fall back to regular copy.
|
|
34
|
+
const result = spawnSync("cp", ["-c", src, fileDest], { stdio: "pipe" });
|
|
35
|
+
if (result.status !== 0) {
|
|
36
|
+
execFileSync("cp", [src, fileDest], { stdio: "pipe" });
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
catch {
|
|
40
|
+
// Skip files that can't be copied (e.g. broken symlinks)
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
// Linux/other: pass the file list to rsync via stdin (--files-from=-)
|
|
46
|
+
// with --from0 so NUL-delimited names are handled correctly.
|
|
47
|
+
// dest is passed as a positional argument, never shell-interpolated.
|
|
48
|
+
const input = files.join("\0");
|
|
49
|
+
const result = spawnSync("rsync", ["-a", "--files-from=-", "--from0", "./", dest + "/"], {
|
|
50
|
+
input,
|
|
51
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
52
|
+
cwd: repoRoot,
|
|
53
|
+
});
|
|
54
|
+
if (result.status !== 0) {
|
|
55
|
+
// rsync not available — fall through to Node.js fallback
|
|
56
|
+
copyViaNodeFs(repoRoot, dest, files);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
/** Node.js fallback: copy each file individually using fs.cpSync. */
|
|
61
|
+
function copyViaNodeFs(repoRoot, dest, files) {
|
|
62
|
+
for (const file of files) {
|
|
63
|
+
const src = path.join(repoRoot, file);
|
|
64
|
+
const fileDest = path.join(dest, file);
|
|
65
|
+
try {
|
|
66
|
+
fs.mkdirSync(path.dirname(fileDest), { recursive: true });
|
|
67
|
+
fs.cpSync(src, fileDest, { force: true, recursive: true });
|
|
68
|
+
}
|
|
69
|
+
catch {
|
|
70
|
+
// Skip files that can't be copied (e.g. broken symlinks)
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* All supported lockfile names, in priority order.
|
|
76
|
+
* The first one found is used as the cache key source.
|
|
77
|
+
*/
|
|
78
|
+
const LOCKFILE_NAMES = [
|
|
79
|
+
"pnpm-lock.yaml",
|
|
80
|
+
"package-lock.json",
|
|
81
|
+
"yarn.lock",
|
|
82
|
+
"bun.lock",
|
|
83
|
+
"bun.lockb",
|
|
84
|
+
];
|
|
85
|
+
/**
|
|
86
|
+
* Compute a short SHA-256 hash of lockfiles tracked in the repo.
|
|
87
|
+
* Searches for all known lockfile types (pnpm, npm, yarn, bun) and hashes
|
|
88
|
+
* whichever are found. Used as a cache key for the warm node_modules directory
|
|
89
|
+
* so the snapshot is automatically invalidated when dependencies change.
|
|
90
|
+
*
|
|
91
|
+
* Returns "no-lockfile" if no lockfile is found.
|
|
92
|
+
*/
|
|
93
|
+
export function computeLockfileHash(repoRoot) {
|
|
94
|
+
// Build a git ls-files query that matches all known lockfile names
|
|
95
|
+
const patterns = LOCKFILE_NAMES.flatMap((name) => [`'**/${name}'`, `'${name}'`]);
|
|
96
|
+
let lockfiles;
|
|
97
|
+
try {
|
|
98
|
+
lockfiles = execSync(`git ls-files --cached -- ${patterns.join(" ")}`, {
|
|
99
|
+
stdio: "pipe",
|
|
100
|
+
cwd: repoRoot,
|
|
101
|
+
})
|
|
102
|
+
.toString()
|
|
103
|
+
.split("\n")
|
|
104
|
+
.map((f) => f.trim())
|
|
105
|
+
.filter(Boolean);
|
|
106
|
+
}
|
|
107
|
+
catch {
|
|
108
|
+
lockfiles = [];
|
|
109
|
+
}
|
|
110
|
+
if (lockfiles.length === 0) {
|
|
111
|
+
// Also try a direct filesystem check for untracked lockfiles
|
|
112
|
+
for (const name of LOCKFILE_NAMES) {
|
|
113
|
+
const rootLockfile = path.join(repoRoot, name);
|
|
114
|
+
if (fs.existsSync(rootLockfile)) {
|
|
115
|
+
lockfiles = [name];
|
|
116
|
+
break;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
if (lockfiles.length === 0) {
|
|
120
|
+
return "no-lockfile";
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
const hash = crypto.createHash("sha256");
|
|
124
|
+
for (const file of lockfiles.sort()) {
|
|
125
|
+
try {
|
|
126
|
+
hash.update(fs.readFileSync(path.join(repoRoot, file)));
|
|
127
|
+
}
|
|
128
|
+
catch {
|
|
129
|
+
// Skip unreadable files
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
return hash.digest("hex").slice(0, 16);
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Sentinel files written by each package manager after a successful install.
|
|
136
|
+
* If at least one exists, the cache is considered intact.
|
|
137
|
+
*/
|
|
138
|
+
const INSTALL_SENTINELS = [
|
|
139
|
+
".modules.yaml", // pnpm
|
|
140
|
+
".package-lock.json", // npm
|
|
141
|
+
".yarn-integrity", // yarn
|
|
142
|
+
];
|
|
143
|
+
/**
|
|
144
|
+
* Check whether any known install sentinel exists in the directory.
|
|
145
|
+
* Each PM writes a specific marker file after a successful install.
|
|
146
|
+
* For Bun, having any `node_modules/.cache` is sufficient since Bun
|
|
147
|
+
* does not write a top-level sentinel but always creates a cache dir.
|
|
148
|
+
*/
|
|
149
|
+
export function hasInstallSentinel(warmDir) {
|
|
150
|
+
for (const sentinel of INSTALL_SENTINELS) {
|
|
151
|
+
if (fs.existsSync(path.join(warmDir, sentinel))) {
|
|
152
|
+
return true;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
// Bun: no top-level sentinel, but .cache is reliably created
|
|
156
|
+
if (fs.existsSync(path.join(warmDir, ".cache"))) {
|
|
157
|
+
return true;
|
|
158
|
+
}
|
|
159
|
+
return false;
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Check whether a warm node_modules directory is populated AND intact.
|
|
163
|
+
* Used by the wave scheduler to decide whether to serialize the first job.
|
|
164
|
+
*
|
|
165
|
+
* A cache is considered warm only if:
|
|
166
|
+
* 1. The directory exists and is non-empty
|
|
167
|
+
* 2. At least one install sentinel exists (PM-specific marker file)
|
|
168
|
+
*
|
|
169
|
+
* A non-empty directory WITHOUT any sentinel indicates an interrupted install
|
|
170
|
+
* and is treated as cold/broken.
|
|
171
|
+
*/
|
|
172
|
+
export function isWarmNodeModules(warmDir) {
|
|
173
|
+
try {
|
|
174
|
+
if (!fs.existsSync(warmDir)) {
|
|
175
|
+
return false;
|
|
176
|
+
}
|
|
177
|
+
const entries = fs.readdirSync(warmDir);
|
|
178
|
+
if (entries.length === 0) {
|
|
179
|
+
return false;
|
|
180
|
+
}
|
|
181
|
+
return hasInstallSentinel(warmDir);
|
|
182
|
+
}
|
|
183
|
+
catch {
|
|
184
|
+
return false;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Detect and repair a corrupted warm cache directory.
|
|
189
|
+
* A cache is corrupt if it has files but no install sentinel from any PM.
|
|
190
|
+
*
|
|
191
|
+
* When corruption is detected, the directory is deleted and recreated empty
|
|
192
|
+
* so the next install starts from scratch.
|
|
193
|
+
*
|
|
194
|
+
* @returns `"repaired"` if a broken cache was nuked, `"warm"` if the cache
|
|
195
|
+
* is healthy, or `"cold"` if it was already empty/missing.
|
|
196
|
+
*/
|
|
197
|
+
export function repairWarmCache(warmDir) {
|
|
198
|
+
try {
|
|
199
|
+
if (!fs.existsSync(warmDir)) {
|
|
200
|
+
return "cold";
|
|
201
|
+
}
|
|
202
|
+
const entries = fs.readdirSync(warmDir);
|
|
203
|
+
if (entries.length === 0) {
|
|
204
|
+
return "cold";
|
|
205
|
+
}
|
|
206
|
+
// If any install sentinel exists, the cache is healthy.
|
|
207
|
+
if (hasInstallSentinel(warmDir)) {
|
|
208
|
+
return "warm";
|
|
209
|
+
}
|
|
210
|
+
// Non-empty but no sentinel → interrupted install. Nuke and recreate.
|
|
211
|
+
fs.rmSync(warmDir, { recursive: true, force: true });
|
|
212
|
+
fs.mkdirSync(warmDir, { recursive: true, mode: 0o777 });
|
|
213
|
+
return "repaired";
|
|
214
|
+
}
|
|
215
|
+
catch {
|
|
216
|
+
return "cold";
|
|
217
|
+
}
|
|
218
|
+
}
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
import { execSync } from "node:child_process";
|
|
6
|
+
// ── Workspace copy tests ──────────────────────────────────────────────────────
|
|
7
|
+
describe("copyWorkspace", () => {
|
|
8
|
+
let repoDir;
|
|
9
|
+
let destDir;
|
|
10
|
+
beforeEach(() => {
|
|
11
|
+
// Create a real git repo with tracked, untracked, and gitignored files
|
|
12
|
+
repoDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-copy-test-repo-"));
|
|
13
|
+
destDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-copy-test-dest-"));
|
|
14
|
+
// Init git repo
|
|
15
|
+
execSync("git init", { cwd: repoDir, stdio: "pipe" });
|
|
16
|
+
execSync('git config user.name "test"', { cwd: repoDir, stdio: "pipe" });
|
|
17
|
+
execSync('git config user.email "test@test.com"', { cwd: repoDir, stdio: "pipe" });
|
|
18
|
+
// Create tracked files
|
|
19
|
+
fs.writeFileSync(path.join(repoDir, "README.md"), "# Hello");
|
|
20
|
+
fs.mkdirSync(path.join(repoDir, "src"), { recursive: true });
|
|
21
|
+
fs.writeFileSync(path.join(repoDir, "src", "index.ts"), "console.log('hello')");
|
|
22
|
+
// Create .gitignore
|
|
23
|
+
fs.writeFileSync(path.join(repoDir, ".gitignore"), "node_modules/\ndist/\n*.log\n");
|
|
24
|
+
// Create gitignored files (should NOT be copied)
|
|
25
|
+
fs.mkdirSync(path.join(repoDir, "node_modules", "foo"), { recursive: true });
|
|
26
|
+
fs.writeFileSync(path.join(repoDir, "node_modules", "foo", "index.js"), "module.exports = {}");
|
|
27
|
+
fs.mkdirSync(path.join(repoDir, "dist"), { recursive: true });
|
|
28
|
+
fs.writeFileSync(path.join(repoDir, "dist", "bundle.js"), "bundled");
|
|
29
|
+
fs.writeFileSync(path.join(repoDir, "debug.log"), "log data");
|
|
30
|
+
// Commit everything that's tracked
|
|
31
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
32
|
+
execSync('git commit -m "initial"', { cwd: repoDir, stdio: "pipe" });
|
|
33
|
+
// Create untracked-but-not-ignored file (should be copied)
|
|
34
|
+
fs.writeFileSync(path.join(repoDir, "newfile.txt"), "untracked but not ignored");
|
|
35
|
+
});
|
|
36
|
+
afterEach(() => {
|
|
37
|
+
fs.rmSync(repoDir, { recursive: true, force: true });
|
|
38
|
+
fs.rmSync(destDir, { recursive: true, force: true });
|
|
39
|
+
});
|
|
40
|
+
it("copies tracked files", async () => {
|
|
41
|
+
const { copyWorkspace } = await import("./cleanup.js");
|
|
42
|
+
copyWorkspace(repoDir, destDir);
|
|
43
|
+
expect(fs.existsSync(path.join(destDir, "README.md"))).toBe(true);
|
|
44
|
+
expect(fs.readFileSync(path.join(destDir, "README.md"), "utf-8")).toBe("# Hello");
|
|
45
|
+
expect(fs.existsSync(path.join(destDir, "src", "index.ts"))).toBe(true);
|
|
46
|
+
expect(fs.existsSync(path.join(destDir, ".gitignore"))).toBe(true);
|
|
47
|
+
});
|
|
48
|
+
it("copies untracked-but-not-ignored files", async () => {
|
|
49
|
+
const { copyWorkspace } = await import("./cleanup.js");
|
|
50
|
+
copyWorkspace(repoDir, destDir);
|
|
51
|
+
expect(fs.existsSync(path.join(destDir, "newfile.txt"))).toBe(true);
|
|
52
|
+
expect(fs.readFileSync(path.join(destDir, "newfile.txt"), "utf-8")).toBe("untracked but not ignored");
|
|
53
|
+
});
|
|
54
|
+
it("excludes gitignored files", async () => {
|
|
55
|
+
const { copyWorkspace } = await import("./cleanup.js");
|
|
56
|
+
copyWorkspace(repoDir, destDir);
|
|
57
|
+
expect(fs.existsSync(path.join(destDir, "node_modules"))).toBe(false);
|
|
58
|
+
expect(fs.existsSync(path.join(destDir, "dist"))).toBe(false);
|
|
59
|
+
expect(fs.existsSync(path.join(destDir, "debug.log"))).toBe(false);
|
|
60
|
+
});
|
|
61
|
+
it("preserves nested directory structure", async () => {
|
|
62
|
+
const { copyWorkspace } = await import("./cleanup.js");
|
|
63
|
+
copyWorkspace(repoDir, destDir);
|
|
64
|
+
expect(fs.readFileSync(path.join(destDir, "src", "index.ts"), "utf-8")).toBe("console.log('hello')");
|
|
65
|
+
});
|
|
66
|
+
});
|
|
67
|
+
// ── computeLockfileHash tests ─────────────────────────────────────────────────
|
|
68
|
+
describe("computeLockfileHash", () => {
|
|
69
|
+
let repoDir;
|
|
70
|
+
beforeEach(() => {
|
|
71
|
+
repoDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-hash-test-"));
|
|
72
|
+
execSync("git init", { cwd: repoDir, stdio: "pipe" });
|
|
73
|
+
execSync('git config user.name "test"', { cwd: repoDir, stdio: "pipe" });
|
|
74
|
+
execSync('git config user.email "test@test.com"', { cwd: repoDir, stdio: "pipe" });
|
|
75
|
+
});
|
|
76
|
+
afterEach(() => {
|
|
77
|
+
fs.rmSync(repoDir, { recursive: true, force: true });
|
|
78
|
+
});
|
|
79
|
+
it("returns a hex string for a repo with a tracked lockfile", async () => {
|
|
80
|
+
const { computeLockfileHash } = await import("./cleanup.js");
|
|
81
|
+
fs.writeFileSync(path.join(repoDir, "pnpm-lock.yaml"), "lockfileVersion: '9.0'\n");
|
|
82
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
83
|
+
execSync('git commit -m "init"', { cwd: repoDir, stdio: "pipe" });
|
|
84
|
+
const hash = computeLockfileHash(repoDir);
|
|
85
|
+
expect(hash).toMatch(/^[a-f0-9]{16}$/);
|
|
86
|
+
});
|
|
87
|
+
it("returns the same hash for the same lockfile content", async () => {
|
|
88
|
+
const { computeLockfileHash } = await import("./cleanup.js");
|
|
89
|
+
fs.writeFileSync(path.join(repoDir, "pnpm-lock.yaml"), "lockfileVersion: '9.0'\n");
|
|
90
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
91
|
+
execSync('git commit -m "init"', { cwd: repoDir, stdio: "pipe" });
|
|
92
|
+
expect(computeLockfileHash(repoDir)).toBe(computeLockfileHash(repoDir));
|
|
93
|
+
});
|
|
94
|
+
it("returns a different hash when lockfile content changes", async () => {
|
|
95
|
+
const { computeLockfileHash } = await import("./cleanup.js");
|
|
96
|
+
fs.writeFileSync(path.join(repoDir, "pnpm-lock.yaml"), "lockfileVersion: '9.0'\n");
|
|
97
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
98
|
+
execSync('git commit -m "init"', { cwd: repoDir, stdio: "pipe" });
|
|
99
|
+
const hash1 = computeLockfileHash(repoDir);
|
|
100
|
+
fs.writeFileSync(path.join(repoDir, "pnpm-lock.yaml"), "lockfileVersion: '9.0'\n# changed\n");
|
|
101
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
102
|
+
execSync('git commit -m "update"', { cwd: repoDir, stdio: "pipe" });
|
|
103
|
+
const hash2 = computeLockfileHash(repoDir);
|
|
104
|
+
expect(hash1).not.toBe(hash2);
|
|
105
|
+
});
|
|
106
|
+
it("returns 'no-lockfile' when no lockfile exists", async () => {
|
|
107
|
+
const { computeLockfileHash } = await import("./cleanup.js");
|
|
108
|
+
// Empty repo, no lockfile
|
|
109
|
+
fs.writeFileSync(path.join(repoDir, "README.md"), "hi");
|
|
110
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
111
|
+
execSync('git commit -m "init"', { cwd: repoDir, stdio: "pipe" });
|
|
112
|
+
expect(computeLockfileHash(repoDir)).toBe("no-lockfile");
|
|
113
|
+
});
|
|
114
|
+
it("detects package-lock.json (npm)", async () => {
|
|
115
|
+
const { computeLockfileHash } = await import("./cleanup.js");
|
|
116
|
+
fs.writeFileSync(path.join(repoDir, "package-lock.json"), '{"lockfileVersion": 3}');
|
|
117
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
118
|
+
execSync('git commit -m "init"', { cwd: repoDir, stdio: "pipe" });
|
|
119
|
+
const hash = computeLockfileHash(repoDir);
|
|
120
|
+
expect(hash).toMatch(/^[a-f0-9]{16}$/);
|
|
121
|
+
});
|
|
122
|
+
it("detects yarn.lock", async () => {
|
|
123
|
+
const { computeLockfileHash } = await import("./cleanup.js");
|
|
124
|
+
fs.writeFileSync(path.join(repoDir, "yarn.lock"), "# yarn lockfile v1");
|
|
125
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
126
|
+
execSync('git commit -m "init"', { cwd: repoDir, stdio: "pipe" });
|
|
127
|
+
const hash = computeLockfileHash(repoDir);
|
|
128
|
+
expect(hash).toMatch(/^[a-f0-9]{16}$/);
|
|
129
|
+
});
|
|
130
|
+
it("detects bun.lock", async () => {
|
|
131
|
+
const { computeLockfileHash } = await import("./cleanup.js");
|
|
132
|
+
fs.writeFileSync(path.join(repoDir, "bun.lock"), '{"lockfileVersion": 0}');
|
|
133
|
+
execSync("git add .", { cwd: repoDir, stdio: "pipe" });
|
|
134
|
+
execSync('git commit -m "init"', { cwd: repoDir, stdio: "pipe" });
|
|
135
|
+
const hash = computeLockfileHash(repoDir);
|
|
136
|
+
expect(hash).toMatch(/^[a-f0-9]{16}$/);
|
|
137
|
+
});
|
|
138
|
+
});
|
|
139
|
+
// ── isWarmNodeModules tests ───────────────────────────────────────────────────
|
|
140
|
+
describe("isWarmNodeModules", () => {
|
|
141
|
+
let tmpDir;
|
|
142
|
+
beforeEach(() => {
|
|
143
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-warm-test-"));
|
|
144
|
+
});
|
|
145
|
+
afterEach(() => {
|
|
146
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
147
|
+
});
|
|
148
|
+
it("returns false for a non-existent directory", async () => {
|
|
149
|
+
const { isWarmNodeModules } = await import("./cleanup.js");
|
|
150
|
+
expect(isWarmNodeModules(path.join(tmpDir, "does-not-exist"))).toBe(false);
|
|
151
|
+
});
|
|
152
|
+
it("returns false for an empty directory", async () => {
|
|
153
|
+
const { isWarmNodeModules } = await import("./cleanup.js");
|
|
154
|
+
const emptyDir = path.join(tmpDir, "empty");
|
|
155
|
+
fs.mkdirSync(emptyDir);
|
|
156
|
+
expect(isWarmNodeModules(emptyDir)).toBe(false);
|
|
157
|
+
});
|
|
158
|
+
it("returns false when directory has files but no .modules.yaml (corrupted)", async () => {
|
|
159
|
+
const { isWarmNodeModules } = await import("./cleanup.js");
|
|
160
|
+
const warmDir = path.join(tmpDir, "warm");
|
|
161
|
+
fs.mkdirSync(warmDir);
|
|
162
|
+
fs.writeFileSync(path.join(warmDir, "some-package"), "content");
|
|
163
|
+
expect(isWarmNodeModules(warmDir)).toBe(false);
|
|
164
|
+
});
|
|
165
|
+
it("returns true when directory has .modules.yaml (pnpm)", async () => {
|
|
166
|
+
const { isWarmNodeModules } = await import("./cleanup.js");
|
|
167
|
+
const warmDir = path.join(tmpDir, "warm");
|
|
168
|
+
fs.mkdirSync(warmDir);
|
|
169
|
+
fs.writeFileSync(path.join(warmDir, ".modules.yaml"), "hoistedDependencies: {}");
|
|
170
|
+
fs.mkdirSync(path.join(warmDir, ".pnpm"), { recursive: true });
|
|
171
|
+
expect(isWarmNodeModules(warmDir)).toBe(true);
|
|
172
|
+
});
|
|
173
|
+
it("returns true when directory has .package-lock.json (npm)", async () => {
|
|
174
|
+
const { isWarmNodeModules } = await import("./cleanup.js");
|
|
175
|
+
const warmDir = path.join(tmpDir, "warm-npm");
|
|
176
|
+
fs.mkdirSync(warmDir);
|
|
177
|
+
fs.writeFileSync(path.join(warmDir, ".package-lock.json"), "{}");
|
|
178
|
+
fs.writeFileSync(path.join(warmDir, "express"), "pkg");
|
|
179
|
+
expect(isWarmNodeModules(warmDir)).toBe(true);
|
|
180
|
+
});
|
|
181
|
+
it("returns true when directory has .yarn-integrity (yarn)", async () => {
|
|
182
|
+
const { isWarmNodeModules } = await import("./cleanup.js");
|
|
183
|
+
const warmDir = path.join(tmpDir, "warm-yarn");
|
|
184
|
+
fs.mkdirSync(warmDir);
|
|
185
|
+
fs.writeFileSync(path.join(warmDir, ".yarn-integrity"), "{}");
|
|
186
|
+
fs.writeFileSync(path.join(warmDir, "express"), "pkg");
|
|
187
|
+
expect(isWarmNodeModules(warmDir)).toBe(true);
|
|
188
|
+
});
|
|
189
|
+
it("returns true when directory has .cache (bun)", async () => {
|
|
190
|
+
const { isWarmNodeModules } = await import("./cleanup.js");
|
|
191
|
+
const warmDir = path.join(tmpDir, "warm-bun");
|
|
192
|
+
fs.mkdirSync(warmDir);
|
|
193
|
+
fs.mkdirSync(path.join(warmDir, ".cache"));
|
|
194
|
+
fs.writeFileSync(path.join(warmDir, "express"), "pkg");
|
|
195
|
+
expect(isWarmNodeModules(warmDir)).toBe(true);
|
|
196
|
+
});
|
|
197
|
+
});
|
|
198
|
+
// ── repairWarmCache tests ─────────────────────────────────────────────────────
|
|
199
|
+
describe("repairWarmCache", () => {
|
|
200
|
+
let tmpDir;
|
|
201
|
+
beforeEach(() => {
|
|
202
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-repair-test-"));
|
|
203
|
+
});
|
|
204
|
+
afterEach(() => {
|
|
205
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
206
|
+
});
|
|
207
|
+
it("returns 'cold' for a non-existent directory", async () => {
|
|
208
|
+
const { repairWarmCache } = await import("./cleanup.js");
|
|
209
|
+
expect(repairWarmCache(path.join(tmpDir, "nope"))).toBe("cold");
|
|
210
|
+
});
|
|
211
|
+
it("returns 'cold' for an empty directory", async () => {
|
|
212
|
+
const { repairWarmCache } = await import("./cleanup.js");
|
|
213
|
+
const emptyDir = path.join(tmpDir, "empty");
|
|
214
|
+
fs.mkdirSync(emptyDir);
|
|
215
|
+
expect(repairWarmCache(emptyDir)).toBe("cold");
|
|
216
|
+
});
|
|
217
|
+
it("returns 'warm' when .modules.yaml exists (pnpm)", async () => {
|
|
218
|
+
const { repairWarmCache } = await import("./cleanup.js");
|
|
219
|
+
const warmDir = path.join(tmpDir, "warm");
|
|
220
|
+
fs.mkdirSync(warmDir);
|
|
221
|
+
fs.writeFileSync(path.join(warmDir, ".modules.yaml"), "ok");
|
|
222
|
+
expect(repairWarmCache(warmDir)).toBe("warm");
|
|
223
|
+
});
|
|
224
|
+
it("returns 'warm' when .package-lock.json exists (npm)", async () => {
|
|
225
|
+
const { repairWarmCache } = await import("./cleanup.js");
|
|
226
|
+
const warmDir = path.join(tmpDir, "warm-npm");
|
|
227
|
+
fs.mkdirSync(warmDir);
|
|
228
|
+
fs.writeFileSync(path.join(warmDir, ".package-lock.json"), "{}");
|
|
229
|
+
expect(repairWarmCache(warmDir)).toBe("warm");
|
|
230
|
+
});
|
|
231
|
+
it("returns 'repaired' and nukes a corrupted cache (files but no sentinel)", async () => {
|
|
232
|
+
const { repairWarmCache } = await import("./cleanup.js");
|
|
233
|
+
const brokenDir = path.join(tmpDir, "broken");
|
|
234
|
+
fs.mkdirSync(path.join(brokenDir, ".pnpm", "yaml@2.8.2"), { recursive: true });
|
|
235
|
+
fs.writeFileSync(path.join(brokenDir, ".pnpm", "yaml@2.8.2", "Pair.js"), "broken");
|
|
236
|
+
expect(repairWarmCache(brokenDir)).toBe("repaired");
|
|
237
|
+
// Directory should be recreated empty
|
|
238
|
+
expect(fs.existsSync(brokenDir)).toBe(true);
|
|
239
|
+
expect(fs.readdirSync(brokenDir)).toHaveLength(0);
|
|
240
|
+
});
|
|
241
|
+
});
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import os from "node:os";
|
|
2
|
+
/**
|
|
3
|
+
* A simple Promise-based semaphore that limits how many async tasks
|
|
4
|
+
* execute concurrently. Used by the orchestrator to throttle parallel
|
|
5
|
+
* job launches within a dependency wave.
|
|
6
|
+
*/
|
|
7
|
+
export function createConcurrencyLimiter(max) {
|
|
8
|
+
let running = 0;
|
|
9
|
+
const queue = [];
|
|
10
|
+
function acquire() {
|
|
11
|
+
if (running < max) {
|
|
12
|
+
running++;
|
|
13
|
+
return Promise.resolve();
|
|
14
|
+
}
|
|
15
|
+
return new Promise((resolve) => {
|
|
16
|
+
queue.push(() => {
|
|
17
|
+
running++;
|
|
18
|
+
resolve();
|
|
19
|
+
});
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
function release() {
|
|
23
|
+
running--;
|
|
24
|
+
const next = queue.shift();
|
|
25
|
+
if (next) {
|
|
26
|
+
next();
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
/** Wrap an async function so it respects the concurrency limit. */
|
|
31
|
+
async run(fn) {
|
|
32
|
+
await acquire();
|
|
33
|
+
try {
|
|
34
|
+
return await fn();
|
|
35
|
+
}
|
|
36
|
+
finally {
|
|
37
|
+
release();
|
|
38
|
+
}
|
|
39
|
+
},
|
|
40
|
+
/** Current number of active tasks (for testing / logging). */
|
|
41
|
+
get active() {
|
|
42
|
+
return running;
|
|
43
|
+
},
|
|
44
|
+
/** Current queue depth (for testing / logging). */
|
|
45
|
+
get queued() {
|
|
46
|
+
return queue.length;
|
|
47
|
+
},
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Determine the default max concurrent jobs based on the host CPU count.
|
|
52
|
+
* Returns floor(cpuCount / 2), with a minimum of 1.
|
|
53
|
+
*/
|
|
54
|
+
export function getDefaultMaxConcurrentJobs() {
|
|
55
|
+
const cpuCount = os.cpus().length;
|
|
56
|
+
return Math.max(1, Math.floor(cpuCount / 2));
|
|
57
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { describe, it, expect } from "vitest";
|
|
2
|
+
import { createConcurrencyLimiter, getDefaultMaxConcurrentJobs } from "./concurrency.js";
|
|
3
|
+
describe("createConcurrencyLimiter", () => {
|
|
4
|
+
it("limits concurrent execution to max", async () => {
|
|
5
|
+
const limiter = createConcurrencyLimiter(2);
|
|
6
|
+
let running = 0;
|
|
7
|
+
let maxRunning = 0;
|
|
8
|
+
const task = () => limiter.run(async () => {
|
|
9
|
+
running++;
|
|
10
|
+
maxRunning = Math.max(maxRunning, running);
|
|
11
|
+
// Simulate async work
|
|
12
|
+
await new Promise((r) => setTimeout(r, 50));
|
|
13
|
+
running--;
|
|
14
|
+
});
|
|
15
|
+
await Promise.all([task(), task(), task(), task(), task()]);
|
|
16
|
+
expect(maxRunning).toBe(2);
|
|
17
|
+
});
|
|
18
|
+
it("runs tasks serially when limit is 1", async () => {
|
|
19
|
+
const limiter = createConcurrencyLimiter(1);
|
|
20
|
+
const order = [];
|
|
21
|
+
const task = (id) => limiter.run(async () => {
|
|
22
|
+
order.push(id);
|
|
23
|
+
await new Promise((r) => setTimeout(r, 10));
|
|
24
|
+
});
|
|
25
|
+
await Promise.all([task(1), task(2), task(3)]);
|
|
26
|
+
expect(order).toEqual([1, 2, 3]);
|
|
27
|
+
});
|
|
28
|
+
it("runs all in parallel when limit exceeds task count", async () => {
|
|
29
|
+
const limiter = createConcurrencyLimiter(10);
|
|
30
|
+
let running = 0;
|
|
31
|
+
let maxRunning = 0;
|
|
32
|
+
const task = () => limiter.run(async () => {
|
|
33
|
+
running++;
|
|
34
|
+
maxRunning = Math.max(maxRunning, running);
|
|
35
|
+
await new Promise((r) => setTimeout(r, 50));
|
|
36
|
+
running--;
|
|
37
|
+
});
|
|
38
|
+
await Promise.all([task(), task(), task()]);
|
|
39
|
+
expect(maxRunning).toBe(3);
|
|
40
|
+
});
|
|
41
|
+
it("propagates errors without deadlocking", async () => {
|
|
42
|
+
const limiter = createConcurrencyLimiter(2);
|
|
43
|
+
const results = await Promise.allSettled([
|
|
44
|
+
limiter.run(async () => {
|
|
45
|
+
throw new Error("boom");
|
|
46
|
+
}),
|
|
47
|
+
limiter.run(async () => "ok"),
|
|
48
|
+
limiter.run(async () => "also ok"),
|
|
49
|
+
]);
|
|
50
|
+
expect(results[0]).toMatchObject({ status: "rejected" });
|
|
51
|
+
expect(results[1]).toMatchObject({ status: "fulfilled", value: "ok" });
|
|
52
|
+
expect(results[2]).toMatchObject({ status: "fulfilled", value: "also ok" });
|
|
53
|
+
});
|
|
54
|
+
it("exposes active and queued counts", async () => {
|
|
55
|
+
const limiter = createConcurrencyLimiter(1);
|
|
56
|
+
let sawQueued = false;
|
|
57
|
+
const task1 = limiter.run(async () => {
|
|
58
|
+
// At this point task2 should be queued
|
|
59
|
+
await new Promise((r) => setTimeout(r, 50));
|
|
60
|
+
});
|
|
61
|
+
// Give task1 a moment to acquire
|
|
62
|
+
await new Promise((r) => setTimeout(r, 5));
|
|
63
|
+
const task2 = limiter.run(async () => {
|
|
64
|
+
// nothing
|
|
65
|
+
});
|
|
66
|
+
// After task1 acquired, check state
|
|
67
|
+
expect(limiter.active).toBe(1);
|
|
68
|
+
if (limiter.queued > 0) {
|
|
69
|
+
sawQueued = true;
|
|
70
|
+
}
|
|
71
|
+
await Promise.all([task1, task2]);
|
|
72
|
+
expect(sawQueued).toBe(true);
|
|
73
|
+
expect(limiter.active).toBe(0);
|
|
74
|
+
expect(limiter.queued).toBe(0);
|
|
75
|
+
});
|
|
76
|
+
});
|
|
77
|
+
describe("getDefaultMaxConcurrentJobs", () => {
|
|
78
|
+
it("returns at least 1", () => {
|
|
79
|
+
const result = getDefaultMaxConcurrentJobs();
|
|
80
|
+
expect(result).toBeGreaterThanOrEqual(1);
|
|
81
|
+
});
|
|
82
|
+
it("returns a reasonable number for the current host", () => {
|
|
83
|
+
const result = getDefaultMaxConcurrentJobs();
|
|
84
|
+
const os = require("os");
|
|
85
|
+
const expected = Math.max(1, Math.floor(os.cpus().length / 2));
|
|
86
|
+
expect(result).toBe(expected);
|
|
87
|
+
});
|
|
88
|
+
});
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { minimatch } from "minimatch";
|
|
2
|
+
/**
|
|
3
|
+
* Lightweight debug logger with namespace support.
|
|
4
|
+
*
|
|
5
|
+
* Enable via DEBUG env var with glob patterns:
|
|
6
|
+
* DEBUG=agent-ci:* — all namespaces
|
|
7
|
+
* DEBUG=agent-ci:cli — CLI only
|
|
8
|
+
* DEBUG=agent-ci:dtu — DTU only
|
|
9
|
+
* DEBUG=agent-ci:runner — Runner only
|
|
10
|
+
* DEBUG=agent-ci:cli,agent-ci:dtu — multiple
|
|
11
|
+
*
|
|
12
|
+
* Output goes to stderr so stdout stays clean for piping.
|
|
13
|
+
*/
|
|
14
|
+
const debugPatterns = (process.env.DEBUG || "")
|
|
15
|
+
.split(",")
|
|
16
|
+
.map((s) => s.trim())
|
|
17
|
+
.filter(Boolean);
|
|
18
|
+
function isEnabled(namespace) {
|
|
19
|
+
return debugPatterns.some((pattern) => minimatch(namespace, pattern));
|
|
20
|
+
}
|
|
21
|
+
export function createDebug(namespace) {
|
|
22
|
+
const enabled = isEnabled(namespace);
|
|
23
|
+
if (!enabled) {
|
|
24
|
+
return () => { };
|
|
25
|
+
}
|
|
26
|
+
const prefix = ` ${namespace}`;
|
|
27
|
+
return (...args) => {
|
|
28
|
+
const msg = args.map((a) => (typeof a === "string" ? a : JSON.stringify(a))).join(" ");
|
|
29
|
+
process.stderr.write(`${prefix} ${msg}\n`);
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
// Pre-configured loggers for each domain
|
|
33
|
+
export const debugCli = createDebug("agent-ci:cli");
|
|
34
|
+
export const debugRunner = createDebug("agent-ci:runner");
|
|
35
|
+
export const debugDtu = createDebug("agent-ci:dtu");
|
|
36
|
+
export const debugBoot = createDebug("agent-ci:boot");
|