@redwoodjs/agent-ci 0.8.2 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +40 -11
- package/dist/config.js +36 -24
- package/dist/config.test.js +77 -1
- package/dist/docker/image-pull.js +42 -0
- package/dist/docker/image-pull.test.js +38 -0
- package/dist/output/reporter.js +31 -8
- package/dist/output/reporter.test.js +44 -0
- package/dist/output/state-renderer.js +7 -7
- package/dist/output/state-renderer.test.js +47 -4
- package/dist/runner/dirty-sha.js +64 -0
- package/dist/runner/dirty-sha.test.js +101 -0
- package/dist/runner/local-job.js +5 -0
- package/dist/workflow/workflow-parser.js +1 -1
- package/package.json +2 -2
package/dist/cli.js
CHANGED
|
@@ -7,7 +7,7 @@ import { getNextLogNum } from "./output/logger.js";
|
|
|
7
7
|
import { setWorkingDirectory, DEFAULT_WORKING_DIR, PROJECT_ROOT, } from "./output/working-directory.js";
|
|
8
8
|
import { debugCli } from "./output/debug.js";
|
|
9
9
|
import { executeLocalJob } from "./runner/local-job.js";
|
|
10
|
-
import { parseWorkflowSteps, parseWorkflowServices, parseWorkflowContainer, validateSecrets, parseMatrixDef, expandMatrixCombinations, collapseMatrixToSingle, isWorkflowRelevant, getChangedFiles, parseJobOutputDefs, parseJobIf, evaluateJobIf, parseFailFast, expandExpressions, } from "./workflow/workflow-parser.js";
|
|
10
|
+
import { parseWorkflowSteps, parseWorkflowServices, parseWorkflowContainer, validateSecrets, extractSecretRefs, parseMatrixDef, expandMatrixCombinations, collapseMatrixToSingle, isWorkflowRelevant, getChangedFiles, parseJobOutputDefs, parseJobIf, evaluateJobIf, parseFailFast, expandExpressions, } from "./workflow/workflow-parser.js";
|
|
11
11
|
import { resolveJobOutputs } from "./runner/result-builder.js";
|
|
12
12
|
import { createConcurrencyLimiter, getDefaultMaxConcurrentJobs } from "./output/concurrency.js";
|
|
13
13
|
import { isWarmNodeModules, computeLockfileHash } from "./output/cleanup.js";
|
|
@@ -18,6 +18,7 @@ import { expandReusableJobs } from "./workflow/reusable-workflow.js";
|
|
|
18
18
|
import { prefetchRemoteWorkflows } from "./workflow/remote-workflow-fetch.js";
|
|
19
19
|
import { printSummary } from "./output/reporter.js";
|
|
20
20
|
import { syncWorkspaceForRetry } from "./runner/sync.js";
|
|
21
|
+
import { computeDirtySha } from "./runner/dirty-sha.js";
|
|
21
22
|
import { RunStateStore } from "./output/run-state.js";
|
|
22
23
|
import { renderRunState } from "./output/state-renderer.js";
|
|
23
24
|
import { isAgentMode, setQuietMode } from "./output/agent-mode.js";
|
|
@@ -489,9 +490,12 @@ async function handleWorkflow(options) {
|
|
|
489
490
|
const { headSha, shaRef } = sha
|
|
490
491
|
? resolveHeadSha(repoRoot, sha)
|
|
491
492
|
: { headSha: undefined, shaRef: undefined };
|
|
492
|
-
// Always resolve
|
|
493
|
-
//
|
|
494
|
-
|
|
493
|
+
// Always resolve a SHA that represents the code being executed.
|
|
494
|
+
// When the working tree is dirty and no explicit --sha was given, compute an
|
|
495
|
+
// ephemeral commit SHA that captures the dirty state (including untracked files).
|
|
496
|
+
// This is purely informational — actions/checkout is always stubbed, so no
|
|
497
|
+
// workflow will ever try to fetch this SHA from a remote.
|
|
498
|
+
const realHeadSha = headSha ?? computeDirtySha(repoRoot) ?? resolveHeadSha(repoRoot, "HEAD").headSha;
|
|
495
499
|
const baseSha = resolveBaseSha(repoRoot, realHeadSha);
|
|
496
500
|
const githubRepo = config.GITHUB_REPO ?? resolveRepoSlug(repoRoot);
|
|
497
501
|
config.GITHUB_REPO = githubRepo;
|
|
@@ -546,7 +550,11 @@ async function handleWorkflow(options) {
|
|
|
546
550
|
if (expandedJobs.length === 1) {
|
|
547
551
|
const ej = expandedJobs[0];
|
|
548
552
|
const actualTaskName = ej.sourceTaskName ?? ej.taskName;
|
|
549
|
-
const
|
|
553
|
+
const requiredRefs = extractSecretRefs(ej.workflowPath, actualTaskName);
|
|
554
|
+
const secrets = loadMachineSecrets(repoRoot, requiredRefs);
|
|
555
|
+
if (githubToken && !secrets["GITHUB_TOKEN"]) {
|
|
556
|
+
secrets["GITHUB_TOKEN"] = githubToken;
|
|
557
|
+
}
|
|
550
558
|
const secretsFilePath = path.join(repoRoot, ".env.agent-ci");
|
|
551
559
|
validateSecrets(ej.workflowPath, actualTaskName, secrets, secretsFilePath);
|
|
552
560
|
// Resolve inputs for called workflow jobs
|
|
@@ -608,7 +616,11 @@ async function handleWorkflow(options) {
|
|
|
608
616
|
let globalIdx = 0;
|
|
609
617
|
const buildJob = (ej) => {
|
|
610
618
|
const actualTaskName = ej.sourceTaskName ?? ej.taskName;
|
|
611
|
-
const
|
|
619
|
+
const requiredRefs = extractSecretRefs(ej.workflowPath, actualTaskName);
|
|
620
|
+
const secrets = loadMachineSecrets(repoRoot, requiredRefs);
|
|
621
|
+
if (githubToken && !secrets["GITHUB_TOKEN"]) {
|
|
622
|
+
secrets["GITHUB_TOKEN"] = githubToken;
|
|
623
|
+
}
|
|
612
624
|
const secretsFilePath = path.join(repoRoot, ".env.agent-ci");
|
|
613
625
|
validateSecrets(ej.workflowPath, actualTaskName, secrets, secretsFilePath);
|
|
614
626
|
const idx = globalIdx++;
|
|
@@ -668,7 +680,11 @@ async function handleWorkflow(options) {
|
|
|
668
680
|
const { taskName, matrixContext } = ej;
|
|
669
681
|
const actualTaskName = ej.sourceTaskName ?? taskName;
|
|
670
682
|
debugCli(`Running: ${path.basename(ej.workflowPath)} | Task: ${taskName}${matrixContext ? ` | Matrix: ${JSON.stringify(Object.fromEntries(Object.entries(matrixContext).filter(([k]) => !k.startsWith("__"))))}` : ""}`);
|
|
671
|
-
const
|
|
683
|
+
const requiredRefs = extractSecretRefs(ej.workflowPath, actualTaskName);
|
|
684
|
+
const secrets = loadMachineSecrets(repoRoot, requiredRefs);
|
|
685
|
+
if (githubToken && !secrets["GITHUB_TOKEN"]) {
|
|
686
|
+
secrets["GITHUB_TOKEN"] = githubToken;
|
|
687
|
+
}
|
|
672
688
|
const secretsFilePath = path.join(repoRoot, ".env.agent-ci");
|
|
673
689
|
validateSecrets(ej.workflowPath, actualTaskName, secrets, secretsFilePath);
|
|
674
690
|
const inputsContext = resolveInputsForJob(ej, secrets, needsContext);
|
|
@@ -835,6 +851,7 @@ async function handleWorkflow(options) {
|
|
|
835
851
|
collectOutputs(result, ej.taskName);
|
|
836
852
|
return result;
|
|
837
853
|
};
|
|
854
|
+
const seenErrorMessages = new Set();
|
|
838
855
|
for (let wi = 0; wi < filteredWaves.length; wi++) {
|
|
839
856
|
const waveJobIds = new Set(filteredWaves[wi]);
|
|
840
857
|
const waveJobs = expandedJobs.filter((j) => waveJobIds.has(j.taskName));
|
|
@@ -856,8 +873,11 @@ async function handleWorkflow(options) {
|
|
|
856
873
|
else {
|
|
857
874
|
const taskName = isJobError(r.reason) ? r.reason.taskName : "unknown";
|
|
858
875
|
const errorMessage = isJobError(r.reason) ? r.reason.message : String(r.reason);
|
|
859
|
-
|
|
860
|
-
|
|
876
|
+
if (!seenErrorMessages.has(errorMessage)) {
|
|
877
|
+
seenErrorMessages.add(errorMessage);
|
|
878
|
+
console.error(`\n[Agent CI] Job failed with error: ${taskName}`);
|
|
879
|
+
console.error(` Error: ${errorMessage}`);
|
|
880
|
+
}
|
|
861
881
|
allResults.push(createFailedJobResult(taskName, workflowPath, r.reason));
|
|
862
882
|
}
|
|
863
883
|
}
|
|
@@ -874,8 +894,11 @@ async function handleWorkflow(options) {
|
|
|
874
894
|
else {
|
|
875
895
|
const taskName = isJobError(r.reason) ? r.reason.taskName : "unknown";
|
|
876
896
|
const errorMessage = isJobError(r.reason) ? r.reason.message : String(r.reason);
|
|
877
|
-
|
|
878
|
-
|
|
897
|
+
if (!seenErrorMessages.has(errorMessage)) {
|
|
898
|
+
seenErrorMessages.add(errorMessage);
|
|
899
|
+
console.error(`\n[Agent CI] Job failed with error: ${taskName}`);
|
|
900
|
+
console.error(` Error: ${errorMessage}`);
|
|
901
|
+
}
|
|
879
902
|
allResults.push(createFailedJobResult(taskName, workflowPath, r.reason));
|
|
880
903
|
}
|
|
881
904
|
}
|
|
@@ -922,6 +945,12 @@ function printUsage() {
|
|
|
922
945
|
console.log(" (auto-resolves via `gh auth token` if no value given)");
|
|
923
946
|
console.log(" Or set: AGENT_CI_GITHUB_TOKEN env var");
|
|
924
947
|
console.log(" --commit-status Post a GitHub commit status after the run (requires --github-token)");
|
|
948
|
+
console.log("");
|
|
949
|
+
console.log("Secrets:");
|
|
950
|
+
console.log(" Workflow secrets (${{ secrets.FOO }}) are resolved from:");
|
|
951
|
+
console.log(" 1. .env.agent-ci file in the repo root");
|
|
952
|
+
console.log(" 2. Environment variables (shell env acts as fallback)");
|
|
953
|
+
console.log(" 3. --github-token automatically provides secrets.GITHUB_TOKEN");
|
|
925
954
|
}
|
|
926
955
|
function resolveRepoRoot() {
|
|
927
956
|
let repoRoot = process.cwd();
|
package/dist/config.js
CHANGED
|
@@ -55,35 +55,47 @@ export const config = {
|
|
|
55
55
|
GITHUB_API_URL: process.env.GITHUB_API_URL || "http://localhost:8910",
|
|
56
56
|
};
|
|
57
57
|
/**
|
|
58
|
-
* Load machine-local secrets from `.env.
|
|
58
|
+
* Load machine-local secrets from `.env.agent-ci` at the given base directory.
|
|
59
59
|
* The file uses KEY=VALUE syntax (lines starting with # are ignored).
|
|
60
|
-
*
|
|
60
|
+
*
|
|
61
|
+
* When `envFallbackKeys` is provided, any key in that list that is NOT already
|
|
62
|
+
* present in the file will be filled from `process.env` (shell environment
|
|
63
|
+
* variables act as a fallback for the .env file).
|
|
64
|
+
*
|
|
65
|
+
* Returns an empty object if the file doesn't exist and no env fallbacks match.
|
|
61
66
|
*/
|
|
62
|
-
export function loadMachineSecrets(baseDir) {
|
|
67
|
+
export function loadMachineSecrets(baseDir, envFallbackKeys) {
|
|
63
68
|
const envMachinePath = path.join(baseDir ?? PROJECT_ROOT, ".env.agent-ci");
|
|
64
|
-
if (!fs.existsSync(envMachinePath)) {
|
|
65
|
-
return {};
|
|
66
|
-
}
|
|
67
69
|
const secrets = {};
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
const
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
(value.startsWith("'
|
|
83
|
-
|
|
70
|
+
if (fs.existsSync(envMachinePath)) {
|
|
71
|
+
const lines = fs.readFileSync(envMachinePath, "utf-8").split("\n");
|
|
72
|
+
for (const line of lines) {
|
|
73
|
+
const trimmed = line.trim();
|
|
74
|
+
if (!trimmed || trimmed.startsWith("#")) {
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
const eqIdx = trimmed.indexOf("=");
|
|
78
|
+
if (eqIdx < 1) {
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
const key = trimmed.slice(0, eqIdx).trim();
|
|
82
|
+
let value = trimmed.slice(eqIdx + 1).trim();
|
|
83
|
+
// Strip optional surrounding quotes
|
|
84
|
+
if ((value.startsWith('"') && value.endsWith('"')) ||
|
|
85
|
+
(value.startsWith("'") && value.endsWith("'"))) {
|
|
86
|
+
value = value.slice(1, -1);
|
|
87
|
+
}
|
|
88
|
+
if (key) {
|
|
89
|
+
secrets[key] = value;
|
|
90
|
+
}
|
|
84
91
|
}
|
|
85
|
-
|
|
86
|
-
|
|
92
|
+
}
|
|
93
|
+
// Fill missing secrets from process.env (shell env vars act as fallback)
|
|
94
|
+
if (envFallbackKeys) {
|
|
95
|
+
for (const key of envFallbackKeys) {
|
|
96
|
+
if (!secrets[key] && process.env[key]) {
|
|
97
|
+
secrets[key] = process.env[key];
|
|
98
|
+
}
|
|
87
99
|
}
|
|
88
100
|
}
|
|
89
101
|
return secrets;
|
package/dist/config.test.js
CHANGED
|
@@ -3,7 +3,7 @@ import fs from "fs";
|
|
|
3
3
|
import os from "os";
|
|
4
4
|
import path from "path";
|
|
5
5
|
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
|
6
|
-
import { config, getFirstRemoteUrl, parseRepoSlug, resolveRepoSlug } from "./config.js";
|
|
6
|
+
import { config, getFirstRemoteUrl, loadMachineSecrets, parseRepoSlug, resolveRepoSlug, } from "./config.js";
|
|
7
7
|
describe("parseRepoSlug", () => {
|
|
8
8
|
it.each([
|
|
9
9
|
["https://github.com/redwoodjs/agent-ci.git", "redwoodjs/agent-ci"],
|
|
@@ -155,3 +155,79 @@ describe("GITHUB_REPO env var override priority", () => {
|
|
|
155
155
|
}).toThrow(/Could not detect GitHub repository/);
|
|
156
156
|
});
|
|
157
157
|
});
|
|
158
|
+
// ─── loadMachineSecrets ──────────────────────────────────────────────────────
|
|
159
|
+
describe("loadMachineSecrets", () => {
|
|
160
|
+
let tmpDir;
|
|
161
|
+
const savedEnv = {};
|
|
162
|
+
function saveEnv(...keys) {
|
|
163
|
+
for (const k of keys) {
|
|
164
|
+
savedEnv[k] = process.env[k];
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
afterEach(() => {
|
|
168
|
+
if (tmpDir) {
|
|
169
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
170
|
+
}
|
|
171
|
+
for (const [k, v] of Object.entries(savedEnv)) {
|
|
172
|
+
if (v === undefined) {
|
|
173
|
+
delete process.env[k];
|
|
174
|
+
}
|
|
175
|
+
else {
|
|
176
|
+
process.env[k] = v;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
});
|
|
180
|
+
function writeEnvFile(content) {
|
|
181
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "secrets-test-"));
|
|
182
|
+
fs.writeFileSync(path.join(tmpDir, ".env.agent-ci"), content);
|
|
183
|
+
return tmpDir;
|
|
184
|
+
}
|
|
185
|
+
function makeTmpDir() {
|
|
186
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "secrets-test-"));
|
|
187
|
+
return tmpDir;
|
|
188
|
+
}
|
|
189
|
+
it("returns empty object when .env.agent-ci does not exist", () => {
|
|
190
|
+
const dir = makeTmpDir();
|
|
191
|
+
expect(loadMachineSecrets(dir)).toEqual({});
|
|
192
|
+
});
|
|
193
|
+
it("parses KEY=VALUE pairs from file", () => {
|
|
194
|
+
const dir = writeEnvFile("FOO=bar\nBAZ=qux\n");
|
|
195
|
+
expect(loadMachineSecrets(dir)).toEqual({ FOO: "bar", BAZ: "qux" });
|
|
196
|
+
});
|
|
197
|
+
it("fills missing secrets from process.env when envFallbackKeys provided", () => {
|
|
198
|
+
const dir = makeTmpDir();
|
|
199
|
+
saveEnv("TEST_SECRET_ABC");
|
|
200
|
+
process.env.TEST_SECRET_ABC = "from-env";
|
|
201
|
+
const secrets = loadMachineSecrets(dir, ["TEST_SECRET_ABC"]);
|
|
202
|
+
expect(secrets.TEST_SECRET_ABC).toBe("from-env");
|
|
203
|
+
});
|
|
204
|
+
it("file values take precedence over process.env", () => {
|
|
205
|
+
const dir = writeEnvFile("MY_TOKEN=from-file\n");
|
|
206
|
+
saveEnv("MY_TOKEN");
|
|
207
|
+
process.env.MY_TOKEN = "from-env";
|
|
208
|
+
const secrets = loadMachineSecrets(dir, ["MY_TOKEN"]);
|
|
209
|
+
expect(secrets.MY_TOKEN).toBe("from-file");
|
|
210
|
+
});
|
|
211
|
+
it("does not pull from process.env for keys not in envFallbackKeys", () => {
|
|
212
|
+
const dir = makeTmpDir();
|
|
213
|
+
saveEnv("UNRELATED_VAR");
|
|
214
|
+
process.env.UNRELATED_VAR = "should-not-appear";
|
|
215
|
+
const secrets = loadMachineSecrets(dir, ["OTHER_KEY"]);
|
|
216
|
+
expect(secrets.UNRELATED_VAR).toBeUndefined();
|
|
217
|
+
expect(secrets.OTHER_KEY).toBeUndefined();
|
|
218
|
+
});
|
|
219
|
+
it("does not pull from process.env when envFallbackKeys is omitted", () => {
|
|
220
|
+
const dir = makeTmpDir();
|
|
221
|
+
saveEnv("SOME_SECRET");
|
|
222
|
+
process.env.SOME_SECRET = "env-value";
|
|
223
|
+
const secrets = loadMachineSecrets(dir);
|
|
224
|
+
expect(secrets.SOME_SECRET).toBeUndefined();
|
|
225
|
+
});
|
|
226
|
+
it("merges file secrets and env fallbacks", () => {
|
|
227
|
+
const dir = writeEnvFile("FROM_FILE=file-val\n");
|
|
228
|
+
saveEnv("FROM_ENV");
|
|
229
|
+
process.env.FROM_ENV = "env-val";
|
|
230
|
+
const secrets = loadMachineSecrets(dir, ["FROM_FILE", "FROM_ENV"]);
|
|
231
|
+
expect(secrets).toEqual({ FROM_FILE: "file-val", FROM_ENV: "env-val" });
|
|
232
|
+
});
|
|
233
|
+
});
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ensures a Docker image is present locally, pulling it if not.
|
|
3
|
+
*
|
|
4
|
+
* Docker's createContainer() returns a 404 "No such image" error when the
|
|
5
|
+
* image is absent — it does not pull automatically. This helper mirrors the
|
|
6
|
+
* pattern already used by service-containers.ts and must be called before
|
|
7
|
+
* any createContainer() call.
|
|
8
|
+
*
|
|
9
|
+
* Reproduces: https://github.com/redwoodjs/agent-ci/issues/203
|
|
10
|
+
*/
|
|
11
|
+
export async function ensureImagePulled(docker, image) {
|
|
12
|
+
try {
|
|
13
|
+
await docker.getImage(image).inspect();
|
|
14
|
+
return; // already present
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
// Not found locally — fall through to pull
|
|
18
|
+
}
|
|
19
|
+
await new Promise((resolve, reject) => {
|
|
20
|
+
docker.pull(image, (err, stream) => {
|
|
21
|
+
if (err) {
|
|
22
|
+
return reject(wrapPullError(image, err));
|
|
23
|
+
}
|
|
24
|
+
docker.modem.followProgress(stream, (err) => {
|
|
25
|
+
if (err) {
|
|
26
|
+
reject(wrapPullError(image, err));
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
resolve();
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
});
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
function wrapPullError(image, cause) {
|
|
36
|
+
return new Error(`Failed to pull Docker image '${image}': ${cause.message}\n` +
|
|
37
|
+
"\n" +
|
|
38
|
+
" Possible causes:\n" +
|
|
39
|
+
" • The image name is misspelled or does not exist in the registry\n" +
|
|
40
|
+
" • The image is private — authenticate first: docker login <registry>\n" +
|
|
41
|
+
" • No network connection");
|
|
42
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { describe, it, expect, beforeAll } from "vitest";
|
|
2
|
+
import Docker from "dockerode";
|
|
3
|
+
import { ensureImagePulled } from "./image-pull.js";
|
|
4
|
+
import { resolveDockerSocket } from "./docker-socket.js";
|
|
5
|
+
// Integration test: requires a running Docker daemon and network access.
|
|
6
|
+
// Uses hello-world (~13 KB) to keep pull time minimal.
|
|
7
|
+
const TEST_IMAGE = "hello-world:latest";
|
|
8
|
+
describe("ensureImagePulled", () => {
|
|
9
|
+
let docker;
|
|
10
|
+
beforeAll(async () => {
|
|
11
|
+
const socket = resolveDockerSocket();
|
|
12
|
+
docker = new Docker({ socketPath: socket.socketPath });
|
|
13
|
+
await docker.ping();
|
|
14
|
+
});
|
|
15
|
+
it("pulls the image when it is not present locally", { timeout: 60000 }, async () => {
|
|
16
|
+
// Arrange: remove the image so it is definitely absent
|
|
17
|
+
try {
|
|
18
|
+
await docker.getImage(TEST_IMAGE).remove({ force: true });
|
|
19
|
+
}
|
|
20
|
+
catch {
|
|
21
|
+
// Already absent — fine
|
|
22
|
+
}
|
|
23
|
+
// Act
|
|
24
|
+
await ensureImagePulled(docker, TEST_IMAGE);
|
|
25
|
+
// Assert: image must now be inspectable
|
|
26
|
+
const info = await docker.getImage(TEST_IMAGE).inspect();
|
|
27
|
+
expect(info.RepoTags).toContain(TEST_IMAGE);
|
|
28
|
+
});
|
|
29
|
+
it("rejects with an error when the image does not exist in the registry", { timeout: 30000 }, async () => {
|
|
30
|
+
await expect(ensureImagePulled(docker, "ghcr.io/redwoodjs/agent-ci-does-not-exist:latest")).rejects.toThrow("Failed to pull Docker image 'ghcr.io/redwoodjs/agent-ci-does-not-exist:latest'");
|
|
31
|
+
});
|
|
32
|
+
it("does nothing when the image is already present", async () => {
|
|
33
|
+
// Arrange: ensure the image is present (previous test or pre-cached)
|
|
34
|
+
await ensureImagePulled(docker, TEST_IMAGE);
|
|
35
|
+
// Act: calling again must not throw
|
|
36
|
+
await expect(ensureImagePulled(docker, TEST_IMAGE)).resolves.toBeUndefined();
|
|
37
|
+
});
|
|
38
|
+
});
|
package/dist/output/reporter.js
CHANGED
|
@@ -10,25 +10,48 @@ function formatDuration(ms) {
|
|
|
10
10
|
return rem > 0 ? `${m}m ${rem}s` : `${m}m`;
|
|
11
11
|
}
|
|
12
12
|
// ─── Failures-first summary (emitted after all jobs complete) ─────────────────
|
|
13
|
+
function getErrorContent(f) {
|
|
14
|
+
if (f.failedStepLogPath && fs.existsSync(f.failedStepLogPath)) {
|
|
15
|
+
return fs.readFileSync(f.failedStepLogPath, "utf-8");
|
|
16
|
+
}
|
|
17
|
+
if (f.lastOutputLines && f.lastOutputLines.length > 0) {
|
|
18
|
+
return f.lastOutputLines.join("\n") + "\n";
|
|
19
|
+
}
|
|
20
|
+
return "";
|
|
21
|
+
}
|
|
22
|
+
function formatFailureHeader(f) {
|
|
23
|
+
if (f.failedStep) {
|
|
24
|
+
return ` ✗ ${f.workflow} > ${f.taskId} > "${f.failedStep}"`;
|
|
25
|
+
}
|
|
26
|
+
return ` ✗ ${f.workflow} > ${f.taskId}`;
|
|
27
|
+
}
|
|
13
28
|
export function printSummary(results, runDir) {
|
|
14
29
|
const failures = results.filter((r) => !r.succeeded);
|
|
15
30
|
const passes = results.filter((r) => r.succeeded);
|
|
16
31
|
const totalMs = results.reduce((sum, r) => sum + r.durationMs, 0);
|
|
17
32
|
if (failures.length > 0) {
|
|
18
33
|
process.stdout.write("\n━━━ FAILURES ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n\n");
|
|
34
|
+
// Group failures by error content to avoid repeating identical errors
|
|
35
|
+
const groups = [];
|
|
36
|
+
const seen = new Map();
|
|
19
37
|
for (const f of failures) {
|
|
20
|
-
|
|
21
|
-
|
|
38
|
+
const content = getErrorContent(f);
|
|
39
|
+
const existing = seen.get(content);
|
|
40
|
+
if (existing) {
|
|
41
|
+
existing.failures.push(f);
|
|
22
42
|
}
|
|
23
43
|
else {
|
|
24
|
-
|
|
44
|
+
const group = { failures: [f], errorContent: content };
|
|
45
|
+
groups.push(group);
|
|
46
|
+
seen.set(content, group);
|
|
25
47
|
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
48
|
+
}
|
|
49
|
+
for (const group of groups) {
|
|
50
|
+
for (const f of group.failures) {
|
|
51
|
+
process.stdout.write(formatFailureHeader(f) + "\n");
|
|
29
52
|
}
|
|
30
|
-
|
|
31
|
-
process.stdout.write("\n" +
|
|
53
|
+
if (group.errorContent) {
|
|
54
|
+
process.stdout.write("\n" + group.errorContent);
|
|
32
55
|
}
|
|
33
56
|
process.stdout.write("\n");
|
|
34
57
|
}
|
|
@@ -65,6 +65,50 @@ describe("printSummary", () => {
|
|
|
65
65
|
]);
|
|
66
66
|
expect(output).toContain('✗ retry-proof.yml > test > "Run assertion test"');
|
|
67
67
|
});
|
|
68
|
+
it("deduplicates failures with identical error content", () => {
|
|
69
|
+
printSummary([
|
|
70
|
+
makeResult({
|
|
71
|
+
taskId: "test (1)",
|
|
72
|
+
failedStep: "[Job startup failed]",
|
|
73
|
+
lastOutputLines: ["Missing secrets"],
|
|
74
|
+
}),
|
|
75
|
+
makeResult({
|
|
76
|
+
taskId: "test (2)",
|
|
77
|
+
failedStep: "[Job startup failed]",
|
|
78
|
+
lastOutputLines: ["Missing secrets"],
|
|
79
|
+
}),
|
|
80
|
+
makeResult({
|
|
81
|
+
taskId: "test (3)",
|
|
82
|
+
failedStep: "[Job startup failed]",
|
|
83
|
+
lastOutputLines: ["Missing secrets"],
|
|
84
|
+
}),
|
|
85
|
+
]);
|
|
86
|
+
// Error content should appear only once
|
|
87
|
+
const matches = output.match(/Missing secrets/g);
|
|
88
|
+
expect(matches).toHaveLength(1);
|
|
89
|
+
// All job headers should still appear
|
|
90
|
+
expect(output).toContain('test (1) > "[Job startup failed]"');
|
|
91
|
+
expect(output).toContain('test (2) > "[Job startup failed]"');
|
|
92
|
+
expect(output).toContain('test (3) > "[Job startup failed]"');
|
|
93
|
+
// Summary should show correct count
|
|
94
|
+
expect(output).toContain("3 failed");
|
|
95
|
+
});
|
|
96
|
+
it("keeps distinct errors separate", () => {
|
|
97
|
+
printSummary([
|
|
98
|
+
makeResult({
|
|
99
|
+
taskId: "build",
|
|
100
|
+
failedStep: "Compile",
|
|
101
|
+
lastOutputLines: ["syntax error"],
|
|
102
|
+
}),
|
|
103
|
+
makeResult({
|
|
104
|
+
taskId: "lint",
|
|
105
|
+
failedStep: "ESLint",
|
|
106
|
+
lastOutputLines: ["unused variable"],
|
|
107
|
+
}),
|
|
108
|
+
]);
|
|
109
|
+
expect(output).toContain("syntax error");
|
|
110
|
+
expect(output).toContain("unused variable");
|
|
111
|
+
});
|
|
68
112
|
it("shows pass count in summary for a successful run", () => {
|
|
69
113
|
printSummary([makeResult({ succeeded: true })]);
|
|
70
114
|
expect(output).toContain("✓ 1 passed");
|
|
@@ -142,22 +142,22 @@ export function renderRunState(state) {
|
|
|
142
142
|
const totalJobs = state.workflows.reduce((sum, wf) => sum + wf.jobs.length, 0);
|
|
143
143
|
const singleJobMode = state.workflows.length === 1 && totalJobs === 1;
|
|
144
144
|
const roots = [];
|
|
145
|
-
let
|
|
145
|
+
let pausedJob;
|
|
146
146
|
for (const wf of state.workflows) {
|
|
147
147
|
const children = [];
|
|
148
148
|
for (const job of wf.jobs) {
|
|
149
149
|
children.push(...buildJobNodes(job, singleJobMode));
|
|
150
|
-
// Capture the first paused job for
|
|
151
|
-
if (
|
|
152
|
-
|
|
150
|
+
// Capture the first paused job for trailing output
|
|
151
|
+
if (job.status === "paused" && !pausedJob) {
|
|
152
|
+
pausedJob = job;
|
|
153
153
|
}
|
|
154
154
|
}
|
|
155
155
|
roots.push({ label: path.basename(wf.path), children });
|
|
156
156
|
}
|
|
157
157
|
let output = renderTree(roots);
|
|
158
|
-
// ──
|
|
159
|
-
if (
|
|
160
|
-
const { lastOutputLines, runnerId } =
|
|
158
|
+
// ── Paused job: append last output + retry/abort hints below tree ──────────
|
|
159
|
+
if (pausedJob) {
|
|
160
|
+
const { lastOutputLines, runnerId } = pausedJob;
|
|
161
161
|
if (lastOutputLines && lastOutputLines.length > 0) {
|
|
162
162
|
output += `\n\n ${DIM}Last output:${RESET}`;
|
|
163
163
|
for (const line of lastOutputLines) {
|
|
@@ -376,11 +376,54 @@ describe("renderRunState", () => {
|
|
|
376
376
|
],
|
|
377
377
|
});
|
|
378
378
|
const output = renderRunState(state);
|
|
379
|
-
// Retry hint is a child node
|
|
379
|
+
// Retry hint is a child node in the tree
|
|
380
380
|
expect(output).toContain("↻ retry: agent-ci retry --runner agent-ci-5-j2");
|
|
381
|
-
//
|
|
382
|
-
expect(output).
|
|
383
|
-
expect(output).
|
|
381
|
+
// Trailing "To retry:" / "To abort:" lines also shown in multi-job mode
|
|
382
|
+
expect(output).toContain("↻ To retry:");
|
|
383
|
+
expect(output).toContain("■ To abort:");
|
|
384
|
+
});
|
|
385
|
+
it("shows last output lines for paused job in multi-job mode", () => {
|
|
386
|
+
const state = makeState({
|
|
387
|
+
workflows: [
|
|
388
|
+
{
|
|
389
|
+
id: "ci.yml",
|
|
390
|
+
path: "/repo/.github/workflows/ci.yml",
|
|
391
|
+
status: "running",
|
|
392
|
+
jobs: [
|
|
393
|
+
{
|
|
394
|
+
id: "build",
|
|
395
|
+
runnerId: "agent-ci-5-j1",
|
|
396
|
+
status: "completed",
|
|
397
|
+
durationMs: 5000,
|
|
398
|
+
steps: [],
|
|
399
|
+
},
|
|
400
|
+
{
|
|
401
|
+
id: "test",
|
|
402
|
+
runnerId: "agent-ci-5-j2",
|
|
403
|
+
status: "paused",
|
|
404
|
+
pausedAtStep: "Run tests",
|
|
405
|
+
pausedAtMs: "1970-01-01T00:00:05.000Z",
|
|
406
|
+
attempt: 1,
|
|
407
|
+
lastOutputLines: ["FAIL src/app.test.ts", " Expected: true", " Received: false"],
|
|
408
|
+
bootDurationMs: 1000,
|
|
409
|
+
steps: [
|
|
410
|
+
{
|
|
411
|
+
name: "Run tests",
|
|
412
|
+
index: 1,
|
|
413
|
+
status: "paused",
|
|
414
|
+
startedAt: "1970-01-01T00:00:03.000Z",
|
|
415
|
+
},
|
|
416
|
+
],
|
|
417
|
+
},
|
|
418
|
+
],
|
|
419
|
+
},
|
|
420
|
+
],
|
|
421
|
+
});
|
|
422
|
+
const output = renderRunState(state);
|
|
423
|
+
expect(output).toContain("Last output:");
|
|
424
|
+
expect(output).toContain("FAIL src/app.test.ts");
|
|
425
|
+
expect(output).toContain("Expected: true");
|
|
426
|
+
expect(output).toContain("Received: false");
|
|
384
427
|
});
|
|
385
428
|
});
|
|
386
429
|
describe("multi-workflow (--all mode)", () => {
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { execSync } from "child_process";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import fs from "fs";
|
|
4
|
+
/**
|
|
5
|
+
* Compute a SHA that represents the current dirty working-tree state, as if
|
|
6
|
+
* it were committed. Uses a temporary index + `git write-tree` /
|
|
7
|
+
* `git commit-tree` so no refs are moved and real history is untouched.
|
|
8
|
+
*
|
|
9
|
+
* Returns `undefined` when the tree is clean (no uncommitted changes).
|
|
10
|
+
*/
|
|
11
|
+
export function computeDirtySha(repoRoot) {
|
|
12
|
+
try {
|
|
13
|
+
// Quick check: anything dirty?
|
|
14
|
+
const status = execSync("git status --porcelain", {
|
|
15
|
+
cwd: repoRoot,
|
|
16
|
+
stdio: "pipe",
|
|
17
|
+
})
|
|
18
|
+
.toString()
|
|
19
|
+
.trim();
|
|
20
|
+
if (!status) {
|
|
21
|
+
return undefined;
|
|
22
|
+
}
|
|
23
|
+
const gitDir = execSync("git rev-parse --git-dir", {
|
|
24
|
+
cwd: repoRoot,
|
|
25
|
+
stdio: "pipe",
|
|
26
|
+
})
|
|
27
|
+
.toString()
|
|
28
|
+
.trim();
|
|
29
|
+
const absoluteGitDir = path.isAbsolute(gitDir) ? gitDir : path.join(repoRoot, gitDir);
|
|
30
|
+
const tmpIndex = path.join(absoluteGitDir, `index-agent-ci-${Date.now()}`);
|
|
31
|
+
try {
|
|
32
|
+
// Seed the temp index from the real one so we start from the current staging area.
|
|
33
|
+
fs.copyFileSync(path.join(absoluteGitDir, "index"), tmpIndex);
|
|
34
|
+
const env = { ...process.env, GIT_INDEX_FILE: tmpIndex };
|
|
35
|
+
// Stage everything (tracked + untracked, respecting .gitignore) into the temp index.
|
|
36
|
+
execSync("git add -A", { cwd: repoRoot, stdio: "pipe", env });
|
|
37
|
+
// Write a tree object from the temp index.
|
|
38
|
+
const tree = execSync("git write-tree", {
|
|
39
|
+
cwd: repoRoot,
|
|
40
|
+
stdio: "pipe",
|
|
41
|
+
env,
|
|
42
|
+
})
|
|
43
|
+
.toString()
|
|
44
|
+
.trim();
|
|
45
|
+
// Create an ephemeral commit object parented on HEAD — no ref is updated.
|
|
46
|
+
const sha = execSync(`git commit-tree ${tree} -p HEAD -m "agent-ci: dirty working tree"`, {
|
|
47
|
+
cwd: repoRoot,
|
|
48
|
+
stdio: "pipe",
|
|
49
|
+
})
|
|
50
|
+
.toString()
|
|
51
|
+
.trim();
|
|
52
|
+
return sha;
|
|
53
|
+
}
|
|
54
|
+
finally {
|
|
55
|
+
try {
|
|
56
|
+
fs.unlinkSync(tmpIndex);
|
|
57
|
+
}
|
|
58
|
+
catch { }
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
catch {
|
|
62
|
+
return undefined;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { execSync } from "child_process";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import os from "node:os";
|
|
6
|
+
import { computeDirtySha } from "./dirty-sha.js";
|
|
7
|
+
describe("computeDirtySha", () => {
|
|
8
|
+
let repoDir;
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
repoDir = fs.mkdtempSync(path.join(os.tmpdir(), "dirty-sha-test-"));
|
|
11
|
+
execSync("git init", { cwd: repoDir, stdio: "pipe" });
|
|
12
|
+
execSync('git config user.name "test"', { cwd: repoDir, stdio: "pipe" });
|
|
13
|
+
execSync('git config user.email "test@test.com"', { cwd: repoDir, stdio: "pipe" });
|
|
14
|
+
// Create an initial commit so HEAD exists
|
|
15
|
+
fs.writeFileSync(path.join(repoDir, "initial.txt"), "initial");
|
|
16
|
+
execSync("git add -A && git commit -m 'initial'", { cwd: repoDir, stdio: "pipe" });
|
|
17
|
+
});
|
|
18
|
+
afterEach(() => {
|
|
19
|
+
fs.rmSync(repoDir, { recursive: true, force: true });
|
|
20
|
+
});
|
|
21
|
+
it("returns undefined for a clean working tree", () => {
|
|
22
|
+
expect(computeDirtySha(repoDir)).toBeUndefined();
|
|
23
|
+
});
|
|
24
|
+
it("returns a SHA when tracked files are modified", () => {
|
|
25
|
+
fs.writeFileSync(path.join(repoDir, "initial.txt"), "modified");
|
|
26
|
+
const sha = computeDirtySha(repoDir);
|
|
27
|
+
expect(sha).toBeDefined();
|
|
28
|
+
expect(sha).toMatch(/^[0-9a-f]{40}$/);
|
|
29
|
+
});
|
|
30
|
+
it("returns a SHA when untracked files are present", () => {
|
|
31
|
+
fs.writeFileSync(path.join(repoDir, "untracked.txt"), "new file");
|
|
32
|
+
const sha = computeDirtySha(repoDir);
|
|
33
|
+
expect(sha).toBeDefined();
|
|
34
|
+
expect(sha).toMatch(/^[0-9a-f]{40}$/);
|
|
35
|
+
});
|
|
36
|
+
it("returns a different SHA for different dirty states", () => {
|
|
37
|
+
fs.writeFileSync(path.join(repoDir, "a.txt"), "content a");
|
|
38
|
+
const sha1 = computeDirtySha(repoDir);
|
|
39
|
+
// Stage and commit a.txt, then modify differently
|
|
40
|
+
execSync("git add -A && git commit -m 'add a'", { cwd: repoDir, stdio: "pipe" });
|
|
41
|
+
fs.writeFileSync(path.join(repoDir, "a.txt"), "content b");
|
|
42
|
+
const sha2 = computeDirtySha(repoDir);
|
|
43
|
+
expect(sha1).toBeDefined();
|
|
44
|
+
expect(sha2).toBeDefined();
|
|
45
|
+
expect(sha1).not.toBe(sha2);
|
|
46
|
+
});
|
|
47
|
+
it("does not move HEAD or create refs", () => {
|
|
48
|
+
const headBefore = execSync("git rev-parse HEAD", { cwd: repoDir, stdio: "pipe" })
|
|
49
|
+
.toString()
|
|
50
|
+
.trim();
|
|
51
|
+
const refsBefore = execSync("git for-each-ref", { cwd: repoDir, stdio: "pipe" })
|
|
52
|
+
.toString()
|
|
53
|
+
.trim();
|
|
54
|
+
fs.writeFileSync(path.join(repoDir, "dirty.txt"), "dirty");
|
|
55
|
+
computeDirtySha(repoDir);
|
|
56
|
+
const headAfter = execSync("git rev-parse HEAD", { cwd: repoDir, stdio: "pipe" })
|
|
57
|
+
.toString()
|
|
58
|
+
.trim();
|
|
59
|
+
const refsAfter = execSync("git for-each-ref", { cwd: repoDir, stdio: "pipe" })
|
|
60
|
+
.toString()
|
|
61
|
+
.trim();
|
|
62
|
+
expect(headAfter).toBe(headBefore);
|
|
63
|
+
expect(refsAfter).toBe(refsBefore);
|
|
64
|
+
});
|
|
65
|
+
it("does not modify the real index", () => {
|
|
66
|
+
// Stage nothing, but have an untracked file
|
|
67
|
+
fs.writeFileSync(path.join(repoDir, "untracked.txt"), "new");
|
|
68
|
+
const statusBefore = execSync("git status --porcelain", { cwd: repoDir, stdio: "pipe" })
|
|
69
|
+
.toString()
|
|
70
|
+
.trim();
|
|
71
|
+
computeDirtySha(repoDir);
|
|
72
|
+
const statusAfter = execSync("git status --porcelain", { cwd: repoDir, stdio: "pipe" })
|
|
73
|
+
.toString()
|
|
74
|
+
.trim();
|
|
75
|
+
expect(statusAfter).toBe(statusBefore);
|
|
76
|
+
});
|
|
77
|
+
it("returns a valid commit object parented on HEAD", () => {
|
|
78
|
+
fs.writeFileSync(path.join(repoDir, "dirty.txt"), "content");
|
|
79
|
+
const sha = computeDirtySha(repoDir);
|
|
80
|
+
expect(sha).toBeDefined();
|
|
81
|
+
// Verify it's a valid commit object
|
|
82
|
+
const type = execSync(`git cat-file -t ${sha}`, { cwd: repoDir, stdio: "pipe" })
|
|
83
|
+
.toString()
|
|
84
|
+
.trim();
|
|
85
|
+
expect(type).toBe("commit");
|
|
86
|
+
// Read the parent SHA directly from the commit object (bypasses any git shims
|
|
87
|
+
// that intercept `git rev-parse HEAD` in CI environments).
|
|
88
|
+
const commitBody = execSync(`git cat-file -p ${sha}`, {
|
|
89
|
+
cwd: repoDir,
|
|
90
|
+
stdio: "pipe",
|
|
91
|
+
}).toString();
|
|
92
|
+
const parentMatch = commitBody.match(/^parent ([0-9a-f]{40})$/m);
|
|
93
|
+
expect(parentMatch).not.toBeNull();
|
|
94
|
+
// Read HEAD the same way to compare — resolve the ref from .git/HEAD.
|
|
95
|
+
const headContent = fs.readFileSync(path.join(repoDir, ".git", "HEAD"), "utf-8").trim();
|
|
96
|
+
const headSha = headContent.startsWith("ref: ")
|
|
97
|
+
? fs.readFileSync(path.join(repoDir, ".git", headContent.slice(5)), "utf-8").trim()
|
|
98
|
+
: headContent;
|
|
99
|
+
expect(parentMatch[1]).toBe(headSha);
|
|
100
|
+
});
|
|
101
|
+
});
|
package/dist/runner/local-job.js
CHANGED
|
@@ -17,6 +17,7 @@ import { prepareWorkspace } from "./workspace.js";
|
|
|
17
17
|
import { createRunDirectories } from "./directory-setup.js";
|
|
18
18
|
import { buildContainerEnv, buildContainerBinds, buildContainerCmd, resolveDtuHost, resolveDockerApiUrl, resolveDockerExtraHosts, } from "../docker/container-config.js";
|
|
19
19
|
import { buildJobResult, isJobSuccessful } from "./result-builder.js";
|
|
20
|
+
import { ensureImagePulled } from "../docker/image-pull.js";
|
|
20
21
|
import { wrapJobSteps, appendOutputCaptureStep } from "./step-wrapper.js";
|
|
21
22
|
import { syncWorkspaceForRetry } from "./sync.js";
|
|
22
23
|
// ─── Docker setup ─────────────────────────────────────────────────────────────
|
|
@@ -305,6 +306,10 @@ export async function executeLocalJob(job, options) {
|
|
|
305
306
|
const hostRunnerSeedDir = path.resolve(getWorkingDirectory(), "runner");
|
|
306
307
|
const useDirectContainer = !!job.container;
|
|
307
308
|
const containerImage = useDirectContainer ? job.container.image : IMAGE;
|
|
309
|
+
// Pull the runner image if not cached locally. Required in both modes:
|
|
310
|
+
// default mode uses it directly as the container image; direct-container
|
|
311
|
+
// mode uses it to seed the runner binary. Fixes: github.com/redwoodjs/agent-ci/issues/203
|
|
312
|
+
await ensureImagePulled(getDocker(), IMAGE);
|
|
308
313
|
if (useDirectContainer) {
|
|
309
314
|
await fs.promises.mkdir(hostRunnerSeedDir, { recursive: true });
|
|
310
315
|
const markerFile = path.join(hostRunnerSeedDir, ".seeded");
|
|
@@ -663,7 +663,7 @@ export function validateSecrets(filePath, taskName, secrets, secretsFilePath) {
|
|
|
663
663
|
return;
|
|
664
664
|
}
|
|
665
665
|
throw new Error(`[Agent CI] Missing secrets required by workflow job "${taskName}".\n` +
|
|
666
|
-
`Add the following to ${secretsFilePath}:\n\n` +
|
|
666
|
+
`Add the following to ${secretsFilePath} or set them as environment variables:\n\n` +
|
|
667
667
|
missing.map((n) => `${n}=`).join("\n") +
|
|
668
668
|
"\n");
|
|
669
669
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@redwoodjs/agent-ci",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.9.0",
|
|
4
4
|
"description": "Local GitHub Actions runner — pause on failure, ~0ms cache, official runner binary. Built for AI coding agents.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"act-alternative",
|
|
@@ -40,7 +40,7 @@
|
|
|
40
40
|
"log-update": "^7.2.0",
|
|
41
41
|
"minimatch": "^10.2.1",
|
|
42
42
|
"yaml": "^2.8.2",
|
|
43
|
-
"dtu-github-actions": "0.
|
|
43
|
+
"dtu-github-actions": "0.9.0"
|
|
44
44
|
},
|
|
45
45
|
"devDependencies": {
|
|
46
46
|
"@types/dockerode": "^3.3.34",
|