@redwoodjs/agent-ci 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/LICENSE +110 -0
  2. package/README.md +79 -0
  3. package/dist/cli.js +628 -0
  4. package/dist/config.js +63 -0
  5. package/dist/docker/container-config.js +178 -0
  6. package/dist/docker/container-config.test.js +156 -0
  7. package/dist/docker/service-containers.js +205 -0
  8. package/dist/docker/service-containers.test.js +236 -0
  9. package/dist/docker/shutdown.js +120 -0
  10. package/dist/docker/shutdown.test.js +148 -0
  11. package/dist/output/agent-mode.js +7 -0
  12. package/dist/output/agent-mode.test.js +36 -0
  13. package/dist/output/cleanup.js +218 -0
  14. package/dist/output/cleanup.test.js +241 -0
  15. package/dist/output/concurrency.js +57 -0
  16. package/dist/output/concurrency.test.js +88 -0
  17. package/dist/output/debug.js +36 -0
  18. package/dist/output/logger.js +57 -0
  19. package/dist/output/logger.test.js +82 -0
  20. package/dist/output/reporter.js +67 -0
  21. package/dist/output/run-state.js +126 -0
  22. package/dist/output/run-state.test.js +169 -0
  23. package/dist/output/state-renderer.js +149 -0
  24. package/dist/output/state-renderer.test.js +488 -0
  25. package/dist/output/tree-renderer.js +52 -0
  26. package/dist/output/tree-renderer.test.js +105 -0
  27. package/dist/output/working-directory.js +20 -0
  28. package/dist/runner/directory-setup.js +98 -0
  29. package/dist/runner/directory-setup.test.js +31 -0
  30. package/dist/runner/git-shim.js +92 -0
  31. package/dist/runner/git-shim.test.js +57 -0
  32. package/dist/runner/local-job.js +691 -0
  33. package/dist/runner/metadata.js +90 -0
  34. package/dist/runner/metadata.test.js +127 -0
  35. package/dist/runner/result-builder.js +119 -0
  36. package/dist/runner/result-builder.test.js +177 -0
  37. package/dist/runner/step-wrapper.js +82 -0
  38. package/dist/runner/step-wrapper.test.js +77 -0
  39. package/dist/runner/sync.js +80 -0
  40. package/dist/runner/workspace.js +66 -0
  41. package/dist/types.js +1 -0
  42. package/dist/workflow/job-scheduler.js +62 -0
  43. package/dist/workflow/job-scheduler.test.js +130 -0
  44. package/dist/workflow/workflow-parser.js +556 -0
  45. package/dist/workflow/workflow-parser.test.js +642 -0
  46. package/package.json +39 -0
  47. package/shim.sh +11 -0
@@ -0,0 +1,642 @@
1
+ import { describe, it, expect, afterEach } from "vitest";
2
+ import fs from "node:fs";
3
+ import os from "node:os";
4
+ import path from "node:path";
5
+ import { parseWorkflowServices } from "./workflow-parser.js";
6
+ // ─── Fixtures ─────────────────────────────────────────────────────────────────
7
+ const WORKFLOW_WITH_SERVICES = `
8
+ name: Unit Tests
9
+ on: [push]
10
+ jobs:
11
+ unit-tests:
12
+ runs-on: ubuntu-latest
13
+ services:
14
+ mysql:
15
+ image: mysql:8.0
16
+ env:
17
+ MYSQL_ROOT_PASSWORD: root
18
+ MYSQL_DATABASE: test_db
19
+ MYSQL_USER: app
20
+ MYSQL_PASSWORD: app
21
+ options: >-
22
+ --health-cmd="mysqladmin ping -h localhost -proot"
23
+ --health-interval=5s
24
+ --health-timeout=3s
25
+ --health-retries=10
26
+ ports:
27
+ - 3306:3306
28
+ redis:
29
+ image: redis:7
30
+ ports:
31
+ - 6379:6379
32
+ steps:
33
+ - run: echo hi
34
+ `.trimStart();
35
+ const WORKFLOW_NO_SERVICES = `
36
+ name: Simple
37
+ on: [push]
38
+ jobs:
39
+ build:
40
+ runs-on: ubuntu-latest
41
+ steps:
42
+ - run: echo hi
43
+ `.trimStart();
44
+ const WORKFLOW_SERVICE_NO_PORTS = `
45
+ name: Minimal Service
46
+ on: [push]
47
+ jobs:
48
+ test:
49
+ runs-on: ubuntu-latest
50
+ services:
51
+ postgres:
52
+ image: postgres:16
53
+ env:
54
+ POSTGRES_PASSWORD: secret
55
+ steps:
56
+ - run: echo hi
57
+ `.trimStart();
58
+ // ─── Tests ────────────────────────────────────────────────────────────────────
59
+ describe("parseWorkflowServices", () => {
60
+ let tmpDir;
61
+ afterEach(() => {
62
+ if (tmpDir) {
63
+ fs.rmSync(tmpDir, { recursive: true, force: true });
64
+ }
65
+ });
66
+ function writeWorkflow(content) {
67
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-svc-test-"));
68
+ const filePath = path.join(tmpDir, "workflow.yml");
69
+ fs.writeFileSync(filePath, content);
70
+ return filePath;
71
+ }
72
+ it("parses MySQL service with env, ports, and health check options", async () => {
73
+ const filePath = writeWorkflow(WORKFLOW_WITH_SERVICES);
74
+ const services = await parseWorkflowServices(filePath, "unit-tests");
75
+ expect(services).toHaveLength(2);
76
+ const mysql = services.find((s) => s.name === "mysql");
77
+ expect(mysql).toBeDefined();
78
+ expect(mysql.image).toBe("mysql:8.0");
79
+ expect(mysql.env).toEqual({
80
+ MYSQL_ROOT_PASSWORD: "root",
81
+ MYSQL_DATABASE: "test_db",
82
+ MYSQL_USER: "app",
83
+ MYSQL_PASSWORD: "app",
84
+ });
85
+ expect(mysql.ports).toEqual(["3306:3306"]);
86
+ expect(mysql.options).toContain("--health-cmd");
87
+ expect(mysql.options).toContain("--health-interval=5s");
88
+ });
89
+ it("parses Redis service with ports but no env or options", async () => {
90
+ const filePath = writeWorkflow(WORKFLOW_WITH_SERVICES);
91
+ const services = await parseWorkflowServices(filePath, "unit-tests");
92
+ const redis = services.find((s) => s.name === "redis");
93
+ expect(redis).toBeDefined();
94
+ expect(redis.image).toBe("redis:7");
95
+ expect(redis.ports).toEqual(["6379:6379"]);
96
+ expect(redis.env).toBeUndefined();
97
+ expect(redis.options).toBeUndefined();
98
+ });
99
+ it("returns empty array when job has no services", async () => {
100
+ const filePath = writeWorkflow(WORKFLOW_NO_SERVICES);
101
+ const services = await parseWorkflowServices(filePath, "build");
102
+ expect(services).toEqual([]);
103
+ });
104
+ it("returns empty array when job doesn't exist", async () => {
105
+ const filePath = writeWorkflow(WORKFLOW_NO_SERVICES);
106
+ const services = await parseWorkflowServices(filePath, "nonexistent");
107
+ expect(services).toEqual([]);
108
+ });
109
+ it("parses service with env but no ports", async () => {
110
+ const filePath = writeWorkflow(WORKFLOW_SERVICE_NO_PORTS);
111
+ const services = await parseWorkflowServices(filePath, "test");
112
+ expect(services).toHaveLength(1);
113
+ const pg = services[0];
114
+ expect(pg.name).toBe("postgres");
115
+ expect(pg.image).toBe("postgres:16");
116
+ expect(pg.env).toEqual({ POSTGRES_PASSWORD: "secret" });
117
+ expect(pg.ports).toBeUndefined();
118
+ expect(pg.options).toBeUndefined();
119
+ });
120
+ it("converts env values to strings", async () => {
121
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-svc-test-"));
122
+ const filePath = path.join(tmpDir, "workflow.yml");
123
+ fs.writeFileSync(filePath, `name: Env Test
124
+ on: [push]
125
+ jobs:
126
+ test:
127
+ runs-on: ubuntu-latest
128
+ services:
129
+ db:
130
+ image: mysql:8.0
131
+ env:
132
+ PORT: 3306
133
+ SKIP_TZINFO: 1
134
+ DEBUG: true
135
+ steps:
136
+ - run: echo hi
137
+ `);
138
+ const services = await parseWorkflowServices(filePath, "test");
139
+ const db = services[0];
140
+ // Numeric and boolean YAML values should be coerced to strings
141
+ expect(db.env.PORT).toBe("3306");
142
+ expect(db.env.SKIP_TZINFO).toBe("1");
143
+ expect(db.env.DEBUG).toBe("true");
144
+ });
145
+ });
146
+ // ─── expandExpressions ────────────────────────────────────────────────────────
147
+ import { expandExpressions } from "./workflow-parser.js";
148
+ describe("expandExpressions", () => {
149
+ let tmpDir;
150
+ afterEach(() => {
151
+ if (tmpDir) {
152
+ fs.rmSync(tmpDir, { recursive: true, force: true });
153
+ }
154
+ });
155
+ function makeRepo(...files) {
156
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-expr-test-"));
157
+ for (const { name, content } of files) {
158
+ const full = path.join(tmpDir, name);
159
+ fs.mkdirSync(path.dirname(full), { recursive: true });
160
+ fs.writeFileSync(full, content);
161
+ }
162
+ return tmpDir;
163
+ }
164
+ // ── passthrough ──────────────────────────────────────────────────────────────
165
+ it("returns plain strings unchanged", () => {
166
+ expect(expandExpressions("hello-world")).toBe("hello-world");
167
+ expect(expandExpressions("")).toBe("");
168
+ expect(expandExpressions("Linux-vite-build-abc123")).toBe("Linux-vite-build-abc123");
169
+ });
170
+ // ── context variables ────────────────────────────────────────────────────────
171
+ it("expands runner.os to Linux", () => {
172
+ expect(expandExpressions("${{ runner.os }}-build")).toBe("Linux-build");
173
+ });
174
+ it("expands runner.arch to X64", () => {
175
+ expect(expandExpressions("prefix-${{ runner.arch }}")).toBe("prefix-X64");
176
+ });
177
+ it("expands github.run_id to '1'", () => {
178
+ expect(expandExpressions("cache-${{ github.run_id }}")).toBe("cache-1");
179
+ });
180
+ it("expands github.run_number to '1'", () => {
181
+ expect(expandExpressions("run-${{ github.run_number }}")).toBe("run-1");
182
+ });
183
+ it("expands github.sha to zeros", () => {
184
+ expect(expandExpressions("sha-${{ github.sha }}")).toBe("sha-0000000000000000000000000000000000000000");
185
+ });
186
+ it("expands github.ref_name to main", () => {
187
+ expect(expandExpressions("branch-${{ github.ref_name }}")).toBe("branch-main");
188
+ });
189
+ it("expands github.repository", () => {
190
+ expect(expandExpressions("${{ github.repository }}")).toBe("local/repo");
191
+ });
192
+ it("expands secrets.* to empty string when no secrets map provided", () => {
193
+ expect(expandExpressions("token=${{ secrets.MY_TOKEN }}")).toBe("token=");
194
+ });
195
+ it("expands secrets.* to empty string when key is absent from secrets map", () => {
196
+ expect(expandExpressions("token=${{ secrets.MISSING }}", undefined, { OTHER: "value" })).toBe("token=");
197
+ });
198
+ it("expands secrets.* from provided secrets map", () => {
199
+ expect(expandExpressions("token=${{ secrets.MY_TOKEN }}", undefined, { MY_TOKEN: "abc123" })).toBe("token=abc123");
200
+ });
201
+ it("expands multiple secrets from provided secrets map", () => {
202
+ const secrets = { API_TOKEN: "tok-xyz", ACCOUNT_ID: "acc-123" };
203
+ expect(expandExpressions("${{ secrets.API_TOKEN }}:${{ secrets.ACCOUNT_ID }}", undefined, secrets)).toBe("tok-xyz:acc-123");
204
+ });
205
+ it("expands matrix.* to empty string when no matrixContext provided", () => {
206
+ expect(expandExpressions("shard-${{ matrix.shard }}")).toBe("shard-");
207
+ });
208
+ it("expands steps.* to empty string", () => {
209
+ expect(expandExpressions("hit-${{ steps.cache.outputs.cache-hit }}")).toBe("hit-");
210
+ expect(expandExpressions("${{ steps.some-step.outputs.result }}")).toBe("");
211
+ });
212
+ it("expands needs.* to empty string", () => {
213
+ expect(expandExpressions("${{ needs.build.result }}")).toBe("");
214
+ });
215
+ it("expands unknown expressions to empty string (no commas injected)", () => {
216
+ expect(expandExpressions("${{ some.unknown.expr }}")).toBe("");
217
+ // Especially important: unknown expressions must NOT contain commas
218
+ const result = expandExpressions("key-${{ something.weird('a','b') }}");
219
+ expect(result).not.toContain(",");
220
+ });
221
+ // ── compound strings ─────────────────────────────────────────────────────────
222
+ it("expands multiple expressions in one string", () => {
223
+ const result = expandExpressions("${{ runner.os }}-build-${{ github.run_id }}");
224
+ expect(result).toBe("Linux-build-1");
225
+ });
226
+ it("produces a cache key with no commas even for multi-arg hashFiles", () => {
227
+ const repoDir = makeRepo({ name: "package.json", content: "{}" });
228
+ const result = expandExpressions("${{ runner.os }}-vite-build-${{ hashFiles('package.json', 'pnpm-lock.yaml') }}", repoDir);
229
+ expect(result).not.toContain(",");
230
+ expect(result).toMatch(/^Linux-vite-build-[0-9a-f]+$/);
231
+ });
232
+ // ── hashFiles ────────────────────────────────────────────────────────────────
233
+ it("hashFiles with a matching file returns a hex sha256", () => {
234
+ const repoDir = makeRepo({ name: "pnpm-lock.yaml", content: "lockfile: v6" });
235
+ const result = expandExpressions("${{ hashFiles('pnpm-lock.yaml') }}", repoDir);
236
+ expect(result).toMatch(/^[0-9a-f]{64}$/);
237
+ });
238
+ it("hashFiles is deterministic for the same file content", () => {
239
+ const repoDir = makeRepo({ name: "pnpm-lock.yaml", content: "lockfile: v6" });
240
+ const a = expandExpressions("${{ hashFiles('pnpm-lock.yaml') }}", repoDir);
241
+ const b = expandExpressions("${{ hashFiles('pnpm-lock.yaml') }}", repoDir);
242
+ expect(a).toBe(b);
243
+ });
244
+ it("hashFiles changes when file content changes", () => {
245
+ const repoDir = makeRepo({ name: "lock.yaml", content: "version: 1" });
246
+ const before = expandExpressions("${{ hashFiles('lock.yaml') }}", repoDir);
247
+ fs.writeFileSync(path.join(repoDir, "lock.yaml"), "version: 2");
248
+ const after = expandExpressions("${{ hashFiles('lock.yaml') }}", repoDir);
249
+ expect(before).not.toBe(after);
250
+ });
251
+ it("hashFiles with multiple matching patterns combines all files", () => {
252
+ const repoDir = makeRepo({ name: "package.json", content: "{}" }, { name: "pnpm-lock.yaml", content: "lockfile: v6" });
253
+ const both = expandExpressions("${{ hashFiles('package.json', 'pnpm-lock.yaml') }}", repoDir);
254
+ const justPackage = expandExpressions("${{ hashFiles('package.json') }}", repoDir);
255
+ // Hash of both files is different from hash of just one
256
+ expect(both).toMatch(/^[0-9a-f]{64}$/);
257
+ expect(both).not.toBe(justPackage);
258
+ });
259
+ it("hashFiles with no matching files returns zero hash", () => {
260
+ const repoDir = makeRepo({ name: "package.json", content: "{}" });
261
+ const result = expandExpressions("${{ hashFiles('nonexistent.txt') }}", repoDir);
262
+ expect(result).toBe("0000000000000000000000000000000000000000");
263
+ });
264
+ it("hashFiles without repoPath returns zero hash", () => {
265
+ const result = expandExpressions("${{ hashFiles('package.json') }}");
266
+ expect(result).toBe("0000000000000000000000000000000000000000");
267
+ });
268
+ it("hashFiles matches glob patterns", () => {
269
+ const repoDir = makeRepo({ name: "src/foo.ts", content: "const x = 1" }, { name: "src/bar.ts", content: "const y = 2" });
270
+ const result = expandExpressions("${{ hashFiles('src/**/*.ts') }}", repoDir);
271
+ expect(result).toMatch(/^[0-9a-f]{64}$/);
272
+ });
273
+ });
274
+ // ─── expandExpressions — matrixContext ────────────────────────────────────────
275
+ describe("expandExpressions with matrixContext", () => {
276
+ it("expands matrix.shard to the provided shard value", () => {
277
+ expect(expandExpressions("shard-${{ matrix.shard }}", undefined, undefined, { shard: "3" })).toBe("shard-3");
278
+ });
279
+ it("expands strategy.job-total to __job_total from context", () => {
280
+ expect(expandExpressions("total-${{ strategy.job-total }}", undefined, undefined, {
281
+ __job_total: "8",
282
+ })).toBe("total-8");
283
+ });
284
+ it("expands both matrix.shard and strategy.job-total in a real command", () => {
285
+ const result = expandExpressions("pnpm test:e2e:ci --shard=${{ matrix.shard }}/${{ strategy.job-total }}", undefined, undefined, { shard: "3", __job_total: "8" });
286
+ expect(result).toBe("pnpm test:e2e:ci --shard=3/8");
287
+ });
288
+ it("returns empty string for matrix.* when matrixContext not provided", () => {
289
+ expect(expandExpressions("shard-${{ matrix.shard }}")).toBe("shard-");
290
+ });
291
+ it("returns fallback '1' for strategy.job-total when context not provided", () => {
292
+ expect(expandExpressions("${{ strategy.job-total }}")).toBe("1");
293
+ });
294
+ });
295
+ // ─── expandMatrixCombinations ─────────────────────────────────────────────────
296
+ import { expandMatrixCombinations } from "./workflow-parser.js";
297
+ describe("expandMatrixCombinations", () => {
298
+ it("returns [{}] for an empty matrix", () => {
299
+ expect(expandMatrixCombinations({})).toEqual([{}]);
300
+ });
301
+ it("returns one combination per value for a single dimension", () => {
302
+ const combos = expandMatrixCombinations({ shard: [1, 2, 3] });
303
+ expect(combos).toHaveLength(3);
304
+ expect(combos[0]).toEqual({ shard: "1" });
305
+ expect(combos[1]).toEqual({ shard: "2" });
306
+ expect(combos[2]).toEqual({ shard: "3" });
307
+ });
308
+ it("coerces numeric values to strings", () => {
309
+ const combos = expandMatrixCombinations({ shard: [1, 2] });
310
+ expect(typeof combos[0].shard).toBe("string");
311
+ });
312
+ it("returns 8 combinations for shard [1..8]", () => {
313
+ const combos = expandMatrixCombinations({ shard: [1, 2, 3, 4, 5, 6, 7, 8] });
314
+ expect(combos).toHaveLength(8);
315
+ expect(combos[7]).toEqual({ shard: "8" });
316
+ });
317
+ it("returns Cartesian product for multi-dimensional matrix", () => {
318
+ const combos = expandMatrixCombinations({ os: ["ubuntu", "windows"], node: [18, 20] });
319
+ expect(combos).toHaveLength(4);
320
+ // All four combinations must be present
321
+ expect(combos).toContainEqual({ os: "ubuntu", node: "18" });
322
+ expect(combos).toContainEqual({ os: "ubuntu", node: "20" });
323
+ expect(combos).toContainEqual({ os: "windows", node: "18" });
324
+ expect(combos).toContainEqual({ os: "windows", node: "20" });
325
+ });
326
+ });
327
+ // ─── loadMachineSecrets ───────────────────────────────────────────────────────
328
+ // Inline the parser logic rather than importing from config.ts to avoid
329
+ // the module-level `configSchema.parse(process.env)` ZodError in test env.
330
+ function loadMachineSecrets(baseDir) {
331
+ const envMachinePath = path.join(baseDir, ".env.machine");
332
+ if (!fs.existsSync(envMachinePath)) {
333
+ return {};
334
+ }
335
+ const secrets = {};
336
+ for (const line of fs.readFileSync(envMachinePath, "utf-8").split("\n")) {
337
+ const trimmed = line.trim();
338
+ if (!trimmed || trimmed.startsWith("#")) {
339
+ continue;
340
+ }
341
+ const eqIdx = trimmed.indexOf("=");
342
+ if (eqIdx < 1) {
343
+ continue;
344
+ }
345
+ const key = trimmed.slice(0, eqIdx).trim();
346
+ let value = trimmed.slice(eqIdx + 1).trim();
347
+ if ((value.startsWith('"') && value.endsWith('"')) ||
348
+ (value.startsWith("'") && value.endsWith("'"))) {
349
+ value = value.slice(1, -1);
350
+ }
351
+ if (key) {
352
+ secrets[key] = value;
353
+ }
354
+ }
355
+ return secrets;
356
+ }
357
+ describe("loadMachineSecrets", () => {
358
+ let tmpDir;
359
+ afterEach(() => {
360
+ if (tmpDir) {
361
+ fs.rmSync(tmpDir, { recursive: true, force: true });
362
+ }
363
+ });
364
+ function writeMachineEnv(content) {
365
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-secrets-test-"));
366
+ fs.writeFileSync(path.join(tmpDir, ".env.machine"), content);
367
+ return tmpDir;
368
+ }
369
+ it("returns empty object when .env.machine does not exist", () => {
370
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-secrets-test-"));
371
+ // No .env.machine written — file simply absent
372
+ const secrets = loadMachineSecrets(tmpDir);
373
+ expect(secrets).toEqual({});
374
+ });
375
+ it("parses KEY=VALUE pairs into a record", () => {
376
+ const dir = writeMachineEnv("CLOUDFLARE_API_TOKEN=my-fake-cf-token\nCLOUDFLARE_ACCOUNT_ID=acct-abc123\n");
377
+ const secrets = loadMachineSecrets(dir);
378
+ expect(secrets).toEqual({
379
+ CLOUDFLARE_API_TOKEN: "my-fake-cf-token",
380
+ CLOUDFLARE_ACCOUNT_ID: "acct-abc123",
381
+ });
382
+ });
383
+ it("ignores comment lines and blank lines", () => {
384
+ const dir = writeMachineEnv(`# This is a comment
385
+ API_KEY=super-secret-key-xyz
386
+
387
+ # Another comment
388
+ OTHER_TOKEN=tok-456
389
+ `);
390
+ const secrets = loadMachineSecrets(dir);
391
+ expect(secrets).toEqual({
392
+ API_KEY: "super-secret-key-xyz",
393
+ OTHER_TOKEN: "tok-456",
394
+ });
395
+ });
396
+ it("strips surrounding double quotes from values", () => {
397
+ const dir = writeMachineEnv('QUOTED_TOKEN="my-quoted-token"\n');
398
+ const secrets = loadMachineSecrets(dir);
399
+ expect(secrets["QUOTED_TOKEN"]).toBe("my-quoted-token");
400
+ });
401
+ it("strips surrounding single quotes from values", () => {
402
+ const dir = writeMachineEnv("SINGLE_QUOTED='my-single-quoted'\n");
403
+ const secrets = loadMachineSecrets(dir);
404
+ expect(secrets["SINGLE_QUOTED"]).toBe("my-single-quoted");
405
+ });
406
+ it("handles values containing equals signs", () => {
407
+ const dir = writeMachineEnv("URL=https://example.com?foo=bar&baz=qux\n");
408
+ const secrets = loadMachineSecrets(dir);
409
+ expect(secrets["URL"]).toBe("https://example.com?foo=bar&baz=qux");
410
+ });
411
+ });
412
+ // ─── extractSecretRefs & validateSecrets ──────────────────────────────────────
413
+ import { extractSecretRefs, validateSecrets } from "./workflow-parser.js";
414
+ describe("extractSecretRefs", () => {
415
+ let tmpDir;
416
+ afterEach(() => {
417
+ if (tmpDir) {
418
+ fs.rmSync(tmpDir, { recursive: true, force: true });
419
+ }
420
+ });
421
+ function writeWorkflow(content) {
422
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-secrets-refs-"));
423
+ const filePath = path.join(tmpDir, "workflow.yml");
424
+ fs.writeFileSync(filePath, content);
425
+ return filePath;
426
+ }
427
+ it("returns empty array when workflow has no secrets references", () => {
428
+ const filePath = writeWorkflow(`
429
+ name: No Secrets
430
+ on: [push]
431
+ jobs:
432
+ build:
433
+ runs-on: ubuntu-latest
434
+ steps:
435
+ - run: echo hello
436
+ `);
437
+ expect(extractSecretRefs(filePath)).toEqual([]);
438
+ });
439
+ it("extracts unique sorted secret names from the whole file", () => {
440
+ const filePath = writeWorkflow(`
441
+ name: Secrets Test
442
+ on: [push]
443
+ jobs:
444
+ test:
445
+ runs-on: ubuntu-latest
446
+ env:
447
+ TOKEN: \${{ secrets.API_TOKEN }}
448
+ ACCT: \${{ secrets.ACCOUNT_ID }}
449
+ DUP: \${{ secrets.API_TOKEN }}
450
+ steps:
451
+ - run: echo ok
452
+ `);
453
+ expect(extractSecretRefs(filePath)).toEqual(["ACCOUNT_ID", "API_TOKEN"]);
454
+ });
455
+ it("scopes to the specified job when taskName is provided", () => {
456
+ const filePath = writeWorkflow(`
457
+ name: Multi Job
458
+ on: [push]
459
+ jobs:
460
+ build:
461
+ runs-on: ubuntu-latest
462
+ env:
463
+ TOKEN: \${{ secrets.BUILD_TOKEN }}
464
+ steps:
465
+ - run: echo build
466
+ test:
467
+ runs-on: ubuntu-latest
468
+ env:
469
+ TOKEN: \${{ secrets.TEST_TOKEN }}
470
+ steps:
471
+ - run: echo test
472
+ `);
473
+ expect(extractSecretRefs(filePath, "test")).toEqual(["TEST_TOKEN"]);
474
+ expect(extractSecretRefs(filePath, "build")).toEqual(["BUILD_TOKEN"]);
475
+ });
476
+ });
477
+ describe("validateSecrets", () => {
478
+ let tmpDir;
479
+ afterEach(() => {
480
+ if (tmpDir) {
481
+ fs.rmSync(tmpDir, { recursive: true, force: true });
482
+ }
483
+ });
484
+ function writeWorkflow(content) {
485
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "oa-validate-"));
486
+ const filePath = path.join(tmpDir, "workflow.yml");
487
+ fs.writeFileSync(filePath, content);
488
+ return filePath;
489
+ }
490
+ it("does not throw when all required secrets are present", () => {
491
+ const filePath = writeWorkflow(`
492
+ name: Test
493
+ on: [push]
494
+ jobs:
495
+ run:
496
+ runs-on: ubuntu-latest
497
+ env:
498
+ TOKEN: \${{ secrets.MY_TOKEN }}
499
+ steps:
500
+ - run: echo ok
501
+ `);
502
+ expect(() => validateSecrets(filePath, "run", { MY_TOKEN: "abc123" }, "/repo/.env.agent-ci")).not.toThrow();
503
+ });
504
+ it("does not throw when workflow has no secrets", () => {
505
+ const filePath = writeWorkflow(`
506
+ name: Test
507
+ on: [push]
508
+ jobs:
509
+ run:
510
+ runs-on: ubuntu-latest
511
+ steps:
512
+ - run: echo ok
513
+ `);
514
+ expect(() => validateSecrets(filePath, "run", {}, "/repo/.env.agent-ci")).not.toThrow();
515
+ });
516
+ it("throws listing missing secrets and the secrets file path", () => {
517
+ const filePath = writeWorkflow(`
518
+ name: Test
519
+ on: [push]
520
+ jobs:
521
+ deploy:
522
+ runs-on: ubuntu-latest
523
+ env:
524
+ CF_TOKEN: \${{ secrets.CLOUDFLARE_API_TOKEN }}
525
+ CF_ACCT: \${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
526
+ steps:
527
+ - run: echo deploy
528
+ `);
529
+ expect(() => validateSecrets(filePath, "deploy", {}, "/home/user/repo/.env.agent-ci")).toThrow(/CLOUDFLARE_ACCOUNT_ID=/);
530
+ expect(() => validateSecrets(filePath, "deploy", {}, "/home/user/repo/.env.agent-ci")).toThrow(/CLOUDFLARE_API_TOKEN=/);
531
+ expect(() => validateSecrets(filePath, "deploy", {}, "/home/user/repo/.env.agent-ci")).toThrow(/\/home\/user\/repo\/.env.agent-ci/);
532
+ });
533
+ it("only fails for missing secrets, not for ones that are present", () => {
534
+ const filePath = writeWorkflow(`
535
+ name: Test
536
+ on: [push]
537
+ jobs:
538
+ run:
539
+ runs-on: ubuntu-latest
540
+ env:
541
+ A: \${{ secrets.PRESENT_SECRET }}
542
+ B: \${{ secrets.MISSING_SECRET }}
543
+ steps:
544
+ - run: echo ok
545
+ `);
546
+ expect(() => validateSecrets(filePath, "run", { PRESENT_SECRET: "value" }, "/repo/.env.agent-ci")).toThrow(/MISSING_SECRET/);
547
+ expect(() => validateSecrets(filePath, "run", { PRESENT_SECRET: "value" }, "/repo/.env.agent-ci")).not.toThrow(/PRESENT_SECRET/);
548
+ });
549
+ });
550
+ // ─── isWorkflowRelevant ───────────────────────────────────────────────────────
551
+ import { isWorkflowRelevant } from "./workflow-parser.js";
552
+ describe("isWorkflowRelevant", () => {
553
+ // Helper to build a template with push event config
554
+ function pushTemplate(config = {}) {
555
+ return { events: { push: config } };
556
+ }
557
+ function prTemplate(config = {}) {
558
+ return { events: { pull_request: config } };
559
+ }
560
+ // ── Basic branch matching (existing behavior) ─────────────────────────────
561
+ it("matches push with no branch/path filters", () => {
562
+ expect(isWorkflowRelevant(pushTemplate(), "main")).toBe(true);
563
+ });
564
+ it("matches push when branch matches", () => {
565
+ expect(isWorkflowRelevant(pushTemplate({ branches: ["main"] }), "main")).toBe(true);
566
+ });
567
+ it("does not match push when branch doesn't match", () => {
568
+ expect(isWorkflowRelevant(pushTemplate({ branches: ["main"] }), "feature")).toBe(false);
569
+ });
570
+ // ── paths-ignore ──────────────────────────────────────────────────────────
571
+ it("skips workflow when all changed files match paths-ignore", () => {
572
+ const template = pushTemplate({
573
+ "paths-ignore": ["**/*.md", "docs/**", "LICENSE"],
574
+ });
575
+ const changedFiles = ["README.md", "docs/guide.md", "LICENSE"];
576
+ expect(isWorkflowRelevant(template, "main", changedFiles)).toBe(false);
577
+ });
578
+ it("runs workflow when at least one changed file is not ignored", () => {
579
+ const template = pushTemplate({
580
+ "paths-ignore": ["**/*.md", "docs/**"],
581
+ });
582
+ const changedFiles = ["README.md", "cli/src/cli.ts"];
583
+ expect(isWorkflowRelevant(template, "main", changedFiles)).toBe(true);
584
+ });
585
+ it("runs workflow when paths-ignore is set but no changed files provided", () => {
586
+ const template = pushTemplate({
587
+ "paths-ignore": ["**/*.md"],
588
+ });
589
+ expect(isWorkflowRelevant(template, "main")).toBe(true);
590
+ expect(isWorkflowRelevant(template, "main", [])).toBe(true);
591
+ });
592
+ // ── paths ─────────────────────────────────────────────────────────────────
593
+ it("runs workflow when a changed file matches a paths filter", () => {
594
+ const template = pushTemplate({
595
+ paths: ["cli/**", "dtu-github-actions/**"],
596
+ });
597
+ const changedFiles = ["cli/src/cli.ts"];
598
+ expect(isWorkflowRelevant(template, "main", changedFiles)).toBe(true);
599
+ });
600
+ it("skips workflow when no changed files match paths filter", () => {
601
+ const template = pushTemplate({
602
+ paths: ["cli/**"],
603
+ });
604
+ const changedFiles = ["README.md", "docs/guide.md"];
605
+ expect(isWorkflowRelevant(template, "main", changedFiles)).toBe(false);
606
+ });
607
+ it("runs workflow when paths is set but no changed files provided", () => {
608
+ const template = pushTemplate({
609
+ paths: ["cli/**"],
610
+ });
611
+ expect(isWorkflowRelevant(template, "main")).toBe(true);
612
+ expect(isWorkflowRelevant(template, "main", [])).toBe(true);
613
+ });
614
+ // ── paths + branch interaction ────────────────────────────────────────────
615
+ it("skips when branch matches but all files are ignored", () => {
616
+ const template = pushTemplate({
617
+ branches: ["main"],
618
+ "paths-ignore": ["**/*.md"],
619
+ });
620
+ expect(isWorkflowRelevant(template, "main", ["README.md"])).toBe(false);
621
+ });
622
+ it("skips when branch does not match, even if paths would match", () => {
623
+ const template = pushTemplate({
624
+ branches: ["main"],
625
+ paths: ["cli/**"],
626
+ });
627
+ expect(isWorkflowRelevant(template, "feature", ["cli/src/cli.ts"])).toBe(false);
628
+ });
629
+ // ── pull_request with paths ───────────────────────────────────────────────
630
+ it("skips PR workflow when all changed files match paths-ignore", () => {
631
+ const template = prTemplate({
632
+ "paths-ignore": ["**/*.md"],
633
+ });
634
+ expect(isWorkflowRelevant(template, "feature", ["README.md"])).toBe(false);
635
+ });
636
+ it("runs PR workflow when a changed file is not ignored", () => {
637
+ const template = prTemplate({
638
+ "paths-ignore": ["**/*.md"],
639
+ });
640
+ expect(isWorkflowRelevant(template, "feature", ["cli/src/cli.ts"])).toBe(true);
641
+ });
642
+ });
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "name": "@redwoodjs/agent-ci",
3
+ "version": "0.1.0",
4
+ "description": "Local GitHub Actions runner",
5
+ "keywords": [],
6
+ "license": "FSL-1.1-MIT",
7
+ "author": "",
8
+ "bin": {
9
+ "agent-ci": "./dist/cli.js"
10
+ },
11
+ "files": [
12
+ "dist",
13
+ "shim.sh"
14
+ ],
15
+ "type": "module",
16
+ "publishConfig": {
17
+ "access": "public"
18
+ },
19
+ "dependencies": {
20
+ "@actions/workflow-parser": "0.3.43",
21
+ "dockerode": "^4.0.2",
22
+ "log-update": "^7.2.0",
23
+ "minimatch": "^10.2.1",
24
+ "yaml": "^2.8.2",
25
+ "dtu-github-actions": "0.1.0"
26
+ },
27
+ "devDependencies": {
28
+ "@types/dockerode": "^3.3.34",
29
+ "@types/node": "^22.10.2",
30
+ "tsx": "^4.21.0",
31
+ "vitest": "^4.0.18"
32
+ },
33
+ "scripts": {
34
+ "build": "tsgo",
35
+ "typecheck": "tsgo",
36
+ "agent-ci": "tsx src/cli.ts",
37
+ "test": "vitest run"
38
+ }
39
+ }