@redwoodjs/agent-ci 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/LICENSE +110 -0
  2. package/README.md +79 -0
  3. package/dist/cli.js +628 -0
  4. package/dist/config.js +63 -0
  5. package/dist/docker/container-config.js +178 -0
  6. package/dist/docker/container-config.test.js +156 -0
  7. package/dist/docker/service-containers.js +205 -0
  8. package/dist/docker/service-containers.test.js +236 -0
  9. package/dist/docker/shutdown.js +120 -0
  10. package/dist/docker/shutdown.test.js +148 -0
  11. package/dist/output/agent-mode.js +7 -0
  12. package/dist/output/agent-mode.test.js +36 -0
  13. package/dist/output/cleanup.js +218 -0
  14. package/dist/output/cleanup.test.js +241 -0
  15. package/dist/output/concurrency.js +57 -0
  16. package/dist/output/concurrency.test.js +88 -0
  17. package/dist/output/debug.js +36 -0
  18. package/dist/output/logger.js +57 -0
  19. package/dist/output/logger.test.js +82 -0
  20. package/dist/output/reporter.js +67 -0
  21. package/dist/output/run-state.js +126 -0
  22. package/dist/output/run-state.test.js +169 -0
  23. package/dist/output/state-renderer.js +149 -0
  24. package/dist/output/state-renderer.test.js +488 -0
  25. package/dist/output/tree-renderer.js +52 -0
  26. package/dist/output/tree-renderer.test.js +105 -0
  27. package/dist/output/working-directory.js +20 -0
  28. package/dist/runner/directory-setup.js +98 -0
  29. package/dist/runner/directory-setup.test.js +31 -0
  30. package/dist/runner/git-shim.js +92 -0
  31. package/dist/runner/git-shim.test.js +57 -0
  32. package/dist/runner/local-job.js +691 -0
  33. package/dist/runner/metadata.js +90 -0
  34. package/dist/runner/metadata.test.js +127 -0
  35. package/dist/runner/result-builder.js +119 -0
  36. package/dist/runner/result-builder.test.js +177 -0
  37. package/dist/runner/step-wrapper.js +82 -0
  38. package/dist/runner/step-wrapper.test.js +77 -0
  39. package/dist/runner/sync.js +80 -0
  40. package/dist/runner/workspace.js +66 -0
  41. package/dist/types.js +1 -0
  42. package/dist/workflow/job-scheduler.js +62 -0
  43. package/dist/workflow/job-scheduler.test.js +130 -0
  44. package/dist/workflow/workflow-parser.js +556 -0
  45. package/dist/workflow/workflow-parser.test.js +642 -0
  46. package/package.json +39 -0
  47. package/shim.sh +11 -0
@@ -0,0 +1,556 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+ import crypto from "crypto";
4
+ import { execSync } from "child_process";
5
+ import { minimatch } from "minimatch";
6
+ import { parse as parseYaml } from "yaml";
7
+ // @actions/workflow-parser imports JSON without `type: json` assertion,
8
+ // which fails on Node.js v22+. Lazy-import it only in the two functions
9
+ // that actually need it (getWorkflowTemplate, parseWorkflowSteps).
10
+ async function loadWorkflowParser() {
11
+ return await import("@actions/workflow-parser");
12
+ }
13
+ /**
14
+ * Expand `${{ expr }}` placeholders in a string.
15
+ * Handles:
16
+ * - hashFiles('pattern1', 'pattern2', ...) → sha256 of matching files under repoPath
17
+ * - runner.os → 'Linux'
18
+ * - github.run_id → a stable numeric string
19
+ * - github.sha → '0000000000000000000000000000000000000000'
20
+ * - (others) → empty string (safe: no commas injected)
21
+ */
22
+ export function expandExpressions(value, repoPath, secrets, matrixContext) {
23
+ return value.replace(/\$\{\{([\s\S]*?)\}\}/g, (_match, expr) => {
24
+ const trimmed = expr.trim();
25
+ // hashFiles('glob1', 'glob2', ...)
26
+ const hashFilesMatch = trimmed.match(/^hashFiles\(([\s\S]+)\)$/);
27
+ if (hashFilesMatch) {
28
+ if (!repoPath) {
29
+ return "0000000000000000000000000000000000000000";
30
+ }
31
+ try {
32
+ // Parse the argument list: quoted strings separated by commas
33
+ const args = hashFilesMatch[1].match(/['"][^'"]*['"]/g) ?? [];
34
+ const patterns = args.map((a) => a.replace(/^['"]|['"]$/g, ""));
35
+ const hash = crypto.createHash("sha256");
36
+ let hasAny = false;
37
+ for (const pattern of patterns) {
38
+ let files;
39
+ try {
40
+ files = findFiles(repoPath, pattern);
41
+ }
42
+ catch {
43
+ files = [];
44
+ }
45
+ for (const f of files.sort()) {
46
+ try {
47
+ const content = fs.readFileSync(f);
48
+ hash.update(content);
49
+ hasAny = true;
50
+ }
51
+ catch {
52
+ // File not readable, skip
53
+ }
54
+ }
55
+ }
56
+ if (!hasAny) {
57
+ return "0000000000000000000000000000000000000000";
58
+ }
59
+ return hash.digest("hex");
60
+ }
61
+ catch {
62
+ return "0000000000000000000000000000000000000000";
63
+ }
64
+ }
65
+ // format('template {0} {1}', arg0, arg1)
66
+ const formatMatch = trimmed.match(/^format\(([\s\S]+)\)$/);
67
+ if (formatMatch) {
68
+ const formatArgs = formatMatch[1].match(/(?:['"][^'"]*['"]|[^,]+)/g) ?? [];
69
+ const cleaned = formatArgs.map((a) => a.trim().replace(/^['"]|['"]$/g, ""));
70
+ const template = cleaned[0] || "";
71
+ const args = cleaned.slice(1);
72
+ return template.replace(/\{(\d+)\}/g, (_m, idx) => {
73
+ const i = parseInt(idx, 10);
74
+ if (i < args.length) {
75
+ // Recursively expand the arg value in case it's a context reference
76
+ return expandExpressions(`\${{ ${args[i]} }}`, repoPath);
77
+ }
78
+ return "";
79
+ });
80
+ }
81
+ // Context variable substitutions
82
+ if (trimmed === "runner.os") {
83
+ return "Linux";
84
+ }
85
+ if (trimmed === "runner.arch") {
86
+ return "X64";
87
+ }
88
+ if (trimmed === "github.run_id") {
89
+ return "1";
90
+ }
91
+ if (trimmed === "github.run_number") {
92
+ return "1";
93
+ }
94
+ if (trimmed === "github.sha" || trimmed === "github.head_sha") {
95
+ return "0000000000000000000000000000000000000000";
96
+ }
97
+ if (trimmed === "github.ref_name" || trimmed === "github.head_ref") {
98
+ return "main";
99
+ }
100
+ if (trimmed === "github.repository") {
101
+ return "local/repo";
102
+ }
103
+ if (trimmed === "github.actor") {
104
+ return "local";
105
+ }
106
+ if (trimmed === "github.event.pull_request.number") {
107
+ return "";
108
+ }
109
+ if (trimmed === "github.event.pull_request.title") {
110
+ return "";
111
+ }
112
+ if (trimmed === "github.event.pull_request.user.login") {
113
+ return "";
114
+ }
115
+ if (trimmed === "strategy.job-total") {
116
+ return matrixContext?.["__job_total"] ?? "1";
117
+ }
118
+ if (trimmed === "strategy.job-index") {
119
+ return matrixContext?.["__job_index"] ?? "0";
120
+ }
121
+ if (trimmed.startsWith("matrix.")) {
122
+ const key = trimmed.slice("matrix.".length);
123
+ return matrixContext?.[key] ?? "";
124
+ }
125
+ if (trimmed.startsWith("secrets.")) {
126
+ const name = trimmed.slice("secrets.".length);
127
+ return secrets?.[name] ?? "";
128
+ }
129
+ if (trimmed.startsWith("steps.") && trimmed.endsWith(".outputs.cache-hit")) {
130
+ return "";
131
+ }
132
+ if (trimmed.startsWith("steps.")) {
133
+ return "";
134
+ }
135
+ if (trimmed.startsWith("needs.")) {
136
+ return "";
137
+ }
138
+ // Unknown expressions — return empty string (safe: no commas injected)
139
+ return "";
140
+ });
141
+ }
142
+ /**
143
+ * Simple recursive file finder using minimatch patterns.
144
+ * Searches under rootDir for files matching pattern.
145
+ */
146
+ function findFiles(rootDir, pattern) {
147
+ const results = [];
148
+ const normPattern = pattern.replace(/^\.\//, "");
149
+ function walk(dir, relative) {
150
+ let entries;
151
+ try {
152
+ entries = fs.readdirSync(dir, { withFileTypes: true });
153
+ }
154
+ catch {
155
+ return;
156
+ }
157
+ for (const entry of entries) {
158
+ if (entry.name.startsWith(".") || entry.name === "node_modules") {
159
+ continue;
160
+ }
161
+ const relChild = relative ? `${relative}/${entry.name}` : entry.name;
162
+ if (entry.isDirectory()) {
163
+ walk(path.join(dir, entry.name), relChild);
164
+ }
165
+ else if (minimatch(relChild, normPattern, { dot: true })) {
166
+ results.push(path.join(dir, entry.name));
167
+ }
168
+ }
169
+ }
170
+ walk(rootDir, "");
171
+ return results;
172
+ }
173
+ export async function getWorkflowTemplate(filePath) {
174
+ const { parseWorkflow, NoOperationTraceWriter, convertWorkflowTemplate } = await loadWorkflowParser();
175
+ const content = fs.readFileSync(filePath, "utf8");
176
+ const result = parseWorkflow({ name: filePath, content }, new NoOperationTraceWriter());
177
+ if (result.value === undefined) {
178
+ throw new Error(`Failed to parse workflow: ${result.context.errors
179
+ .getErrors()
180
+ .map((e) => e.message)
181
+ .join(", ")}`);
182
+ }
183
+ return await convertWorkflowTemplate(result.context, result.value);
184
+ }
185
+ /**
186
+ * Compute the Cartesian product of a matrix definition.
187
+ * Values are always coerced to strings.
188
+ * Returns [{}] for an empty matrix so callers always get at least one combination.
189
+ */
190
+ export function expandMatrixCombinations(matrixDef) {
191
+ const keys = Object.keys(matrixDef);
192
+ if (keys.length === 0) {
193
+ return [{}];
194
+ }
195
+ let combos = [{}];
196
+ for (const key of keys) {
197
+ const values = matrixDef[key];
198
+ const next = [];
199
+ for (const combo of combos) {
200
+ for (const val of values) {
201
+ next.push({ ...combo, [key]: String(val) });
202
+ }
203
+ }
204
+ combos = next;
205
+ }
206
+ return combos;
207
+ }
208
+ /**
209
+ * Read the `strategy.matrix` object for a given job from the raw YAML.
210
+ * Returns null if the job has no matrix.
211
+ */
212
+ export async function parseMatrixDef(filePath, jobId) {
213
+ const yaml = parseYaml(fs.readFileSync(filePath, "utf8"));
214
+ const matrix = yaml?.jobs?.[jobId]?.strategy?.matrix;
215
+ if (!matrix || typeof matrix !== "object") {
216
+ return null;
217
+ }
218
+ // Only keep keys whose values are arrays
219
+ const result = {};
220
+ for (const [k, v] of Object.entries(matrix)) {
221
+ if (Array.isArray(v)) {
222
+ result[k] = v;
223
+ }
224
+ }
225
+ return Object.keys(result).length > 0 ? result : null;
226
+ }
227
+ export async function parseWorkflowSteps(filePath, taskName, secrets, matrixContext) {
228
+ const template = await getWorkflowTemplate(filePath);
229
+ const rawYaml = parseYaml(fs.readFileSync(filePath, "utf8"));
230
+ // Derive repoPath from filePath (.../repoPath/.github/workflows/foo.yml → repoPath)
231
+ const repoPath = path.dirname(path.dirname(path.dirname(filePath)));
232
+ // Find the job by ID or Name
233
+ const job = template.jobs.find((j) => {
234
+ if (j.type !== "job") {
235
+ return false;
236
+ }
237
+ return j.id.toString() === taskName || (j.name && j.name.toString() === taskName);
238
+ });
239
+ if (!job || job.type !== "job") {
240
+ throw new Error(`Task "${taskName}" not found in workflow "${filePath}"`);
241
+ }
242
+ const rawJob = rawYaml.jobs?.[taskName] || {};
243
+ const rawSteps = rawJob.steps || [];
244
+ return job.steps
245
+ .map((step, index) => {
246
+ const stepId = step.id || `step-${index + 1}`;
247
+ let stepName = step.name
248
+ ? expandExpressions(step.name.toString(), repoPath, secrets, matrixContext)
249
+ : stepId;
250
+ const rawStep = rawSteps[index] || {};
251
+ // If a step lacks an explicit name, we map it to standard GitHub Actions defaults
252
+ if (!step.name) {
253
+ if ("run" in step) {
254
+ const runText = rawStep.run != null ? String(rawStep.run) : step.run.toString();
255
+ // Extract the first non-empty line of the script
256
+ const firstLine = runText
257
+ .split("\n")
258
+ .map((l) => l.trim())
259
+ .find(Boolean) || "command";
260
+ stepName = `Run ${firstLine}`;
261
+ }
262
+ else if (step.uses) {
263
+ stepName = `Run ${step.uses.toString()}`;
264
+ }
265
+ }
266
+ if ("run" in step) {
267
+ // Prefer the raw YAML value over step.run.toString(): the workflow-parser
268
+ // stringifies expression trees in ways that can truncate multiline scripts
269
+ // (e.g. dropping the text after an embedded ${{ }} boundary). The raw YAML
270
+ // string is always the complete literal block scalar.
271
+ const rawScript = rawStep.run != null ? String(rawStep.run) : step.run.toString();
272
+ const inputs = {
273
+ script: expandExpressions(rawScript, repoPath, secrets, matrixContext),
274
+ };
275
+ if (rawStep["working-directory"]) {
276
+ inputs.workingDirectory = rawStep["working-directory"];
277
+ }
278
+ return {
279
+ Type: "Action",
280
+ Name: stepName,
281
+ DisplayName: stepName,
282
+ Id: crypto.randomUUID(),
283
+ Reference: {
284
+ Type: "Script",
285
+ },
286
+ Inputs: inputs,
287
+ Env: rawStep.env
288
+ ? Object.fromEntries(Object.entries(rawStep.env).map(([k, v]) => [
289
+ k,
290
+ expandExpressions(String(v), repoPath, secrets),
291
+ ]))
292
+ : undefined,
293
+ };
294
+ }
295
+ else if ("uses" in step) {
296
+ // Basic support for 'uses' steps
297
+ // Parse uses string: owner/repo@ref
298
+ const uses = step.uses.toString();
299
+ let name = uses;
300
+ let ref = "";
301
+ if (uses.indexOf("@") >= 0) {
302
+ const parts = uses.split("@");
303
+ name = parts[0];
304
+ ref = parts[1];
305
+ }
306
+ const isCheckout = name.trim().toLowerCase() === "actions/checkout";
307
+ const stepWith = rawStep.with || {};
308
+ return {
309
+ Type: "Action",
310
+ Name: stepName,
311
+ DisplayName: stepName,
312
+ Id: crypto.randomUUID(),
313
+ Reference: {
314
+ Type: "Repository",
315
+ Name: name,
316
+ Ref: ref,
317
+ RepositoryType: "GitHub",
318
+ Path: "",
319
+ },
320
+ Inputs: {
321
+ // with: values from @actions/workflow-parser are expression objects; call toString() on each.
322
+ ...(step.with
323
+ ? Object.fromEntries(Object.entries(step.with).map(([k, v]) => [
324
+ k,
325
+ expandExpressions(String(v), repoPath, secrets, matrixContext),
326
+ ]))
327
+ : {}),
328
+ // Merge from raw YAML (overrides parsed values), expanding expressions
329
+ ...Object.fromEntries(Object.entries(stepWith).map(([k, v]) => [
330
+ k,
331
+ expandExpressions(String(v), repoPath, secrets, matrixContext),
332
+ ])),
333
+ ...(isCheckout
334
+ ? {
335
+ clean: "false",
336
+ "fetch-depth": "0",
337
+ lfs: "false",
338
+ submodules: "false",
339
+ ...Object.fromEntries(Object.entries(stepWith).map(([k, v]) => [
340
+ k,
341
+ expandExpressions(String(v), repoPath),
342
+ ])),
343
+ }
344
+ : {}), // Prevent actions/checkout from wiping the rsynced workspace
345
+ },
346
+ Env: rawStep.env
347
+ ? Object.fromEntries(Object.entries(rawStep.env).map(([k, v]) => [
348
+ k,
349
+ expandExpressions(String(v), repoPath, secrets, matrixContext),
350
+ ]))
351
+ : undefined,
352
+ };
353
+ }
354
+ return null;
355
+ })
356
+ .filter(Boolean);
357
+ }
358
+ export async function parseWorkflowServices(filePath, taskName) {
359
+ const rawYaml = parseYaml(fs.readFileSync(filePath, "utf8"));
360
+ const rawJob = rawYaml.jobs?.[taskName] || {};
361
+ const rawServices = rawJob.services;
362
+ if (!rawServices || typeof rawServices !== "object") {
363
+ return [];
364
+ }
365
+ return Object.entries(rawServices).map(([name, def]) => {
366
+ const svc = {
367
+ name,
368
+ image: def.image || "",
369
+ };
370
+ if (def.env && typeof def.env === "object") {
371
+ svc.env = Object.fromEntries(Object.entries(def.env).map(([k, v]) => [k, String(v)]));
372
+ }
373
+ if (Array.isArray(def.ports)) {
374
+ svc.ports = def.ports.map(String);
375
+ }
376
+ if (def.options) {
377
+ svc.options = String(def.options);
378
+ }
379
+ return svc;
380
+ });
381
+ }
382
+ /**
383
+ * Parse the `container:` directive from a workflow job.
384
+ * Returns null if the job doesn't specify a container.
385
+ *
386
+ * Supports both short form (`container: node:18`) and
387
+ * long form (`container: { image: ..., env: ..., ... }`).
388
+ */
389
+ export async function parseWorkflowContainer(filePath, taskName) {
390
+ const rawYaml = parseYaml(fs.readFileSync(filePath, "utf8"));
391
+ const rawJob = rawYaml.jobs?.[taskName] || {};
392
+ const rawContainer = rawJob.container;
393
+ if (!rawContainer) {
394
+ return null;
395
+ }
396
+ // Short form: `container: node:18`
397
+ if (typeof rawContainer === "string") {
398
+ return { image: rawContainer };
399
+ }
400
+ if (typeof rawContainer !== "object") {
401
+ return null;
402
+ }
403
+ const result = {
404
+ image: rawContainer.image || "",
405
+ };
406
+ if (!result.image) {
407
+ return null;
408
+ }
409
+ if (rawContainer.env && typeof rawContainer.env === "object") {
410
+ result.env = Object.fromEntries(Object.entries(rawContainer.env).map(([k, v]) => [k, String(v)]));
411
+ }
412
+ if (Array.isArray(rawContainer.ports)) {
413
+ result.ports = rawContainer.ports.map(String);
414
+ }
415
+ if (Array.isArray(rawContainer.volumes)) {
416
+ result.volumes = rawContainer.volumes.map(String);
417
+ }
418
+ if (rawContainer.options) {
419
+ result.options = String(rawContainer.options);
420
+ }
421
+ return result;
422
+ }
423
+ /**
424
+ * Get the list of files changed in the current commit relative to the previous
425
+ * commit. Returns an empty array on error (safe fallback: all workflows run).
426
+ */
427
+ export function getChangedFiles(repoRoot) {
428
+ try {
429
+ const output = execSync("git diff --name-only HEAD~1", {
430
+ cwd: repoRoot,
431
+ encoding: "utf-8",
432
+ stdio: ["pipe", "pipe", "pipe"],
433
+ });
434
+ return output
435
+ .trim()
436
+ .split("\n")
437
+ .filter((f) => f.length > 0);
438
+ }
439
+ catch {
440
+ return [];
441
+ }
442
+ }
443
+ /**
444
+ * Check whether the changed files pass the paths / paths-ignore filter for an
445
+ * event definition. Returns true (relevant) when:
446
+ * - No changedFiles provided or the array is empty (safe fallback).
447
+ * - No paths / paths-ignore filters are defined.
448
+ * - At least one changed file matches a `paths` pattern.
449
+ * - At least one changed file is NOT matched by all `paths-ignore` patterns.
450
+ */
451
+ function matchesPaths(eventDef, changedFiles) {
452
+ if (!changedFiles || changedFiles.length === 0) {
453
+ return true; // No file info → always relevant
454
+ }
455
+ const pathsFilter = eventDef.paths;
456
+ const pathsIgnore = eventDef["paths-ignore"];
457
+ if (!pathsFilter && !pathsIgnore) {
458
+ return true; // No path filters defined
459
+ }
460
+ if (pathsFilter) {
461
+ // At least one changed file must match one of the path patterns
462
+ return changedFiles.some((file) => pathsFilter.some((pattern) => minimatch(file, pattern)));
463
+ }
464
+ if (pathsIgnore) {
465
+ // At least one changed file must NOT be matched by all ignore patterns
466
+ return changedFiles.some((file) => !pathsIgnore.some((pattern) => minimatch(file, pattern)));
467
+ }
468
+ return true;
469
+ }
470
+ export function isWorkflowRelevant(template, branch, changedFiles) {
471
+ const events = template.events;
472
+ if (!events) {
473
+ return false;
474
+ }
475
+ // 1. Check pull_request
476
+ if (events.pull_request) {
477
+ const pr = events.pull_request;
478
+ // If pull_request has branch filters, check if 'main' (target) is included.
479
+ // This simulates a PR being raised against main.
480
+ let branchMatches = false;
481
+ if (!pr.branches && !pr["branches-ignore"]) {
482
+ branchMatches = true; // No filters, matches all PRs
483
+ }
484
+ else if (pr.branches) {
485
+ branchMatches = pr.branches.some((pattern) => minimatch("main", pattern));
486
+ }
487
+ else if (pr["branches-ignore"]) {
488
+ branchMatches = !pr["branches-ignore"].some((pattern) => minimatch("main", pattern));
489
+ }
490
+ if (branchMatches && matchesPaths(pr, changedFiles)) {
491
+ return true;
492
+ }
493
+ }
494
+ // 2. Check push
495
+ if (events.push) {
496
+ const push = events.push;
497
+ let branchMatches = false;
498
+ if (!push.branches && !push["branches-ignore"]) {
499
+ branchMatches = true; // No filters, matches all pushes
500
+ }
501
+ else if (push.branches) {
502
+ branchMatches = push.branches.some((pattern) => minimatch(branch, pattern));
503
+ }
504
+ else if (push["branches-ignore"]) {
505
+ branchMatches = !push["branches-ignore"].some((pattern) => minimatch(branch, pattern));
506
+ }
507
+ if (branchMatches && matchesPaths(push, changedFiles)) {
508
+ return true;
509
+ }
510
+ }
511
+ return false;
512
+ }
513
+ /**
514
+ * Scan a workflow file for all `${{ secrets.FOO }}` references.
515
+ * If `taskName` is provided, only the YAML subtree for that job is scanned.
516
+ * Returns a sorted, de-duplicated list of secret names.
517
+ */
518
+ export function extractSecretRefs(filePath, taskName) {
519
+ const raw = fs.readFileSync(filePath, "utf8");
520
+ // Scope to the job subtree when a taskName is given so we don't pick up
521
+ // secrets from other jobs.
522
+ let source = raw;
523
+ if (taskName) {
524
+ try {
525
+ const parsed = parseYaml(raw);
526
+ const jobDef = parsed?.jobs?.[taskName];
527
+ if (jobDef) {
528
+ source = JSON.stringify(jobDef);
529
+ }
530
+ }
531
+ catch {
532
+ // Fall back to scanning the whole file
533
+ }
534
+ }
535
+ const names = new Set();
536
+ for (const m of source.matchAll(/\$\{\{\s*secrets\.([A-Za-z_][A-Za-z0-9_]*)\s*\}\}/g)) {
537
+ names.add(m[1]);
538
+ }
539
+ return Array.from(names).sort();
540
+ }
541
+ /**
542
+ * Validate that all secrets referenced in a workflow job are present in the
543
+ * provided secrets map. Throws with a descriptive message listing the missing
544
+ * secret names and the expected file path if any are absent.
545
+ */
546
+ export function validateSecrets(filePath, taskName, secrets, secretsFilePath) {
547
+ const required = extractSecretRefs(filePath, taskName);
548
+ const missing = required.filter((name) => !secrets[name]);
549
+ if (missing.length === 0) {
550
+ return;
551
+ }
552
+ throw new Error(`[Agent CI] Missing secrets required by workflow job "${taskName}".\n` +
553
+ `Add the following to ${secretsFilePath}:\n\n` +
554
+ missing.map((n) => `${n}=`).join("\n") +
555
+ "\n");
556
+ }