@cifn/runner 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1117 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __esm = (fn, res) => function __init() {
7
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
8
+ };
9
+ var __export = (target, all) => {
10
+ for (var name in all)
11
+ __defProp(target, name, { get: all[name], enumerable: true });
12
+ };
13
+ var __copyProps = (to, from, except, desc) => {
14
+ if (from && typeof from === "object" || typeof from === "function") {
15
+ for (let key of __getOwnPropNames(from))
16
+ if (!__hasOwnProp.call(to, key) && key !== except)
17
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
18
+ }
19
+ return to;
20
+ };
21
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
22
+
23
+ // src/steps/testfn-run.ts
24
+ var testfn_run_exports = {};
25
+ __export(testfn_run_exports, {
26
+ executeTestFnRun: () => executeTestFnRun,
27
+ executeTestFnRunAsync: () => executeTestFnRunAsync
28
+ });
29
+ function executeTestFnRun(options) {
30
+ const {
31
+ framework = "vitest",
32
+ testPattern,
33
+ reporter,
34
+ outputPath = "./testfn-results.json",
35
+ workspace,
36
+ env,
37
+ parallel,
38
+ timeout,
39
+ retries
40
+ } = options;
41
+ const lines = [];
42
+ lines.push(`Running tests with testfn SDK (framework: ${framework})`);
43
+ try {
44
+ if (!["vitest", "playwright", "jest"].includes(framework)) {
45
+ throw new Error(`Unsupported framework: ${framework}. Supported: vitest, playwright, jest`);
46
+ }
47
+ const config = {
48
+ framework,
49
+ testPattern: testPattern || "./tests/**/*.{test,spec}.{ts,js}",
50
+ parallel,
51
+ timeout,
52
+ retries,
53
+ env,
54
+ reporters: reporter === "json" ? [new import_core.JsonReporter((0, import_node_path5.join)(workspace, outputPath))] : void 0
55
+ };
56
+ const originalCwd = process.cwd();
57
+ try {
58
+ process.chdir(workspace);
59
+ const runner = new import_core.TestRunner(config);
60
+ const results = runner.run();
61
+ process.chdir(originalCwd);
62
+ if (results && typeof results === "object" && "then" in results) {
63
+ throw new Error("executeTestFnRun must be called from async context or use sync adapter");
64
+ }
65
+ lines.push(`Tests completed: ${results.summary?.total || 0} total`);
66
+ lines.push(`Passed: ${results.summary?.passed || 0}, Failed: ${results.summary?.failed || 0}`);
67
+ if (reporter === "json" && outputPath) {
68
+ const fullPath = (0, import_node_path5.join)(workspace, outputPath);
69
+ if ((0, import_node_fs5.existsSync)(fullPath)) {
70
+ lines.push(`JSON report written to ${outputPath}`);
71
+ }
72
+ }
73
+ const hasFailed = results.summary?.failed > 0;
74
+ if (hasFailed) {
75
+ return {
76
+ success: false,
77
+ exitCode: 1,
78
+ lines,
79
+ error: "Tests failed"
80
+ };
81
+ }
82
+ return { success: true, exitCode: 0, lines };
83
+ } catch (innerErr) {
84
+ process.chdir(originalCwd);
85
+ throw innerErr;
86
+ }
87
+ } catch (err) {
88
+ const errorMessage = err instanceof Error ? err.message : String(err);
89
+ lines.push(`Test execution failed: ${errorMessage}`);
90
+ return {
91
+ success: false,
92
+ exitCode: 1,
93
+ lines,
94
+ error: errorMessage
95
+ };
96
+ }
97
+ }
98
+ async function executeTestFnRunAsync(options) {
99
+ const {
100
+ framework = "vitest",
101
+ testPattern,
102
+ reporter,
103
+ outputPath = "./testfn-results.json",
104
+ workspace,
105
+ env,
106
+ parallel,
107
+ timeout,
108
+ retries
109
+ } = options;
110
+ const lines = [];
111
+ lines.push(`Running tests with testfn SDK (framework: ${framework})`);
112
+ try {
113
+ if (!["vitest", "playwright", "jest"].includes(framework)) {
114
+ throw new Error(`Unsupported framework: ${framework}. Supported: vitest, playwright, jest`);
115
+ }
116
+ const config = {
117
+ framework,
118
+ testPattern: testPattern || "./tests/**/*.{test,spec}.{ts,js}",
119
+ parallel,
120
+ timeout,
121
+ retries,
122
+ env,
123
+ reporters: reporter === "json" ? [new import_core.JsonReporter((0, import_node_path5.join)(workspace, outputPath))] : void 0
124
+ };
125
+ const originalCwd = process.cwd();
126
+ try {
127
+ process.chdir(workspace);
128
+ const runner = new import_core.TestRunner(config);
129
+ const results = await runner.run();
130
+ process.chdir(originalCwd);
131
+ lines.push(`Tests completed: ${results.summary.total} total`);
132
+ lines.push(`Passed: ${results.summary.passed}, Failed: ${results.summary.failed}`);
133
+ if (reporter === "json" && outputPath) {
134
+ const fullPath = (0, import_node_path5.join)(workspace, outputPath);
135
+ if ((0, import_node_fs5.existsSync)(fullPath)) {
136
+ lines.push(`JSON report written to ${outputPath}`);
137
+ }
138
+ }
139
+ if (results.summary.failed > 0) {
140
+ return {
141
+ success: false,
142
+ exitCode: 1,
143
+ lines,
144
+ error: "Tests failed"
145
+ };
146
+ }
147
+ return { success: true, exitCode: 0, lines };
148
+ } catch (innerErr) {
149
+ process.chdir(originalCwd);
150
+ throw innerErr;
151
+ }
152
+ } catch (err) {
153
+ const errorMessage = err instanceof Error ? err.message : String(err);
154
+ lines.push(`Test execution failed: ${errorMessage}`);
155
+ return {
156
+ success: false,
157
+ exitCode: 1,
158
+ lines,
159
+ error: errorMessage
160
+ };
161
+ }
162
+ }
163
+ var import_core, import_node_path5, import_node_fs5;
164
+ var init_testfn_run = __esm({
165
+ "src/steps/testfn-run.ts"() {
166
+ "use strict";
167
+ import_core = require("@testfn/core");
168
+ import_node_path5 = require("path");
169
+ import_node_fs5 = require("fs");
170
+ }
171
+ });
172
+
173
+ // src/index.ts
174
+ var index_exports = {};
175
+ __export(index_exports, {
176
+ DockerExecutor: () => DockerExecutor,
177
+ MemoryLogFnClient: () => MemoryLogFnClient,
178
+ Runner: () => Runner,
179
+ executeArtifactDownload: () => executeArtifactDownload,
180
+ executeArtifactUpload: () => executeArtifactUpload,
181
+ executeCacheRestore: () => executeCacheRestore,
182
+ executeCacheSave: () => executeCacheSave,
183
+ executeCheckout: () => executeCheckout,
184
+ executeHostFnDeploy: () => executeHostFnDeploy,
185
+ executeRunStep: () => executeRunStep,
186
+ executeTestFnRun: () => executeTestFnRun,
187
+ redactSecrets: () => redactSecrets
188
+ });
189
+ module.exports = __toCommonJS(index_exports);
190
+
191
+ // src/runner.ts
192
+ var import_node_fs6 = require("fs");
193
+ var import_node_os = require("os");
194
+ var import_node_path6 = require("path");
195
+ var import_cifn = require("cifn");
196
+
197
+ // src/executor/run-step.ts
198
+ var import_node_child_process = require("child_process");
199
+ function executeRunStep(command, workspacePath, env) {
200
+ const execEnv = env ? { ...process.env, ...env } : process.env;
201
+ try {
202
+ const stdout = (0, import_node_child_process.execSync)(command, {
203
+ cwd: workspacePath,
204
+ encoding: "utf-8",
205
+ stdio: ["pipe", "pipe", "pipe"],
206
+ timeout: 3e5,
207
+ shell: "/bin/sh",
208
+ env: execEnv
209
+ });
210
+ const lines = stdout.split("\n").filter((l) => l !== "");
211
+ return { exitCode: 0, stdout, stderr: "", lines };
212
+ } catch (err) {
213
+ const error = err;
214
+ const stdout = typeof error.stdout === "string" ? error.stdout : "";
215
+ const stderr = typeof error.stderr === "string" ? error.stderr : "";
216
+ const exitCode = typeof error.status === "number" ? error.status : 1;
217
+ const lines = [
218
+ ...stdout.split("\n").filter((l) => l !== ""),
219
+ ...stderr.split("\n").filter((l) => l !== "")
220
+ ];
221
+ return { exitCode, stdout, stderr, lines };
222
+ }
223
+ }
224
+
225
+ // src/steps/checkout.ts
226
+ var import_node_child_process2 = require("child_process");
227
+ function executeCheckout(options) {
228
+ const { repo, ref, workspace, token } = options;
229
+ const lines = [];
230
+ const secretsToRedact = [];
231
+ let cloneUrl = repo;
232
+ if (token && cloneUrl.startsWith("https://")) {
233
+ const url = new URL(cloneUrl);
234
+ url.username = "x-access-token";
235
+ url.password = token;
236
+ cloneUrl = url.toString();
237
+ secretsToRedact.push(token);
238
+ secretsToRedact.push(cloneUrl);
239
+ }
240
+ const redactLine = (line) => {
241
+ let redacted = line;
242
+ for (const secret of secretsToRedact) {
243
+ if (secret.length > 0) {
244
+ redacted = redacted.split(secret).join("***");
245
+ }
246
+ }
247
+ return redacted;
248
+ };
249
+ try {
250
+ lines.push(`Cloning ${repo} at ref ${ref}`);
251
+ const cloneCmd = `git clone --depth 1 --branch ${ref} ${cloneUrl} .`;
252
+ const output = (0, import_node_child_process2.execSync)(cloneCmd, {
253
+ cwd: workspace,
254
+ encoding: "utf-8",
255
+ stdio: ["pipe", "pipe", "pipe"],
256
+ timeout: 12e4
257
+ });
258
+ if (output) {
259
+ lines.push(...output.split("\n").filter((l) => l !== "").map(redactLine));
260
+ }
261
+ lines.push(`Checkout complete: ${ref}`);
262
+ return { success: true, lines };
263
+ } catch (err) {
264
+ const error = err;
265
+ const errMsg = typeof error.stderr === "string" ? redactLine(error.stderr) : error.message ? redactLine(error.message) : "Unknown error";
266
+ lines.push(`Checkout failed: ${errMsg}`);
267
+ return { success: false, lines, error: errMsg };
268
+ }
269
+ }
270
+
271
+ // src/steps/artifact-upload.ts
272
+ var import_node_fs = require("fs");
273
+ var import_node_path = require("path");
274
+ function collectFiles(dirPath) {
275
+ const files = [];
276
+ if (!(0, import_node_fs.existsSync)(dirPath)) return files;
277
+ const stat = (0, import_node_fs.statSync)(dirPath);
278
+ if (stat.isFile()) return [dirPath];
279
+ if (!stat.isDirectory()) return files;
280
+ for (const entry of (0, import_node_fs.readdirSync)(dirPath, { withFileTypes: true })) {
281
+ const fullPath = (0, import_node_path.join)(dirPath, entry.name);
282
+ if (entry.isFile()) {
283
+ files.push(fullPath);
284
+ } else if (entry.isDirectory()) {
285
+ files.push(...collectFiles(fullPath));
286
+ }
287
+ }
288
+ return files;
289
+ }
290
+ async function executeArtifactUpload(options) {
291
+ const { name, path: artifactPath, workspace, runId, fileFnClient } = options;
292
+ const lines = [];
293
+ if (name.length > 256) {
294
+ const msg = `Artifact name exceeds maximum length of 256 characters (actual: ${name.length})`;
295
+ lines.push(msg);
296
+ return { success: false, lines, error: msg };
297
+ }
298
+ const fullPath = (0, import_node_path.join)(workspace, artifactPath);
299
+ if (!(0, import_node_fs.existsSync)(fullPath)) {
300
+ const msg = `Artifact path not found: ${artifactPath}`;
301
+ lines.push(msg);
302
+ return { success: false, lines, error: msg };
303
+ }
304
+ try {
305
+ lines.push(`Uploading artifact "${name}" from ${artifactPath}`);
306
+ const files = collectFiles(fullPath);
307
+ const buffers = [];
308
+ const manifest = [];
309
+ let offset = 0;
310
+ for (const file of files) {
311
+ const data = (0, import_node_fs.readFileSync)(file);
312
+ const rel = (0, import_node_path.relative)(fullPath, file) || (0, import_node_path.relative)(workspace, file);
313
+ manifest.push({ relativePath: rel, offset, size: data.length });
314
+ buffers.push(data);
315
+ offset += data.length;
316
+ }
317
+ const manifestBuf = Buffer.from(JSON.stringify(manifest));
318
+ const headerBuf = Buffer.alloc(4);
319
+ headerBuf.writeUInt32BE(manifestBuf.length, 0);
320
+ const combined = Buffer.concat([headerBuf, manifestBuf, ...buffers]);
321
+ const namespace = `artifact:${runId}`;
322
+ const fileId = await fileFnClient.upload(namespace, name, combined);
323
+ lines.push(`Uploaded ${files.length} file(s), artifact fileId: ${fileId}`);
324
+ return { success: true, fileId, lines };
325
+ } catch (err) {
326
+ const msg = err instanceof Error ? err.message : String(err);
327
+ lines.push(`Upload failed: ${msg}`);
328
+ return { success: false, lines, error: msg };
329
+ }
330
+ }
331
+
332
+ // src/steps/artifact-download.ts
333
+ var import_node_fs2 = require("fs");
334
+ var import_node_path2 = require("path");
335
+ async function executeArtifactDownload(options) {
336
+ const { name, workspace, runId, fileFnClient } = options;
337
+ const lines = [];
338
+ try {
339
+ lines.push(`Downloading artifact "${name}"`);
340
+ const namespace = `artifact:${runId}`;
341
+ const data = await fileFnClient.downloadByKey(namespace, name);
342
+ if (!data) {
343
+ const msg = `Artifact "${name}" not found`;
344
+ lines.push(msg);
345
+ return { success: false, lines, error: msg };
346
+ }
347
+ const manifestLen = data.readUInt32BE(0);
348
+ const manifestJson = data.subarray(4, 4 + manifestLen).toString("utf-8");
349
+ const manifest = JSON.parse(manifestJson);
350
+ const dataStart = 4 + manifestLen;
351
+ for (const entry of manifest) {
352
+ const fileBuf = data.subarray(dataStart + entry.offset, dataStart + entry.offset + entry.size);
353
+ const outPath = (0, import_node_path2.join)(workspace, entry.relativePath);
354
+ (0, import_node_fs2.mkdirSync)((0, import_node_path2.dirname)(outPath), { recursive: true });
355
+ (0, import_node_fs2.writeFileSync)(outPath, fileBuf);
356
+ }
357
+ lines.push(`Downloaded and extracted ${manifest.length} file(s)`);
358
+ return { success: true, lines };
359
+ } catch (err) {
360
+ const msg = err instanceof Error ? err.message : String(err);
361
+ lines.push(`Download failed: ${msg}`);
362
+ return { success: false, lines, error: msg };
363
+ }
364
+ }
365
+
366
+ // src/steps/cache-save.ts
367
+ var import_node_fs3 = require("fs");
368
+ var import_node_path3 = require("path");
369
+ function collectFiles2(dirPath, basePath) {
370
+ const results = [];
371
+ if (!(0, import_node_fs3.existsSync)(dirPath)) return results;
372
+ const stat = (0, import_node_fs3.statSync)(dirPath);
373
+ if (stat.isFile()) {
374
+ results.push({ relativePath: (0, import_node_path3.relative)(basePath, dirPath), data: (0, import_node_fs3.readFileSync)(dirPath) });
375
+ return results;
376
+ }
377
+ if (!stat.isDirectory()) return results;
378
+ for (const entry of (0, import_node_fs3.readdirSync)(dirPath, { withFileTypes: true })) {
379
+ const fullPath = (0, import_node_path3.join)(dirPath, entry.name);
380
+ if (entry.isFile()) {
381
+ results.push({ relativePath: (0, import_node_path3.relative)(basePath, fullPath), data: (0, import_node_fs3.readFileSync)(fullPath) });
382
+ } else if (entry.isDirectory()) {
383
+ results.push(...collectFiles2(fullPath, basePath));
384
+ }
385
+ }
386
+ return results;
387
+ }
388
+ async function executeCacheSave(options) {
389
+ const { key, paths, workspace, fileFnClient } = options;
390
+ const lines = [];
391
+ const keyByteLength = Buffer.byteLength(key, "utf8");
392
+ if (keyByteLength > 1024) {
393
+ const msg = `Cache key exceeds maximum length of 1KB (actual: ${keyByteLength} bytes)`;
394
+ lines.push(msg);
395
+ return { success: false, lines, error: msg };
396
+ }
397
+ try {
398
+ lines.push(`Saving cache with key "${key}"`);
399
+ const allFiles = [];
400
+ for (const p of paths) {
401
+ const fullPath = (0, import_node_path3.join)(workspace, p);
402
+ const files = collectFiles2(fullPath, workspace);
403
+ allFiles.push(...files);
404
+ }
405
+ if (allFiles.length === 0) {
406
+ lines.push("No files found to cache");
407
+ return { success: true, lines };
408
+ }
409
+ const manifest = [];
410
+ const buffers = [];
411
+ let offset = 0;
412
+ for (const f of allFiles) {
413
+ manifest.push({ relativePath: f.relativePath, offset, size: f.data.length });
414
+ buffers.push(f.data);
415
+ offset += f.data.length;
416
+ }
417
+ const manifestBuf = Buffer.from(JSON.stringify(manifest));
418
+ const headerBuf = Buffer.alloc(4);
419
+ headerBuf.writeUInt32BE(manifestBuf.length, 0);
420
+ const combined = Buffer.concat([headerBuf, manifestBuf, ...buffers]);
421
+ await fileFnClient.upload("cache", key, combined);
422
+ lines.push(`Cached ${allFiles.length} file(s) under key "${key}"`);
423
+ return { success: true, lines };
424
+ } catch (err) {
425
+ const msg = err instanceof Error ? err.message : String(err);
426
+ lines.push(`Cache save failed: ${msg}`);
427
+ return { success: false, lines, error: msg };
428
+ }
429
+ }
430
+
431
+ // src/steps/cache-restore.ts
432
+ var import_node_fs4 = require("fs");
433
+ var import_node_path4 = require("path");
434
+ async function executeCacheRestore(options) {
435
+ const { key, workspace, fileFnClient } = options;
436
+ const lines = [];
437
+ const keyByteLength = Buffer.byteLength(key, "utf8");
438
+ if (keyByteLength > 1024) {
439
+ const msg = `Cache key exceeds maximum length of 1KB (actual: ${keyByteLength} bytes)`;
440
+ lines.push(msg);
441
+ return { success: false, hit: false, lines, error: msg };
442
+ }
443
+ try {
444
+ lines.push(`Restoring cache with key "${key}"`);
445
+ const data = await fileFnClient.downloadByKey("cache", key);
446
+ if (!data) {
447
+ lines.push(`Cache miss for key "${key}"`);
448
+ return { success: true, hit: false, lines };
449
+ }
450
+ const manifestLen = data.readUInt32BE(0);
451
+ const manifestJson = data.subarray(4, 4 + manifestLen).toString("utf-8");
452
+ const manifest = JSON.parse(manifestJson);
453
+ const dataStart = 4 + manifestLen;
454
+ for (const entry of manifest) {
455
+ const fileBuf = data.subarray(dataStart + entry.offset, dataStart + entry.offset + entry.size);
456
+ const outPath = (0, import_node_path4.join)(workspace, entry.relativePath);
457
+ (0, import_node_fs4.mkdirSync)((0, import_node_path4.dirname)(outPath), { recursive: true });
458
+ (0, import_node_fs4.writeFileSync)(outPath, fileBuf);
459
+ }
460
+ lines.push(`Cache hit: restored ${manifest.length} file(s)`);
461
+ return { success: true, hit: true, lines };
462
+ } catch (err) {
463
+ const msg = err instanceof Error ? err.message : String(err);
464
+ lines.push(`Cache restore failed: ${msg}`);
465
+ return { success: false, hit: false, lines, error: msg };
466
+ }
467
+ }
468
+
469
+ // src/steps/hostfn-deploy.ts
470
+ var import_node_child_process3 = require("child_process");
471
+ function executeHostFnDeploy(options) {
472
+ const { environment, ci = true, local = false, workspace, env } = options;
473
+ const lines = [];
474
+ let command = `hostfn deploy ${environment}`;
475
+ if (local) command += " --local";
476
+ if (ci) command += " --ci";
477
+ lines.push(`Deploying: ${command}`);
478
+ try {
479
+ const output = (0, import_node_child_process3.execSync)(command, {
480
+ cwd: workspace,
481
+ encoding: "utf-8",
482
+ stdio: ["pipe", "pipe", "pipe"],
483
+ timeout: 6e5,
484
+ shell: "/bin/sh",
485
+ env: env ? { ...process.env, ...env } : process.env
486
+ });
487
+ if (output) {
488
+ lines.push(...output.split("\n").filter((l) => l !== ""));
489
+ }
490
+ lines.push("Deploy succeeded");
491
+ return { success: true, exitCode: 0, lines };
492
+ } catch (err) {
493
+ const error = err;
494
+ const stdout = typeof error.stdout === "string" ? error.stdout : "";
495
+ const stderr = typeof error.stderr === "string" ? error.stderr : "";
496
+ const exitCode = typeof error.status === "number" ? error.status : 1;
497
+ if (stdout) lines.push(...stdout.split("\n").filter((l) => l !== ""));
498
+ if (stderr) lines.push(...stderr.split("\n").filter((l) => l !== ""));
499
+ lines.push(`Deploy failed with exit code ${exitCode}`);
500
+ return { success: false, exitCode, lines, error: `Deploy failed with exit code ${exitCode}` };
501
+ }
502
+ }
503
+
504
+ // src/reporting/redact.ts
505
+ function redactSecrets(lines, secretValues) {
506
+ if (secretValues.length === 0) return lines;
507
+ return lines.map((line) => {
508
+ let result = line;
509
+ for (const secret of secretValues) {
510
+ if (secret.length > 0) {
511
+ result = result.split(secret).join("***");
512
+ }
513
+ }
514
+ return result;
515
+ });
516
+ }
517
+
518
+ // src/runner.ts
519
+ var Runner = class {
520
+ constructor(options) {
521
+ this.store = options.store;
522
+ this.queue = options.queue;
523
+ this.logClient = options.logClient;
524
+ this.pipelineSpecs = options.pipelineSpecs ?? /* @__PURE__ */ new Map();
525
+ this.queueName = options.queueName ?? import_cifn.DEFAULT_QUEUE_NAME;
526
+ this.cleanWorkspace = options.cleanWorkspace ?? true;
527
+ this.fileFnClient = options.fileFnClient;
528
+ this.artifactStore = options.artifactStore;
529
+ this.secretValues = options.secretValues ?? /* @__PURE__ */ new Map();
530
+ this.getSecret = options.getSecret;
531
+ this.runnerType = options.runnerType ?? "default";
532
+ this.labels = options.labels && options.labels.length > 0 ? [...new Set(options.labels)] : [this.runnerType];
533
+ this.dockerExecutor = options.dockerExecutor;
534
+ this.defaultDockerImage = options.defaultDockerImage ?? "node:20";
535
+ this.dockerRunOnLabels = new Set(options.dockerRunOnLabels ?? ["docker-ubuntu-22"]);
536
+ this.dockerForDefault = options.dockerForDefault ?? false;
537
+ }
538
+ registerPipelineSpec(runId, spec) {
539
+ this.pipelineSpecs.set(runId, spec);
540
+ }
541
+ registerSecretValues(runId, values) {
542
+ this.secretValues.set(runId, values);
543
+ }
544
+ async processNextJob() {
545
+ const payload = await this.queue.dequeueMatching(
546
+ this.queueName,
547
+ (job) => this.labels.includes(job.jobSpec["runs-on"])
548
+ );
549
+ if (!payload) return false;
550
+ await this.executeJob(payload);
551
+ return true;
552
+ }
553
+ async processAllJobs() {
554
+ let count = 0;
555
+ while (await this.processNextJob()) {
556
+ count++;
557
+ }
558
+ return count;
559
+ }
560
+ async executeJob(payload) {
561
+ const { runId, jobKey, jobSpec } = payload;
562
+ let jobEnv = { ...payload.env };
563
+ const secretVals = this.secretValues.get(runId) ?? [];
564
+ if (payload.secretKeys && jobEnv) {
565
+ for (const key of payload.secretKeys) {
566
+ const val = jobEnv[key];
567
+ if (val && !secretVals.includes(val)) {
568
+ secretVals.push(val);
569
+ }
570
+ }
571
+ }
572
+ const logLines = (stepKey, lines) => {
573
+ this.logClient.appendLines(runId, jobKey, stepKey, redactSecrets(lines, secretVals));
574
+ };
575
+ const run = this.store.getRun(runId);
576
+ if (!run) return;
577
+ const job = run.jobs.find((j) => j.jobKey === jobKey);
578
+ if (!job) return;
579
+ const github = run.trigger?.payload?.github ?? {};
580
+ if (!github.ref && payload.pipelineRef?.ref) github.ref = payload.pipelineRef.ref;
581
+ if (!github.repository && payload.pipelineRef?.repo) github.repository = payload.pipelineRef.repo;
582
+ if (!github.event_name && run.trigger?.type) github.event_name = run.trigger.type;
583
+ if (jobSpec.if !== void 0 && jobSpec.if !== null) {
584
+ try {
585
+ const runJob = (0, import_cifn.evaluateConditionSync)(String(jobSpec.if).trim(), { github, stepOutcomes: [] });
586
+ if (!runJob) {
587
+ job.status = "skipped";
588
+ job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
589
+ logLines("__job__", [`Job "${jobKey}" skipped (if: false)`]);
590
+ this.enqueueDependentJobs(runId, jobKey);
591
+ this.checkRunCompletion(runId);
592
+ return;
593
+ }
594
+ } catch {
595
+ job.status = "failure";
596
+ job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
597
+ logLines("__job__", [`Job "${jobKey}" failed (invalid if expression)`]);
598
+ this.store.updateRunStatus(runId, "failure");
599
+ return;
600
+ }
601
+ }
602
+ if (run.status === "queued") {
603
+ this.store.updateRunStatus(runId, "running");
604
+ }
605
+ job.status = "running";
606
+ job.startedAt = (/* @__PURE__ */ new Date()).toISOString();
607
+ logLines("__job__", [`Job "${jobKey}" started`]);
608
+ const workspace = (0, import_node_fs6.mkdtempSync)((0, import_node_path6.join)((0, import_node_os.tmpdir)(), `cifn-${runId}-${jobKey}-`));
609
+ let jobFailed = false;
610
+ const interpolateContext = {
611
+ github,
612
+ getSecret: this.getSecret ? (name) => this.getSecret(runId, name) : void 0,
613
+ workspaceRoot: workspace,
614
+ hashFiles: (glob) => Promise.resolve((0, import_cifn.hashFiles)(workspace, glob))
615
+ };
616
+ if (jobSpec.env) {
617
+ for (const [k, v] of Object.entries(jobSpec.env)) {
618
+ try {
619
+ const { result, secretValues: sv } = await (0, import_cifn.interpolate)(String(v), interpolateContext);
620
+ jobEnv[k] = result;
621
+ secretVals.push(...sv);
622
+ } catch {
623
+ jobEnv[k] = String(v);
624
+ }
625
+ }
626
+ }
627
+ try {
628
+ for (let i = 0; i < jobSpec.steps.length; i++) {
629
+ const stepSpec = jobSpec.steps[i];
630
+ const step = job.steps[i];
631
+ if (!step) continue;
632
+ const effectiveIf = "if" in stepSpec && stepSpec.if != null ? String(stepSpec.if).trim() : "success()";
633
+ const stepOutcomes = job.steps.slice(0, i).map((s) => s.status);
634
+ try {
635
+ const runStep = (0, import_cifn.evaluateConditionSync)(effectiveIf, { github, stepOutcomes });
636
+ if (!runStep) {
637
+ step.status = "skipped";
638
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
639
+ logLines(step.stepKey, [`Step "${step.stepKey}" skipped (if: false)`]);
640
+ continue;
641
+ }
642
+ } catch {
643
+ step.status = "failure";
644
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
645
+ logLines(step.stepKey, [`Step "${step.stepKey}" failed (invalid if expression)`]);
646
+ jobFailed = true;
647
+ continue;
648
+ }
649
+ if ("uses" in stepSpec) {
650
+ if (stepSpec.uses === "checkout") {
651
+ step.status = "running";
652
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
653
+ logLines(step.stepKey, [
654
+ `Step "${step.stepKey}" started: checkout`
655
+ ]);
656
+ const repo = stepSpec.with?.repository ?? payload.pipelineRef?.repo ?? "";
657
+ const ref = stepSpec.with?.ref ?? payload.pipelineRef?.ref ?? "main";
658
+ const token = stepSpec.with?.token;
659
+ const checkoutResult = executeCheckout({ repo, ref, workspace, token });
660
+ logLines(step.stepKey, checkoutResult.lines);
661
+ if (checkoutResult.success) {
662
+ step.status = "success";
663
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
664
+ logLines(step.stepKey, [
665
+ `Step "${step.stepKey}" completed successfully`
666
+ ]);
667
+ } else {
668
+ step.status = "failure";
669
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
670
+ logLines(step.stepKey, [
671
+ `Step "${step.stepKey}" failed: ${checkoutResult.error}`
672
+ ]);
673
+ jobFailed = true;
674
+ }
675
+ continue;
676
+ }
677
+ if (stepSpec.uses === "artifact/upload" && this.fileFnClient) {
678
+ step.status = "running";
679
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
680
+ let artName = stepSpec.with?.name ?? "default";
681
+ let artPath = stepSpec.with?.path ?? ".";
682
+ try {
683
+ const nameRes = await (0, import_cifn.interpolate)(artName, interpolateContext);
684
+ artName = nameRes.result;
685
+ secretVals.push(...nameRes.secretValues);
686
+ const pathRes = await (0, import_cifn.interpolate)(artPath, interpolateContext);
687
+ artPath = pathRes.result;
688
+ secretVals.push(...pathRes.secretValues);
689
+ } catch {
690
+ step.status = "failure";
691
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
692
+ logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
693
+ jobFailed = true;
694
+ continue;
695
+ }
696
+ logLines(step.stepKey, [
697
+ `Step "${step.stepKey}" started: artifact/upload "${artName}"`
698
+ ]);
699
+ const uploadResult = await executeArtifactUpload({
700
+ name: artName,
701
+ path: artPath,
702
+ workspace,
703
+ runId,
704
+ fileFnClient: this.fileFnClient
705
+ });
706
+ logLines(step.stepKey, uploadResult.lines);
707
+ if (uploadResult.success) {
708
+ if (uploadResult.fileId && this.artifactStore) {
709
+ this.artifactStore.addArtifact(runId, { name: artName, fileId: uploadResult.fileId });
710
+ }
711
+ step.status = "success";
712
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
713
+ logLines(step.stepKey, [
714
+ `Step "${step.stepKey}" completed successfully`
715
+ ]);
716
+ } else {
717
+ step.status = "failure";
718
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
719
+ logLines(step.stepKey, [
720
+ `Step "${step.stepKey}" failed: ${uploadResult.error}`
721
+ ]);
722
+ jobFailed = true;
723
+ }
724
+ continue;
725
+ }
726
+ if (stepSpec.uses === "artifact/download" && this.fileFnClient) {
727
+ step.status = "running";
728
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
729
+ let artName = stepSpec.with?.name ?? "default";
730
+ try {
731
+ const nameRes = await (0, import_cifn.interpolate)(artName, interpolateContext);
732
+ artName = nameRes.result;
733
+ secretVals.push(...nameRes.secretValues);
734
+ } catch {
735
+ step.status = "failure";
736
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
737
+ logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
738
+ jobFailed = true;
739
+ continue;
740
+ }
741
+ logLines(step.stepKey, [
742
+ `Step "${step.stepKey}" started: artifact/download "${artName}"`
743
+ ]);
744
+ const downloadResult = await executeArtifactDownload({
745
+ name: artName,
746
+ workspace,
747
+ runId,
748
+ fileFnClient: this.fileFnClient
749
+ });
750
+ logLines(step.stepKey, downloadResult.lines);
751
+ if (downloadResult.success) {
752
+ step.status = "success";
753
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
754
+ logLines(step.stepKey, [
755
+ `Step "${step.stepKey}" completed successfully`
756
+ ]);
757
+ } else {
758
+ step.status = "failure";
759
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
760
+ logLines(step.stepKey, [
761
+ `Step "${step.stepKey}" failed: ${downloadResult.error}`
762
+ ]);
763
+ jobFailed = true;
764
+ }
765
+ continue;
766
+ }
767
+ if (stepSpec.uses === "cache/save" && this.fileFnClient) {
768
+ step.status = "running";
769
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
770
+ let cacheKey = stepSpec.with?.key ?? "";
771
+ try {
772
+ const keyRes = await (0, import_cifn.interpolate)(cacheKey, interpolateContext);
773
+ cacheKey = keyRes.result;
774
+ secretVals.push(...keyRes.secretValues);
775
+ } catch {
776
+ step.status = "failure";
777
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
778
+ logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
779
+ jobFailed = true;
780
+ continue;
781
+ }
782
+ const cachePaths = stepSpec.with?.paths ?? [];
783
+ logLines(step.stepKey, [
784
+ `Step "${step.stepKey}" started: cache/save "${cacheKey}"`
785
+ ]);
786
+ const saveResult = await executeCacheSave({
787
+ key: cacheKey,
788
+ paths: cachePaths,
789
+ workspace,
790
+ fileFnClient: this.fileFnClient
791
+ });
792
+ logLines(step.stepKey, saveResult.lines);
793
+ step.status = saveResult.success ? "success" : "failure";
794
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
795
+ if (!saveResult.success) {
796
+ logLines(step.stepKey, [
797
+ `Step "${step.stepKey}" failed: ${saveResult.error}`
798
+ ]);
799
+ jobFailed = true;
800
+ } else {
801
+ logLines(step.stepKey, [
802
+ `Step "${step.stepKey}" completed successfully`
803
+ ]);
804
+ }
805
+ continue;
806
+ }
807
+ if (stepSpec.uses === "cache/restore" && this.fileFnClient) {
808
+ step.status = "running";
809
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
810
+ let cacheKey = stepSpec.with?.key ?? "";
811
+ try {
812
+ const keyRes = await (0, import_cifn.interpolate)(cacheKey, interpolateContext);
813
+ cacheKey = keyRes.result;
814
+ secretVals.push(...keyRes.secretValues);
815
+ } catch {
816
+ step.status = "failure";
817
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
818
+ logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
819
+ jobFailed = true;
820
+ continue;
821
+ }
822
+ logLines(step.stepKey, [
823
+ `Step "${step.stepKey}" started: cache/restore "${cacheKey}"`
824
+ ]);
825
+ const restoreResult = await executeCacheRestore({
826
+ key: cacheKey,
827
+ workspace,
828
+ fileFnClient: this.fileFnClient
829
+ });
830
+ logLines(step.stepKey, restoreResult.lines);
831
+ step.status = "success";
832
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
833
+ logLines(step.stepKey, [
834
+ `Step "${step.stepKey}" completed successfully${restoreResult.hit ? " (cache hit)" : " (cache miss)"}`
835
+ ]);
836
+ continue;
837
+ }
838
+ if (stepSpec.uses === "testfn/run") {
839
+ step.status = "running";
840
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
841
+ const framework = stepSpec.with?.framework;
842
+ const testPattern = stepSpec.with?.testPattern;
843
+ const reporter = stepSpec.with?.reporter;
844
+ const outputPath = stepSpec.with?.outputPath;
845
+ const parallel = stepSpec.with?.parallel;
846
+ const timeout = stepSpec.with?.timeout;
847
+ const retries = stepSpec.with?.retries;
848
+ logLines(step.stepKey, [
849
+ `Step "${step.stepKey}" started: testfn/run`
850
+ ]);
851
+ const { executeTestFnRunAsync: executeTestFnRunAsync2 } = await Promise.resolve().then(() => (init_testfn_run(), testfn_run_exports));
852
+ const testResult = await executeTestFnRunAsync2({
853
+ framework,
854
+ testPattern,
855
+ reporter,
856
+ outputPath,
857
+ workspace,
858
+ env: jobEnv,
859
+ parallel,
860
+ timeout,
861
+ retries
862
+ });
863
+ logLines(step.stepKey, testResult.lines);
864
+ if (testResult.success) {
865
+ step.status = "success";
866
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
867
+ logLines(step.stepKey, [
868
+ `Step "${step.stepKey}" completed successfully`
869
+ ]);
870
+ } else {
871
+ step.status = "failure";
872
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
873
+ logLines(step.stepKey, [
874
+ `Step "${step.stepKey}" failed: ${testResult.error}`
875
+ ]);
876
+ jobFailed = true;
877
+ }
878
+ continue;
879
+ }
880
+ if (stepSpec.uses === "hostfn/deploy") {
881
+ step.status = "running";
882
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
883
+ const environment = stepSpec.with?.environment ?? "";
884
+ const ci = stepSpec.with?.ci ?? true;
885
+ const local = typeof stepSpec.with?.local === "boolean" ? stepSpec.with.local : jobSpec["runs-on"] === "hostfn-runner";
886
+ logLines(step.stepKey, [
887
+ `Step "${step.stepKey}" started: hostfn/deploy ${environment}`
888
+ ]);
889
+ const deployResult = executeHostFnDeploy({
890
+ environment,
891
+ ci,
892
+ local,
893
+ workspace,
894
+ env: jobEnv
895
+ });
896
+ logLines(step.stepKey, deployResult.lines);
897
+ if (deployResult.success) {
898
+ step.status = "success";
899
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
900
+ logLines(step.stepKey, [
901
+ `Step "${step.stepKey}" completed successfully`
902
+ ]);
903
+ } else {
904
+ step.status = "failure";
905
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
906
+ logLines(step.stepKey, [
907
+ `Step "${step.stepKey}" failed: ${deployResult.error}`
908
+ ]);
909
+ jobFailed = true;
910
+ }
911
+ continue;
912
+ }
913
+ step.status = "skipped";
914
+ logLines(step.stepKey, [
915
+ `Step "${step.stepKey}" skipped (unsupported uses: ${stepSpec.uses})`
916
+ ]);
917
+ continue;
918
+ }
919
+ step.status = "running";
920
+ step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
921
+ let runCommand = stepSpec.run;
922
+ try {
923
+ const runRes = await (0, import_cifn.interpolate)(stepSpec.run, interpolateContext);
924
+ runCommand = runRes.result;
925
+ secretVals.push(...runRes.secretValues);
926
+ } catch {
927
+ step.status = "failure";
928
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
929
+ logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
930
+ jobFailed = true;
931
+ continue;
932
+ }
933
+ logLines(step.stepKey, [
934
+ `Step "${step.stepKey}" started: ${runCommand}`
935
+ ]);
936
+ const result = this.executeRunCommand(runCommand, workspace, jobSpec, jobEnv);
937
+ logLines(step.stepKey, result.lines);
938
+ if (result.exitCode === 0) {
939
+ step.status = "success";
940
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
941
+ logLines(step.stepKey, [
942
+ `Step "${step.stepKey}" completed successfully`
943
+ ]);
944
+ } else {
945
+ step.status = "failure";
946
+ step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
947
+ logLines(step.stepKey, [
948
+ `Step "${step.stepKey}" failed with exit code ${result.exitCode}`
949
+ ]);
950
+ jobFailed = true;
951
+ }
952
+ }
953
+ } finally {
954
+ if (this.cleanWorkspace) {
955
+ try {
956
+ (0, import_node_fs6.rmSync)(workspace, { recursive: true, force: true });
957
+ } catch {
958
+ }
959
+ }
960
+ }
961
+ job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
962
+ if (jobFailed) {
963
+ job.status = "failure";
964
+ logLines("__job__", [`Job "${jobKey}" failed`]);
965
+ this.store.updateRunStatus(runId, "failure");
966
+ } else {
967
+ job.status = "success";
968
+ logLines("__job__", [`Job "${jobKey}" completed successfully`]);
969
+ await this.enqueueDependentJobs(runId, jobKey);
970
+ this.checkRunCompletion(runId);
971
+ }
972
+ }
973
+ executeRunCommand(command, workspace, jobSpec, env) {
974
+ if (this.shouldUseDocker(jobSpec) && this.dockerExecutor) {
975
+ return this.dockerExecutor.execute({
976
+ image: jobSpec.image ?? this.defaultDockerImage,
977
+ workspace,
978
+ command,
979
+ env
980
+ });
981
+ }
982
+ return executeRunStep(command, workspace, env);
983
+ }
984
+ shouldUseDocker(jobSpec) {
985
+ if (!this.dockerExecutor) return false;
986
+ if (typeof jobSpec.image === "string" && jobSpec.image.length > 0) return true;
987
+ if (jobSpec["runs-on"] === "default" && this.dockerForDefault) return true;
988
+ return this.dockerRunOnLabels.has(jobSpec["runs-on"]);
989
+ }
990
+ async enqueueDependentJobs(runId, completedJobKey) {
991
+ const spec = this.pipelineSpecs.get(runId);
992
+ if (!spec) return;
993
+ const run = this.store.getRun(runId);
994
+ if (!run) return;
995
+ const completedJobs = new Set(
996
+ run.jobs.filter((j) => j.status === "success" || j.status === "skipped").map((j) => j.jobKey)
997
+ );
998
+ const enqueuedJobs = new Set(
999
+ run.jobs.filter((j) => j.status !== "pending").map((j) => j.jobKey)
1000
+ );
1001
+ const readyJobs = (0, import_cifn.getReadyJobs)(spec, completedJobs, enqueuedJobs);
1002
+ for (const readyJobKey of readyJobs) {
1003
+ const jobSpec = spec.jobs[readyJobKey];
1004
+ if (jobSpec) {
1005
+ await this.queue.enqueue(this.queueName, {
1006
+ runId,
1007
+ jobKey: readyJobKey,
1008
+ jobSpec
1009
+ });
1010
+ }
1011
+ }
1012
+ }
1013
+ checkRunCompletion(runId) {
1014
+ const run = this.store.getRun(runId);
1015
+ if (!run) return;
1016
+ const allDone = run.jobs.every(
1017
+ (j) => j.status === "success" || j.status === "failure" || j.status === "skipped"
1018
+ );
1019
+ if (allDone) {
1020
+ const anyFailure = run.jobs.some((j) => j.status === "failure");
1021
+ this.store.updateRunStatus(runId, anyFailure ? "failure" : "success");
1022
+ }
1023
+ }
1024
+ };
1025
+
1026
+ // src/reporting/logfn-client.ts
1027
+ var MemoryLogFnClient = class {
1028
+ constructor() {
1029
+ this.entries = [];
1030
+ }
1031
+ append(entry) {
1032
+ this.entries.push(entry);
1033
+ }
1034
+ appendLines(runId, jobKey, stepKey, lines) {
1035
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1036
+ for (const line of lines) {
1037
+ this.entries.push({ runId, jobKey, stepKey, line, timestamp: now });
1038
+ }
1039
+ }
1040
+ getLines(runId, jobKey) {
1041
+ return this.entries.filter((e) => e.runId === runId && e.jobKey === jobKey);
1042
+ }
1043
+ getAllLines(runId) {
1044
+ return this.entries.filter((e) => e.runId === runId);
1045
+ }
1046
+ };
1047
+
1048
+ // src/index.ts
1049
+ init_testfn_run();
1050
+
1051
+ // src/docker-executor.ts
1052
+ var import_node_child_process4 = require("child_process");
1053
+ var DefaultDockerCommandRunner = class {
1054
+ run(args, options) {
1055
+ const res = (0, import_node_child_process4.spawnSync)("docker", args, {
1056
+ cwd: options.cwd,
1057
+ encoding: "utf-8",
1058
+ env: options.env ? { ...process.env, ...options.env } : process.env,
1059
+ timeout: 6e5
1060
+ });
1061
+ return {
1062
+ status: res.status,
1063
+ stdout: res.stdout ?? "",
1064
+ stderr: res.stderr ?? "",
1065
+ error: res.error ? String(res.error.message ?? res.error) : void 0
1066
+ };
1067
+ }
1068
+ };
1069
+ var DockerExecutor = class {
1070
+ constructor(runner) {
1071
+ this.runner = runner ?? new DefaultDockerCommandRunner();
1072
+ }
1073
+ execute(options) {
1074
+ const args = [
1075
+ "run",
1076
+ "--rm",
1077
+ "-v",
1078
+ `${options.workspace}:/workspace`,
1079
+ "-w",
1080
+ "/workspace"
1081
+ ];
1082
+ for (const [key, value] of Object.entries(options.env ?? {})) {
1083
+ args.push("-e", `${key}=${value}`);
1084
+ }
1085
+ args.push(options.image, "sh", "-lc", options.command);
1086
+ const output = this.runner.run(args, { cwd: options.workspace, env: options.env });
1087
+ const stdout = output.stdout ?? "";
1088
+ const stderr = output.stderr ?? "";
1089
+ const lines = [...stdout.split("\n"), ...stderr.split("\n")].filter((line) => line !== "");
1090
+ const exitCode = output.status ?? 1;
1091
+ if (output.error) {
1092
+ lines.push(`docker error: ${output.error}`);
1093
+ }
1094
+ return {
1095
+ exitCode,
1096
+ stdout,
1097
+ stderr,
1098
+ lines
1099
+ };
1100
+ }
1101
+ };
1102
+ // Annotate the CommonJS export names for ESM import in node:
1103
+ 0 && (module.exports = {
1104
+ DockerExecutor,
1105
+ MemoryLogFnClient,
1106
+ Runner,
1107
+ executeArtifactDownload,
1108
+ executeArtifactUpload,
1109
+ executeCacheRestore,
1110
+ executeCacheSave,
1111
+ executeCheckout,
1112
+ executeHostFnDeploy,
1113
+ executeRunStep,
1114
+ executeTestFnRun,
1115
+ redactSecrets
1116
+ });
1117
+ //# sourceMappingURL=index.js.map