@cifn/runner 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +223 -0
- package/dist/index.js +1117 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +1091 -0
- package/dist/index.mjs.map +1 -0
- package/dist/server.d.mts +2 -0
- package/dist/server.js.map +1 -0
- package/dist/server.mjs +48 -0
- package/dist/server.mjs.map +1 -0
- package/package.json +41 -0
- package/src/artifacts-cache.test.ts +557 -0
- package/src/docker-executor.ts +76 -0
- package/src/executor/run-step.ts +34 -0
- package/src/index.ts +23 -0
- package/src/reporting/logfn-client.ts +37 -0
- package/src/reporting/redact.ts +12 -0
- package/src/runner.test.ts +957 -0
- package/src/runner.ts +626 -0
- package/src/secrets-steps.test.ts +603 -0
- package/src/server.ts +54 -0
- package/src/steps/artifact-download.ts +55 -0
- package/src/steps/artifact-upload.ts +89 -0
- package/src/steps/cache-restore.ts +61 -0
- package/src/steps/cache-save.ts +88 -0
- package/src/steps/checkout.ts +63 -0
- package/src/steps/hostfn-deploy.ts +52 -0
- package/src/steps/testfn-run.ts +179 -0
- package/tsconfig.json +18 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +9 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,1091 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
3
|
+
var __esm = (fn, res) => function __init() {
|
|
4
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
5
|
+
};
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
// src/steps/testfn-run.ts
|
|
12
|
+
var testfn_run_exports = {};
|
|
13
|
+
__export(testfn_run_exports, {
|
|
14
|
+
executeTestFnRun: () => executeTestFnRun,
|
|
15
|
+
executeTestFnRunAsync: () => executeTestFnRunAsync
|
|
16
|
+
});
|
|
17
|
+
import { TestRunner, JsonReporter } from "@testfn/core";
|
|
18
|
+
import { join as join5 } from "path";
|
|
19
|
+
import { existsSync as existsSync3 } from "fs";
|
|
20
|
+
function executeTestFnRun(options) {
|
|
21
|
+
const {
|
|
22
|
+
framework = "vitest",
|
|
23
|
+
testPattern,
|
|
24
|
+
reporter,
|
|
25
|
+
outputPath = "./testfn-results.json",
|
|
26
|
+
workspace,
|
|
27
|
+
env,
|
|
28
|
+
parallel,
|
|
29
|
+
timeout,
|
|
30
|
+
retries
|
|
31
|
+
} = options;
|
|
32
|
+
const lines = [];
|
|
33
|
+
lines.push(`Running tests with testfn SDK (framework: ${framework})`);
|
|
34
|
+
try {
|
|
35
|
+
if (!["vitest", "playwright", "jest"].includes(framework)) {
|
|
36
|
+
throw new Error(`Unsupported framework: ${framework}. Supported: vitest, playwright, jest`);
|
|
37
|
+
}
|
|
38
|
+
const config = {
|
|
39
|
+
framework,
|
|
40
|
+
testPattern: testPattern || "./tests/**/*.{test,spec}.{ts,js}",
|
|
41
|
+
parallel,
|
|
42
|
+
timeout,
|
|
43
|
+
retries,
|
|
44
|
+
env,
|
|
45
|
+
reporters: reporter === "json" ? [new JsonReporter(join5(workspace, outputPath))] : void 0
|
|
46
|
+
};
|
|
47
|
+
const originalCwd = process.cwd();
|
|
48
|
+
try {
|
|
49
|
+
process.chdir(workspace);
|
|
50
|
+
const runner = new TestRunner(config);
|
|
51
|
+
const results = runner.run();
|
|
52
|
+
process.chdir(originalCwd);
|
|
53
|
+
if (results && typeof results === "object" && "then" in results) {
|
|
54
|
+
throw new Error("executeTestFnRun must be called from async context or use sync adapter");
|
|
55
|
+
}
|
|
56
|
+
lines.push(`Tests completed: ${results.summary?.total || 0} total`);
|
|
57
|
+
lines.push(`Passed: ${results.summary?.passed || 0}, Failed: ${results.summary?.failed || 0}`);
|
|
58
|
+
if (reporter === "json" && outputPath) {
|
|
59
|
+
const fullPath = join5(workspace, outputPath);
|
|
60
|
+
if (existsSync3(fullPath)) {
|
|
61
|
+
lines.push(`JSON report written to ${outputPath}`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
const hasFailed = results.summary?.failed > 0;
|
|
65
|
+
if (hasFailed) {
|
|
66
|
+
return {
|
|
67
|
+
success: false,
|
|
68
|
+
exitCode: 1,
|
|
69
|
+
lines,
|
|
70
|
+
error: "Tests failed"
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
return { success: true, exitCode: 0, lines };
|
|
74
|
+
} catch (innerErr) {
|
|
75
|
+
process.chdir(originalCwd);
|
|
76
|
+
throw innerErr;
|
|
77
|
+
}
|
|
78
|
+
} catch (err) {
|
|
79
|
+
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
80
|
+
lines.push(`Test execution failed: ${errorMessage}`);
|
|
81
|
+
return {
|
|
82
|
+
success: false,
|
|
83
|
+
exitCode: 1,
|
|
84
|
+
lines,
|
|
85
|
+
error: errorMessage
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
async function executeTestFnRunAsync(options) {
|
|
90
|
+
const {
|
|
91
|
+
framework = "vitest",
|
|
92
|
+
testPattern,
|
|
93
|
+
reporter,
|
|
94
|
+
outputPath = "./testfn-results.json",
|
|
95
|
+
workspace,
|
|
96
|
+
env,
|
|
97
|
+
parallel,
|
|
98
|
+
timeout,
|
|
99
|
+
retries
|
|
100
|
+
} = options;
|
|
101
|
+
const lines = [];
|
|
102
|
+
lines.push(`Running tests with testfn SDK (framework: ${framework})`);
|
|
103
|
+
try {
|
|
104
|
+
if (!["vitest", "playwright", "jest"].includes(framework)) {
|
|
105
|
+
throw new Error(`Unsupported framework: ${framework}. Supported: vitest, playwright, jest`);
|
|
106
|
+
}
|
|
107
|
+
const config = {
|
|
108
|
+
framework,
|
|
109
|
+
testPattern: testPattern || "./tests/**/*.{test,spec}.{ts,js}",
|
|
110
|
+
parallel,
|
|
111
|
+
timeout,
|
|
112
|
+
retries,
|
|
113
|
+
env,
|
|
114
|
+
reporters: reporter === "json" ? [new JsonReporter(join5(workspace, outputPath))] : void 0
|
|
115
|
+
};
|
|
116
|
+
const originalCwd = process.cwd();
|
|
117
|
+
try {
|
|
118
|
+
process.chdir(workspace);
|
|
119
|
+
const runner = new TestRunner(config);
|
|
120
|
+
const results = await runner.run();
|
|
121
|
+
process.chdir(originalCwd);
|
|
122
|
+
lines.push(`Tests completed: ${results.summary.total} total`);
|
|
123
|
+
lines.push(`Passed: ${results.summary.passed}, Failed: ${results.summary.failed}`);
|
|
124
|
+
if (reporter === "json" && outputPath) {
|
|
125
|
+
const fullPath = join5(workspace, outputPath);
|
|
126
|
+
if (existsSync3(fullPath)) {
|
|
127
|
+
lines.push(`JSON report written to ${outputPath}`);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
if (results.summary.failed > 0) {
|
|
131
|
+
return {
|
|
132
|
+
success: false,
|
|
133
|
+
exitCode: 1,
|
|
134
|
+
lines,
|
|
135
|
+
error: "Tests failed"
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
return { success: true, exitCode: 0, lines };
|
|
139
|
+
} catch (innerErr) {
|
|
140
|
+
process.chdir(originalCwd);
|
|
141
|
+
throw innerErr;
|
|
142
|
+
}
|
|
143
|
+
} catch (err) {
|
|
144
|
+
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
145
|
+
lines.push(`Test execution failed: ${errorMessage}`);
|
|
146
|
+
return {
|
|
147
|
+
success: false,
|
|
148
|
+
exitCode: 1,
|
|
149
|
+
lines,
|
|
150
|
+
error: errorMessage
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
var init_testfn_run = __esm({
|
|
155
|
+
"src/steps/testfn-run.ts"() {
|
|
156
|
+
"use strict";
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
// src/runner.ts
|
|
161
|
+
import { mkdtempSync, rmSync } from "fs";
|
|
162
|
+
import { tmpdir } from "os";
|
|
163
|
+
import { join as join6 } from "path";
|
|
164
|
+
import {
|
|
165
|
+
DEFAULT_QUEUE_NAME,
|
|
166
|
+
getReadyJobs,
|
|
167
|
+
evaluateConditionSync,
|
|
168
|
+
interpolate,
|
|
169
|
+
hashFiles
|
|
170
|
+
} from "cifn";
|
|
171
|
+
|
|
172
|
+
// src/executor/run-step.ts
|
|
173
|
+
import { execSync } from "child_process";
|
|
174
|
+
function executeRunStep(command, workspacePath, env) {
|
|
175
|
+
const execEnv = env ? { ...process.env, ...env } : process.env;
|
|
176
|
+
try {
|
|
177
|
+
const stdout = execSync(command, {
|
|
178
|
+
cwd: workspacePath,
|
|
179
|
+
encoding: "utf-8",
|
|
180
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
181
|
+
timeout: 3e5,
|
|
182
|
+
shell: "/bin/sh",
|
|
183
|
+
env: execEnv
|
|
184
|
+
});
|
|
185
|
+
const lines = stdout.split("\n").filter((l) => l !== "");
|
|
186
|
+
return { exitCode: 0, stdout, stderr: "", lines };
|
|
187
|
+
} catch (err) {
|
|
188
|
+
const error = err;
|
|
189
|
+
const stdout = typeof error.stdout === "string" ? error.stdout : "";
|
|
190
|
+
const stderr = typeof error.stderr === "string" ? error.stderr : "";
|
|
191
|
+
const exitCode = typeof error.status === "number" ? error.status : 1;
|
|
192
|
+
const lines = [
|
|
193
|
+
...stdout.split("\n").filter((l) => l !== ""),
|
|
194
|
+
...stderr.split("\n").filter((l) => l !== "")
|
|
195
|
+
];
|
|
196
|
+
return { exitCode, stdout, stderr, lines };
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// src/steps/checkout.ts
|
|
201
|
+
import { execSync as execSync2 } from "child_process";
|
|
202
|
+
function executeCheckout(options) {
|
|
203
|
+
const { repo, ref, workspace, token } = options;
|
|
204
|
+
const lines = [];
|
|
205
|
+
const secretsToRedact = [];
|
|
206
|
+
let cloneUrl = repo;
|
|
207
|
+
if (token && cloneUrl.startsWith("https://")) {
|
|
208
|
+
const url = new URL(cloneUrl);
|
|
209
|
+
url.username = "x-access-token";
|
|
210
|
+
url.password = token;
|
|
211
|
+
cloneUrl = url.toString();
|
|
212
|
+
secretsToRedact.push(token);
|
|
213
|
+
secretsToRedact.push(cloneUrl);
|
|
214
|
+
}
|
|
215
|
+
const redactLine = (line) => {
|
|
216
|
+
let redacted = line;
|
|
217
|
+
for (const secret of secretsToRedact) {
|
|
218
|
+
if (secret.length > 0) {
|
|
219
|
+
redacted = redacted.split(secret).join("***");
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
return redacted;
|
|
223
|
+
};
|
|
224
|
+
try {
|
|
225
|
+
lines.push(`Cloning ${repo} at ref ${ref}`);
|
|
226
|
+
const cloneCmd = `git clone --depth 1 --branch ${ref} ${cloneUrl} .`;
|
|
227
|
+
const output = execSync2(cloneCmd, {
|
|
228
|
+
cwd: workspace,
|
|
229
|
+
encoding: "utf-8",
|
|
230
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
231
|
+
timeout: 12e4
|
|
232
|
+
});
|
|
233
|
+
if (output) {
|
|
234
|
+
lines.push(...output.split("\n").filter((l) => l !== "").map(redactLine));
|
|
235
|
+
}
|
|
236
|
+
lines.push(`Checkout complete: ${ref}`);
|
|
237
|
+
return { success: true, lines };
|
|
238
|
+
} catch (err) {
|
|
239
|
+
const error = err;
|
|
240
|
+
const errMsg = typeof error.stderr === "string" ? redactLine(error.stderr) : error.message ? redactLine(error.message) : "Unknown error";
|
|
241
|
+
lines.push(`Checkout failed: ${errMsg}`);
|
|
242
|
+
return { success: false, lines, error: errMsg };
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// src/steps/artifact-upload.ts
|
|
247
|
+
import { existsSync, readFileSync, readdirSync, statSync } from "fs";
|
|
248
|
+
import { join, relative } from "path";
|
|
249
|
+
function collectFiles(dirPath) {
|
|
250
|
+
const files = [];
|
|
251
|
+
if (!existsSync(dirPath)) return files;
|
|
252
|
+
const stat = statSync(dirPath);
|
|
253
|
+
if (stat.isFile()) return [dirPath];
|
|
254
|
+
if (!stat.isDirectory()) return files;
|
|
255
|
+
for (const entry of readdirSync(dirPath, { withFileTypes: true })) {
|
|
256
|
+
const fullPath = join(dirPath, entry.name);
|
|
257
|
+
if (entry.isFile()) {
|
|
258
|
+
files.push(fullPath);
|
|
259
|
+
} else if (entry.isDirectory()) {
|
|
260
|
+
files.push(...collectFiles(fullPath));
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
return files;
|
|
264
|
+
}
|
|
265
|
+
async function executeArtifactUpload(options) {
|
|
266
|
+
const { name, path: artifactPath, workspace, runId, fileFnClient } = options;
|
|
267
|
+
const lines = [];
|
|
268
|
+
if (name.length > 256) {
|
|
269
|
+
const msg = `Artifact name exceeds maximum length of 256 characters (actual: ${name.length})`;
|
|
270
|
+
lines.push(msg);
|
|
271
|
+
return { success: false, lines, error: msg };
|
|
272
|
+
}
|
|
273
|
+
const fullPath = join(workspace, artifactPath);
|
|
274
|
+
if (!existsSync(fullPath)) {
|
|
275
|
+
const msg = `Artifact path not found: ${artifactPath}`;
|
|
276
|
+
lines.push(msg);
|
|
277
|
+
return { success: false, lines, error: msg };
|
|
278
|
+
}
|
|
279
|
+
try {
|
|
280
|
+
lines.push(`Uploading artifact "${name}" from ${artifactPath}`);
|
|
281
|
+
const files = collectFiles(fullPath);
|
|
282
|
+
const buffers = [];
|
|
283
|
+
const manifest = [];
|
|
284
|
+
let offset = 0;
|
|
285
|
+
for (const file of files) {
|
|
286
|
+
const data = readFileSync(file);
|
|
287
|
+
const rel = relative(fullPath, file) || relative(workspace, file);
|
|
288
|
+
manifest.push({ relativePath: rel, offset, size: data.length });
|
|
289
|
+
buffers.push(data);
|
|
290
|
+
offset += data.length;
|
|
291
|
+
}
|
|
292
|
+
const manifestBuf = Buffer.from(JSON.stringify(manifest));
|
|
293
|
+
const headerBuf = Buffer.alloc(4);
|
|
294
|
+
headerBuf.writeUInt32BE(manifestBuf.length, 0);
|
|
295
|
+
const combined = Buffer.concat([headerBuf, manifestBuf, ...buffers]);
|
|
296
|
+
const namespace = `artifact:${runId}`;
|
|
297
|
+
const fileId = await fileFnClient.upload(namespace, name, combined);
|
|
298
|
+
lines.push(`Uploaded ${files.length} file(s), artifact fileId: ${fileId}`);
|
|
299
|
+
return { success: true, fileId, lines };
|
|
300
|
+
} catch (err) {
|
|
301
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
302
|
+
lines.push(`Upload failed: ${msg}`);
|
|
303
|
+
return { success: false, lines, error: msg };
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
// src/steps/artifact-download.ts
|
|
308
|
+
import { mkdirSync, writeFileSync } from "fs";
|
|
309
|
+
import { join as join2, dirname } from "path";
|
|
310
|
+
async function executeArtifactDownload(options) {
|
|
311
|
+
const { name, workspace, runId, fileFnClient } = options;
|
|
312
|
+
const lines = [];
|
|
313
|
+
try {
|
|
314
|
+
lines.push(`Downloading artifact "${name}"`);
|
|
315
|
+
const namespace = `artifact:${runId}`;
|
|
316
|
+
const data = await fileFnClient.downloadByKey(namespace, name);
|
|
317
|
+
if (!data) {
|
|
318
|
+
const msg = `Artifact "${name}" not found`;
|
|
319
|
+
lines.push(msg);
|
|
320
|
+
return { success: false, lines, error: msg };
|
|
321
|
+
}
|
|
322
|
+
const manifestLen = data.readUInt32BE(0);
|
|
323
|
+
const manifestJson = data.subarray(4, 4 + manifestLen).toString("utf-8");
|
|
324
|
+
const manifest = JSON.parse(manifestJson);
|
|
325
|
+
const dataStart = 4 + manifestLen;
|
|
326
|
+
for (const entry of manifest) {
|
|
327
|
+
const fileBuf = data.subarray(dataStart + entry.offset, dataStart + entry.offset + entry.size);
|
|
328
|
+
const outPath = join2(workspace, entry.relativePath);
|
|
329
|
+
mkdirSync(dirname(outPath), { recursive: true });
|
|
330
|
+
writeFileSync(outPath, fileBuf);
|
|
331
|
+
}
|
|
332
|
+
lines.push(`Downloaded and extracted ${manifest.length} file(s)`);
|
|
333
|
+
return { success: true, lines };
|
|
334
|
+
} catch (err) {
|
|
335
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
336
|
+
lines.push(`Download failed: ${msg}`);
|
|
337
|
+
return { success: false, lines, error: msg };
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// src/steps/cache-save.ts
|
|
342
|
+
import { existsSync as existsSync2, readFileSync as readFileSync2, readdirSync as readdirSync2, statSync as statSync2 } from "fs";
|
|
343
|
+
import { join as join3, relative as relative2 } from "path";
|
|
344
|
+
function collectFiles2(dirPath, basePath) {
|
|
345
|
+
const results = [];
|
|
346
|
+
if (!existsSync2(dirPath)) return results;
|
|
347
|
+
const stat = statSync2(dirPath);
|
|
348
|
+
if (stat.isFile()) {
|
|
349
|
+
results.push({ relativePath: relative2(basePath, dirPath), data: readFileSync2(dirPath) });
|
|
350
|
+
return results;
|
|
351
|
+
}
|
|
352
|
+
if (!stat.isDirectory()) return results;
|
|
353
|
+
for (const entry of readdirSync2(dirPath, { withFileTypes: true })) {
|
|
354
|
+
const fullPath = join3(dirPath, entry.name);
|
|
355
|
+
if (entry.isFile()) {
|
|
356
|
+
results.push({ relativePath: relative2(basePath, fullPath), data: readFileSync2(fullPath) });
|
|
357
|
+
} else if (entry.isDirectory()) {
|
|
358
|
+
results.push(...collectFiles2(fullPath, basePath));
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
return results;
|
|
362
|
+
}
|
|
363
|
+
async function executeCacheSave(options) {
|
|
364
|
+
const { key, paths, workspace, fileFnClient } = options;
|
|
365
|
+
const lines = [];
|
|
366
|
+
const keyByteLength = Buffer.byteLength(key, "utf8");
|
|
367
|
+
if (keyByteLength > 1024) {
|
|
368
|
+
const msg = `Cache key exceeds maximum length of 1KB (actual: ${keyByteLength} bytes)`;
|
|
369
|
+
lines.push(msg);
|
|
370
|
+
return { success: false, lines, error: msg };
|
|
371
|
+
}
|
|
372
|
+
try {
|
|
373
|
+
lines.push(`Saving cache with key "${key}"`);
|
|
374
|
+
const allFiles = [];
|
|
375
|
+
for (const p of paths) {
|
|
376
|
+
const fullPath = join3(workspace, p);
|
|
377
|
+
const files = collectFiles2(fullPath, workspace);
|
|
378
|
+
allFiles.push(...files);
|
|
379
|
+
}
|
|
380
|
+
if (allFiles.length === 0) {
|
|
381
|
+
lines.push("No files found to cache");
|
|
382
|
+
return { success: true, lines };
|
|
383
|
+
}
|
|
384
|
+
const manifest = [];
|
|
385
|
+
const buffers = [];
|
|
386
|
+
let offset = 0;
|
|
387
|
+
for (const f of allFiles) {
|
|
388
|
+
manifest.push({ relativePath: f.relativePath, offset, size: f.data.length });
|
|
389
|
+
buffers.push(f.data);
|
|
390
|
+
offset += f.data.length;
|
|
391
|
+
}
|
|
392
|
+
const manifestBuf = Buffer.from(JSON.stringify(manifest));
|
|
393
|
+
const headerBuf = Buffer.alloc(4);
|
|
394
|
+
headerBuf.writeUInt32BE(manifestBuf.length, 0);
|
|
395
|
+
const combined = Buffer.concat([headerBuf, manifestBuf, ...buffers]);
|
|
396
|
+
await fileFnClient.upload("cache", key, combined);
|
|
397
|
+
lines.push(`Cached ${allFiles.length} file(s) under key "${key}"`);
|
|
398
|
+
return { success: true, lines };
|
|
399
|
+
} catch (err) {
|
|
400
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
401
|
+
lines.push(`Cache save failed: ${msg}`);
|
|
402
|
+
return { success: false, lines, error: msg };
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
// src/steps/cache-restore.ts
|
|
407
|
+
import { mkdirSync as mkdirSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
408
|
+
import { join as join4, dirname as dirname2 } from "path";
|
|
409
|
+
async function executeCacheRestore(options) {
|
|
410
|
+
const { key, workspace, fileFnClient } = options;
|
|
411
|
+
const lines = [];
|
|
412
|
+
const keyByteLength = Buffer.byteLength(key, "utf8");
|
|
413
|
+
if (keyByteLength > 1024) {
|
|
414
|
+
const msg = `Cache key exceeds maximum length of 1KB (actual: ${keyByteLength} bytes)`;
|
|
415
|
+
lines.push(msg);
|
|
416
|
+
return { success: false, hit: false, lines, error: msg };
|
|
417
|
+
}
|
|
418
|
+
try {
|
|
419
|
+
lines.push(`Restoring cache with key "${key}"`);
|
|
420
|
+
const data = await fileFnClient.downloadByKey("cache", key);
|
|
421
|
+
if (!data) {
|
|
422
|
+
lines.push(`Cache miss for key "${key}"`);
|
|
423
|
+
return { success: true, hit: false, lines };
|
|
424
|
+
}
|
|
425
|
+
const manifestLen = data.readUInt32BE(0);
|
|
426
|
+
const manifestJson = data.subarray(4, 4 + manifestLen).toString("utf-8");
|
|
427
|
+
const manifest = JSON.parse(manifestJson);
|
|
428
|
+
const dataStart = 4 + manifestLen;
|
|
429
|
+
for (const entry of manifest) {
|
|
430
|
+
const fileBuf = data.subarray(dataStart + entry.offset, dataStart + entry.offset + entry.size);
|
|
431
|
+
const outPath = join4(workspace, entry.relativePath);
|
|
432
|
+
mkdirSync2(dirname2(outPath), { recursive: true });
|
|
433
|
+
writeFileSync2(outPath, fileBuf);
|
|
434
|
+
}
|
|
435
|
+
lines.push(`Cache hit: restored ${manifest.length} file(s)`);
|
|
436
|
+
return { success: true, hit: true, lines };
|
|
437
|
+
} catch (err) {
|
|
438
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
439
|
+
lines.push(`Cache restore failed: ${msg}`);
|
|
440
|
+
return { success: false, hit: false, lines, error: msg };
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// src/steps/hostfn-deploy.ts
|
|
445
|
+
import { execSync as execSync3 } from "child_process";
|
|
446
|
+
function executeHostFnDeploy(options) {
|
|
447
|
+
const { environment, ci = true, local = false, workspace, env } = options;
|
|
448
|
+
const lines = [];
|
|
449
|
+
let command = `hostfn deploy ${environment}`;
|
|
450
|
+
if (local) command += " --local";
|
|
451
|
+
if (ci) command += " --ci";
|
|
452
|
+
lines.push(`Deploying: ${command}`);
|
|
453
|
+
try {
|
|
454
|
+
const output = execSync3(command, {
|
|
455
|
+
cwd: workspace,
|
|
456
|
+
encoding: "utf-8",
|
|
457
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
458
|
+
timeout: 6e5,
|
|
459
|
+
shell: "/bin/sh",
|
|
460
|
+
env: env ? { ...process.env, ...env } : process.env
|
|
461
|
+
});
|
|
462
|
+
if (output) {
|
|
463
|
+
lines.push(...output.split("\n").filter((l) => l !== ""));
|
|
464
|
+
}
|
|
465
|
+
lines.push("Deploy succeeded");
|
|
466
|
+
return { success: true, exitCode: 0, lines };
|
|
467
|
+
} catch (err) {
|
|
468
|
+
const error = err;
|
|
469
|
+
const stdout = typeof error.stdout === "string" ? error.stdout : "";
|
|
470
|
+
const stderr = typeof error.stderr === "string" ? error.stderr : "";
|
|
471
|
+
const exitCode = typeof error.status === "number" ? error.status : 1;
|
|
472
|
+
if (stdout) lines.push(...stdout.split("\n").filter((l) => l !== ""));
|
|
473
|
+
if (stderr) lines.push(...stderr.split("\n").filter((l) => l !== ""));
|
|
474
|
+
lines.push(`Deploy failed with exit code ${exitCode}`);
|
|
475
|
+
return { success: false, exitCode, lines, error: `Deploy failed with exit code ${exitCode}` };
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
// src/reporting/redact.ts
|
|
480
|
+
function redactSecrets(lines, secretValues) {
|
|
481
|
+
if (secretValues.length === 0) return lines;
|
|
482
|
+
return lines.map((line) => {
|
|
483
|
+
let result = line;
|
|
484
|
+
for (const secret of secretValues) {
|
|
485
|
+
if (secret.length > 0) {
|
|
486
|
+
result = result.split(secret).join("***");
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
return result;
|
|
490
|
+
});
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
// src/runner.ts
|
|
494
|
+
var Runner = class {
|
|
495
|
+
constructor(options) {
|
|
496
|
+
this.store = options.store;
|
|
497
|
+
this.queue = options.queue;
|
|
498
|
+
this.logClient = options.logClient;
|
|
499
|
+
this.pipelineSpecs = options.pipelineSpecs ?? /* @__PURE__ */ new Map();
|
|
500
|
+
this.queueName = options.queueName ?? DEFAULT_QUEUE_NAME;
|
|
501
|
+
this.cleanWorkspace = options.cleanWorkspace ?? true;
|
|
502
|
+
this.fileFnClient = options.fileFnClient;
|
|
503
|
+
this.artifactStore = options.artifactStore;
|
|
504
|
+
this.secretValues = options.secretValues ?? /* @__PURE__ */ new Map();
|
|
505
|
+
this.getSecret = options.getSecret;
|
|
506
|
+
this.runnerType = options.runnerType ?? "default";
|
|
507
|
+
this.labels = options.labels && options.labels.length > 0 ? [...new Set(options.labels)] : [this.runnerType];
|
|
508
|
+
this.dockerExecutor = options.dockerExecutor;
|
|
509
|
+
this.defaultDockerImage = options.defaultDockerImage ?? "node:20";
|
|
510
|
+
this.dockerRunOnLabels = new Set(options.dockerRunOnLabels ?? ["docker-ubuntu-22"]);
|
|
511
|
+
this.dockerForDefault = options.dockerForDefault ?? false;
|
|
512
|
+
}
|
|
513
|
+
registerPipelineSpec(runId, spec) {
|
|
514
|
+
this.pipelineSpecs.set(runId, spec);
|
|
515
|
+
}
|
|
516
|
+
registerSecretValues(runId, values) {
|
|
517
|
+
this.secretValues.set(runId, values);
|
|
518
|
+
}
|
|
519
|
+
async processNextJob() {
|
|
520
|
+
const payload = await this.queue.dequeueMatching(
|
|
521
|
+
this.queueName,
|
|
522
|
+
(job) => this.labels.includes(job.jobSpec["runs-on"])
|
|
523
|
+
);
|
|
524
|
+
if (!payload) return false;
|
|
525
|
+
await this.executeJob(payload);
|
|
526
|
+
return true;
|
|
527
|
+
}
|
|
528
|
+
async processAllJobs() {
|
|
529
|
+
let count = 0;
|
|
530
|
+
while (await this.processNextJob()) {
|
|
531
|
+
count++;
|
|
532
|
+
}
|
|
533
|
+
return count;
|
|
534
|
+
}
|
|
535
|
+
async executeJob(payload) {
|
|
536
|
+
const { runId, jobKey, jobSpec } = payload;
|
|
537
|
+
let jobEnv = { ...payload.env };
|
|
538
|
+
const secretVals = this.secretValues.get(runId) ?? [];
|
|
539
|
+
if (payload.secretKeys && jobEnv) {
|
|
540
|
+
for (const key of payload.secretKeys) {
|
|
541
|
+
const val = jobEnv[key];
|
|
542
|
+
if (val && !secretVals.includes(val)) {
|
|
543
|
+
secretVals.push(val);
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
const logLines = (stepKey, lines) => {
|
|
548
|
+
this.logClient.appendLines(runId, jobKey, stepKey, redactSecrets(lines, secretVals));
|
|
549
|
+
};
|
|
550
|
+
const run = this.store.getRun(runId);
|
|
551
|
+
if (!run) return;
|
|
552
|
+
const job = run.jobs.find((j) => j.jobKey === jobKey);
|
|
553
|
+
if (!job) return;
|
|
554
|
+
const github = run.trigger?.payload?.github ?? {};
|
|
555
|
+
if (!github.ref && payload.pipelineRef?.ref) github.ref = payload.pipelineRef.ref;
|
|
556
|
+
if (!github.repository && payload.pipelineRef?.repo) github.repository = payload.pipelineRef.repo;
|
|
557
|
+
if (!github.event_name && run.trigger?.type) github.event_name = run.trigger.type;
|
|
558
|
+
if (jobSpec.if !== void 0 && jobSpec.if !== null) {
|
|
559
|
+
try {
|
|
560
|
+
const runJob = evaluateConditionSync(String(jobSpec.if).trim(), { github, stepOutcomes: [] });
|
|
561
|
+
if (!runJob) {
|
|
562
|
+
job.status = "skipped";
|
|
563
|
+
job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
564
|
+
logLines("__job__", [`Job "${jobKey}" skipped (if: false)`]);
|
|
565
|
+
this.enqueueDependentJobs(runId, jobKey);
|
|
566
|
+
this.checkRunCompletion(runId);
|
|
567
|
+
return;
|
|
568
|
+
}
|
|
569
|
+
} catch {
|
|
570
|
+
job.status = "failure";
|
|
571
|
+
job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
572
|
+
logLines("__job__", [`Job "${jobKey}" failed (invalid if expression)`]);
|
|
573
|
+
this.store.updateRunStatus(runId, "failure");
|
|
574
|
+
return;
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
if (run.status === "queued") {
|
|
578
|
+
this.store.updateRunStatus(runId, "running");
|
|
579
|
+
}
|
|
580
|
+
job.status = "running";
|
|
581
|
+
job.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
582
|
+
logLines("__job__", [`Job "${jobKey}" started`]);
|
|
583
|
+
const workspace = mkdtempSync(join6(tmpdir(), `cifn-${runId}-${jobKey}-`));
|
|
584
|
+
let jobFailed = false;
|
|
585
|
+
const interpolateContext = {
|
|
586
|
+
github,
|
|
587
|
+
getSecret: this.getSecret ? (name) => this.getSecret(runId, name) : void 0,
|
|
588
|
+
workspaceRoot: workspace,
|
|
589
|
+
hashFiles: (glob) => Promise.resolve(hashFiles(workspace, glob))
|
|
590
|
+
};
|
|
591
|
+
if (jobSpec.env) {
|
|
592
|
+
for (const [k, v] of Object.entries(jobSpec.env)) {
|
|
593
|
+
try {
|
|
594
|
+
const { result, secretValues: sv } = await interpolate(String(v), interpolateContext);
|
|
595
|
+
jobEnv[k] = result;
|
|
596
|
+
secretVals.push(...sv);
|
|
597
|
+
} catch {
|
|
598
|
+
jobEnv[k] = String(v);
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
try {
|
|
603
|
+
for (let i = 0; i < jobSpec.steps.length; i++) {
|
|
604
|
+
const stepSpec = jobSpec.steps[i];
|
|
605
|
+
const step = job.steps[i];
|
|
606
|
+
if (!step) continue;
|
|
607
|
+
const effectiveIf = "if" in stepSpec && stepSpec.if != null ? String(stepSpec.if).trim() : "success()";
|
|
608
|
+
const stepOutcomes = job.steps.slice(0, i).map((s) => s.status);
|
|
609
|
+
try {
|
|
610
|
+
const runStep = evaluateConditionSync(effectiveIf, { github, stepOutcomes });
|
|
611
|
+
if (!runStep) {
|
|
612
|
+
step.status = "skipped";
|
|
613
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
614
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" skipped (if: false)`]);
|
|
615
|
+
continue;
|
|
616
|
+
}
|
|
617
|
+
} catch {
|
|
618
|
+
step.status = "failure";
|
|
619
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
620
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed (invalid if expression)`]);
|
|
621
|
+
jobFailed = true;
|
|
622
|
+
continue;
|
|
623
|
+
}
|
|
624
|
+
if ("uses" in stepSpec) {
|
|
625
|
+
if (stepSpec.uses === "checkout") {
|
|
626
|
+
step.status = "running";
|
|
627
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
628
|
+
logLines(step.stepKey, [
|
|
629
|
+
`Step "${step.stepKey}" started: checkout`
|
|
630
|
+
]);
|
|
631
|
+
const repo = stepSpec.with?.repository ?? payload.pipelineRef?.repo ?? "";
|
|
632
|
+
const ref = stepSpec.with?.ref ?? payload.pipelineRef?.ref ?? "main";
|
|
633
|
+
const token = stepSpec.with?.token;
|
|
634
|
+
const checkoutResult = executeCheckout({ repo, ref, workspace, token });
|
|
635
|
+
logLines(step.stepKey, checkoutResult.lines);
|
|
636
|
+
if (checkoutResult.success) {
|
|
637
|
+
step.status = "success";
|
|
638
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
639
|
+
logLines(step.stepKey, [
|
|
640
|
+
`Step "${step.stepKey}" completed successfully`
|
|
641
|
+
]);
|
|
642
|
+
} else {
|
|
643
|
+
step.status = "failure";
|
|
644
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
645
|
+
logLines(step.stepKey, [
|
|
646
|
+
`Step "${step.stepKey}" failed: ${checkoutResult.error}`
|
|
647
|
+
]);
|
|
648
|
+
jobFailed = true;
|
|
649
|
+
}
|
|
650
|
+
continue;
|
|
651
|
+
}
|
|
652
|
+
if (stepSpec.uses === "artifact/upload" && this.fileFnClient) {
|
|
653
|
+
step.status = "running";
|
|
654
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
655
|
+
let artName = stepSpec.with?.name ?? "default";
|
|
656
|
+
let artPath = stepSpec.with?.path ?? ".";
|
|
657
|
+
try {
|
|
658
|
+
const nameRes = await interpolate(artName, interpolateContext);
|
|
659
|
+
artName = nameRes.result;
|
|
660
|
+
secretVals.push(...nameRes.secretValues);
|
|
661
|
+
const pathRes = await interpolate(artPath, interpolateContext);
|
|
662
|
+
artPath = pathRes.result;
|
|
663
|
+
secretVals.push(...pathRes.secretValues);
|
|
664
|
+
} catch {
|
|
665
|
+
step.status = "failure";
|
|
666
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
667
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
668
|
+
jobFailed = true;
|
|
669
|
+
continue;
|
|
670
|
+
}
|
|
671
|
+
logLines(step.stepKey, [
|
|
672
|
+
`Step "${step.stepKey}" started: artifact/upload "${artName}"`
|
|
673
|
+
]);
|
|
674
|
+
const uploadResult = await executeArtifactUpload({
|
|
675
|
+
name: artName,
|
|
676
|
+
path: artPath,
|
|
677
|
+
workspace,
|
|
678
|
+
runId,
|
|
679
|
+
fileFnClient: this.fileFnClient
|
|
680
|
+
});
|
|
681
|
+
logLines(step.stepKey, uploadResult.lines);
|
|
682
|
+
if (uploadResult.success) {
|
|
683
|
+
if (uploadResult.fileId && this.artifactStore) {
|
|
684
|
+
this.artifactStore.addArtifact(runId, { name: artName, fileId: uploadResult.fileId });
|
|
685
|
+
}
|
|
686
|
+
step.status = "success";
|
|
687
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
688
|
+
logLines(step.stepKey, [
|
|
689
|
+
`Step "${step.stepKey}" completed successfully`
|
|
690
|
+
]);
|
|
691
|
+
} else {
|
|
692
|
+
step.status = "failure";
|
|
693
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
694
|
+
logLines(step.stepKey, [
|
|
695
|
+
`Step "${step.stepKey}" failed: ${uploadResult.error}`
|
|
696
|
+
]);
|
|
697
|
+
jobFailed = true;
|
|
698
|
+
}
|
|
699
|
+
continue;
|
|
700
|
+
}
|
|
701
|
+
if (stepSpec.uses === "artifact/download" && this.fileFnClient) {
|
|
702
|
+
step.status = "running";
|
|
703
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
704
|
+
let artName = stepSpec.with?.name ?? "default";
|
|
705
|
+
try {
|
|
706
|
+
const nameRes = await interpolate(artName, interpolateContext);
|
|
707
|
+
artName = nameRes.result;
|
|
708
|
+
secretVals.push(...nameRes.secretValues);
|
|
709
|
+
} catch {
|
|
710
|
+
step.status = "failure";
|
|
711
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
712
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
713
|
+
jobFailed = true;
|
|
714
|
+
continue;
|
|
715
|
+
}
|
|
716
|
+
logLines(step.stepKey, [
|
|
717
|
+
`Step "${step.stepKey}" started: artifact/download "${artName}"`
|
|
718
|
+
]);
|
|
719
|
+
const downloadResult = await executeArtifactDownload({
|
|
720
|
+
name: artName,
|
|
721
|
+
workspace,
|
|
722
|
+
runId,
|
|
723
|
+
fileFnClient: this.fileFnClient
|
|
724
|
+
});
|
|
725
|
+
logLines(step.stepKey, downloadResult.lines);
|
|
726
|
+
if (downloadResult.success) {
|
|
727
|
+
step.status = "success";
|
|
728
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
729
|
+
logLines(step.stepKey, [
|
|
730
|
+
`Step "${step.stepKey}" completed successfully`
|
|
731
|
+
]);
|
|
732
|
+
} else {
|
|
733
|
+
step.status = "failure";
|
|
734
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
735
|
+
logLines(step.stepKey, [
|
|
736
|
+
`Step "${step.stepKey}" failed: ${downloadResult.error}`
|
|
737
|
+
]);
|
|
738
|
+
jobFailed = true;
|
|
739
|
+
}
|
|
740
|
+
continue;
|
|
741
|
+
}
|
|
742
|
+
if (stepSpec.uses === "cache/save" && this.fileFnClient) {
|
|
743
|
+
step.status = "running";
|
|
744
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
745
|
+
let cacheKey = stepSpec.with?.key ?? "";
|
|
746
|
+
try {
|
|
747
|
+
const keyRes = await interpolate(cacheKey, interpolateContext);
|
|
748
|
+
cacheKey = keyRes.result;
|
|
749
|
+
secretVals.push(...keyRes.secretValues);
|
|
750
|
+
} catch {
|
|
751
|
+
step.status = "failure";
|
|
752
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
753
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
754
|
+
jobFailed = true;
|
|
755
|
+
continue;
|
|
756
|
+
}
|
|
757
|
+
const cachePaths = stepSpec.with?.paths ?? [];
|
|
758
|
+
logLines(step.stepKey, [
|
|
759
|
+
`Step "${step.stepKey}" started: cache/save "${cacheKey}"`
|
|
760
|
+
]);
|
|
761
|
+
const saveResult = await executeCacheSave({
|
|
762
|
+
key: cacheKey,
|
|
763
|
+
paths: cachePaths,
|
|
764
|
+
workspace,
|
|
765
|
+
fileFnClient: this.fileFnClient
|
|
766
|
+
});
|
|
767
|
+
logLines(step.stepKey, saveResult.lines);
|
|
768
|
+
step.status = saveResult.success ? "success" : "failure";
|
|
769
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
770
|
+
if (!saveResult.success) {
|
|
771
|
+
logLines(step.stepKey, [
|
|
772
|
+
`Step "${step.stepKey}" failed: ${saveResult.error}`
|
|
773
|
+
]);
|
|
774
|
+
jobFailed = true;
|
|
775
|
+
} else {
|
|
776
|
+
logLines(step.stepKey, [
|
|
777
|
+
`Step "${step.stepKey}" completed successfully`
|
|
778
|
+
]);
|
|
779
|
+
}
|
|
780
|
+
continue;
|
|
781
|
+
}
|
|
782
|
+
if (stepSpec.uses === "cache/restore" && this.fileFnClient) {
|
|
783
|
+
step.status = "running";
|
|
784
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
785
|
+
let cacheKey = stepSpec.with?.key ?? "";
|
|
786
|
+
try {
|
|
787
|
+
const keyRes = await interpolate(cacheKey, interpolateContext);
|
|
788
|
+
cacheKey = keyRes.result;
|
|
789
|
+
secretVals.push(...keyRes.secretValues);
|
|
790
|
+
} catch {
|
|
791
|
+
step.status = "failure";
|
|
792
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
793
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
794
|
+
jobFailed = true;
|
|
795
|
+
continue;
|
|
796
|
+
}
|
|
797
|
+
logLines(step.stepKey, [
|
|
798
|
+
`Step "${step.stepKey}" started: cache/restore "${cacheKey}"`
|
|
799
|
+
]);
|
|
800
|
+
const restoreResult = await executeCacheRestore({
|
|
801
|
+
key: cacheKey,
|
|
802
|
+
workspace,
|
|
803
|
+
fileFnClient: this.fileFnClient
|
|
804
|
+
});
|
|
805
|
+
logLines(step.stepKey, restoreResult.lines);
|
|
806
|
+
step.status = "success";
|
|
807
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
808
|
+
logLines(step.stepKey, [
|
|
809
|
+
`Step "${step.stepKey}" completed successfully${restoreResult.hit ? " (cache hit)" : " (cache miss)"}`
|
|
810
|
+
]);
|
|
811
|
+
continue;
|
|
812
|
+
}
|
|
813
|
+
if (stepSpec.uses === "testfn/run") {
|
|
814
|
+
step.status = "running";
|
|
815
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
816
|
+
const framework = stepSpec.with?.framework;
|
|
817
|
+
const testPattern = stepSpec.with?.testPattern;
|
|
818
|
+
const reporter = stepSpec.with?.reporter;
|
|
819
|
+
const outputPath = stepSpec.with?.outputPath;
|
|
820
|
+
const parallel = stepSpec.with?.parallel;
|
|
821
|
+
const timeout = stepSpec.with?.timeout;
|
|
822
|
+
const retries = stepSpec.with?.retries;
|
|
823
|
+
logLines(step.stepKey, [
|
|
824
|
+
`Step "${step.stepKey}" started: testfn/run`
|
|
825
|
+
]);
|
|
826
|
+
const { executeTestFnRunAsync: executeTestFnRunAsync2 } = await Promise.resolve().then(() => (init_testfn_run(), testfn_run_exports));
|
|
827
|
+
const testResult = await executeTestFnRunAsync2({
|
|
828
|
+
framework,
|
|
829
|
+
testPattern,
|
|
830
|
+
reporter,
|
|
831
|
+
outputPath,
|
|
832
|
+
workspace,
|
|
833
|
+
env: jobEnv,
|
|
834
|
+
parallel,
|
|
835
|
+
timeout,
|
|
836
|
+
retries
|
|
837
|
+
});
|
|
838
|
+
logLines(step.stepKey, testResult.lines);
|
|
839
|
+
if (testResult.success) {
|
|
840
|
+
step.status = "success";
|
|
841
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
842
|
+
logLines(step.stepKey, [
|
|
843
|
+
`Step "${step.stepKey}" completed successfully`
|
|
844
|
+
]);
|
|
845
|
+
} else {
|
|
846
|
+
step.status = "failure";
|
|
847
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
848
|
+
logLines(step.stepKey, [
|
|
849
|
+
`Step "${step.stepKey}" failed: ${testResult.error}`
|
|
850
|
+
]);
|
|
851
|
+
jobFailed = true;
|
|
852
|
+
}
|
|
853
|
+
continue;
|
|
854
|
+
}
|
|
855
|
+
if (stepSpec.uses === "hostfn/deploy") {
|
|
856
|
+
step.status = "running";
|
|
857
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
858
|
+
const environment = stepSpec.with?.environment ?? "";
|
|
859
|
+
const ci = stepSpec.with?.ci ?? true;
|
|
860
|
+
const local = typeof stepSpec.with?.local === "boolean" ? stepSpec.with.local : jobSpec["runs-on"] === "hostfn-runner";
|
|
861
|
+
logLines(step.stepKey, [
|
|
862
|
+
`Step "${step.stepKey}" started: hostfn/deploy ${environment}`
|
|
863
|
+
]);
|
|
864
|
+
const deployResult = executeHostFnDeploy({
|
|
865
|
+
environment,
|
|
866
|
+
ci,
|
|
867
|
+
local,
|
|
868
|
+
workspace,
|
|
869
|
+
env: jobEnv
|
|
870
|
+
});
|
|
871
|
+
logLines(step.stepKey, deployResult.lines);
|
|
872
|
+
if (deployResult.success) {
|
|
873
|
+
step.status = "success";
|
|
874
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
875
|
+
logLines(step.stepKey, [
|
|
876
|
+
`Step "${step.stepKey}" completed successfully`
|
|
877
|
+
]);
|
|
878
|
+
} else {
|
|
879
|
+
step.status = "failure";
|
|
880
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
881
|
+
logLines(step.stepKey, [
|
|
882
|
+
`Step "${step.stepKey}" failed: ${deployResult.error}`
|
|
883
|
+
]);
|
|
884
|
+
jobFailed = true;
|
|
885
|
+
}
|
|
886
|
+
continue;
|
|
887
|
+
}
|
|
888
|
+
step.status = "skipped";
|
|
889
|
+
logLines(step.stepKey, [
|
|
890
|
+
`Step "${step.stepKey}" skipped (unsupported uses: ${stepSpec.uses})`
|
|
891
|
+
]);
|
|
892
|
+
continue;
|
|
893
|
+
}
|
|
894
|
+
step.status = "running";
|
|
895
|
+
step.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
896
|
+
let runCommand = stepSpec.run;
|
|
897
|
+
try {
|
|
898
|
+
const runRes = await interpolate(stepSpec.run, interpolateContext);
|
|
899
|
+
runCommand = runRes.result;
|
|
900
|
+
secretVals.push(...runRes.secretValues);
|
|
901
|
+
} catch {
|
|
902
|
+
step.status = "failure";
|
|
903
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
904
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
905
|
+
jobFailed = true;
|
|
906
|
+
continue;
|
|
907
|
+
}
|
|
908
|
+
logLines(step.stepKey, [
|
|
909
|
+
`Step "${step.stepKey}" started: ${runCommand}`
|
|
910
|
+
]);
|
|
911
|
+
const result = this.executeRunCommand(runCommand, workspace, jobSpec, jobEnv);
|
|
912
|
+
logLines(step.stepKey, result.lines);
|
|
913
|
+
if (result.exitCode === 0) {
|
|
914
|
+
step.status = "success";
|
|
915
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
916
|
+
logLines(step.stepKey, [
|
|
917
|
+
`Step "${step.stepKey}" completed successfully`
|
|
918
|
+
]);
|
|
919
|
+
} else {
|
|
920
|
+
step.status = "failure";
|
|
921
|
+
step.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
922
|
+
logLines(step.stepKey, [
|
|
923
|
+
`Step "${step.stepKey}" failed with exit code ${result.exitCode}`
|
|
924
|
+
]);
|
|
925
|
+
jobFailed = true;
|
|
926
|
+
}
|
|
927
|
+
}
|
|
928
|
+
} finally {
|
|
929
|
+
if (this.cleanWorkspace) {
|
|
930
|
+
try {
|
|
931
|
+
rmSync(workspace, { recursive: true, force: true });
|
|
932
|
+
} catch {
|
|
933
|
+
}
|
|
934
|
+
}
|
|
935
|
+
}
|
|
936
|
+
job.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
937
|
+
if (jobFailed) {
|
|
938
|
+
job.status = "failure";
|
|
939
|
+
logLines("__job__", [`Job "${jobKey}" failed`]);
|
|
940
|
+
this.store.updateRunStatus(runId, "failure");
|
|
941
|
+
} else {
|
|
942
|
+
job.status = "success";
|
|
943
|
+
logLines("__job__", [`Job "${jobKey}" completed successfully`]);
|
|
944
|
+
await this.enqueueDependentJobs(runId, jobKey);
|
|
945
|
+
this.checkRunCompletion(runId);
|
|
946
|
+
}
|
|
947
|
+
}
|
|
948
|
+
executeRunCommand(command, workspace, jobSpec, env) {
|
|
949
|
+
if (this.shouldUseDocker(jobSpec) && this.dockerExecutor) {
|
|
950
|
+
return this.dockerExecutor.execute({
|
|
951
|
+
image: jobSpec.image ?? this.defaultDockerImage,
|
|
952
|
+
workspace,
|
|
953
|
+
command,
|
|
954
|
+
env
|
|
955
|
+
});
|
|
956
|
+
}
|
|
957
|
+
return executeRunStep(command, workspace, env);
|
|
958
|
+
}
|
|
959
|
+
shouldUseDocker(jobSpec) {
|
|
960
|
+
if (!this.dockerExecutor) return false;
|
|
961
|
+
if (typeof jobSpec.image === "string" && jobSpec.image.length > 0) return true;
|
|
962
|
+
if (jobSpec["runs-on"] === "default" && this.dockerForDefault) return true;
|
|
963
|
+
return this.dockerRunOnLabels.has(jobSpec["runs-on"]);
|
|
964
|
+
}
|
|
965
|
+
async enqueueDependentJobs(runId, completedJobKey) {
|
|
966
|
+
const spec = this.pipelineSpecs.get(runId);
|
|
967
|
+
if (!spec) return;
|
|
968
|
+
const run = this.store.getRun(runId);
|
|
969
|
+
if (!run) return;
|
|
970
|
+
const completedJobs = new Set(
|
|
971
|
+
run.jobs.filter((j) => j.status === "success" || j.status === "skipped").map((j) => j.jobKey)
|
|
972
|
+
);
|
|
973
|
+
const enqueuedJobs = new Set(
|
|
974
|
+
run.jobs.filter((j) => j.status !== "pending").map((j) => j.jobKey)
|
|
975
|
+
);
|
|
976
|
+
const readyJobs = getReadyJobs(spec, completedJobs, enqueuedJobs);
|
|
977
|
+
for (const readyJobKey of readyJobs) {
|
|
978
|
+
const jobSpec = spec.jobs[readyJobKey];
|
|
979
|
+
if (jobSpec) {
|
|
980
|
+
await this.queue.enqueue(this.queueName, {
|
|
981
|
+
runId,
|
|
982
|
+
jobKey: readyJobKey,
|
|
983
|
+
jobSpec
|
|
984
|
+
});
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
checkRunCompletion(runId) {
|
|
989
|
+
const run = this.store.getRun(runId);
|
|
990
|
+
if (!run) return;
|
|
991
|
+
const allDone = run.jobs.every(
|
|
992
|
+
(j) => j.status === "success" || j.status === "failure" || j.status === "skipped"
|
|
993
|
+
);
|
|
994
|
+
if (allDone) {
|
|
995
|
+
const anyFailure = run.jobs.some((j) => j.status === "failure");
|
|
996
|
+
this.store.updateRunStatus(runId, anyFailure ? "failure" : "success");
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
};
|
|
1000
|
+
|
|
1001
|
+
// src/reporting/logfn-client.ts
|
|
1002
|
+
var MemoryLogFnClient = class {
|
|
1003
|
+
constructor() {
|
|
1004
|
+
this.entries = [];
|
|
1005
|
+
}
|
|
1006
|
+
append(entry) {
|
|
1007
|
+
this.entries.push(entry);
|
|
1008
|
+
}
|
|
1009
|
+
appendLines(runId, jobKey, stepKey, lines) {
|
|
1010
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1011
|
+
for (const line of lines) {
|
|
1012
|
+
this.entries.push({ runId, jobKey, stepKey, line, timestamp: now });
|
|
1013
|
+
}
|
|
1014
|
+
}
|
|
1015
|
+
getLines(runId, jobKey) {
|
|
1016
|
+
return this.entries.filter((e) => e.runId === runId && e.jobKey === jobKey);
|
|
1017
|
+
}
|
|
1018
|
+
getAllLines(runId) {
|
|
1019
|
+
return this.entries.filter((e) => e.runId === runId);
|
|
1020
|
+
}
|
|
1021
|
+
};
|
|
1022
|
+
|
|
1023
|
+
// src/index.ts
|
|
1024
|
+
init_testfn_run();
|
|
1025
|
+
|
|
1026
|
+
// src/docker-executor.ts
|
|
1027
|
+
import { spawnSync } from "child_process";
|
|
1028
|
+
var DefaultDockerCommandRunner = class {
|
|
1029
|
+
run(args, options) {
|
|
1030
|
+
const res = spawnSync("docker", args, {
|
|
1031
|
+
cwd: options.cwd,
|
|
1032
|
+
encoding: "utf-8",
|
|
1033
|
+
env: options.env ? { ...process.env, ...options.env } : process.env,
|
|
1034
|
+
timeout: 6e5
|
|
1035
|
+
});
|
|
1036
|
+
return {
|
|
1037
|
+
status: res.status,
|
|
1038
|
+
stdout: res.stdout ?? "",
|
|
1039
|
+
stderr: res.stderr ?? "",
|
|
1040
|
+
error: res.error ? String(res.error.message ?? res.error) : void 0
|
|
1041
|
+
};
|
|
1042
|
+
}
|
|
1043
|
+
};
|
|
1044
|
+
var DockerExecutor = class {
|
|
1045
|
+
constructor(runner) {
|
|
1046
|
+
this.runner = runner ?? new DefaultDockerCommandRunner();
|
|
1047
|
+
}
|
|
1048
|
+
execute(options) {
|
|
1049
|
+
const args = [
|
|
1050
|
+
"run",
|
|
1051
|
+
"--rm",
|
|
1052
|
+
"-v",
|
|
1053
|
+
`${options.workspace}:/workspace`,
|
|
1054
|
+
"-w",
|
|
1055
|
+
"/workspace"
|
|
1056
|
+
];
|
|
1057
|
+
for (const [key, value] of Object.entries(options.env ?? {})) {
|
|
1058
|
+
args.push("-e", `${key}=${value}`);
|
|
1059
|
+
}
|
|
1060
|
+
args.push(options.image, "sh", "-lc", options.command);
|
|
1061
|
+
const output = this.runner.run(args, { cwd: options.workspace, env: options.env });
|
|
1062
|
+
const stdout = output.stdout ?? "";
|
|
1063
|
+
const stderr = output.stderr ?? "";
|
|
1064
|
+
const lines = [...stdout.split("\n"), ...stderr.split("\n")].filter((line) => line !== "");
|
|
1065
|
+
const exitCode = output.status ?? 1;
|
|
1066
|
+
if (output.error) {
|
|
1067
|
+
lines.push(`docker error: ${output.error}`);
|
|
1068
|
+
}
|
|
1069
|
+
return {
|
|
1070
|
+
exitCode,
|
|
1071
|
+
stdout,
|
|
1072
|
+
stderr,
|
|
1073
|
+
lines
|
|
1074
|
+
};
|
|
1075
|
+
}
|
|
1076
|
+
};
|
|
1077
|
+
export {
|
|
1078
|
+
DockerExecutor,
|
|
1079
|
+
MemoryLogFnClient,
|
|
1080
|
+
Runner,
|
|
1081
|
+
executeArtifactDownload,
|
|
1082
|
+
executeArtifactUpload,
|
|
1083
|
+
executeCacheRestore,
|
|
1084
|
+
executeCacheSave,
|
|
1085
|
+
executeCheckout,
|
|
1086
|
+
executeHostFnDeploy,
|
|
1087
|
+
executeRunStep,
|
|
1088
|
+
executeTestFnRun,
|
|
1089
|
+
redactSecrets
|
|
1090
|
+
};
|
|
1091
|
+
//# sourceMappingURL=index.mjs.map
|