@cifn/runner 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +223 -0
- package/dist/index.js +1117 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +1091 -0
- package/dist/index.mjs.map +1 -0
- package/dist/server.d.mts +2 -0
- package/dist/server.js.map +1 -0
- package/dist/server.mjs +48 -0
- package/dist/server.mjs.map +1 -0
- package/package.json +41 -0
- package/src/artifacts-cache.test.ts +557 -0
- package/src/docker-executor.ts +76 -0
- package/src/executor/run-step.ts +34 -0
- package/src/index.ts +23 -0
- package/src/reporting/logfn-client.ts +37 -0
- package/src/reporting/redact.ts +12 -0
- package/src/runner.test.ts +957 -0
- package/src/runner.ts +626 -0
- package/src/secrets-steps.test.ts +603 -0
- package/src/server.ts +54 -0
- package/src/steps/artifact-download.ts +55 -0
- package/src/steps/artifact-upload.ts +89 -0
- package/src/steps/cache-restore.ts +61 -0
- package/src/steps/cache-save.ts +88 -0
- package/src/steps/checkout.ts +63 -0
- package/src/steps/hostfn-deploy.ts +52 -0
- package/src/steps/testfn-run.ts +179 -0
- package/tsconfig.json +18 -0
- package/tsup.config.ts +10 -0
- package/vitest.config.ts +9 -0
package/src/runner.ts
ADDED
|
@@ -0,0 +1,626 @@
|
|
|
1
|
+
import { mkdtempSync, rmSync } from 'node:fs';
|
|
2
|
+
import { tmpdir } from 'node:os';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
import type { QueueClient, JobPayload } from 'cifn';
|
|
5
|
+
import {
|
|
6
|
+
DEFAULT_QUEUE_NAME,
|
|
7
|
+
getReadyJobs,
|
|
8
|
+
evaluateConditionSync,
|
|
9
|
+
interpolate,
|
|
10
|
+
hashFiles,
|
|
11
|
+
type MemoryStore,
|
|
12
|
+
type PipelineSpec,
|
|
13
|
+
} from 'cifn';
|
|
14
|
+
import { executeRunStep } from './executor/run-step.js';
|
|
15
|
+
import { executeCheckout } from './steps/checkout.js';
|
|
16
|
+
import { executeArtifactUpload } from './steps/artifact-upload.js';
|
|
17
|
+
import { executeArtifactDownload } from './steps/artifact-download.js';
|
|
18
|
+
import { executeCacheSave } from './steps/cache-save.js';
|
|
19
|
+
import { executeCacheRestore } from './steps/cache-restore.js';
|
|
20
|
+
import { executeHostFnDeploy } from './steps/hostfn-deploy.js';
|
|
21
|
+
import type { LogFnClient } from './reporting/logfn-client.js';
|
|
22
|
+
import { redactSecrets } from './reporting/redact.js';
|
|
23
|
+
import { DockerExecutor } from './docker-executor.js';
|
|
24
|
+
|
|
25
|
+
export interface RunnerOptions {
|
|
26
|
+
store: MemoryStore;
|
|
27
|
+
queue: QueueClient;
|
|
28
|
+
logClient: LogFnClient;
|
|
29
|
+
pipelineSpecs?: Map<string, PipelineSpec>;
|
|
30
|
+
queueName?: string;
|
|
31
|
+
cleanWorkspace?: boolean;
|
|
32
|
+
fileFnClient?: {
|
|
33
|
+
upload(namespace: string, key: string, data: Buffer): Promise<string>;
|
|
34
|
+
downloadByKey(namespace: string, key: string): Promise<Buffer | null>;
|
|
35
|
+
};
|
|
36
|
+
artifactStore?: {
|
|
37
|
+
addArtifact(runId: string, artifact: { name: string; fileId: string; url?: string }): void;
|
|
38
|
+
};
|
|
39
|
+
secretValues?: Map<string, string[]>;
|
|
40
|
+
/** Resolve secret by name for interpolation. When not set, ${{ secrets.* }} in run/with/cache key will fail. */
|
|
41
|
+
getSecret?: (runId: string, name: string) => Promise<string | null>;
|
|
42
|
+
labels?: string[];
|
|
43
|
+
runnerType?: 'default' | 'hostfn-runner';
|
|
44
|
+
dockerExecutor?: DockerExecutor;
|
|
45
|
+
defaultDockerImage?: string;
|
|
46
|
+
dockerRunOnLabels?: string[];
|
|
47
|
+
dockerForDefault?: boolean;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export class Runner {
|
|
51
|
+
private store: MemoryStore;
|
|
52
|
+
private queue: QueueClient;
|
|
53
|
+
private logClient: LogFnClient;
|
|
54
|
+
private pipelineSpecs: Map<string, PipelineSpec>;
|
|
55
|
+
private queueName: string;
|
|
56
|
+
private cleanWorkspace: boolean;
|
|
57
|
+
private fileFnClient?: RunnerOptions['fileFnClient'];
|
|
58
|
+
private artifactStore?: RunnerOptions['artifactStore'];
|
|
59
|
+
private secretValues: Map<string, string[]>;
|
|
60
|
+
private labels: string[];
|
|
61
|
+
private runnerType: 'default' | 'hostfn-runner';
|
|
62
|
+
private dockerExecutor?: DockerExecutor;
|
|
63
|
+
private defaultDockerImage: string;
|
|
64
|
+
private dockerRunOnLabels: Set<string>;
|
|
65
|
+
private dockerForDefault: boolean;
|
|
66
|
+
private getSecret?: (runId: string, name: string) => Promise<string | null>;
|
|
67
|
+
|
|
68
|
+
constructor(options: RunnerOptions) {
|
|
69
|
+
this.store = options.store;
|
|
70
|
+
this.queue = options.queue;
|
|
71
|
+
this.logClient = options.logClient;
|
|
72
|
+
this.pipelineSpecs = options.pipelineSpecs ?? new Map();
|
|
73
|
+
this.queueName = options.queueName ?? DEFAULT_QUEUE_NAME;
|
|
74
|
+
this.cleanWorkspace = options.cleanWorkspace ?? true;
|
|
75
|
+
this.fileFnClient = options.fileFnClient;
|
|
76
|
+
this.artifactStore = options.artifactStore;
|
|
77
|
+
this.secretValues = options.secretValues ?? new Map();
|
|
78
|
+
this.getSecret = options.getSecret;
|
|
79
|
+
this.runnerType = options.runnerType ?? 'default';
|
|
80
|
+
this.labels = options.labels && options.labels.length > 0
|
|
81
|
+
? [...new Set(options.labels)]
|
|
82
|
+
: [this.runnerType];
|
|
83
|
+
this.dockerExecutor = options.dockerExecutor;
|
|
84
|
+
this.defaultDockerImage = options.defaultDockerImage ?? 'node:20';
|
|
85
|
+
this.dockerRunOnLabels = new Set(options.dockerRunOnLabels ?? ['docker-ubuntu-22']);
|
|
86
|
+
this.dockerForDefault = options.dockerForDefault ?? false;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
registerPipelineSpec(runId: string, spec: PipelineSpec): void {
|
|
90
|
+
this.pipelineSpecs.set(runId, spec);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
registerSecretValues(runId: string, values: string[]): void {
|
|
94
|
+
this.secretValues.set(runId, values);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async processNextJob(): Promise<boolean> {
|
|
98
|
+
const payload = await this.queue.dequeueMatching(this.queueName, (job) =>
|
|
99
|
+
this.labels.includes(job.jobSpec['runs-on'])
|
|
100
|
+
);
|
|
101
|
+
if (!payload) return false;
|
|
102
|
+
await this.executeJob(payload);
|
|
103
|
+
return true;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
async processAllJobs(): Promise<number> {
|
|
107
|
+
let count = 0;
|
|
108
|
+
while (await this.processNextJob()) {
|
|
109
|
+
count++;
|
|
110
|
+
}
|
|
111
|
+
return count;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
private async executeJob(payload: JobPayload): Promise<void> {
|
|
115
|
+
const { runId, jobKey, jobSpec } = payload;
|
|
116
|
+
let jobEnv: Record<string, string> = { ...payload.env };
|
|
117
|
+
const secretVals = this.secretValues.get(runId) ?? [];
|
|
118
|
+
if (payload.secretKeys && jobEnv) {
|
|
119
|
+
for (const key of payload.secretKeys) {
|
|
120
|
+
const val = jobEnv[key];
|
|
121
|
+
if (val && !secretVals.includes(val)) {
|
|
122
|
+
secretVals.push(val);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const logLines = (stepKey: string, lines: string[]) => {
|
|
128
|
+
this.logClient.appendLines(runId, jobKey, stepKey, redactSecrets(lines, secretVals));
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
const run = this.store.getRun(runId);
|
|
132
|
+
if (!run) return;
|
|
133
|
+
|
|
134
|
+
const job = run.jobs.find(j => j.jobKey === jobKey);
|
|
135
|
+
if (!job) return;
|
|
136
|
+
|
|
137
|
+
const github = (run.trigger?.payload?.github as Record<string, string> | undefined) ?? {};
|
|
138
|
+
if (!github.ref && payload.pipelineRef?.ref) github.ref = payload.pipelineRef.ref;
|
|
139
|
+
if (!github.repository && payload.pipelineRef?.repo) github.repository = payload.pipelineRef.repo;
|
|
140
|
+
if (!github.event_name && run.trigger?.type) github.event_name = run.trigger.type;
|
|
141
|
+
|
|
142
|
+
if (jobSpec.if !== undefined && jobSpec.if !== null) {
|
|
143
|
+
try {
|
|
144
|
+
const runJob = evaluateConditionSync(String(jobSpec.if).trim(), { github, stepOutcomes: [] });
|
|
145
|
+
if (!runJob) {
|
|
146
|
+
job.status = 'skipped';
|
|
147
|
+
job.completedAt = new Date().toISOString();
|
|
148
|
+
logLines('__job__', [`Job "${jobKey}" skipped (if: false)`]);
|
|
149
|
+
this.enqueueDependentJobs(runId, jobKey);
|
|
150
|
+
this.checkRunCompletion(runId);
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
} catch {
|
|
154
|
+
job.status = 'failure';
|
|
155
|
+
job.completedAt = new Date().toISOString();
|
|
156
|
+
logLines('__job__', [`Job "${jobKey}" failed (invalid if expression)`]);
|
|
157
|
+
this.store.updateRunStatus(runId, 'failure');
|
|
158
|
+
return;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
if (run.status === 'queued') {
|
|
163
|
+
this.store.updateRunStatus(runId, 'running');
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
job.status = 'running';
|
|
167
|
+
job.startedAt = new Date().toISOString();
|
|
168
|
+
|
|
169
|
+
logLines('__job__', [`Job "${jobKey}" started`]);
|
|
170
|
+
|
|
171
|
+
const workspace = mkdtempSync(join(tmpdir(), `cifn-${runId}-${jobKey}-`));
|
|
172
|
+
|
|
173
|
+
let jobFailed = false;
|
|
174
|
+
|
|
175
|
+
const interpolateContext = {
|
|
176
|
+
github,
|
|
177
|
+
getSecret: this.getSecret ? (name: string) => this.getSecret!(runId, name) : undefined,
|
|
178
|
+
workspaceRoot: workspace,
|
|
179
|
+
hashFiles: (glob: string) => Promise.resolve(hashFiles(workspace, glob)),
|
|
180
|
+
};
|
|
181
|
+
|
|
182
|
+
if (jobSpec.env) {
|
|
183
|
+
for (const [k, v] of Object.entries(jobSpec.env)) {
|
|
184
|
+
try {
|
|
185
|
+
const { result, secretValues: sv } = await interpolate(String(v), interpolateContext);
|
|
186
|
+
jobEnv[k] = result;
|
|
187
|
+
secretVals.push(...sv);
|
|
188
|
+
} catch {
|
|
189
|
+
jobEnv[k] = String(v);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
try {
|
|
195
|
+
for (let i = 0; i < jobSpec.steps.length; i++) {
|
|
196
|
+
const stepSpec = jobSpec.steps[i];
|
|
197
|
+
const step = job.steps[i];
|
|
198
|
+
if (!step) continue;
|
|
199
|
+
|
|
200
|
+
const effectiveIf = ('if' in stepSpec && stepSpec.if != null) ? String(stepSpec.if).trim() : 'success()';
|
|
201
|
+
const stepOutcomes = job.steps.slice(0, i).map(s => s.status);
|
|
202
|
+
try {
|
|
203
|
+
const runStep = evaluateConditionSync(effectiveIf, { github, stepOutcomes });
|
|
204
|
+
if (!runStep) {
|
|
205
|
+
step.status = 'skipped';
|
|
206
|
+
step.completedAt = new Date().toISOString();
|
|
207
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" skipped (if: false)`]);
|
|
208
|
+
continue;
|
|
209
|
+
}
|
|
210
|
+
} catch {
|
|
211
|
+
step.status = 'failure';
|
|
212
|
+
step.completedAt = new Date().toISOString();
|
|
213
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed (invalid if expression)`]);
|
|
214
|
+
jobFailed = true;
|
|
215
|
+
continue;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
if ('uses' in stepSpec) {
|
|
219
|
+
if (stepSpec.uses === 'checkout') {
|
|
220
|
+
step.status = 'running';
|
|
221
|
+
step.startedAt = new Date().toISOString();
|
|
222
|
+
logLines(step.stepKey, [
|
|
223
|
+
`Step "${step.stepKey}" started: checkout`,
|
|
224
|
+
]);
|
|
225
|
+
|
|
226
|
+
const repo = (stepSpec.with?.repository as string) ?? payload.pipelineRef?.repo ?? '';
|
|
227
|
+
const ref = (stepSpec.with?.ref as string) ?? payload.pipelineRef?.ref ?? 'main';
|
|
228
|
+
const token = stepSpec.with?.token as string | undefined;
|
|
229
|
+
|
|
230
|
+
const checkoutResult = executeCheckout({ repo, ref, workspace, token });
|
|
231
|
+
logLines(step.stepKey, checkoutResult.lines);
|
|
232
|
+
|
|
233
|
+
if (checkoutResult.success) {
|
|
234
|
+
step.status = 'success';
|
|
235
|
+
step.completedAt = new Date().toISOString();
|
|
236
|
+
logLines(step.stepKey, [
|
|
237
|
+
`Step "${step.stepKey}" completed successfully`,
|
|
238
|
+
]);
|
|
239
|
+
} else {
|
|
240
|
+
step.status = 'failure';
|
|
241
|
+
step.completedAt = new Date().toISOString();
|
|
242
|
+
logLines(step.stepKey, [
|
|
243
|
+
`Step "${step.stepKey}" failed: ${checkoutResult.error}`,
|
|
244
|
+
]);
|
|
245
|
+
jobFailed = true;
|
|
246
|
+
}
|
|
247
|
+
continue;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
if (stepSpec.uses === 'artifact/upload' && this.fileFnClient) {
|
|
251
|
+
step.status = 'running';
|
|
252
|
+
step.startedAt = new Date().toISOString();
|
|
253
|
+
let artName = (stepSpec.with?.name as string) ?? 'default';
|
|
254
|
+
let artPath = (stepSpec.with?.path as string) ?? '.';
|
|
255
|
+
try {
|
|
256
|
+
const nameRes = await interpolate(artName, interpolateContext);
|
|
257
|
+
artName = nameRes.result;
|
|
258
|
+
secretVals.push(...nameRes.secretValues);
|
|
259
|
+
const pathRes = await interpolate(artPath, interpolateContext);
|
|
260
|
+
artPath = pathRes.result;
|
|
261
|
+
secretVals.push(...pathRes.secretValues);
|
|
262
|
+
} catch {
|
|
263
|
+
step.status = 'failure';
|
|
264
|
+
step.completedAt = new Date().toISOString();
|
|
265
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
266
|
+
jobFailed = true;
|
|
267
|
+
continue;
|
|
268
|
+
}
|
|
269
|
+
logLines(step.stepKey, [
|
|
270
|
+
`Step "${step.stepKey}" started: artifact/upload "${artName}"`,
|
|
271
|
+
]);
|
|
272
|
+
|
|
273
|
+
const uploadResult = await executeArtifactUpload({
|
|
274
|
+
name: artName, path: artPath, workspace, runId, fileFnClient: this.fileFnClient,
|
|
275
|
+
});
|
|
276
|
+
logLines(step.stepKey, uploadResult.lines);
|
|
277
|
+
|
|
278
|
+
if (uploadResult.success) {
|
|
279
|
+
if (uploadResult.fileId && this.artifactStore) {
|
|
280
|
+
this.artifactStore.addArtifact(runId, { name: artName, fileId: uploadResult.fileId });
|
|
281
|
+
}
|
|
282
|
+
step.status = 'success';
|
|
283
|
+
step.completedAt = new Date().toISOString();
|
|
284
|
+
logLines(step.stepKey, [
|
|
285
|
+
`Step "${step.stepKey}" completed successfully`,
|
|
286
|
+
]);
|
|
287
|
+
} else {
|
|
288
|
+
step.status = 'failure';
|
|
289
|
+
step.completedAt = new Date().toISOString();
|
|
290
|
+
logLines(step.stepKey, [
|
|
291
|
+
`Step "${step.stepKey}" failed: ${uploadResult.error}`,
|
|
292
|
+
]);
|
|
293
|
+
jobFailed = true;
|
|
294
|
+
}
|
|
295
|
+
continue;
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
if (stepSpec.uses === 'artifact/download' && this.fileFnClient) {
|
|
299
|
+
step.status = 'running';
|
|
300
|
+
step.startedAt = new Date().toISOString();
|
|
301
|
+
let artName = (stepSpec.with?.name as string) ?? 'default';
|
|
302
|
+
try {
|
|
303
|
+
const nameRes = await interpolate(artName, interpolateContext);
|
|
304
|
+
artName = nameRes.result;
|
|
305
|
+
secretVals.push(...nameRes.secretValues);
|
|
306
|
+
} catch {
|
|
307
|
+
step.status = 'failure';
|
|
308
|
+
step.completedAt = new Date().toISOString();
|
|
309
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
310
|
+
jobFailed = true;
|
|
311
|
+
continue;
|
|
312
|
+
}
|
|
313
|
+
logLines(step.stepKey, [
|
|
314
|
+
`Step "${step.stepKey}" started: artifact/download "${artName}"`,
|
|
315
|
+
]);
|
|
316
|
+
|
|
317
|
+
const downloadResult = await executeArtifactDownload({
|
|
318
|
+
name: artName, workspace, runId, fileFnClient: this.fileFnClient,
|
|
319
|
+
});
|
|
320
|
+
logLines(step.stepKey, downloadResult.lines);
|
|
321
|
+
|
|
322
|
+
if (downloadResult.success) {
|
|
323
|
+
step.status = 'success';
|
|
324
|
+
step.completedAt = new Date().toISOString();
|
|
325
|
+
logLines(step.stepKey, [
|
|
326
|
+
`Step "${step.stepKey}" completed successfully`,
|
|
327
|
+
]);
|
|
328
|
+
} else {
|
|
329
|
+
step.status = 'failure';
|
|
330
|
+
step.completedAt = new Date().toISOString();
|
|
331
|
+
logLines(step.stepKey, [
|
|
332
|
+
`Step "${step.stepKey}" failed: ${downloadResult.error}`,
|
|
333
|
+
]);
|
|
334
|
+
jobFailed = true;
|
|
335
|
+
}
|
|
336
|
+
continue;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
if (stepSpec.uses === 'cache/save' && this.fileFnClient) {
|
|
340
|
+
step.status = 'running';
|
|
341
|
+
step.startedAt = new Date().toISOString();
|
|
342
|
+
let cacheKey = (stepSpec.with?.key as string) ?? '';
|
|
343
|
+
try {
|
|
344
|
+
const keyRes = await interpolate(cacheKey, interpolateContext);
|
|
345
|
+
cacheKey = keyRes.result;
|
|
346
|
+
secretVals.push(...keyRes.secretValues);
|
|
347
|
+
} catch {
|
|
348
|
+
step.status = 'failure';
|
|
349
|
+
step.completedAt = new Date().toISOString();
|
|
350
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
351
|
+
jobFailed = true;
|
|
352
|
+
continue;
|
|
353
|
+
}
|
|
354
|
+
const cachePaths = (stepSpec.with?.paths as string[]) ?? [];
|
|
355
|
+
logLines(step.stepKey, [
|
|
356
|
+
`Step "${step.stepKey}" started: cache/save "${cacheKey}"`,
|
|
357
|
+
]);
|
|
358
|
+
|
|
359
|
+
const saveResult = await executeCacheSave({
|
|
360
|
+
key: cacheKey, paths: cachePaths, workspace, fileFnClient: this.fileFnClient,
|
|
361
|
+
});
|
|
362
|
+
logLines(step.stepKey, saveResult.lines);
|
|
363
|
+
|
|
364
|
+
step.status = saveResult.success ? 'success' : 'failure';
|
|
365
|
+
step.completedAt = new Date().toISOString();
|
|
366
|
+
if (!saveResult.success) {
|
|
367
|
+
logLines(step.stepKey, [
|
|
368
|
+
`Step "${step.stepKey}" failed: ${saveResult.error}`,
|
|
369
|
+
]);
|
|
370
|
+
jobFailed = true;
|
|
371
|
+
} else {
|
|
372
|
+
logLines(step.stepKey, [
|
|
373
|
+
`Step "${step.stepKey}" completed successfully`,
|
|
374
|
+
]);
|
|
375
|
+
}
|
|
376
|
+
continue;
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
if (stepSpec.uses === 'cache/restore' && this.fileFnClient) {
|
|
380
|
+
step.status = 'running';
|
|
381
|
+
step.startedAt = new Date().toISOString();
|
|
382
|
+
let cacheKey = (stepSpec.with?.key as string) ?? '';
|
|
383
|
+
try {
|
|
384
|
+
const keyRes = await interpolate(cacheKey, interpolateContext);
|
|
385
|
+
cacheKey = keyRes.result;
|
|
386
|
+
secretVals.push(...keyRes.secretValues);
|
|
387
|
+
} catch {
|
|
388
|
+
step.status = 'failure';
|
|
389
|
+
step.completedAt = new Date().toISOString();
|
|
390
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
391
|
+
jobFailed = true;
|
|
392
|
+
continue;
|
|
393
|
+
}
|
|
394
|
+
logLines(step.stepKey, [
|
|
395
|
+
`Step "${step.stepKey}" started: cache/restore "${cacheKey}"`,
|
|
396
|
+
]);
|
|
397
|
+
|
|
398
|
+
const restoreResult = await executeCacheRestore({
|
|
399
|
+
key: cacheKey, workspace, fileFnClient: this.fileFnClient,
|
|
400
|
+
});
|
|
401
|
+
logLines(step.stepKey, restoreResult.lines);
|
|
402
|
+
|
|
403
|
+
step.status = 'success';
|
|
404
|
+
step.completedAt = new Date().toISOString();
|
|
405
|
+
logLines(step.stepKey, [
|
|
406
|
+
`Step "${step.stepKey}" completed successfully${restoreResult.hit ? ' (cache hit)' : ' (cache miss)'}`,
|
|
407
|
+
]);
|
|
408
|
+
continue;
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
if (stepSpec.uses === 'testfn/run') {
|
|
412
|
+
step.status = 'running';
|
|
413
|
+
step.startedAt = new Date().toISOString();
|
|
414
|
+
const framework = stepSpec.with?.framework as string | undefined;
|
|
415
|
+
const testPattern = stepSpec.with?.testPattern as string | undefined;
|
|
416
|
+
const reporter = stepSpec.with?.reporter as string | undefined;
|
|
417
|
+
const outputPath = stepSpec.with?.outputPath as string | undefined;
|
|
418
|
+
const parallel = stepSpec.with?.parallel as number | 'auto' | undefined;
|
|
419
|
+
const timeout = stepSpec.with?.timeout as number | undefined;
|
|
420
|
+
const retries = stepSpec.with?.retries as number | undefined;
|
|
421
|
+
logLines(step.stepKey, [
|
|
422
|
+
`Step "${step.stepKey}" started: testfn/run`,
|
|
423
|
+
]);
|
|
424
|
+
|
|
425
|
+
const { executeTestFnRunAsync } = await import('./steps/testfn-run.js');
|
|
426
|
+
const testResult = await executeTestFnRunAsync({
|
|
427
|
+
framework,
|
|
428
|
+
testPattern,
|
|
429
|
+
reporter,
|
|
430
|
+
outputPath,
|
|
431
|
+
workspace,
|
|
432
|
+
env: jobEnv,
|
|
433
|
+
parallel,
|
|
434
|
+
timeout,
|
|
435
|
+
retries,
|
|
436
|
+
});
|
|
437
|
+
logLines(step.stepKey, testResult.lines);
|
|
438
|
+
|
|
439
|
+
if (testResult.success) {
|
|
440
|
+
step.status = 'success';
|
|
441
|
+
step.completedAt = new Date().toISOString();
|
|
442
|
+
logLines(step.stepKey, [
|
|
443
|
+
`Step "${step.stepKey}" completed successfully`,
|
|
444
|
+
]);
|
|
445
|
+
} else {
|
|
446
|
+
step.status = 'failure';
|
|
447
|
+
step.completedAt = new Date().toISOString();
|
|
448
|
+
logLines(step.stepKey, [
|
|
449
|
+
`Step "${step.stepKey}" failed: ${testResult.error}`,
|
|
450
|
+
]);
|
|
451
|
+
jobFailed = true;
|
|
452
|
+
}
|
|
453
|
+
continue;
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
if (stepSpec.uses === 'hostfn/deploy') {
|
|
457
|
+
step.status = 'running';
|
|
458
|
+
step.startedAt = new Date().toISOString();
|
|
459
|
+
const environment = (stepSpec.with?.environment as string) ?? '';
|
|
460
|
+
const ci = (stepSpec.with?.ci as boolean) ?? true;
|
|
461
|
+
const local = typeof stepSpec.with?.local === 'boolean'
|
|
462
|
+
? (stepSpec.with.local as boolean)
|
|
463
|
+
: jobSpec['runs-on'] === 'hostfn-runner';
|
|
464
|
+
logLines(step.stepKey, [
|
|
465
|
+
`Step "${step.stepKey}" started: hostfn/deploy ${environment}`,
|
|
466
|
+
]);
|
|
467
|
+
|
|
468
|
+
const deployResult = executeHostFnDeploy({
|
|
469
|
+
environment, ci, local, workspace, env: jobEnv,
|
|
470
|
+
});
|
|
471
|
+
logLines(step.stepKey, deployResult.lines);
|
|
472
|
+
|
|
473
|
+
if (deployResult.success) {
|
|
474
|
+
step.status = 'success';
|
|
475
|
+
step.completedAt = new Date().toISOString();
|
|
476
|
+
logLines(step.stepKey, [
|
|
477
|
+
`Step "${step.stepKey}" completed successfully`,
|
|
478
|
+
]);
|
|
479
|
+
} else {
|
|
480
|
+
step.status = 'failure';
|
|
481
|
+
step.completedAt = new Date().toISOString();
|
|
482
|
+
logLines(step.stepKey, [
|
|
483
|
+
`Step "${step.stepKey}" failed: ${deployResult.error}`,
|
|
484
|
+
]);
|
|
485
|
+
jobFailed = true;
|
|
486
|
+
}
|
|
487
|
+
continue;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
step.status = 'skipped';
|
|
491
|
+
logLines(step.stepKey, [
|
|
492
|
+
`Step "${step.stepKey}" skipped (unsupported uses: ${stepSpec.uses})`,
|
|
493
|
+
]);
|
|
494
|
+
continue;
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
step.status = 'running';
|
|
498
|
+
step.startedAt = new Date().toISOString();
|
|
499
|
+
|
|
500
|
+
let runCommand = stepSpec.run;
|
|
501
|
+
try {
|
|
502
|
+
const runRes = await interpolate(stepSpec.run, interpolateContext);
|
|
503
|
+
runCommand = runRes.result;
|
|
504
|
+
secretVals.push(...runRes.secretValues);
|
|
505
|
+
} catch {
|
|
506
|
+
step.status = 'failure';
|
|
507
|
+
step.completedAt = new Date().toISOString();
|
|
508
|
+
logLines(step.stepKey, [`Step "${step.stepKey}" failed: interpolation error`]);
|
|
509
|
+
jobFailed = true;
|
|
510
|
+
continue;
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
logLines(step.stepKey, [
|
|
514
|
+
`Step "${step.stepKey}" started: ${runCommand}`,
|
|
515
|
+
]);
|
|
516
|
+
|
|
517
|
+
const result = this.executeRunCommand(runCommand, workspace, jobSpec, jobEnv);
|
|
518
|
+
|
|
519
|
+
logLines(step.stepKey, result.lines);
|
|
520
|
+
|
|
521
|
+
if (result.exitCode === 0) {
|
|
522
|
+
step.status = 'success';
|
|
523
|
+
step.completedAt = new Date().toISOString();
|
|
524
|
+
logLines(step.stepKey, [
|
|
525
|
+
`Step "${step.stepKey}" completed successfully`,
|
|
526
|
+
]);
|
|
527
|
+
} else {
|
|
528
|
+
step.status = 'failure';
|
|
529
|
+
step.completedAt = new Date().toISOString();
|
|
530
|
+
logLines(step.stepKey, [
|
|
531
|
+
`Step "${step.stepKey}" failed with exit code ${result.exitCode}`,
|
|
532
|
+
]);
|
|
533
|
+
jobFailed = true;
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
} finally {
|
|
537
|
+
if (this.cleanWorkspace) {
|
|
538
|
+
try {
|
|
539
|
+
rmSync(workspace, { recursive: true, force: true });
|
|
540
|
+
} catch {
|
|
541
|
+
// ignore cleanup errors
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
job.completedAt = new Date().toISOString();
|
|
547
|
+
|
|
548
|
+
if (jobFailed) {
|
|
549
|
+
job.status = 'failure';
|
|
550
|
+
logLines('__job__', [`Job "${jobKey}" failed`]);
|
|
551
|
+
this.store.updateRunStatus(runId, 'failure');
|
|
552
|
+
} else {
|
|
553
|
+
job.status = 'success';
|
|
554
|
+
logLines('__job__', [`Job "${jobKey}" completed successfully`]);
|
|
555
|
+
await this.enqueueDependentJobs(runId, jobKey);
|
|
556
|
+
this.checkRunCompletion(runId);
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
private executeRunCommand(
|
|
561
|
+
command: string,
|
|
562
|
+
workspace: string,
|
|
563
|
+
jobSpec: JobPayload['jobSpec'],
|
|
564
|
+
env?: Record<string, string>,
|
|
565
|
+
) {
|
|
566
|
+
if (this.shouldUseDocker(jobSpec) && this.dockerExecutor) {
|
|
567
|
+
return this.dockerExecutor.execute({
|
|
568
|
+
image: jobSpec.image ?? this.defaultDockerImage,
|
|
569
|
+
workspace,
|
|
570
|
+
command,
|
|
571
|
+
env,
|
|
572
|
+
});
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
return executeRunStep(command, workspace, env);
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
private shouldUseDocker(jobSpec: JobPayload['jobSpec']): boolean {
|
|
579
|
+
if (!this.dockerExecutor) return false;
|
|
580
|
+
if (typeof jobSpec.image === 'string' && jobSpec.image.length > 0) return true;
|
|
581
|
+
if (jobSpec['runs-on'] === 'default' && this.dockerForDefault) return true;
|
|
582
|
+
return this.dockerRunOnLabels.has(jobSpec['runs-on']);
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
private async enqueueDependentJobs(runId: string, completedJobKey: string): Promise<void> {
|
|
586
|
+
const spec = this.pipelineSpecs.get(runId);
|
|
587
|
+
if (!spec) return;
|
|
588
|
+
|
|
589
|
+
const run = this.store.getRun(runId);
|
|
590
|
+
if (!run) return;
|
|
591
|
+
|
|
592
|
+
const completedJobs = new Set(
|
|
593
|
+
run.jobs.filter(j => j.status === 'success' || j.status === 'skipped').map(j => j.jobKey)
|
|
594
|
+
);
|
|
595
|
+
const enqueuedJobs = new Set(
|
|
596
|
+
run.jobs.filter(j => j.status !== 'pending').map(j => j.jobKey)
|
|
597
|
+
);
|
|
598
|
+
|
|
599
|
+
const readyJobs = getReadyJobs(spec, completedJobs, enqueuedJobs);
|
|
600
|
+
|
|
601
|
+
for (const readyJobKey of readyJobs) {
|
|
602
|
+
const jobSpec = spec.jobs[readyJobKey];
|
|
603
|
+
if (jobSpec) {
|
|
604
|
+
await this.queue.enqueue(this.queueName, {
|
|
605
|
+
runId,
|
|
606
|
+
jobKey: readyJobKey,
|
|
607
|
+
jobSpec,
|
|
608
|
+
});
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
private checkRunCompletion(runId: string): void {
|
|
614
|
+
const run = this.store.getRun(runId);
|
|
615
|
+
if (!run) return;
|
|
616
|
+
|
|
617
|
+
const allDone = run.jobs.every(j =>
|
|
618
|
+
j.status === 'success' || j.status === 'failure' || j.status === 'skipped'
|
|
619
|
+
);
|
|
620
|
+
|
|
621
|
+
if (allDone) {
|
|
622
|
+
const anyFailure = run.jobs.some(j => j.status === 'failure');
|
|
623
|
+
this.store.updateRunStatus(runId, anyFailure ? 'failure' : 'success');
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
}
|