@cifn/runner 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,557 @@
1
+ import { describe, it, expect, beforeEach } from 'vitest';
2
+ import { mkdtempSync, mkdirSync, writeFileSync, existsSync, readFileSync } from 'node:fs';
3
+ import { tmpdir } from 'node:os';
4
+ import { join } from 'node:path';
5
+ import {
6
+ MemoryStore,
7
+ MemoryQueueClient,
8
+ MemoryFileFnClient,
9
+ MemorySecFnClient,
10
+ DEFAULT_QUEUE_NAME,
11
+ createApp,
12
+ type PipelineSpec,
13
+ } from 'cifn';
14
+ import { Runner } from './runner.js';
15
+ import { MemoryLogFnClient } from './reporting/logfn-client.js';
16
+ import { executeArtifactUpload } from './steps/artifact-upload.js';
17
+ import { executeArtifactDownload } from './steps/artifact-download.js';
18
+ import { executeCacheSave } from './steps/cache-save.js';
19
+ import { executeCacheRestore } from './steps/cache-restore.js';
20
+
21
+ const TEST_API_KEY = 'runner-artifacts-test-key';
22
+
23
+ async function createAuthApp(options: { store: MemoryStore; queue?: MemoryQueueClient; logClient?: MemoryLogFnClient }) {
24
+ const secFn = new MemorySecFnClient();
25
+ await secFn.setSecret('CIFN_API_KEYS', TEST_API_KEY);
26
+ return createApp({ ...options, secFn });
27
+ }
28
+
29
+ function authHeaders() {
30
+ return {
31
+ 'Content-Type': 'application/json',
32
+ 'Authorization': `Bearer ${TEST_API_KEY}`,
33
+ };
34
+ }
35
+
36
+ describe('executeArtifactUpload', () => {
37
+ let workspace: string;
38
+ let fileFn: MemoryFileFnClient;
39
+
40
+ beforeEach(() => {
41
+ workspace = mkdtempSync(join(tmpdir(), 'cifn-art-up-'));
42
+ fileFn = new MemoryFileFnClient();
43
+ });
44
+
45
+ it('uploads a single file', async () => {
46
+ writeFileSync(join(workspace, 'output.txt'), 'hello artifact');
47
+ const result = await executeArtifactUpload({
48
+ name: 'out',
49
+ path: 'output.txt',
50
+ workspace,
51
+ runId: 'r1',
52
+ fileFnClient: fileFn,
53
+ });
54
+ expect(result.success).toBe(true);
55
+ expect(result.fileId).toBeDefined();
56
+ expect(result.lines.some(l => l.includes('Uploaded'))).toBe(true);
57
+ });
58
+
59
+ it('uploads a directory', async () => {
60
+ mkdirSync(join(workspace, 'dist'));
61
+ writeFileSync(join(workspace, 'dist', 'a.js'), 'console.log("a")');
62
+ writeFileSync(join(workspace, 'dist', 'b.js'), 'console.log("b")');
63
+ const result = await executeArtifactUpload({
64
+ name: 'dist',
65
+ path: 'dist',
66
+ workspace,
67
+ runId: 'r1',
68
+ fileFnClient: fileFn,
69
+ });
70
+ expect(result.success).toBe(true);
71
+ expect(result.lines.some(l => l.includes('2 file(s)'))).toBe(true);
72
+ });
73
+
74
+ it('fails when path does not exist', async () => {
75
+ const result = await executeArtifactUpload({
76
+ name: 'missing',
77
+ path: 'nonexistent',
78
+ workspace,
79
+ runId: 'r1',
80
+ fileFnClient: fileFn,
81
+ });
82
+ expect(result.success).toBe(false);
83
+ expect(result.error).toContain('not found');
84
+ });
85
+
86
+ // TV-LIMIT-001: Artifact name length validation
87
+ it('TV-LIMIT-001: rejects artifact name exceeding 256 chars', async () => {
88
+ writeFileSync(join(workspace, 'output.txt'), 'content');
89
+ const longName = 'a'.repeat(257);
90
+ const result = await executeArtifactUpload({
91
+ name: longName,
92
+ path: 'output.txt',
93
+ workspace,
94
+ runId: 'r1',
95
+ fileFnClient: fileFn,
96
+ });
97
+ expect(result.success).toBe(false);
98
+ expect(result.error).toContain('256');
99
+ });
100
+
101
+ it('TV-LIMIT-001: accepts artifact name of exactly 256 chars', async () => {
102
+ writeFileSync(join(workspace, 'output.txt'), 'content');
103
+ const name = 'a'.repeat(256);
104
+ const result = await executeArtifactUpload({
105
+ name,
106
+ path: 'output.txt',
107
+ workspace,
108
+ runId: 'r1',
109
+ fileFnClient: fileFn,
110
+ });
111
+ expect(result.success).toBe(true);
112
+ });
113
+ });
114
+
115
+ describe('executeArtifactDownload', () => {
116
+ let fileFn: MemoryFileFnClient;
117
+
118
+ beforeEach(() => {
119
+ fileFn = new MemoryFileFnClient();
120
+ });
121
+
122
+ it('downloads and extracts uploaded artifact', async () => {
123
+ const uploadWorkspace = mkdtempSync(join(tmpdir(), 'cifn-art-up2-'));
124
+ mkdirSync(join(uploadWorkspace, 'dist'));
125
+ writeFileSync(join(uploadWorkspace, 'dist', 'index.js'), 'main()');
126
+
127
+ await executeArtifactUpload({
128
+ name: 'dist',
129
+ path: 'dist',
130
+ workspace: uploadWorkspace,
131
+ runId: 'r1',
132
+ fileFnClient: fileFn,
133
+ });
134
+
135
+ const downloadWorkspace = mkdtempSync(join(tmpdir(), 'cifn-art-down-'));
136
+ const result = await executeArtifactDownload({
137
+ name: 'dist',
138
+ workspace: downloadWorkspace,
139
+ runId: 'r1',
140
+ fileFnClient: fileFn,
141
+ });
142
+
143
+ expect(result.success).toBe(true);
144
+ expect(existsSync(join(downloadWorkspace, 'index.js'))).toBe(true);
145
+ expect(readFileSync(join(downloadWorkspace, 'index.js'), 'utf-8')).toBe('main()');
146
+ });
147
+
148
+ it('fails when artifact not found', async () => {
149
+ const workspace = mkdtempSync(join(tmpdir(), 'cifn-art-down2-'));
150
+ const result = await executeArtifactDownload({
151
+ name: 'missing',
152
+ workspace,
153
+ runId: 'r1',
154
+ fileFnClient: fileFn,
155
+ });
156
+ expect(result.success).toBe(false);
157
+ expect(result.error).toContain('not found');
158
+ });
159
+ });
160
+
161
+ describe('executeCacheSave and executeCacheRestore', () => {
162
+ let fileFn: MemoryFileFnClient;
163
+
164
+ beforeEach(() => {
165
+ fileFn = new MemoryFileFnClient();
166
+ });
167
+
168
+ it('TV-INT-003: save then restore populates workspace', async () => {
169
+ const saveWorkspace = mkdtempSync(join(tmpdir(), 'cifn-cache-save-'));
170
+ mkdirSync(join(saveWorkspace, 'node_modules'));
171
+ writeFileSync(join(saveWorkspace, 'node_modules', 'pkg.json'), '{"name":"test"}');
172
+
173
+ const saveResult = await executeCacheSave({
174
+ key: 'deps-1',
175
+ paths: ['node_modules'],
176
+ workspace: saveWorkspace,
177
+ fileFnClient: fileFn,
178
+ });
179
+ expect(saveResult.success).toBe(true);
180
+
181
+ const restoreWorkspace = mkdtempSync(join(tmpdir(), 'cifn-cache-restore-'));
182
+ const restoreResult = await executeCacheRestore({
183
+ key: 'deps-1',
184
+ workspace: restoreWorkspace,
185
+ fileFnClient: fileFn,
186
+ });
187
+ expect(restoreResult.success).toBe(true);
188
+ expect(restoreResult.hit).toBe(true);
189
+ expect(existsSync(join(restoreWorkspace, 'node_modules', 'pkg.json'))).toBe(true);
190
+ expect(readFileSync(join(restoreWorkspace, 'node_modules', 'pkg.json'), 'utf-8')).toBe('{"name":"test"}');
191
+ });
192
+
193
+ it('TV-INT-003: cache miss does not fail step', async () => {
194
+ const workspace = mkdtempSync(join(tmpdir(), 'cifn-cache-miss-'));
195
+ const result = await executeCacheRestore({
196
+ key: 'nonexistent-key',
197
+ workspace,
198
+ fileFnClient: fileFn,
199
+ });
200
+ expect(result.success).toBe(true);
201
+ expect(result.hit).toBe(false);
202
+ });
203
+
204
+ it('save with no matching files succeeds', async () => {
205
+ const workspace = mkdtempSync(join(tmpdir(), 'cifn-cache-empty-'));
206
+ const result = await executeCacheSave({
207
+ key: 'empty',
208
+ paths: ['nonexistent-dir'],
209
+ workspace,
210
+ fileFnClient: fileFn,
211
+ });
212
+ expect(result.success).toBe(true);
213
+ expect(result.lines.some(l => l.includes('No files found'))).toBe(true);
214
+ });
215
+
216
+ // TV-LIMIT-001: Cache key length validation
217
+ it('TV-LIMIT-001: rejects cache key exceeding 1KB in save', async () => {
218
+ const workspace = mkdtempSync(join(tmpdir(), 'cifn-cache-long-'));
219
+ writeFileSync(join(workspace, 'file.txt'), 'content');
220
+ const longKey = 'k'.repeat(1025);
221
+ const result = await executeCacheSave({
222
+ key: longKey,
223
+ paths: ['file.txt'],
224
+ workspace,
225
+ fileFnClient: fileFn,
226
+ });
227
+ expect(result.success).toBe(false);
228
+ expect(result.error).toContain('1KB');
229
+ });
230
+
231
+ it('TV-LIMIT-001: rejects cache key exceeding 1KB in restore', async () => {
232
+ const workspace = mkdtempSync(join(tmpdir(), 'cifn-cache-long-'));
233
+ const longKey = 'k'.repeat(1025);
234
+ const result = await executeCacheRestore({
235
+ key: longKey,
236
+ workspace,
237
+ fileFnClient: fileFn,
238
+ });
239
+ expect(result.success).toBe(false);
240
+ expect(result.error).toContain('1KB');
241
+ });
242
+
243
+ it('TV-LIMIT-001: accepts cache key of exactly 1KB', async () => {
244
+ const workspace = mkdtempSync(join(tmpdir(), 'cifn-cache-1kb-'));
245
+ writeFileSync(join(workspace, 'file.txt'), 'content');
246
+ const key = 'k'.repeat(1024);
247
+ const saveResult = await executeCacheSave({
248
+ key,
249
+ paths: ['file.txt'],
250
+ workspace,
251
+ fileFnClient: fileFn,
252
+ });
253
+ expect(saveResult.success).toBe(true);
254
+
255
+ const restoreResult = await executeCacheRestore({
256
+ key,
257
+ workspace,
258
+ fileFnClient: fileFn,
259
+ });
260
+ expect(restoreResult.success).toBe(true);
261
+ });
262
+ });
263
+
264
+ describe('TV-INT-002: Runner artifact upload/download across jobs', () => {
265
+ it('job A uploads artifact, job B downloads it', async () => {
266
+ const store = new MemoryStore();
267
+ const queue = new MemoryQueueClient();
268
+ const logClient = new MemoryLogFnClient();
269
+ const fileFn = new MemoryFileFnClient();
270
+
271
+ const runner = new Runner({
272
+ store, queue, logClient,
273
+ fileFnClient: fileFn,
274
+ artifactStore: store,
275
+ cleanWorkspace: false,
276
+ });
277
+
278
+ const pipeline: PipelineSpec = {
279
+ name: 'artifact-test',
280
+ on: { workflow_dispatch: {} },
281
+ jobs: {
282
+ build: {
283
+ 'runs-on': 'default',
284
+ steps: [
285
+ { run: 'mkdir -p dist && echo "built" > dist/output.txt' },
286
+ { uses: 'artifact/upload' as any, with: { name: 'dist', path: 'dist' } },
287
+ ],
288
+ },
289
+ deploy: {
290
+ 'runs-on': 'default',
291
+ needs: ['build'],
292
+ steps: [
293
+ { uses: 'artifact/download' as any, with: { name: 'dist' } },
294
+ { run: 'cat output.txt' },
295
+ ],
296
+ },
297
+ },
298
+ };
299
+
300
+ const run = store.createRun({
301
+ pipelineSpec: pipeline,
302
+ trigger: { type: 'workflow_dispatch' },
303
+ });
304
+
305
+ runner.registerPipelineSpec(run.id, pipeline);
306
+
307
+ await queue.enqueue(DEFAULT_QUEUE_NAME, {
308
+ runId: run.id,
309
+ jobKey: 'build',
310
+ jobSpec: pipeline.jobs.build,
311
+ });
312
+
313
+ await runner.processAllJobs();
314
+
315
+ const updatedRun = store.getRun(run.id)!;
316
+ expect(updatedRun.status).toBe('success');
317
+
318
+ const buildJob = updatedRun.jobs.find(j => j.jobKey === 'build')!;
319
+ expect(buildJob.status).toBe('success');
320
+ expect(buildJob.steps[0].status).toBe('success');
321
+ expect(buildJob.steps[1].status).toBe('success');
322
+
323
+ const deployJob = updatedRun.jobs.find(j => j.jobKey === 'deploy')!;
324
+ expect(deployJob.status).toBe('success');
325
+ expect(deployJob.steps[0].status).toBe('success');
326
+ expect(deployJob.steps[1].status).toBe('success');
327
+
328
+ const artifacts = store.getArtifacts(run.id);
329
+ expect(artifacts).toHaveLength(1);
330
+ expect(artifacts[0].name).toBe('dist');
331
+ expect(artifacts[0].fileId).toBeDefined();
332
+
333
+ const logs = logClient.getLines(run.id, 'deploy');
334
+ const logLines = logs.map(l => l.line);
335
+ expect(logLines.some(l => l.includes('built'))).toBe(true);
336
+ });
337
+ });
338
+
339
+ describe('TV-INT-003: Runner cache save/restore', () => {
340
+ it('cache/save then cache/restore in another run restores files', async () => {
341
+ const store = new MemoryStore();
342
+ const queue = new MemoryQueueClient();
343
+ const logClient = new MemoryLogFnClient();
344
+ const fileFn = new MemoryFileFnClient();
345
+
346
+ const runner = new Runner({
347
+ store, queue, logClient,
348
+ fileFnClient: fileFn,
349
+ });
350
+
351
+ const savePipeline: PipelineSpec = {
352
+ name: 'cache-save',
353
+ on: { workflow_dispatch: {} },
354
+ jobs: {
355
+ build: {
356
+ 'runs-on': 'default',
357
+ steps: [
358
+ { run: 'mkdir -p node_modules && echo "cached" > node_modules/pkg.txt' },
359
+ { uses: 'cache/save' as any, with: { key: 'deps-1', paths: ['node_modules'] } },
360
+ ],
361
+ },
362
+ },
363
+ };
364
+
365
+ const run1 = store.createRun({
366
+ pipelineSpec: savePipeline,
367
+ trigger: { type: 'workflow_dispatch' },
368
+ });
369
+ runner.registerPipelineSpec(run1.id, savePipeline);
370
+ await queue.enqueue(DEFAULT_QUEUE_NAME, {
371
+ runId: run1.id,
372
+ jobKey: 'build',
373
+ jobSpec: savePipeline.jobs.build,
374
+ });
375
+ await runner.processAllJobs();
376
+
377
+ expect(store.getRun(run1.id)!.status).toBe('success');
378
+
379
+ const restorePipeline: PipelineSpec = {
380
+ name: 'cache-restore',
381
+ on: { workflow_dispatch: {} },
382
+ jobs: {
383
+ build: {
384
+ 'runs-on': 'default',
385
+ steps: [
386
+ { uses: 'cache/restore' as any, with: { key: 'deps-1' } },
387
+ { run: 'cat node_modules/pkg.txt' },
388
+ ],
389
+ },
390
+ },
391
+ };
392
+
393
+ const run2 = store.createRun({
394
+ pipelineSpec: restorePipeline,
395
+ trigger: { type: 'workflow_dispatch' },
396
+ });
397
+ runner.registerPipelineSpec(run2.id, restorePipeline);
398
+ await queue.enqueue(DEFAULT_QUEUE_NAME, {
399
+ runId: run2.id,
400
+ jobKey: 'build',
401
+ jobSpec: restorePipeline.jobs.build,
402
+ });
403
+ await runner.processAllJobs();
404
+
405
+ const updatedRun2 = store.getRun(run2.id)!;
406
+ expect(updatedRun2.status).toBe('success');
407
+
408
+ const job = updatedRun2.jobs.find(j => j.jobKey === 'build')!;
409
+ expect(job.steps[0].status).toBe('success');
410
+ expect(job.steps[1].status).toBe('success');
411
+
412
+ const logs = logClient.getLines(run2.id, 'build');
413
+ const logLines = logs.map(l => l.line);
414
+ expect(logLines.some(l => l.includes('cache hit'))).toBe(true);
415
+ expect(logLines.some(l => l.includes('cached'))).toBe(true);
416
+ });
417
+
418
+ it('cache/restore with missing key is cache miss, step still succeeds', async () => {
419
+ const store = new MemoryStore();
420
+ const queue = new MemoryQueueClient();
421
+ const logClient = new MemoryLogFnClient();
422
+ const fileFn = new MemoryFileFnClient();
423
+
424
+ const runner = new Runner({
425
+ store, queue, logClient,
426
+ fileFnClient: fileFn,
427
+ });
428
+
429
+ const pipeline: PipelineSpec = {
430
+ name: 'cache-miss',
431
+ on: { workflow_dispatch: {} },
432
+ jobs: {
433
+ build: {
434
+ 'runs-on': 'default',
435
+ steps: [
436
+ { uses: 'cache/restore' as any, with: { key: 'nonexistent' } },
437
+ { run: 'echo ok' },
438
+ ],
439
+ },
440
+ },
441
+ };
442
+
443
+ const run = store.createRun({
444
+ pipelineSpec: pipeline,
445
+ trigger: { type: 'workflow_dispatch' },
446
+ });
447
+ runner.registerPipelineSpec(run.id, pipeline);
448
+ await queue.enqueue(DEFAULT_QUEUE_NAME, {
449
+ runId: run.id,
450
+ jobKey: 'build',
451
+ jobSpec: pipeline.jobs.build,
452
+ });
453
+ await runner.processAllJobs();
454
+
455
+ const updatedRun = store.getRun(run.id)!;
456
+ expect(updatedRun.status).toBe('success');
457
+
458
+ const job = updatedRun.jobs.find(j => j.jobKey === 'build')!;
459
+ expect(job.steps[0].status).toBe('success');
460
+ expect(job.steps[1].status).toBe('success');
461
+
462
+ const logs = logClient.getLines(run.id, 'build');
463
+ expect(logs.some(l => l.line.includes('cache miss'))).toBe(true);
464
+ });
465
+ });
466
+
467
+ describe('TV-API-014: GET /runs/:runId/artifacts', () => {
468
+ it('returns artifacts after upload', async () => {
469
+ const store = new MemoryStore();
470
+ const queue = new MemoryQueueClient();
471
+ const logClient = new MemoryLogFnClient();
472
+ const fileFn = new MemoryFileFnClient();
473
+
474
+ const { app } = await createAuthApp({ store, queue, logClient });
475
+ const runner = new Runner({
476
+ store, queue, logClient,
477
+ fileFnClient: fileFn,
478
+ artifactStore: store,
479
+ });
480
+
481
+ const pipeline: PipelineSpec = {
482
+ name: 'art-api',
483
+ on: { workflow_dispatch: {} },
484
+ jobs: {
485
+ build: {
486
+ 'runs-on': 'default',
487
+ steps: [
488
+ { run: 'mkdir dist && echo x > dist/out.txt' },
489
+ { uses: 'artifact/upload' as any, with: { name: 'out', path: 'dist' } },
490
+ ],
491
+ },
492
+ },
493
+ };
494
+
495
+ const createRes = await app.request('http://localhost/api/v1/runs', {
496
+ method: 'POST',
497
+ headers: authHeaders(),
498
+ body: JSON.stringify({
499
+ trigger: { type: 'workflow_dispatch' },
500
+ pipelineSpec: pipeline,
501
+ }),
502
+ });
503
+ const { data } = await createRes.json() as { data: { runId: string } };
504
+ const runId = data.runId;
505
+
506
+ runner.registerPipelineSpec(runId, pipeline);
507
+ await runner.processAllJobs();
508
+
509
+ const artRes = await app.request(`http://localhost/api/v1/runs/${runId}/artifacts`, {
510
+ headers: authHeaders(),
511
+ });
512
+ expect(artRes.status).toBe(200);
513
+ const artBody = await artRes.json() as { ok: boolean; data: { artifacts: Array<{ name: string; fileId?: string }> } };
514
+ expect(artBody.ok).toBe(true);
515
+ expect(artBody.data.artifacts).toHaveLength(1);
516
+ expect(artBody.data.artifacts[0].name).toBe('out');
517
+ expect(artBody.data.artifacts[0].fileId).toBeDefined();
518
+ });
519
+
520
+ it('returns empty array when no artifacts', async () => {
521
+ const store = new MemoryStore();
522
+ const queue = new MemoryQueueClient();
523
+ const { app } = await createAuthApp({ store, queue });
524
+
525
+ const createRes = await app.request('http://localhost/api/v1/runs', {
526
+ method: 'POST',
527
+ headers: authHeaders(),
528
+ body: JSON.stringify({
529
+ trigger: { type: 'workflow_dispatch' },
530
+ pipelineSpec: {
531
+ name: 'no-art',
532
+ on: { workflow_dispatch: {} },
533
+ jobs: { build: { 'runs-on': 'default', steps: [{ run: 'echo ok' }] } },
534
+ },
535
+ }),
536
+ });
537
+ const { data } = await createRes.json() as { data: { runId: string } };
538
+
539
+ const artRes = await app.request(`http://localhost/api/v1/runs/${data.runId}/artifacts`, {
540
+ headers: authHeaders(),
541
+ });
542
+ expect(artRes.status).toBe(200);
543
+ const artBody = await artRes.json() as { ok: boolean; data: { artifacts: unknown[] } };
544
+ expect(artBody.ok).toBe(true);
545
+ expect(artBody.data.artifacts).toEqual([]);
546
+ });
547
+
548
+ it('returns 404 for nonexistent run', async () => {
549
+ const store = new MemoryStore();
550
+ const { app } = await createAuthApp({ store });
551
+
552
+ const res = await app.request('http://localhost/api/v1/runs/nonexistent/artifacts', {
553
+ headers: authHeaders(),
554
+ });
555
+ expect(res.status).toBe(404);
556
+ });
557
+ });
@@ -0,0 +1,76 @@
1
+ import { spawnSync } from 'node:child_process';
2
+ import type { RunStepResult } from './executor/run-step.js';
3
+
4
+ export interface DockerExecutorOptions {
5
+ image: string;
6
+ workspace: string;
7
+ command: string;
8
+ env?: Record<string, string>;
9
+ }
10
+
11
+ export interface DockerCommandRunner {
12
+ run(args: string[], options: { cwd: string; env?: Record<string, string> }): {
13
+ status: number | null;
14
+ stdout: string;
15
+ stderr: string;
16
+ error?: string;
17
+ };
18
+ }
19
+
20
+ class DefaultDockerCommandRunner implements DockerCommandRunner {
21
+ run(args: string[], options: { cwd: string; env?: Record<string, string> }) {
22
+ const res = spawnSync('docker', args, {
23
+ cwd: options.cwd,
24
+ encoding: 'utf-8',
25
+ env: options.env ? { ...process.env, ...options.env } : process.env,
26
+ timeout: 600_000,
27
+ });
28
+ return {
29
+ status: res.status,
30
+ stdout: res.stdout ?? '',
31
+ stderr: res.stderr ?? '',
32
+ error: res.error ? String(res.error.message ?? res.error) : undefined,
33
+ };
34
+ }
35
+ }
36
+
37
+ export class DockerExecutor {
38
+ private readonly runner: DockerCommandRunner;
39
+
40
+ constructor(runner?: DockerCommandRunner) {
41
+ this.runner = runner ?? new DefaultDockerCommandRunner();
42
+ }
43
+
44
+ execute(options: DockerExecutorOptions): RunStepResult {
45
+ const args = [
46
+ 'run',
47
+ '--rm',
48
+ '-v', `${options.workspace}:/workspace`,
49
+ '-w', '/workspace',
50
+ ];
51
+
52
+ for (const [key, value] of Object.entries(options.env ?? {})) {
53
+ args.push('-e', `${key}=${value}`);
54
+ }
55
+
56
+ args.push(options.image, 'sh', '-lc', options.command);
57
+
58
+ const output = this.runner.run(args, { cwd: options.workspace, env: options.env });
59
+
60
+ const stdout = output.stdout ?? '';
61
+ const stderr = output.stderr ?? '';
62
+ const lines = [...stdout.split('\n'), ...stderr.split('\n')].filter(line => line !== '');
63
+ const exitCode = output.status ?? 1;
64
+
65
+ if (output.error) {
66
+ lines.push(`docker error: ${output.error}`);
67
+ }
68
+
69
+ return {
70
+ exitCode,
71
+ stdout,
72
+ stderr,
73
+ lines,
74
+ };
75
+ }
76
+ }
@@ -0,0 +1,34 @@
1
+ import { execSync, type ExecSyncOptionsWithStringEncoding } from 'node:child_process';
2
+
3
+ export interface RunStepResult {
4
+ exitCode: number;
5
+ stdout: string;
6
+ stderr: string;
7
+ lines: string[];
8
+ }
9
+
10
+ export function executeRunStep(command: string, workspacePath: string, env?: Record<string, string>): RunStepResult {
11
+ const execEnv = env ? { ...process.env, ...env } : process.env;
12
+ try {
13
+ const stdout = execSync(command, {
14
+ cwd: workspacePath,
15
+ encoding: 'utf-8',
16
+ stdio: ['pipe', 'pipe', 'pipe'],
17
+ timeout: 300_000,
18
+ shell: '/bin/sh',
19
+ env: execEnv,
20
+ } satisfies ExecSyncOptionsWithStringEncoding);
21
+ const lines = stdout.split('\n').filter(l => l !== '');
22
+ return { exitCode: 0, stdout, stderr: '', lines };
23
+ } catch (err: unknown) {
24
+ const error = err as { status?: number; stdout?: string; stderr?: string };
25
+ const stdout = typeof error.stdout === 'string' ? error.stdout : '';
26
+ const stderr = typeof error.stderr === 'string' ? error.stderr : '';
27
+ const exitCode = typeof error.status === 'number' ? error.status : 1;
28
+ const lines = [
29
+ ...stdout.split('\n').filter(l => l !== ''),
30
+ ...stderr.split('\n').filter(l => l !== ''),
31
+ ];
32
+ return { exitCode, stdout, stderr, lines };
33
+ }
34
+ }
package/src/index.ts ADDED
@@ -0,0 +1,23 @@
1
+ export { Runner } from './runner.js';
2
+ export type { RunnerOptions } from './runner.js';
3
+ export { executeRunStep } from './executor/run-step.js';
4
+ export type { RunStepResult } from './executor/run-step.js';
5
+ export { MemoryLogFnClient } from './reporting/logfn-client.js';
6
+ export type { LogEntry, LogFnClient } from './reporting/logfn-client.js';
7
+ export { executeCheckout } from './steps/checkout.js';
8
+ export type { CheckoutOptions, CheckoutResult } from './steps/checkout.js';
9
+ export { executeArtifactUpload } from './steps/artifact-upload.js';
10
+ export type { ArtifactUploadOptions, ArtifactUploadResult } from './steps/artifact-upload.js';
11
+ export { executeArtifactDownload } from './steps/artifact-download.js';
12
+ export type { ArtifactDownloadOptions, ArtifactDownloadResult } from './steps/artifact-download.js';
13
+ export { executeCacheSave } from './steps/cache-save.js';
14
+ export type { CacheSaveOptions, CacheSaveResult } from './steps/cache-save.js';
15
+ export { executeCacheRestore } from './steps/cache-restore.js';
16
+ export type { CacheRestoreOptions, CacheRestoreResult } from './steps/cache-restore.js';
17
+ export { redactSecrets } from './reporting/redact.js';
18
+ export { executeTestFnRun } from './steps/testfn-run.js';
19
+ export type { TestFnRunOptions, TestFnRunResult } from './steps/testfn-run.js';
20
+ export { executeHostFnDeploy } from './steps/hostfn-deploy.js';
21
+ export type { HostFnDeployOptions, HostFnDeployResult } from './steps/hostfn-deploy.js';
22
+ export { DockerExecutor } from './docker-executor.js';
23
+ export type { DockerExecutorOptions, DockerCommandRunner } from './docker-executor.js';