@livingdata/pipex 0.0.9 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +154 -14
- package/dist/__tests__/errors.js +162 -0
- package/dist/__tests__/helpers.js +41 -0
- package/dist/__tests__/types.js +8 -0
- package/dist/cli/__tests__/condition.js +23 -0
- package/dist/cli/__tests__/dag.js +154 -0
- package/dist/cli/__tests__/pipeline-loader.js +267 -0
- package/dist/cli/__tests__/pipeline-runner.js +257 -0
- package/dist/cli/__tests__/state-persistence.js +80 -0
- package/dist/cli/__tests__/state.js +58 -0
- package/dist/cli/__tests__/step-runner.js +116 -0
- package/dist/cli/commands/bundle.js +35 -0
- package/dist/cli/commands/cat.js +54 -0
- package/dist/cli/commands/exec.js +89 -0
- package/dist/cli/commands/export.js +2 -2
- package/dist/cli/commands/inspect.js +1 -1
- package/dist/cli/commands/list.js +2 -1
- package/dist/cli/commands/logs.js +1 -1
- package/dist/cli/commands/prune.js +1 -1
- package/dist/cli/commands/rm-step.js +41 -0
- package/dist/cli/commands/run-bundle.js +59 -0
- package/dist/cli/commands/run.js +9 -4
- package/dist/cli/commands/show.js +42 -7
- package/dist/cli/condition.js +11 -0
- package/dist/cli/dag.js +143 -0
- package/dist/cli/index.js +6 -0
- package/dist/cli/interactive-reporter.js +227 -0
- package/dist/cli/pipeline-loader.js +10 -110
- package/dist/cli/pipeline-runner.js +164 -78
- package/dist/cli/reporter.js +2 -158
- package/dist/cli/state.js +8 -0
- package/dist/cli/step-loader.js +25 -0
- package/dist/cli/step-resolver.js +111 -0
- package/dist/cli/step-runner.js +226 -0
- package/dist/cli/utils.js +0 -46
- package/dist/core/__tests__/bundle.js +663 -0
- package/dist/core/__tests__/condition.js +23 -0
- package/dist/core/__tests__/dag.js +154 -0
- package/dist/core/__tests__/env-file.test.js +41 -0
- package/dist/core/__tests__/event-aggregator.js +244 -0
- package/dist/core/__tests__/pipeline-loader.js +267 -0
- package/dist/core/__tests__/pipeline-runner.js +257 -0
- package/dist/core/__tests__/state-persistence.js +80 -0
- package/dist/core/__tests__/state.js +58 -0
- package/dist/core/__tests__/step-runner.js +118 -0
- package/dist/core/__tests__/stream-reporter.js +142 -0
- package/dist/core/__tests__/transport.js +50 -0
- package/dist/core/__tests__/utils.js +40 -0
- package/dist/core/bundle.js +130 -0
- package/dist/core/condition.js +11 -0
- package/dist/core/dag.js +143 -0
- package/dist/core/env-file.js +6 -0
- package/dist/core/event-aggregator.js +114 -0
- package/dist/core/index.js +14 -0
- package/dist/core/pipeline-loader.js +81 -0
- package/dist/core/pipeline-runner.js +360 -0
- package/dist/core/reporter.js +11 -0
- package/dist/core/state.js +110 -0
- package/dist/core/step-loader.js +25 -0
- package/dist/core/step-resolver.js +117 -0
- package/dist/core/step-runner.js +225 -0
- package/dist/core/stream-reporter.js +41 -0
- package/dist/core/transport.js +9 -0
- package/dist/core/utils.js +56 -0
- package/dist/engine/__tests__/workspace.js +288 -0
- package/dist/engine/docker-executor.js +10 -2
- package/dist/engine/index.js +1 -0
- package/dist/engine/workspace.js +76 -12
- package/dist/errors.js +122 -0
- package/dist/index.js +3 -0
- package/dist/kits/__tests__/index.js +23 -0
- package/dist/kits/builtin/__tests__/node.js +74 -0
- package/dist/kits/builtin/__tests__/python.js +67 -0
- package/dist/kits/builtin/__tests__/shell.js +74 -0
- package/dist/kits/builtin/node.js +10 -5
- package/dist/kits/builtin/python.js +10 -5
- package/dist/kits/builtin/shell.js +2 -1
- package/dist/kits/index.js +2 -1
- package/package.json +6 -3
- package/dist/cli/types.js +0 -3
- package/dist/engine/docker-runtime.js +0 -65
- package/dist/engine/runtime.js +0 -2
- package/dist/kits/bash.js +0 -19
- package/dist/kits/builtin/bash.js +0 -19
- package/dist/kits/node.js +0 -56
- package/dist/kits/python.js +0 -51
- package/dist/kits/types.js +0 -1
- package/dist/reporter.js +0 -13
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { readFile } from 'node:fs/promises';
|
|
2
|
+
import { ValidationError } from '../errors.js';
|
|
3
|
+
import { parsePipelineFile } from './pipeline-loader.js';
|
|
4
|
+
import { resolveStep, validateStep } from './step-resolver.js';
|
|
5
|
+
/**
|
|
6
|
+
* Loads and resolves a single step definition from a file.
|
|
7
|
+
*/
|
|
8
|
+
export async function loadStepFile(filePath, stepIdOverride) {
|
|
9
|
+
const content = await readFile(filePath, 'utf8');
|
|
10
|
+
const raw = parsePipelineFile(content, filePath);
|
|
11
|
+
if (!raw || typeof raw !== 'object') {
|
|
12
|
+
throw new ValidationError('Step file must contain an object');
|
|
13
|
+
}
|
|
14
|
+
// If no id/name provided, require --step override
|
|
15
|
+
if (!('id' in raw && raw.id) && !('name' in raw && raw.name) && !stepIdOverride) {
|
|
16
|
+
throw new ValidationError('Step file must have "id" or "name", or use --step to set an ID');
|
|
17
|
+
}
|
|
18
|
+
// Apply step ID override
|
|
19
|
+
if (stepIdOverride) {
|
|
20
|
+
raw.id = stepIdOverride;
|
|
21
|
+
}
|
|
22
|
+
const step = resolveStep(raw);
|
|
23
|
+
validateStep(step);
|
|
24
|
+
return step;
|
|
25
|
+
}
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import { ValidationError } from '../errors.js';
|
|
2
|
+
import { getKit } from '../kits/index.js';
|
|
3
|
+
import { isKitStep } from '../types.js';
|
|
4
|
+
import { slugify, mergeEnv, mergeCaches, mergeMounts } from './pipeline-loader.js';
|
|
5
|
+
/**
|
|
6
|
+
* Resolves a step definition into a fully resolved Step.
|
|
7
|
+
* Kit steps (`uses`) are expanded into image + cmd.
|
|
8
|
+
*/
|
|
9
|
+
export function resolveStep(step) {
|
|
10
|
+
if (!step.id && !step.name) {
|
|
11
|
+
throw new ValidationError('Invalid step: at least one of "id" or "name" must be defined');
|
|
12
|
+
}
|
|
13
|
+
const id = step.id ?? slugify(step.name);
|
|
14
|
+
const { name } = step;
|
|
15
|
+
if (!isKitStep(step)) {
|
|
16
|
+
return { ...step, id, name };
|
|
17
|
+
}
|
|
18
|
+
return resolveKitStep(step, id, name);
|
|
19
|
+
}
|
|
20
|
+
function resolveKitStep(step, id, name) {
|
|
21
|
+
const kit = getKit(step.uses);
|
|
22
|
+
const kitOutput = kit.resolve(step.with ?? {});
|
|
23
|
+
return {
|
|
24
|
+
id,
|
|
25
|
+
name,
|
|
26
|
+
image: kitOutput.image,
|
|
27
|
+
cmd: kitOutput.cmd,
|
|
28
|
+
env: mergeEnv(kitOutput.env, step.env),
|
|
29
|
+
envFile: step.envFile,
|
|
30
|
+
inputs: step.inputs,
|
|
31
|
+
outputPath: step.outputPath,
|
|
32
|
+
caches: mergeCaches(kitOutput.caches, step.caches),
|
|
33
|
+
mounts: mergeMounts(kitOutput.mounts, step.mounts),
|
|
34
|
+
sources: mergeMounts(kitOutput.sources, step.sources),
|
|
35
|
+
timeoutSec: step.timeoutSec,
|
|
36
|
+
allowFailure: step.allowFailure,
|
|
37
|
+
allowNetwork: step.allowNetwork ?? kitOutput.allowNetwork,
|
|
38
|
+
retries: step.retries,
|
|
39
|
+
retryDelayMs: step.retryDelayMs,
|
|
40
|
+
if: step.if
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Validates a resolved step for correctness and security.
|
|
45
|
+
*/
|
|
46
|
+
export function validateStep(step) {
|
|
47
|
+
validateIdentifier(step.id, 'step id');
|
|
48
|
+
if (!step.image || typeof step.image !== 'string') {
|
|
49
|
+
throw new ValidationError(`Invalid step ${step.id}: image is required`);
|
|
50
|
+
}
|
|
51
|
+
if (!Array.isArray(step.cmd) || step.cmd.length === 0) {
|
|
52
|
+
throw new ValidationError(`Invalid step ${step.id}: cmd must be a non-empty array`);
|
|
53
|
+
}
|
|
54
|
+
if (step.envFile) {
|
|
55
|
+
if (typeof step.envFile !== 'string') {
|
|
56
|
+
throw new ValidationError(`Step ${step.id}: envFile must be a string`);
|
|
57
|
+
}
|
|
58
|
+
if (step.envFile.startsWith('/')) {
|
|
59
|
+
throw new ValidationError(`Step ${step.id}: envFile '${step.envFile}' must be a relative path`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (step.inputs) {
|
|
63
|
+
for (const input of step.inputs) {
|
|
64
|
+
validateIdentifier(input.step, `input step name in step ${step.id}`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
if (step.mounts) {
|
|
68
|
+
validateMounts(step.id, step.mounts);
|
|
69
|
+
}
|
|
70
|
+
if (step.sources) {
|
|
71
|
+
validateMounts(step.id, step.sources);
|
|
72
|
+
}
|
|
73
|
+
if (step.caches) {
|
|
74
|
+
validateCaches(step.id, step.caches);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
function validateIdentifier(id, context) {
|
|
78
|
+
if (!/^[\w-]+$/.test(id)) {
|
|
79
|
+
throw new ValidationError(`Invalid ${context}: '${id}' must contain only alphanumeric characters, underscore, and hyphen`);
|
|
80
|
+
}
|
|
81
|
+
if (id.includes('..')) {
|
|
82
|
+
throw new ValidationError(`Invalid ${context}: '${id}' cannot contain '..'`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
function validateMounts(stepId, mounts) {
|
|
86
|
+
for (const mount of mounts) {
|
|
87
|
+
if (!mount.host || typeof mount.host !== 'string') {
|
|
88
|
+
throw new ValidationError(`Step ${stepId}: mount.host is required and must be a string`);
|
|
89
|
+
}
|
|
90
|
+
if (mount.host.startsWith('/')) {
|
|
91
|
+
throw new ValidationError(`Step ${stepId}: mount.host '${mount.host}' must be a relative path`);
|
|
92
|
+
}
|
|
93
|
+
if (!mount.container || typeof mount.container !== 'string') {
|
|
94
|
+
throw new ValidationError(`Step ${stepId}: mount.container is required and must be a string`);
|
|
95
|
+
}
|
|
96
|
+
if (!mount.container.startsWith('/')) {
|
|
97
|
+
throw new ValidationError(`Step ${stepId}: mount.container '${mount.container}' must be an absolute path`);
|
|
98
|
+
}
|
|
99
|
+
if (mount.container.includes('..')) {
|
|
100
|
+
throw new ValidationError(`Step ${stepId}: mount.container '${mount.container}' must not contain '..'`);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
function validateCaches(stepId, caches) {
|
|
105
|
+
for (const cache of caches) {
|
|
106
|
+
if (!cache.name || typeof cache.name !== 'string') {
|
|
107
|
+
throw new ValidationError(`Step ${stepId}: cache.name is required and must be a string`);
|
|
108
|
+
}
|
|
109
|
+
validateIdentifier(cache.name, `cache name in step ${stepId}`);
|
|
110
|
+
if (!cache.path || typeof cache.path !== 'string') {
|
|
111
|
+
throw new ValidationError(`Step ${stepId}: cache.path is required and must be a string`);
|
|
112
|
+
}
|
|
113
|
+
if (!cache.path.startsWith('/')) {
|
|
114
|
+
throw new ValidationError(`Step ${stepId}: cache.path '${cache.path}' must be an absolute path`);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import process from 'node:process';
|
|
2
|
+
import { cp, writeFile } from 'node:fs/promises';
|
|
3
|
+
import { setTimeout } from 'node:timers/promises';
|
|
4
|
+
import { createWriteStream } from 'node:fs';
|
|
5
|
+
import { join } from 'node:path';
|
|
6
|
+
import { ContainerCrashError, PipexError } from '../errors.js';
|
|
7
|
+
import { StateManager } from './state.js';
|
|
8
|
+
import { dirSize, resolveHostPath } from './utils.js';
|
|
9
|
+
/**
|
|
10
|
+
* Executes a single step in a workspace.
|
|
11
|
+
* Adapted from PipelineRunner.executeStep() for standalone use.
|
|
12
|
+
*/
|
|
13
|
+
export class StepRunner {
|
|
14
|
+
runtime;
|
|
15
|
+
reporter;
|
|
16
|
+
constructor(runtime, reporter) {
|
|
17
|
+
this.runtime = runtime;
|
|
18
|
+
this.reporter = reporter;
|
|
19
|
+
}
|
|
20
|
+
async run(options) {
|
|
21
|
+
const { workspace, state, step, inputs, pipelineRoot, force, ephemeral, job } = options;
|
|
22
|
+
const stepRef = { id: step.id, displayName: step.name ?? step.id };
|
|
23
|
+
const resolvedMounts = step.mounts?.map(m => ({
|
|
24
|
+
hostPath: resolveHostPath(pipelineRoot, m.host),
|
|
25
|
+
containerPath: m.container
|
|
26
|
+
}));
|
|
27
|
+
const currentFingerprint = this.computeFingerprint(step, inputs, resolvedMounts);
|
|
28
|
+
// Cache check (skip for ephemeral or force)
|
|
29
|
+
if (!force && !ephemeral) {
|
|
30
|
+
const cacheResult = await this.tryUseCache({ workspace, state, stepId: step.id, stepRef, fingerprint: currentFingerprint, job });
|
|
31
|
+
if (cacheResult) {
|
|
32
|
+
return cacheResult;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
this.reporter.emit({ ...job, event: 'STEP_STARTING', step: stepRef });
|
|
36
|
+
return this.executeStep({ workspace, state, step, stepRef, inputs, pipelineRoot, ephemeral, currentFingerprint, resolvedMounts, job });
|
|
37
|
+
}
|
|
38
|
+
computeFingerprint(step, inputs, resolvedMounts) {
|
|
39
|
+
const inputRunIds = step.inputs
|
|
40
|
+
?.map(i => inputs.get(i.step))
|
|
41
|
+
.filter((id) => id !== undefined);
|
|
42
|
+
return StateManager.fingerprint({
|
|
43
|
+
image: step.image,
|
|
44
|
+
cmd: step.cmd,
|
|
45
|
+
env: step.env,
|
|
46
|
+
inputRunIds,
|
|
47
|
+
mounts: resolvedMounts
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
async tryUseCache({ workspace, state, stepId, stepRef, fingerprint, job }) {
|
|
51
|
+
const cached = state.getStep(stepId);
|
|
52
|
+
if (cached?.fingerprint === fingerprint) {
|
|
53
|
+
const runs = await workspace.listRuns();
|
|
54
|
+
if (runs.includes(cached.runId)) {
|
|
55
|
+
await workspace.linkRun(stepId, cached.runId);
|
|
56
|
+
this.reporter.emit({ ...job, event: 'STEP_SKIPPED', step: stepRef, runId: cached.runId, reason: 'cached' });
|
|
57
|
+
return { runId: cached.runId, exitCode: 0 };
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
return undefined;
|
|
61
|
+
}
|
|
62
|
+
async executeStep(ctx) {
|
|
63
|
+
const { workspace, step, stepRef, inputs, pipelineRoot, ephemeral, job } = ctx;
|
|
64
|
+
const runId = workspace.generateRunId();
|
|
65
|
+
const stagingPath = await workspace.prepareRun(runId);
|
|
66
|
+
await this.prepareStagingInputs(workspace, step, runId, inputs);
|
|
67
|
+
await this.prepareCaches(workspace, step);
|
|
68
|
+
const { containerInputs, output, caches, mounts } = this.buildMounts(step, runId, inputs, pipelineRoot);
|
|
69
|
+
const stdoutLog = createWriteStream(join(stagingPath, 'stdout.log'));
|
|
70
|
+
const stderrLog = createWriteStream(join(stagingPath, 'stderr.log'));
|
|
71
|
+
try {
|
|
72
|
+
const result = await this.executeWithRetries({
|
|
73
|
+
ctx, containerInputs, output, caches, mounts, stdoutLog, stderrLog
|
|
74
|
+
});
|
|
75
|
+
await closeStream(stdoutLog);
|
|
76
|
+
await closeStream(stderrLog);
|
|
77
|
+
if (ephemeral) {
|
|
78
|
+
await workspace.discardRun(runId);
|
|
79
|
+
this.reporter.emit({ ...job, event: 'STEP_FINISHED', step: stepRef, ephemeral: true });
|
|
80
|
+
return { exitCode: result.exitCode };
|
|
81
|
+
}
|
|
82
|
+
return await this.commitOrDiscard({ ...ctx, runId, stagingPath, result });
|
|
83
|
+
}
|
|
84
|
+
catch (error) {
|
|
85
|
+
await closeStream(stdoutLog);
|
|
86
|
+
await closeStream(stderrLog);
|
|
87
|
+
throw error;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
async prepareStagingInputs(workspace, step, runId, inputs) {
|
|
91
|
+
if (!step.inputs) {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
for (const input of step.inputs) {
|
|
95
|
+
const inputRunId = inputs.get(input.step);
|
|
96
|
+
if (inputRunId && input.copyToOutput) {
|
|
97
|
+
await cp(workspace.runArtifactsPath(inputRunId), workspace.runStagingArtifactsPath(runId), { recursive: true });
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
async prepareCaches(workspace, step) {
|
|
102
|
+
if (!step.caches) {
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
for (const cache of step.caches) {
|
|
106
|
+
await workspace.prepareCache(cache.name);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
buildMounts(step, runId, inputs, pipelineRoot) {
|
|
110
|
+
const containerInputs = [];
|
|
111
|
+
if (step.inputs) {
|
|
112
|
+
for (const input of step.inputs) {
|
|
113
|
+
const inputRunId = inputs.get(input.step);
|
|
114
|
+
if (inputRunId) {
|
|
115
|
+
containerInputs.push({ runId: inputRunId, containerPath: `/input/${input.step}` });
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
const output = { stagingRunId: runId, containerPath: step.outputPath ?? '/output' };
|
|
120
|
+
const caches = step.caches?.map(c => ({ name: c.name, containerPath: c.path }));
|
|
121
|
+
const mounts = step.mounts?.map(m => ({
|
|
122
|
+
hostPath: resolveHostPath(pipelineRoot, m.host),
|
|
123
|
+
containerPath: m.container
|
|
124
|
+
}));
|
|
125
|
+
return { containerInputs, output, caches, mounts };
|
|
126
|
+
}
|
|
127
|
+
async executeWithRetries({ ctx, containerInputs, output, caches, mounts, stdoutLog, stderrLog }) {
|
|
128
|
+
const { workspace, step, stepRef, pipelineRoot, ephemeral, job } = ctx;
|
|
129
|
+
const maxRetries = step.retries ?? 0;
|
|
130
|
+
const retryDelay = step.retryDelayMs ?? 5000;
|
|
131
|
+
let result;
|
|
132
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
133
|
+
try {
|
|
134
|
+
result = await this.runtime.run(workspace, {
|
|
135
|
+
name: `pipex-${workspace.id}-${step.id}-${Date.now()}`,
|
|
136
|
+
image: step.image,
|
|
137
|
+
cmd: step.cmd,
|
|
138
|
+
env: step.env,
|
|
139
|
+
inputs: containerInputs,
|
|
140
|
+
output,
|
|
141
|
+
caches,
|
|
142
|
+
mounts,
|
|
143
|
+
sources: step.sources?.map(m => ({
|
|
144
|
+
hostPath: resolveHostPath(pipelineRoot, m.host),
|
|
145
|
+
containerPath: m.container
|
|
146
|
+
})),
|
|
147
|
+
network: step.allowNetwork ? 'bridge' : 'none',
|
|
148
|
+
timeoutSec: step.timeoutSec
|
|
149
|
+
}, ({ stream, line }) => {
|
|
150
|
+
if (stream === 'stdout') {
|
|
151
|
+
stdoutLog.write(line + '\n');
|
|
152
|
+
}
|
|
153
|
+
else {
|
|
154
|
+
stderrLog.write(line + '\n');
|
|
155
|
+
}
|
|
156
|
+
if (ephemeral && stream === 'stdout') {
|
|
157
|
+
process.stdout.write(line + '\n');
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
this.reporter.emit({ ...job, event: 'STEP_LOG', step: stepRef, stream, line });
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
break;
|
|
164
|
+
}
|
|
165
|
+
catch (error) {
|
|
166
|
+
if (error instanceof PipexError && error.transient && attempt < maxRetries) {
|
|
167
|
+
this.reporter.emit({ ...job, event: 'STEP_RETRYING', step: stepRef, attempt: attempt + 1, maxRetries });
|
|
168
|
+
await setTimeout(retryDelay);
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
171
|
+
throw error;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
return result;
|
|
175
|
+
}
|
|
176
|
+
async commitOrDiscard({ workspace, state, step, stepRef, inputs, resolvedMounts, currentFingerprint, runId, stagingPath, result, job }) {
|
|
177
|
+
const meta = {
|
|
178
|
+
runId,
|
|
179
|
+
stepId: step.id,
|
|
180
|
+
stepName: step.name,
|
|
181
|
+
startedAt: result.startedAt.toISOString(),
|
|
182
|
+
finishedAt: result.finishedAt.toISOString(),
|
|
183
|
+
durationMs: result.finishedAt.getTime() - result.startedAt.getTime(),
|
|
184
|
+
exitCode: result.exitCode,
|
|
185
|
+
image: step.image,
|
|
186
|
+
cmd: step.cmd,
|
|
187
|
+
env: step.env,
|
|
188
|
+
inputs: step.inputs?.map(i => ({
|
|
189
|
+
step: i.step,
|
|
190
|
+
runId: inputs.get(i.step),
|
|
191
|
+
mountedAs: `/input/${i.step}`
|
|
192
|
+
})),
|
|
193
|
+
mounts: resolvedMounts,
|
|
194
|
+
caches: step.caches?.map(c => c.name),
|
|
195
|
+
allowNetwork: step.allowNetwork ?? false,
|
|
196
|
+
fingerprint: currentFingerprint,
|
|
197
|
+
status: result.exitCode === 0 ? 'success' : 'failure'
|
|
198
|
+
};
|
|
199
|
+
await writeFile(join(stagingPath, 'meta.json'), JSON.stringify(meta, null, 2), 'utf8');
|
|
200
|
+
if (result.exitCode === 0 || step.allowFailure) {
|
|
201
|
+
await workspace.commitRun(runId);
|
|
202
|
+
await workspace.linkRun(step.id, runId);
|
|
203
|
+
state.setStep(step.id, runId, currentFingerprint);
|
|
204
|
+
await state.save();
|
|
205
|
+
const durationMs = result.finishedAt.getTime() - result.startedAt.getTime();
|
|
206
|
+
const artifactSize = await dirSize(workspace.runArtifactsPath(runId));
|
|
207
|
+
this.reporter.emit({ ...job, event: 'STEP_FINISHED', step: stepRef, runId, durationMs, artifactSize });
|
|
208
|
+
return { runId, exitCode: result.exitCode };
|
|
209
|
+
}
|
|
210
|
+
await workspace.discardRun(runId);
|
|
211
|
+
this.reporter.emit({ ...job, event: 'STEP_FAILED', step: stepRef, exitCode: result.exitCode });
|
|
212
|
+
throw new ContainerCrashError(step.id, result.exitCode);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
async function closeStream(stream) {
|
|
216
|
+
if (stream.destroyed) {
|
|
217
|
+
return;
|
|
218
|
+
}
|
|
219
|
+
return new Promise((resolve, reject) => {
|
|
220
|
+
stream.end(() => {
|
|
221
|
+
resolve();
|
|
222
|
+
});
|
|
223
|
+
stream.on('error', reject);
|
|
224
|
+
});
|
|
225
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Reporter that wraps events into TransportMessages and publishes them.
|
|
3
|
+
* Ignores STEP_LOG events (high volume, not suitable for transport).
|
|
4
|
+
*/
|
|
5
|
+
export class StreamReporter {
|
|
6
|
+
transport;
|
|
7
|
+
seq = 0;
|
|
8
|
+
constructor(transport) {
|
|
9
|
+
this.transport = transport;
|
|
10
|
+
}
|
|
11
|
+
emit(event) {
|
|
12
|
+
if (event.event === 'STEP_LOG') {
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
const message = {
|
|
16
|
+
seq: this.seq++,
|
|
17
|
+
timestamp: new Date().toISOString(),
|
|
18
|
+
version: 1,
|
|
19
|
+
type: event.event,
|
|
20
|
+
event
|
|
21
|
+
};
|
|
22
|
+
void this.transport.publish(message);
|
|
23
|
+
}
|
|
24
|
+
async flush() {
|
|
25
|
+
await this.transport.flush?.();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Delegates emit() to multiple reporters.
|
|
30
|
+
*/
|
|
31
|
+
export class CompositeReporter {
|
|
32
|
+
reporters;
|
|
33
|
+
constructor(...reporters) {
|
|
34
|
+
this.reporters = reporters;
|
|
35
|
+
}
|
|
36
|
+
emit(event) {
|
|
37
|
+
for (const reporter of this.reporters) {
|
|
38
|
+
reporter.emit(event);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import process from 'node:process';
|
|
2
|
+
import { readdir, stat } from 'node:fs/promises';
|
|
3
|
+
import { join, resolve } from 'node:path';
|
|
4
|
+
import { ValidationError } from '../errors.js';
|
|
5
|
+
export async function dirSize(dirPath) {
|
|
6
|
+
let total = 0;
|
|
7
|
+
try {
|
|
8
|
+
const entries = await readdir(dirPath, { withFileTypes: true });
|
|
9
|
+
for (const entry of entries) {
|
|
10
|
+
const fullPath = join(dirPath, entry.name);
|
|
11
|
+
if (entry.isDirectory()) {
|
|
12
|
+
total += await dirSize(fullPath);
|
|
13
|
+
}
|
|
14
|
+
else if (entry.isFile()) {
|
|
15
|
+
const s = await stat(fullPath);
|
|
16
|
+
total += s.size;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
catch {
|
|
21
|
+
// Directory doesn't exist or isn't readable
|
|
22
|
+
}
|
|
23
|
+
return total;
|
|
24
|
+
}
|
|
25
|
+
export function resolveHostPath(pipelineRoot, hostRelative) {
|
|
26
|
+
const resolved = resolve(pipelineRoot, hostRelative);
|
|
27
|
+
const boundary = process.cwd();
|
|
28
|
+
if (!resolved.startsWith(boundary + '/') && resolved !== boundary) {
|
|
29
|
+
throw new ValidationError(`Mount host '${hostRelative}' resolves to '${resolved}' which is outside the working directory '${boundary}'`);
|
|
30
|
+
}
|
|
31
|
+
return resolved;
|
|
32
|
+
}
|
|
33
|
+
export function formatSize(bytes) {
|
|
34
|
+
if (bytes < 1024) {
|
|
35
|
+
return `${bytes} B`;
|
|
36
|
+
}
|
|
37
|
+
if (bytes < 1024 * 1024) {
|
|
38
|
+
return `${(bytes / 1024).toFixed(1)} KB`;
|
|
39
|
+
}
|
|
40
|
+
if (bytes < 1024 * 1024 * 1024) {
|
|
41
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
42
|
+
}
|
|
43
|
+
return `${(bytes / (1024 * 1024 * 1024)).toFixed(1)} GB`;
|
|
44
|
+
}
|
|
45
|
+
export function formatDuration(ms) {
|
|
46
|
+
if (ms < 1000) {
|
|
47
|
+
return `${ms}ms`;
|
|
48
|
+
}
|
|
49
|
+
const seconds = ms / 1000;
|
|
50
|
+
if (seconds < 60) {
|
|
51
|
+
return `${seconds.toFixed(1)}s`;
|
|
52
|
+
}
|
|
53
|
+
const minutes = Math.floor(seconds / 60);
|
|
54
|
+
const remainingSeconds = Math.round(seconds % 60);
|
|
55
|
+
return `${minutes}m ${remainingSeconds}s`;
|
|
56
|
+
}
|