@livingdata/pipex 0.0.8 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/README.md +186 -16
  2. package/dist/__tests__/errors.js +162 -0
  3. package/dist/__tests__/helpers.js +41 -0
  4. package/dist/__tests__/types.js +8 -0
  5. package/dist/cli/__tests__/condition.js +23 -0
  6. package/dist/cli/__tests__/dag.js +154 -0
  7. package/dist/cli/__tests__/pipeline-loader.js +267 -0
  8. package/dist/cli/__tests__/pipeline-runner.js +257 -0
  9. package/dist/cli/__tests__/state-persistence.js +80 -0
  10. package/dist/cli/__tests__/state.js +58 -0
  11. package/dist/cli/__tests__/step-runner.js +116 -0
  12. package/dist/cli/commands/bundle.js +35 -0
  13. package/dist/cli/commands/cat.js +54 -0
  14. package/dist/cli/commands/clean.js +22 -0
  15. package/dist/cli/commands/exec.js +89 -0
  16. package/dist/cli/commands/export.js +32 -0
  17. package/dist/cli/commands/inspect.js +58 -0
  18. package/dist/cli/commands/list.js +39 -0
  19. package/dist/cli/commands/logs.js +54 -0
  20. package/dist/cli/commands/prune.js +26 -0
  21. package/dist/cli/commands/rm-step.js +41 -0
  22. package/dist/cli/commands/rm.js +27 -0
  23. package/dist/cli/commands/run-bundle.js +59 -0
  24. package/dist/cli/commands/run.js +44 -0
  25. package/dist/cli/commands/show.js +108 -0
  26. package/dist/cli/condition.js +11 -0
  27. package/dist/cli/dag.js +143 -0
  28. package/dist/cli/index.js +24 -105
  29. package/dist/cli/interactive-reporter.js +227 -0
  30. package/dist/cli/pipeline-loader.js +10 -110
  31. package/dist/cli/pipeline-runner.js +256 -111
  32. package/dist/cli/reporter.js +2 -107
  33. package/dist/cli/state.js +30 -9
  34. package/dist/cli/step-loader.js +25 -0
  35. package/dist/cli/step-resolver.js +111 -0
  36. package/dist/cli/step-runner.js +226 -0
  37. package/dist/cli/utils.js +3 -0
  38. package/dist/core/__tests__/bundle.js +663 -0
  39. package/dist/core/__tests__/condition.js +23 -0
  40. package/dist/core/__tests__/dag.js +154 -0
  41. package/dist/core/__tests__/env-file.test.js +41 -0
  42. package/dist/core/__tests__/event-aggregator.js +244 -0
  43. package/dist/core/__tests__/pipeline-loader.js +267 -0
  44. package/dist/core/__tests__/pipeline-runner.js +257 -0
  45. package/dist/core/__tests__/state-persistence.js +80 -0
  46. package/dist/core/__tests__/state.js +58 -0
  47. package/dist/core/__tests__/step-runner.js +118 -0
  48. package/dist/core/__tests__/stream-reporter.js +142 -0
  49. package/dist/core/__tests__/transport.js +50 -0
  50. package/dist/core/__tests__/utils.js +40 -0
  51. package/dist/core/bundle.js +130 -0
  52. package/dist/core/condition.js +11 -0
  53. package/dist/core/dag.js +143 -0
  54. package/dist/core/env-file.js +6 -0
  55. package/dist/core/event-aggregator.js +114 -0
  56. package/dist/core/index.js +14 -0
  57. package/dist/core/pipeline-loader.js +81 -0
  58. package/dist/core/pipeline-runner.js +360 -0
  59. package/dist/core/reporter.js +11 -0
  60. package/dist/core/state.js +110 -0
  61. package/dist/core/step-loader.js +25 -0
  62. package/dist/core/step-resolver.js +117 -0
  63. package/dist/core/step-runner.js +225 -0
  64. package/dist/core/stream-reporter.js +41 -0
  65. package/dist/core/transport.js +9 -0
  66. package/dist/core/utils.js +56 -0
  67. package/dist/engine/__tests__/workspace.js +288 -0
  68. package/dist/engine/docker-executor.js +32 -6
  69. package/dist/engine/index.js +1 -0
  70. package/dist/engine/workspace.js +164 -66
  71. package/dist/errors.js +122 -0
  72. package/dist/index.js +3 -0
  73. package/dist/kits/__tests__/index.js +23 -0
  74. package/dist/kits/builtin/__tests__/node.js +74 -0
  75. package/dist/kits/builtin/__tests__/python.js +67 -0
  76. package/dist/kits/builtin/__tests__/shell.js +74 -0
  77. package/dist/kits/builtin/node.js +10 -5
  78. package/dist/kits/builtin/python.js +10 -5
  79. package/dist/kits/builtin/shell.js +2 -1
  80. package/dist/kits/index.js +2 -1
  81. package/package.json +6 -3
  82. package/dist/cli/types.js +0 -3
  83. package/dist/engine/docker-runtime.js +0 -65
  84. package/dist/engine/runtime.js +0 -2
  85. package/dist/kits/bash.js +0 -19
  86. package/dist/kits/builtin/bash.js +0 -19
  87. package/dist/kits/node.js +0 -56
  88. package/dist/kits/python.js +0 -51
  89. package/dist/kits/types.js +0 -1
  90. package/dist/reporter.js +0 -13
@@ -0,0 +1,257 @@
1
+ import { readFile, writeFile } from 'node:fs/promises';
2
+ import { join } from 'node:path';
3
+ import { stringify as yamlStringify } from 'yaml';
4
+ import test from 'ava';
5
+ import { DockerCliExecutor } from '../../engine/docker-executor.js';
6
+ import { PipelineLoader } from '../pipeline-loader.js';
7
+ import { PipelineRunner } from '../pipeline-runner.js';
8
+ import { Workspace } from '../../engine/workspace.js';
9
+ import { ContainerCrashError } from '../../errors.js';
10
+ import { createTmpDir, isDockerAvailable, noopReporter, recordingReporter } from '../../__tests__/helpers.js';
11
+ const hasDocker = isDockerAvailable();
12
+ const dockerTest = hasDocker ? test : test.skip;
13
+ // -- helpers -----------------------------------------------------------------
14
+ async function writePipeline(dir, config) {
15
+ const filePath = join(dir, 'pipeline.yaml');
16
+ await writeFile(filePath, yamlStringify(config), 'utf8');
17
+ return filePath;
18
+ }
19
+ // -- two-step dependency -----------------------------------------------------
20
+ dockerTest('step B reads step A output via inputs', async (t) => {
21
+ const tmpDir = await createTmpDir();
22
+ const workdir = await createTmpDir();
23
+ const pipelinePath = await writePipeline(tmpDir, {
24
+ id: 'dep-test',
25
+ steps: [
26
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo data > /output/result.txt'] },
27
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/result.txt > /output/copy.txt'], inputs: [{ step: 'a' }] }
28
+ ]
29
+ });
30
+ const { reporter, events } = recordingReporter();
31
+ await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
32
+ // Both steps should have finished (not just "not crashed")
33
+ const finished = events.filter((e) => e.event === 'STEP_FINISHED');
34
+ t.is(finished.length, 2);
35
+ // Verify step B's artifact actually contains step A's output
36
+ const ws = await Workspace.open(workdir, 'dep-test');
37
+ const bRunId = finished.find(e => e.step.id === 'b').runId;
38
+ t.truthy(bRunId);
39
+ const content = await readFile(join(ws.runArtifactsPath(bRunId), 'copy.txt'), 'utf8');
40
+ t.is(content.trim(), 'data');
41
+ });
42
+ // -- cache hit on re-run -----------------------------------------------------
43
+ dockerTest('re-running same pipeline skips all steps (cached)', async (t) => {
44
+ const tmpDir = await createTmpDir();
45
+ const workdir = await createTmpDir();
46
+ const pipelinePath = await writePipeline(tmpDir, {
47
+ id: 'cache-hit',
48
+ steps: [
49
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
50
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'] }
51
+ ]
52
+ });
53
+ const loader = new PipelineLoader();
54
+ const executor = new DockerCliExecutor();
55
+ // First run — executes
56
+ await new PipelineRunner(loader, executor, noopReporter, workdir).run(pipelinePath);
57
+ // Second run — should skip
58
+ const { reporter, events } = recordingReporter();
59
+ await new PipelineRunner(loader, executor, reporter, workdir).run(pipelinePath);
60
+ const skipped = events.filter(e => e.event === 'STEP_SKIPPED');
61
+ t.is(skipped.length, 2);
62
+ });
63
+ // -- cache invalidation cascade ----------------------------------------------
64
+ dockerTest('modifying step A cmd re-executes both steps', async (t) => {
65
+ const tmpDir = await createTmpDir();
66
+ const workdir = await createTmpDir();
67
+ const loader = new PipelineLoader();
68
+ const executor = new DockerCliExecutor();
69
+ // First run
70
+ const path1 = await writePipeline(tmpDir, {
71
+ id: 'cascade',
72
+ steps: [
73
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo v1 > /output/a.txt'] },
74
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/b.txt'], inputs: [{ step: 'a' }] }
75
+ ]
76
+ });
77
+ await new PipelineRunner(loader, executor, noopReporter, workdir).run(path1);
78
+ // Modify step A cmd
79
+ const path2 = await writePipeline(tmpDir, {
80
+ id: 'cascade',
81
+ steps: [
82
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo v2 > /output/a.txt'] },
83
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/b.txt'], inputs: [{ step: 'a' }] }
84
+ ]
85
+ });
86
+ const { reporter, events } = recordingReporter();
87
+ await new PipelineRunner(loader, executor, reporter, workdir).run(path2);
88
+ // Both steps should have executed (STEP_STARTING events, not STEP_SKIPPED)
89
+ const starting = events.filter(e => e.event === 'STEP_STARTING');
90
+ const skipped = events.filter(e => e.event === 'STEP_SKIPPED');
91
+ t.is(starting.length, 2);
92
+ t.is(skipped.length, 0);
93
+ });
94
+ // -- allowFailure ------------------------------------------------------------
95
+ dockerTest('step B executes even when step A fails with allowFailure', async (t) => {
96
+ const tmpDir = await createTmpDir();
97
+ const workdir = await createTmpDir();
98
+ const pipelinePath = await writePipeline(tmpDir, {
99
+ id: 'allow-fail',
100
+ steps: [
101
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'exit 1'], allowFailure: true },
102
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo ok > /output/b.txt'] }
103
+ ]
104
+ });
105
+ const { reporter, events } = recordingReporter();
106
+ await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
107
+ // Step B should have finished successfully
108
+ const finished = events.filter(e => e.event === 'STEP_FINISHED');
109
+ t.true(finished.some(e => e.event === 'STEP_FINISHED' && e.step.id === 'b'));
110
+ });
111
+ // -- force specific step -----------------------------------------------------
112
+ dockerTest('force specific step re-executes only that step', async (t) => {
113
+ const tmpDir = await createTmpDir();
114
+ const workdir = await createTmpDir();
115
+ const pipelinePath = await writePipeline(tmpDir, {
116
+ id: 'force-one',
117
+ steps: [
118
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
119
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'] }
120
+ ]
121
+ });
122
+ const loader = new PipelineLoader();
123
+ const executor = new DockerCliExecutor();
124
+ // First run
125
+ await new PipelineRunner(loader, executor, noopReporter, workdir).run(pipelinePath);
126
+ // Second run with force on step b only
127
+ const { reporter, events } = recordingReporter();
128
+ await new PipelineRunner(loader, executor, reporter, workdir).run(pipelinePath, { force: ['b'] });
129
+ const skipped = events.filter((e) => e.event === 'STEP_SKIPPED');
130
+ const starting = events.filter((e) => e.event === 'STEP_STARTING');
131
+ t.is(skipped.length, 1);
132
+ t.is(skipped[0].step.id, 'a');
133
+ t.is(starting.length, 1);
134
+ t.is(starting[0].step.id, 'b');
135
+ });
136
+ // -- dry run -----------------------------------------------------------------
137
+ dockerTest('dryRun emits STEP_WOULD_RUN without executing or committing', async (t) => {
138
+ const tmpDir = await createTmpDir();
139
+ const workdir = await createTmpDir();
140
+ const pipelinePath = await writePipeline(tmpDir, {
141
+ id: 'dry-run',
142
+ steps: [
143
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] }
144
+ ]
145
+ });
146
+ const { reporter, events } = recordingReporter();
147
+ await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath, { dryRun: true });
148
+ t.truthy(events.find(e => e.event === 'STEP_WOULD_RUN'));
149
+ t.falsy(events.find(e => e.event === 'STEP_STARTING'));
150
+ // No runs should have been committed to disk
151
+ const ws = await Workspace.open(workdir, 'dry-run');
152
+ const runs = await ws.listRuns();
153
+ t.is(runs.length, 0);
154
+ });
155
+ // -- diamond DAG A→B, A→C, B+C→D --------------------------------------------
156
+ dockerTest('diamond DAG executes all steps in correct order', async (t) => {
157
+ const tmpDir = await createTmpDir();
158
+ const workdir = await createTmpDir();
159
+ const pipelinePath = await writePipeline(tmpDir, {
160
+ id: 'diamond',
161
+ steps: [
162
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
163
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/b.txt'], inputs: [{ step: 'a' }] },
164
+ { id: 'c', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/c.txt'], inputs: [{ step: 'a' }] },
165
+ { id: 'd', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/b/b.txt /input/c/c.txt > /output/d.txt'], inputs: [{ step: 'b' }, { step: 'c' }] }
166
+ ]
167
+ });
168
+ const { reporter, events } = recordingReporter();
169
+ await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
170
+ const finished = events.filter((e) => e.event === 'STEP_FINISHED');
171
+ t.is(finished.length, 4);
172
+ });
173
+ // -- failed step → dependents skipped ----------------------------------------
174
+ dockerTest('failed step causes dependents to be skipped with reason dependency', async (t) => {
175
+ const tmpDir = await createTmpDir();
176
+ const workdir = await createTmpDir();
177
+ const pipelinePath = await writePipeline(tmpDir, {
178
+ id: 'fail-dep',
179
+ steps: [
180
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'exit 1'] },
181
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'], inputs: [{ step: 'a' }] }
182
+ ]
183
+ });
184
+ const { reporter, events } = recordingReporter();
185
+ await t.throwsAsync(async () => new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath), { instanceOf: ContainerCrashError });
186
+ const skipped = events.filter((e) => e.event === 'STEP_SKIPPED');
187
+ t.is(skipped.length, 1);
188
+ t.is(skipped[0].step.id, 'b');
189
+ t.is(skipped[0].reason, 'dependency');
190
+ });
191
+ // -- optional input → step runs despite missing input ------------------------
192
+ dockerTest('optional input allows step to run when input step is missing', async (t) => {
193
+ const tmpDir = await createTmpDir();
194
+ const workdir = await createTmpDir();
195
+ const pipelinePath = await writePipeline(tmpDir, {
196
+ id: 'opt-input',
197
+ steps: [
198
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'exit 1'] },
199
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'], inputs: [{ step: 'a', optional: true }] }
200
+ ]
201
+ });
202
+ const { reporter, events } = recordingReporter();
203
+ await t.throwsAsync(async () => new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath));
204
+ // Step b should have been attempted (STEP_STARTING)
205
+ const starting = events.filter((e) => e.event === 'STEP_STARTING');
206
+ t.true(starting.some(e => e.step.id === 'b'));
207
+ });
208
+ // -- if condition → step skipped ---------------------------------------------
209
+ dockerTest('if: "1 == 2" skips step with reason condition', async (t) => {
210
+ const tmpDir = await createTmpDir();
211
+ const workdir = await createTmpDir();
212
+ const pipelinePath = await writePipeline(tmpDir, {
213
+ id: 'cond-skip',
214
+ steps: [
215
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'], if: '1 == 2' }
216
+ ]
217
+ });
218
+ const { reporter, events } = recordingReporter();
219
+ await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
220
+ const skipped = events.filter((e) => e.event === 'STEP_SKIPPED');
221
+ t.is(skipped.length, 1);
222
+ t.is(skipped[0].reason, 'condition');
223
+ });
224
+ // -- --target → only subgraph executes ---------------------------------------
225
+ dockerTest('--target executes only the targeted step and its dependencies', async (t) => {
226
+ const tmpDir = await createTmpDir();
227
+ const workdir = await createTmpDir();
228
+ const pipelinePath = await writePipeline(tmpDir, {
229
+ id: 'target-test',
230
+ steps: [
231
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
232
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'], inputs: [{ step: 'a' }] },
233
+ { id: 'c', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo c > /output/c.txt'] }
234
+ ]
235
+ });
236
+ const { reporter, events } = recordingReporter();
237
+ await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath, { target: ['b'] });
238
+ const finished = events.filter((e) => e.event === 'STEP_FINISHED');
239
+ const executedIds = finished.map(e => e.step.id).sort();
240
+ t.deepEqual(executedIds, ['a', 'b']);
241
+ });
242
+ // -- independent steps → parallel execution ----------------------------------
243
+ dockerTest('independent steps both finish (parallel execution)', async (t) => {
244
+ const tmpDir = await createTmpDir();
245
+ const workdir = await createTmpDir();
246
+ const pipelinePath = await writePipeline(tmpDir, {
247
+ id: 'parallel',
248
+ steps: [
249
+ { id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
250
+ { id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'] }
251
+ ]
252
+ });
253
+ const { reporter, events } = recordingReporter();
254
+ await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
255
+ const finished = events.filter((e) => e.event === 'STEP_FINISHED');
256
+ t.is(finished.length, 2);
257
+ });
@@ -0,0 +1,80 @@
1
+ import test from 'ava';
2
+ import { StateManager } from '../state.js';
3
+ import { createTmpDir } from '../../__tests__/helpers.js';
4
+ // -- load / save round-trip --------------------------------------------------
5
+ test('load on missing file gives empty state, getStep returns undefined', async (t) => {
6
+ const root = await createTmpDir();
7
+ const sm = new StateManager(root);
8
+ await sm.load();
9
+ t.is(sm.getStep('any'), undefined);
10
+ });
11
+ test('setStep + save + new instance load returns same data', async (t) => {
12
+ const root = await createTmpDir();
13
+ const sm1 = new StateManager(root);
14
+ await sm1.load();
15
+ sm1.setStep('build', 'run-1', 'fp-abc');
16
+ await sm1.save();
17
+ const sm2 = new StateManager(root);
18
+ await sm2.load();
19
+ const step = sm2.getStep('build');
20
+ t.truthy(step);
21
+ t.is(step.runId, 'run-1');
22
+ t.is(step.fingerprint, 'fp-abc');
23
+ });
24
+ test('removeStep + save + load removes the step', async (t) => {
25
+ const root = await createTmpDir();
26
+ const sm1 = new StateManager(root);
27
+ await sm1.load();
28
+ sm1.setStep('build', 'run-1', 'fp-1');
29
+ sm1.setStep('test', 'run-2', 'fp-2');
30
+ await sm1.save();
31
+ const sm2 = new StateManager(root);
32
+ await sm2.load();
33
+ sm2.removeStep('build');
34
+ await sm2.save();
35
+ const sm3 = new StateManager(root);
36
+ await sm3.load();
37
+ t.is(sm3.getStep('build'), undefined);
38
+ t.truthy(sm3.getStep('test'));
39
+ });
40
+ test('listSteps survives save/load round-trip', async (t) => {
41
+ const root = await createTmpDir();
42
+ const sm1 = new StateManager(root);
43
+ await sm1.load();
44
+ sm1.setStep('a', 'run-a', 'fp-a');
45
+ sm1.setStep('b', 'run-b', 'fp-b');
46
+ await sm1.save();
47
+ const sm2 = new StateManager(root);
48
+ await sm2.load();
49
+ const steps = sm2.listSteps();
50
+ t.is(steps.length, 2);
51
+ t.truthy(steps.find(s => s.stepId === 'a' && s.runId === 'run-a'));
52
+ t.truthy(steps.find(s => s.stepId === 'b' && s.runId === 'run-b'));
53
+ });
54
+ test('activeRunIds survives save/load round-trip', async (t) => {
55
+ const root = await createTmpDir();
56
+ const sm1 = new StateManager(root);
57
+ await sm1.load();
58
+ sm1.setStep('a', 'run-1', 'fp-a');
59
+ sm1.setStep('b', 'run-2', 'fp-b');
60
+ await sm1.save();
61
+ const sm2 = new StateManager(root);
62
+ await sm2.load();
63
+ const ids = sm2.activeRunIds();
64
+ t.true(ids.has('run-1'));
65
+ t.true(ids.has('run-2'));
66
+ t.is(ids.size, 2);
67
+ });
68
+ test('overwriting same stepId keeps only latest after reload', async (t) => {
69
+ const root = await createTmpDir();
70
+ const sm1 = new StateManager(root);
71
+ await sm1.load();
72
+ sm1.setStep('build', 'run-old', 'fp-old');
73
+ sm1.setStep('build', 'run-new', 'fp-new');
74
+ await sm1.save();
75
+ const sm2 = new StateManager(root);
76
+ await sm2.load();
77
+ const step = sm2.getStep('build');
78
+ t.is(step.runId, 'run-new');
79
+ t.is(step.fingerprint, 'fp-new');
80
+ });
@@ -0,0 +1,58 @@
1
+ import test from 'ava';
2
+ import { StateManager } from '../state.js';
3
+ const base = {
4
+ image: 'alpine:3.20',
5
+ cmd: ['echo', 'hello']
6
+ };
7
+ test('fingerprint is deterministic', t => {
8
+ const a = StateManager.fingerprint(base);
9
+ const b = StateManager.fingerprint(base);
10
+ t.is(a, b);
11
+ });
12
+ test('fingerprint sorts env keys', t => {
13
+ const a = StateManager.fingerprint({ ...base, env: { A: '1', B: '2' } });
14
+ const b = StateManager.fingerprint({ ...base, env: { B: '2', A: '1' } });
15
+ t.is(a, b);
16
+ });
17
+ test('fingerprint sorts inputRunIds', t => {
18
+ const a = StateManager.fingerprint({ ...base, inputRunIds: ['x', 'y'] });
19
+ const b = StateManager.fingerprint({ ...base, inputRunIds: ['y', 'x'] });
20
+ t.is(a, b);
21
+ });
22
+ test('fingerprint sorts mounts by containerPath', t => {
23
+ const a = StateManager.fingerprint({
24
+ ...base,
25
+ mounts: [
26
+ { hostPath: 'a', containerPath: '/a' },
27
+ { hostPath: 'b', containerPath: '/b' }
28
+ ]
29
+ });
30
+ const b = StateManager.fingerprint({
31
+ ...base,
32
+ mounts: [
33
+ { hostPath: 'b', containerPath: '/b' },
34
+ { hostPath: 'a', containerPath: '/a' }
35
+ ]
36
+ });
37
+ t.is(a, b);
38
+ });
39
+ test('fingerprint differs when optional fields absent vs present', t => {
40
+ const withEnv = StateManager.fingerprint({ ...base, env: { A: '1' } });
41
+ const without = StateManager.fingerprint(base);
42
+ t.not(withEnv, without);
43
+ });
44
+ test('fingerprint changes when image changes', t => {
45
+ const a = StateManager.fingerprint(base);
46
+ const b = StateManager.fingerprint({ ...base, image: 'node:24' });
47
+ t.not(a, b);
48
+ });
49
+ test('fingerprint changes when cmd changes', t => {
50
+ const a = StateManager.fingerprint(base);
51
+ const b = StateManager.fingerprint({ ...base, cmd: ['echo', 'bye'] });
52
+ t.not(a, b);
53
+ });
54
+ test('fingerprint changes when env value changes', t => {
55
+ const a = StateManager.fingerprint({ ...base, env: { A: '1' } });
56
+ const b = StateManager.fingerprint({ ...base, env: { A: '2' } });
57
+ t.not(a, b);
58
+ });
@@ -0,0 +1,116 @@
1
+ import { readFile } from 'node:fs/promises';
2
+ import { join } from 'node:path';
3
+ import test from 'ava';
4
+ import { Workspace } from '../../engine/workspace.js';
5
+ import { DockerCliExecutor } from '../../engine/docker-executor.js';
6
+ import { ContainerCrashError } from '../../errors.js';
7
+ import { StateManager } from '../state.js';
8
+ import { StepRunner } from '../step-runner.js';
9
+ import { createTmpDir, isDockerAvailable, noopReporter, recordingReporter } from '../../__tests__/helpers.js';
10
+ const hasDocker = isDockerAvailable();
11
+ const dockerTest = hasDocker ? test : test.skip;
12
+ // -- helpers -----------------------------------------------------------------
13
+ function makeStep(overrides) {
14
+ return {
15
+ image: 'alpine:3.20',
16
+ cmd: ['sh', '-c', 'echo hello'],
17
+ ...overrides
18
+ };
19
+ }
20
+ async function setupWorkspace() {
21
+ const tmpDir = await createTmpDir();
22
+ const workspace = await Workspace.create(tmpDir, 'test-ws');
23
+ const state = new StateManager(workspace.root);
24
+ await state.load();
25
+ return { workspace, state, tmpDir };
26
+ }
27
+ // -- minimal execution -------------------------------------------------------
28
+ dockerTest('minimal step writes artifact and returns exitCode 0', async (t) => {
29
+ const { workspace, state } = await setupWorkspace();
30
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
31
+ const step = makeStep({ id: 'greet', cmd: ['sh', '-c', 'echo hello > /output/greeting.txt'] });
32
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' });
33
+ t.is(result.exitCode, 0);
34
+ t.truthy(result.runId);
35
+ // Artifact exists in committed run
36
+ const content = await readFile(join(workspace.runArtifactsPath(result.runId), 'greeting.txt'), 'utf8');
37
+ t.is(content.trim(), 'hello');
38
+ });
39
+ dockerTest('meta.json exists with correct fields', async (t) => {
40
+ const { workspace, state } = await setupWorkspace();
41
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
42
+ const step = makeStep({ id: 'meta-test', cmd: ['sh', '-c', 'echo ok > /output/out.txt'] });
43
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' });
44
+ const metaPath = join(workspace.runPath(result.runId), 'meta.json');
45
+ const meta = JSON.parse(await readFile(metaPath, 'utf8'));
46
+ t.is(meta.runId, result.runId);
47
+ t.is(meta.exitCode, 0);
48
+ t.is(meta.image, 'alpine:3.20');
49
+ t.deepEqual(meta.cmd, ['sh', '-c', 'echo ok > /output/out.txt']);
50
+ });
51
+ // -- log capture -------------------------------------------------------------
52
+ dockerTest('stdout and stderr are captured to log files', async (t) => {
53
+ const { workspace, state } = await setupWorkspace();
54
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
55
+ const step = makeStep({ id: 'logs', cmd: ['sh', '-c', 'echo out-line && echo err-line >&2'] });
56
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' });
57
+ const stdout = await readFile(join(workspace.runPath(result.runId), 'stdout.log'), 'utf8');
58
+ const stderr = await readFile(join(workspace.runPath(result.runId), 'stderr.log'), 'utf8');
59
+ t.true(stdout.includes('out-line'));
60
+ t.true(stderr.includes('err-line'));
61
+ });
62
+ // -- cache hit ---------------------------------------------------------------
63
+ dockerTest('second run of same step is cached (STEP_SKIPPED)', async (t) => {
64
+ const { workspace, state } = await setupWorkspace();
65
+ const { reporter, events } = recordingReporter();
66
+ const runner = new StepRunner(new DockerCliExecutor(), reporter);
67
+ const step = makeStep({ id: 'cached', cmd: ['sh', '-c', 'echo hi > /output/x.txt'] });
68
+ const first = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' });
69
+ const second = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' });
70
+ t.is(first.runId, second.runId);
71
+ const skipped = events.find(e => e.event === 'STEP_SKIPPED');
72
+ t.truthy(skipped);
73
+ t.is(skipped.reason, 'cached');
74
+ });
75
+ // -- force -------------------------------------------------------------------
76
+ dockerTest('force: true produces new runId', async (t) => {
77
+ const { workspace, state } = await setupWorkspace();
78
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
79
+ const step = makeStep({ id: 'force-test', cmd: ['sh', '-c', 'echo data > /output/f.txt'] });
80
+ const first = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' });
81
+ const second = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', force: true });
82
+ t.not(first.runId, second.runId);
83
+ t.is(second.exitCode, 0);
84
+ });
85
+ // -- ephemeral ---------------------------------------------------------------
86
+ dockerTest('ephemeral: true returns exitCode but no runId', async (t) => {
87
+ const { workspace, state } = await setupWorkspace();
88
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
89
+ const step = makeStep({ id: 'ephemeral', cmd: ['sh', '-c', 'echo temp > /output/t.txt'] });
90
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/', ephemeral: true });
91
+ t.is(result.exitCode, 0);
92
+ t.is(result.runId, undefined);
93
+ // No run committed
94
+ const runs = await workspace.listRuns();
95
+ t.is(runs.length, 0);
96
+ });
97
+ // -- failure -----------------------------------------------------------------
98
+ dockerTest('non-zero exit throws ContainerCrashError', async (t) => {
99
+ const { workspace, state } = await setupWorkspace();
100
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
101
+ const step = makeStep({ id: 'fail', cmd: ['sh', '-c', 'exit 1'] });
102
+ const error = await t.throwsAsync(async () => runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' }));
103
+ t.true(error instanceof ContainerCrashError);
104
+ });
105
+ // -- allowFailure ------------------------------------------------------------
106
+ dockerTest('allowFailure: true commits run with non-zero exitCode', async (t) => {
107
+ const { workspace, state } = await setupWorkspace();
108
+ const runner = new StepRunner(new DockerCliExecutor(), noopReporter);
109
+ const step = makeStep({ id: 'allow-fail', cmd: ['sh', '-c', 'exit 1'], allowFailure: true });
110
+ const result = await runner.run({ workspace, state, step, inputs: new Map(), pipelineRoot: '/' });
111
+ t.truthy(result.runId);
112
+ t.is(result.exitCode, 1);
113
+ // Run was committed
114
+ const runs = await workspace.listRuns();
115
+ t.true(runs.includes(result.runId));
116
+ });
@@ -0,0 +1,35 @@
1
+ import process from 'node:process';
2
+ import { writeFile } from 'node:fs/promises';
3
+ import { basename, resolve } from 'node:path';
4
+ import chalk from 'chalk';
5
+ import { buildBundle } from '../../core/bundle.js';
6
+ import { PipelineLoader } from '../../core/pipeline-loader.js';
7
+ import { formatSize } from '../../core/utils.js';
8
+ export function registerBundleCommand(program) {
9
+ program
10
+ .command('bundle')
11
+ .description('Package a pipeline and its local dependencies into a tar.gz archive')
12
+ .argument('<pipeline>', 'Path to the pipeline file')
13
+ .option('-o, --output <path>', 'Output file path (default: <pipeline-id>.tar.gz)')
14
+ .action(async (pipelineFile, options) => {
15
+ try {
16
+ const pipelinePath = resolve(pipelineFile);
17
+ // Determine output path
18
+ let outputPath = options.output;
19
+ if (!outputPath) {
20
+ const loader = new PipelineLoader();
21
+ const pipeline = await loader.load(pipelinePath);
22
+ outputPath = `${pipeline.id}.tar.gz`;
23
+ }
24
+ outputPath = resolve(outputPath);
25
+ const archive = await buildBundle(pipelinePath);
26
+ await writeFile(outputPath, archive);
27
+ console.log(chalk.green(`Bundle created: ${basename(outputPath)} (${formatSize(archive.length)})`));
28
+ }
29
+ catch (error) {
30
+ const message = error instanceof Error ? error.message : String(error);
31
+ console.error(chalk.red(`Bundle failed: ${message}`));
32
+ process.exitCode = 1;
33
+ }
34
+ });
35
+ }
@@ -0,0 +1,54 @@
1
+ import process from 'node:process';
2
+ import { readFile, readdir, stat } from 'node:fs/promises';
3
+ import { join, resolve } from 'node:path';
4
+ import chalk from 'chalk';
5
+ import { Workspace } from '../../engine/workspace.js';
6
+ import { StateManager } from '../../core/state.js';
7
+ import { getGlobalOptions } from '../utils.js';
8
+ export function registerCatCommand(program) {
9
+ program
10
+ .command('cat')
11
+ .description('Read artifact content from a step\'s latest run')
12
+ .argument('<workspace>', 'Workspace name')
13
+ .argument('<step>', 'Step ID')
14
+ .argument('[path]', 'Path within artifacts (omit to list)')
15
+ .action(async (workspaceName, stepId, artifactPath, _options, cmd) => {
16
+ const { workdir } = getGlobalOptions(cmd);
17
+ const workdirRoot = resolve(workdir);
18
+ const workspace = await Workspace.open(workdirRoot, workspaceName);
19
+ const state = new StateManager(workspace.root);
20
+ await state.load();
21
+ const stepState = state.getStep(stepId);
22
+ if (!stepState) {
23
+ console.error(chalk.red(`No run found for step: ${stepId}`));
24
+ process.exitCode = 1;
25
+ return;
26
+ }
27
+ const artifactsDir = workspace.runArtifactsPath(stepState.runId);
28
+ const targetPath = artifactPath ? join(artifactsDir, artifactPath) : artifactsDir;
29
+ // Prevent path traversal
30
+ if (!targetPath.startsWith(artifactsDir)) {
31
+ console.error(chalk.red('Invalid path: must be within artifacts directory'));
32
+ process.exitCode = 1;
33
+ return;
34
+ }
35
+ try {
36
+ const info = await stat(targetPath);
37
+ if (info.isDirectory()) {
38
+ const entries = await readdir(targetPath, { withFileTypes: true });
39
+ for (const entry of entries) {
40
+ const suffix = entry.isDirectory() ? '/' : '';
41
+ console.log(entry.name + suffix);
42
+ }
43
+ }
44
+ else {
45
+ const content = await readFile(targetPath);
46
+ process.stdout.write(content);
47
+ }
48
+ }
49
+ catch {
50
+ console.error(chalk.red(`Not found: ${artifactPath ?? '(artifacts directory)'}`));
51
+ process.exitCode = 1;
52
+ }
53
+ });
54
+ }
@@ -0,0 +1,22 @@
1
+ import { resolve } from 'node:path';
2
+ import chalk from 'chalk';
3
+ import { Workspace } from '../../engine/workspace.js';
4
+ import { getGlobalOptions } from '../utils.js';
5
+ export function registerCleanCommand(program) {
6
+ program
7
+ .command('clean')
8
+ .description('Remove all workspaces')
9
+ .action(async (_options, cmd) => {
10
+ const { workdir } = getGlobalOptions(cmd);
11
+ const workdirRoot = resolve(workdir);
12
+ const names = await Workspace.list(workdirRoot);
13
+ if (names.length === 0) {
14
+ console.log(chalk.gray('No workspaces to clean.'));
15
+ return;
16
+ }
17
+ for (const name of names) {
18
+ await Workspace.remove(workdirRoot, name);
19
+ }
20
+ console.log(chalk.green(`Removed ${names.length} workspace${names.length > 1 ? 's' : ''}.`));
21
+ });
22
+ }