@livingdata/pipex 0.0.8 → 0.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +186 -16
- package/dist/__tests__/errors.js +162 -0
- package/dist/__tests__/helpers.js +41 -0
- package/dist/__tests__/types.js +8 -0
- package/dist/cli/__tests__/condition.js +23 -0
- package/dist/cli/__tests__/dag.js +154 -0
- package/dist/cli/__tests__/pipeline-loader.js +267 -0
- package/dist/cli/__tests__/pipeline-runner.js +257 -0
- package/dist/cli/__tests__/state-persistence.js +80 -0
- package/dist/cli/__tests__/state.js +58 -0
- package/dist/cli/__tests__/step-runner.js +116 -0
- package/dist/cli/commands/bundle.js +35 -0
- package/dist/cli/commands/cat.js +54 -0
- package/dist/cli/commands/clean.js +22 -0
- package/dist/cli/commands/exec.js +89 -0
- package/dist/cli/commands/export.js +32 -0
- package/dist/cli/commands/inspect.js +58 -0
- package/dist/cli/commands/list.js +39 -0
- package/dist/cli/commands/logs.js +54 -0
- package/dist/cli/commands/prune.js +26 -0
- package/dist/cli/commands/rm-step.js +41 -0
- package/dist/cli/commands/rm.js +27 -0
- package/dist/cli/commands/run-bundle.js +59 -0
- package/dist/cli/commands/run.js +44 -0
- package/dist/cli/commands/show.js +108 -0
- package/dist/cli/condition.js +11 -0
- package/dist/cli/dag.js +143 -0
- package/dist/cli/index.js +24 -105
- package/dist/cli/interactive-reporter.js +227 -0
- package/dist/cli/pipeline-loader.js +10 -110
- package/dist/cli/pipeline-runner.js +256 -111
- package/dist/cli/reporter.js +2 -107
- package/dist/cli/state.js +30 -9
- package/dist/cli/step-loader.js +25 -0
- package/dist/cli/step-resolver.js +111 -0
- package/dist/cli/step-runner.js +226 -0
- package/dist/cli/utils.js +3 -0
- package/dist/core/__tests__/bundle.js +663 -0
- package/dist/core/__tests__/condition.js +23 -0
- package/dist/core/__tests__/dag.js +154 -0
- package/dist/core/__tests__/env-file.test.js +41 -0
- package/dist/core/__tests__/event-aggregator.js +244 -0
- package/dist/core/__tests__/pipeline-loader.js +267 -0
- package/dist/core/__tests__/pipeline-runner.js +257 -0
- package/dist/core/__tests__/state-persistence.js +80 -0
- package/dist/core/__tests__/state.js +58 -0
- package/dist/core/__tests__/step-runner.js +118 -0
- package/dist/core/__tests__/stream-reporter.js +142 -0
- package/dist/core/__tests__/transport.js +50 -0
- package/dist/core/__tests__/utils.js +40 -0
- package/dist/core/bundle.js +130 -0
- package/dist/core/condition.js +11 -0
- package/dist/core/dag.js +143 -0
- package/dist/core/env-file.js +6 -0
- package/dist/core/event-aggregator.js +114 -0
- package/dist/core/index.js +14 -0
- package/dist/core/pipeline-loader.js +81 -0
- package/dist/core/pipeline-runner.js +360 -0
- package/dist/core/reporter.js +11 -0
- package/dist/core/state.js +110 -0
- package/dist/core/step-loader.js +25 -0
- package/dist/core/step-resolver.js +117 -0
- package/dist/core/step-runner.js +225 -0
- package/dist/core/stream-reporter.js +41 -0
- package/dist/core/transport.js +9 -0
- package/dist/core/utils.js +56 -0
- package/dist/engine/__tests__/workspace.js +288 -0
- package/dist/engine/docker-executor.js +32 -6
- package/dist/engine/index.js +1 -0
- package/dist/engine/workspace.js +164 -66
- package/dist/errors.js +122 -0
- package/dist/index.js +3 -0
- package/dist/kits/__tests__/index.js +23 -0
- package/dist/kits/builtin/__tests__/node.js +74 -0
- package/dist/kits/builtin/__tests__/python.js +67 -0
- package/dist/kits/builtin/__tests__/shell.js +74 -0
- package/dist/kits/builtin/node.js +10 -5
- package/dist/kits/builtin/python.js +10 -5
- package/dist/kits/builtin/shell.js +2 -1
- package/dist/kits/index.js +2 -1
- package/package.json +6 -3
- package/dist/cli/types.js +0 -3
- package/dist/engine/docker-runtime.js +0 -65
- package/dist/engine/runtime.js +0 -2
- package/dist/kits/bash.js +0 -19
- package/dist/kits/builtin/bash.js +0 -19
- package/dist/kits/node.js +0 -56
- package/dist/kits/python.js +0 -51
- package/dist/kits/types.js +0 -1
- package/dist/reporter.js +0 -13
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
import test from 'ava';
|
|
2
|
+
import { CyclicDependencyError, ValidationError } from '../../errors.js';
|
|
3
|
+
import { PipelineLoader, slugify, parsePipelineFile, mergeEnv, mergeCaches, mergeMounts } from '../pipeline-loader.js';
|
|
4
|
+
// ---------------------------------------------------------------------------
|
|
5
|
+
// slugify
|
|
6
|
+
// ---------------------------------------------------------------------------
|
|
7
|
+
test('slugify converts accented characters', t => {
|
|
8
|
+
t.is(slugify('Étape numéro un'), 'etape-numero-un');
|
|
9
|
+
});
|
|
10
|
+
test('slugify replaces spaces with hyphens', t => {
|
|
11
|
+
t.is(slugify('hello world'), 'hello-world');
|
|
12
|
+
});
|
|
13
|
+
test('slugify replaces special characters', t => {
|
|
14
|
+
t.is(slugify('build@v2!'), 'build-v2');
|
|
15
|
+
t.is(slugify('build@v2!final'), 'build-v2-final');
|
|
16
|
+
});
|
|
17
|
+
test('slugify collapses double hyphens', t => {
|
|
18
|
+
t.is(slugify('a--b'), 'a-b');
|
|
19
|
+
});
|
|
20
|
+
test('slugify strips leading and trailing hyphens', t => {
|
|
21
|
+
t.is(slugify('-hello-'), 'hello');
|
|
22
|
+
});
|
|
23
|
+
// ---------------------------------------------------------------------------
|
|
24
|
+
// parsePipelineFile
|
|
25
|
+
// ---------------------------------------------------------------------------
|
|
26
|
+
test('parsePipelineFile parses valid JSON', t => {
|
|
27
|
+
const result = parsePipelineFile('{"id": "test"}', 'pipeline.json');
|
|
28
|
+
t.is(result.id, 'test');
|
|
29
|
+
});
|
|
30
|
+
test('parsePipelineFile parses YAML for .yaml extension', t => {
|
|
31
|
+
const result = parsePipelineFile('id: test', 'pipeline.yaml');
|
|
32
|
+
t.is(result.id, 'test');
|
|
33
|
+
});
|
|
34
|
+
test('parsePipelineFile parses YAML for .yml extension', t => {
|
|
35
|
+
const result = parsePipelineFile('id: test', 'pipeline.yml');
|
|
36
|
+
t.is(result.id, 'test');
|
|
37
|
+
});
|
|
38
|
+
test('parsePipelineFile throws on invalid JSON', t => {
|
|
39
|
+
t.throws(() => parsePipelineFile('{invalid', 'pipeline.json'));
|
|
40
|
+
});
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
// mergeEnv
|
|
43
|
+
// ---------------------------------------------------------------------------
|
|
44
|
+
test('mergeEnv returns undefined when both are undefined', t => {
|
|
45
|
+
t.is(mergeEnv(undefined, undefined), undefined);
|
|
46
|
+
});
|
|
47
|
+
test('mergeEnv returns kit env when user is undefined', t => {
|
|
48
|
+
t.deepEqual(mergeEnv({ A: '1' }, undefined), { A: '1' });
|
|
49
|
+
});
|
|
50
|
+
test('mergeEnv returns user env when kit is undefined', t => {
|
|
51
|
+
t.deepEqual(mergeEnv(undefined, { B: '2' }), { B: '2' });
|
|
52
|
+
});
|
|
53
|
+
test('mergeEnv user overrides kit', t => {
|
|
54
|
+
t.deepEqual(mergeEnv({ A: '1' }, { A: '2' }), { A: '2' });
|
|
55
|
+
});
|
|
56
|
+
test('mergeEnv merges both', t => {
|
|
57
|
+
t.deepEqual(mergeEnv({ A: '1' }, { B: '2' }), { A: '1', B: '2' });
|
|
58
|
+
});
|
|
59
|
+
// ---------------------------------------------------------------------------
|
|
60
|
+
// mergeCaches
|
|
61
|
+
// ---------------------------------------------------------------------------
|
|
62
|
+
test('mergeCaches returns undefined when both are undefined', t => {
|
|
63
|
+
t.is(mergeCaches(undefined, undefined), undefined);
|
|
64
|
+
});
|
|
65
|
+
test('mergeCaches concatenates non-overlapping caches', t => {
|
|
66
|
+
const result = mergeCaches([{ name: 'a', path: '/a' }], [{ name: 'b', path: '/b' }]);
|
|
67
|
+
t.deepEqual(result, [
|
|
68
|
+
{ name: 'a', path: '/a' },
|
|
69
|
+
{ name: 'b', path: '/b' }
|
|
70
|
+
]);
|
|
71
|
+
});
|
|
72
|
+
test('mergeCaches user wins on same name', t => {
|
|
73
|
+
const result = mergeCaches([{ name: 'x', path: '/kit' }], [{ name: 'x', path: '/user' }]);
|
|
74
|
+
t.deepEqual(result, [{ name: 'x', path: '/user' }]);
|
|
75
|
+
});
|
|
76
|
+
// ---------------------------------------------------------------------------
|
|
77
|
+
// mergeMounts
|
|
78
|
+
// ---------------------------------------------------------------------------
|
|
79
|
+
test('mergeMounts returns undefined when both are undefined', t => {
|
|
80
|
+
t.is(mergeMounts(undefined, undefined), undefined);
|
|
81
|
+
});
|
|
82
|
+
test('mergeMounts concatenates mounts', t => {
|
|
83
|
+
const result = mergeMounts([{ host: 'a', container: '/a' }], [{ host: 'b', container: '/b' }]);
|
|
84
|
+
t.deepEqual(result, [
|
|
85
|
+
{ host: 'a', container: '/a' },
|
|
86
|
+
{ host: 'b', container: '/b' }
|
|
87
|
+
]);
|
|
88
|
+
});
|
|
89
|
+
// ---------------------------------------------------------------------------
|
|
90
|
+
// PipelineLoader.parse
|
|
91
|
+
// ---------------------------------------------------------------------------
|
|
92
|
+
const loader = new PipelineLoader();
|
|
93
|
+
test('parse: valid pipeline with raw steps', t => {
|
|
94
|
+
const pipeline = loader.parse(JSON.stringify({
|
|
95
|
+
id: 'my-pipeline',
|
|
96
|
+
steps: [{
|
|
97
|
+
id: 'step1',
|
|
98
|
+
image: 'alpine',
|
|
99
|
+
cmd: ['echo', 'hello']
|
|
100
|
+
}]
|
|
101
|
+
}), 'p.json');
|
|
102
|
+
t.is(pipeline.id, 'my-pipeline');
|
|
103
|
+
t.is(pipeline.steps.length, 1);
|
|
104
|
+
t.is(pipeline.steps[0].id, 'step1');
|
|
105
|
+
});
|
|
106
|
+
test('parse: derives id from name via slugify', t => {
|
|
107
|
+
const pipeline = loader.parse(JSON.stringify({
|
|
108
|
+
name: 'Mon Pipeline',
|
|
109
|
+
steps: [{
|
|
110
|
+
name: 'Première Étape',
|
|
111
|
+
image: 'alpine',
|
|
112
|
+
cmd: ['echo']
|
|
113
|
+
}]
|
|
114
|
+
}), 'p.json');
|
|
115
|
+
t.is(pipeline.id, 'mon-pipeline');
|
|
116
|
+
t.is(pipeline.steps[0].id, 'premiere-etape');
|
|
117
|
+
});
|
|
118
|
+
test('parse: throws ValidationError when neither id nor name on pipeline', t => {
|
|
119
|
+
const error = t.throws(() => loader.parse(JSON.stringify({
|
|
120
|
+
steps: [{ id: 's', image: 'alpine', cmd: ['echo'] }]
|
|
121
|
+
}), 'p.json'), { message: /at least one of "id" or "name"/ });
|
|
122
|
+
t.true(error instanceof ValidationError);
|
|
123
|
+
});
|
|
124
|
+
test('parse: throws ValidationError when neither id nor name on step', t => {
|
|
125
|
+
const error = t.throws(() => loader.parse(JSON.stringify({
|
|
126
|
+
id: 'p',
|
|
127
|
+
steps: [{ image: 'alpine', cmd: ['echo'] }]
|
|
128
|
+
}), 'p.json'), { message: /at least one of "id" or "name"/ });
|
|
129
|
+
t.true(error instanceof ValidationError);
|
|
130
|
+
});
|
|
131
|
+
test('parse: throws ValidationError on empty steps array', t => {
|
|
132
|
+
const error = t.throws(() => loader.parse(JSON.stringify({
|
|
133
|
+
id: 'p', steps: []
|
|
134
|
+
}), 'p.json'), { message: /steps must be a non-empty array/ });
|
|
135
|
+
t.true(error instanceof ValidationError);
|
|
136
|
+
});
|
|
137
|
+
test('parse: throws on invalid identifier with path traversal', t => {
|
|
138
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
139
|
+
id: 'p',
|
|
140
|
+
steps: [{ id: '../bad', image: 'alpine', cmd: ['echo'] }]
|
|
141
|
+
}), 'p.json'), { message: /must contain only alphanumeric/ });
|
|
142
|
+
});
|
|
143
|
+
test('parse: throws on invalid identifier with special chars', t => {
|
|
144
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
145
|
+
id: 'p',
|
|
146
|
+
steps: [{ id: 'hello world', image: 'alpine', cmd: ['echo'] }]
|
|
147
|
+
}), 'p.json'), { message: /must contain only alphanumeric/ });
|
|
148
|
+
});
|
|
149
|
+
test('parse: throws when step has no image', t => {
|
|
150
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
151
|
+
id: 'p',
|
|
152
|
+
steps: [{ id: 's', cmd: ['echo'] }]
|
|
153
|
+
}), 'p.json'), { message: /image is required/ });
|
|
154
|
+
});
|
|
155
|
+
test('parse: throws when step has no cmd', t => {
|
|
156
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
157
|
+
id: 'p',
|
|
158
|
+
steps: [{ id: 's', image: 'alpine' }]
|
|
159
|
+
}), 'p.json'), { message: /cmd must be a non-empty array/ });
|
|
160
|
+
});
|
|
161
|
+
test('parse: throws ValidationError on duplicate step ids', t => {
|
|
162
|
+
const error = t.throws(() => loader.parse(JSON.stringify({
|
|
163
|
+
id: 'p',
|
|
164
|
+
steps: [
|
|
165
|
+
{ id: 's', image: 'alpine', cmd: ['echo'] },
|
|
166
|
+
{ id: 's', image: 'alpine', cmd: ['echo'] }
|
|
167
|
+
]
|
|
168
|
+
}), 'p.json'), { message: /Duplicate step id/ });
|
|
169
|
+
t.true(error instanceof ValidationError);
|
|
170
|
+
});
|
|
171
|
+
test('parse: validates mount host must be relative', t => {
|
|
172
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
173
|
+
id: 'p',
|
|
174
|
+
steps: [{
|
|
175
|
+
id: 's', image: 'alpine', cmd: ['echo'],
|
|
176
|
+
mounts: [{ host: '/absolute', container: '/c' }]
|
|
177
|
+
}]
|
|
178
|
+
}), 'p.json'), { message: /must be a relative path/ });
|
|
179
|
+
});
|
|
180
|
+
test('parse: allows mount host with ..', t => {
|
|
181
|
+
t.notThrows(() => loader.parse(JSON.stringify({
|
|
182
|
+
id: 'p',
|
|
183
|
+
steps: [{
|
|
184
|
+
id: 's', image: 'alpine', cmd: ['echo'],
|
|
185
|
+
mounts: [{ host: '../sibling', container: '/c' }]
|
|
186
|
+
}]
|
|
187
|
+
}), 'p.json'));
|
|
188
|
+
});
|
|
189
|
+
test('parse: validates mount container must be absolute', t => {
|
|
190
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
191
|
+
id: 'p',
|
|
192
|
+
steps: [{
|
|
193
|
+
id: 's', image: 'alpine', cmd: ['echo'],
|
|
194
|
+
mounts: [{ host: 'src', container: 'relative' }]
|
|
195
|
+
}]
|
|
196
|
+
}), 'p.json'), { message: /must be an absolute path/ });
|
|
197
|
+
});
|
|
198
|
+
test('parse: validates cache path must be absolute', t => {
|
|
199
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
200
|
+
id: 'p',
|
|
201
|
+
steps: [{
|
|
202
|
+
id: 's', image: 'alpine', cmd: ['echo'],
|
|
203
|
+
caches: [{ name: 'c', path: 'relative' }]
|
|
204
|
+
}]
|
|
205
|
+
}), 'p.json'), { message: /must be an absolute path/ });
|
|
206
|
+
});
|
|
207
|
+
test('parse: validates cache name is a valid identifier', t => {
|
|
208
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
209
|
+
id: 'p',
|
|
210
|
+
steps: [{
|
|
211
|
+
id: 's', image: 'alpine', cmd: ['echo'],
|
|
212
|
+
caches: [{ name: 'bad name!', path: '/cache' }]
|
|
213
|
+
}]
|
|
214
|
+
}), 'p.json'), { message: /must contain only alphanumeric/ });
|
|
215
|
+
});
|
|
216
|
+
test('parse: resolves kit step (uses → image/cmd)', t => {
|
|
217
|
+
const pipeline = loader.parse(JSON.stringify({
|
|
218
|
+
id: 'p',
|
|
219
|
+
steps: [{
|
|
220
|
+
id: 'b',
|
|
221
|
+
uses: 'shell',
|
|
222
|
+
with: { run: 'echo hello' }
|
|
223
|
+
}]
|
|
224
|
+
}), 'p.json');
|
|
225
|
+
t.is(pipeline.steps[0].image, 'alpine:3.20');
|
|
226
|
+
t.deepEqual(pipeline.steps[0].cmd, ['sh', '-c', 'echo hello']);
|
|
227
|
+
});
|
|
228
|
+
// ---------------------------------------------------------------------------
|
|
229
|
+
// DAG validation
|
|
230
|
+
// ---------------------------------------------------------------------------
|
|
231
|
+
test('parse: detects cycle → CyclicDependencyError', t => {
|
|
232
|
+
const error = t.throws(() => loader.parse(JSON.stringify({
|
|
233
|
+
id: 'p',
|
|
234
|
+
steps: [
|
|
235
|
+
{ id: 'a', image: 'alpine', cmd: ['echo'], inputs: [{ step: 'b' }] },
|
|
236
|
+
{ id: 'b', image: 'alpine', cmd: ['echo'], inputs: [{ step: 'a' }] }
|
|
237
|
+
]
|
|
238
|
+
}), 'p.json'), { message: /cycle/ });
|
|
239
|
+
t.true(error instanceof CyclicDependencyError);
|
|
240
|
+
});
|
|
241
|
+
test('parse: missing input ref → error', t => {
|
|
242
|
+
t.throws(() => loader.parse(JSON.stringify({
|
|
243
|
+
id: 'p',
|
|
244
|
+
steps: [
|
|
245
|
+
{ id: 'a', image: 'alpine', cmd: ['echo'], inputs: [{ step: 'missing' }] }
|
|
246
|
+
]
|
|
247
|
+
}), 'p.json'), { message: /unknown step 'missing'/ });
|
|
248
|
+
});
|
|
249
|
+
test('parse: optional input to unknown step → OK', t => {
|
|
250
|
+
t.notThrows(() => loader.parse(JSON.stringify({
|
|
251
|
+
id: 'p',
|
|
252
|
+
steps: [
|
|
253
|
+
{ id: 'a', image: 'alpine', cmd: ['echo'], inputs: [{ step: 'missing', optional: true }] }
|
|
254
|
+
]
|
|
255
|
+
}), 'p.json'));
|
|
256
|
+
});
|
|
257
|
+
test('parse: valid DAG diamond → OK', t => {
|
|
258
|
+
t.notThrows(() => loader.parse(JSON.stringify({
|
|
259
|
+
id: 'p',
|
|
260
|
+
steps: [
|
|
261
|
+
{ id: 'a', image: 'alpine', cmd: ['echo'] },
|
|
262
|
+
{ id: 'b', image: 'alpine', cmd: ['echo'], inputs: [{ step: 'a' }] },
|
|
263
|
+
{ id: 'c', image: 'alpine', cmd: ['echo'], inputs: [{ step: 'a' }] },
|
|
264
|
+
{ id: 'd', image: 'alpine', cmd: ['echo'], inputs: [{ step: 'b' }, { step: 'c' }] }
|
|
265
|
+
]
|
|
266
|
+
}), 'p.json'));
|
|
267
|
+
});
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import { readFile, writeFile } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { stringify as yamlStringify } from 'yaml';
|
|
4
|
+
import test from 'ava';
|
|
5
|
+
import { DockerCliExecutor } from '../../engine/docker-executor.js';
|
|
6
|
+
import { PipelineLoader } from '../pipeline-loader.js';
|
|
7
|
+
import { PipelineRunner } from '../pipeline-runner.js';
|
|
8
|
+
import { Workspace } from '../../engine/workspace.js';
|
|
9
|
+
import { ContainerCrashError } from '../../errors.js';
|
|
10
|
+
import { createTmpDir, isDockerAvailable, noopReporter, recordingReporter } from '../../__tests__/helpers.js';
|
|
11
|
+
const hasDocker = isDockerAvailable();
|
|
12
|
+
const dockerTest = hasDocker ? test : test.skip;
|
|
13
|
+
// -- helpers -----------------------------------------------------------------
|
|
14
|
+
async function writePipeline(dir, config) {
|
|
15
|
+
const filePath = join(dir, 'pipeline.yaml');
|
|
16
|
+
await writeFile(filePath, yamlStringify(config), 'utf8');
|
|
17
|
+
return filePath;
|
|
18
|
+
}
|
|
19
|
+
// -- two-step dependency -----------------------------------------------------
|
|
20
|
+
dockerTest('step B reads step A output via inputs', async (t) => {
|
|
21
|
+
const tmpDir = await createTmpDir();
|
|
22
|
+
const workdir = await createTmpDir();
|
|
23
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
24
|
+
id: 'dep-test',
|
|
25
|
+
steps: [
|
|
26
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo data > /output/result.txt'] },
|
|
27
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/result.txt > /output/copy.txt'], inputs: [{ step: 'a' }] }
|
|
28
|
+
]
|
|
29
|
+
});
|
|
30
|
+
const { reporter, events } = recordingReporter();
|
|
31
|
+
await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
|
|
32
|
+
// Both steps should have finished (not just "not crashed")
|
|
33
|
+
const finished = events.filter((e) => e.event === 'STEP_FINISHED');
|
|
34
|
+
t.is(finished.length, 2);
|
|
35
|
+
// Verify step B's artifact actually contains step A's output
|
|
36
|
+
const ws = await Workspace.open(workdir, 'dep-test');
|
|
37
|
+
const bRunId = finished.find(e => e.step.id === 'b').runId;
|
|
38
|
+
t.truthy(bRunId);
|
|
39
|
+
const content = await readFile(join(ws.runArtifactsPath(bRunId), 'copy.txt'), 'utf8');
|
|
40
|
+
t.is(content.trim(), 'data');
|
|
41
|
+
});
|
|
42
|
+
// -- cache hit on re-run -----------------------------------------------------
|
|
43
|
+
dockerTest('re-running same pipeline skips all steps (cached)', async (t) => {
|
|
44
|
+
const tmpDir = await createTmpDir();
|
|
45
|
+
const workdir = await createTmpDir();
|
|
46
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
47
|
+
id: 'cache-hit',
|
|
48
|
+
steps: [
|
|
49
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
|
|
50
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'] }
|
|
51
|
+
]
|
|
52
|
+
});
|
|
53
|
+
const loader = new PipelineLoader();
|
|
54
|
+
const executor = new DockerCliExecutor();
|
|
55
|
+
// First run — executes
|
|
56
|
+
await new PipelineRunner(loader, executor, noopReporter, workdir).run(pipelinePath);
|
|
57
|
+
// Second run — should skip
|
|
58
|
+
const { reporter, events } = recordingReporter();
|
|
59
|
+
await new PipelineRunner(loader, executor, reporter, workdir).run(pipelinePath);
|
|
60
|
+
const skipped = events.filter(e => e.event === 'STEP_SKIPPED');
|
|
61
|
+
t.is(skipped.length, 2);
|
|
62
|
+
});
|
|
63
|
+
// -- cache invalidation cascade ----------------------------------------------
|
|
64
|
+
dockerTest('modifying step A cmd re-executes both steps', async (t) => {
|
|
65
|
+
const tmpDir = await createTmpDir();
|
|
66
|
+
const workdir = await createTmpDir();
|
|
67
|
+
const loader = new PipelineLoader();
|
|
68
|
+
const executor = new DockerCliExecutor();
|
|
69
|
+
// First run
|
|
70
|
+
const path1 = await writePipeline(tmpDir, {
|
|
71
|
+
id: 'cascade',
|
|
72
|
+
steps: [
|
|
73
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo v1 > /output/a.txt'] },
|
|
74
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/b.txt'], inputs: [{ step: 'a' }] }
|
|
75
|
+
]
|
|
76
|
+
});
|
|
77
|
+
await new PipelineRunner(loader, executor, noopReporter, workdir).run(path1);
|
|
78
|
+
// Modify step A cmd
|
|
79
|
+
const path2 = await writePipeline(tmpDir, {
|
|
80
|
+
id: 'cascade',
|
|
81
|
+
steps: [
|
|
82
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo v2 > /output/a.txt'] },
|
|
83
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/b.txt'], inputs: [{ step: 'a' }] }
|
|
84
|
+
]
|
|
85
|
+
});
|
|
86
|
+
const { reporter, events } = recordingReporter();
|
|
87
|
+
await new PipelineRunner(loader, executor, reporter, workdir).run(path2);
|
|
88
|
+
// Both steps should have executed (STEP_STARTING events, not STEP_SKIPPED)
|
|
89
|
+
const starting = events.filter(e => e.event === 'STEP_STARTING');
|
|
90
|
+
const skipped = events.filter(e => e.event === 'STEP_SKIPPED');
|
|
91
|
+
t.is(starting.length, 2);
|
|
92
|
+
t.is(skipped.length, 0);
|
|
93
|
+
});
|
|
94
|
+
// -- allowFailure ------------------------------------------------------------
|
|
95
|
+
dockerTest('step B executes even when step A fails with allowFailure', async (t) => {
|
|
96
|
+
const tmpDir = await createTmpDir();
|
|
97
|
+
const workdir = await createTmpDir();
|
|
98
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
99
|
+
id: 'allow-fail',
|
|
100
|
+
steps: [
|
|
101
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'exit 1'], allowFailure: true },
|
|
102
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo ok > /output/b.txt'] }
|
|
103
|
+
]
|
|
104
|
+
});
|
|
105
|
+
const { reporter, events } = recordingReporter();
|
|
106
|
+
await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
|
|
107
|
+
// Step B should have finished successfully
|
|
108
|
+
const finished = events.filter(e => e.event === 'STEP_FINISHED');
|
|
109
|
+
t.true(finished.some(e => e.event === 'STEP_FINISHED' && e.step.id === 'b'));
|
|
110
|
+
});
|
|
111
|
+
// -- force specific step -----------------------------------------------------
|
|
112
|
+
dockerTest('force specific step re-executes only that step', async (t) => {
|
|
113
|
+
const tmpDir = await createTmpDir();
|
|
114
|
+
const workdir = await createTmpDir();
|
|
115
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
116
|
+
id: 'force-one',
|
|
117
|
+
steps: [
|
|
118
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
|
|
119
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'] }
|
|
120
|
+
]
|
|
121
|
+
});
|
|
122
|
+
const loader = new PipelineLoader();
|
|
123
|
+
const executor = new DockerCliExecutor();
|
|
124
|
+
// First run
|
|
125
|
+
await new PipelineRunner(loader, executor, noopReporter, workdir).run(pipelinePath);
|
|
126
|
+
// Second run with force on step b only
|
|
127
|
+
const { reporter, events } = recordingReporter();
|
|
128
|
+
await new PipelineRunner(loader, executor, reporter, workdir).run(pipelinePath, { force: ['b'] });
|
|
129
|
+
const skipped = events.filter((e) => e.event === 'STEP_SKIPPED');
|
|
130
|
+
const starting = events.filter((e) => e.event === 'STEP_STARTING');
|
|
131
|
+
t.is(skipped.length, 1);
|
|
132
|
+
t.is(skipped[0].step.id, 'a');
|
|
133
|
+
t.is(starting.length, 1);
|
|
134
|
+
t.is(starting[0].step.id, 'b');
|
|
135
|
+
});
|
|
136
|
+
// -- dry run -----------------------------------------------------------------
|
|
137
|
+
dockerTest('dryRun emits STEP_WOULD_RUN without executing or committing', async (t) => {
|
|
138
|
+
const tmpDir = await createTmpDir();
|
|
139
|
+
const workdir = await createTmpDir();
|
|
140
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
141
|
+
id: 'dry-run',
|
|
142
|
+
steps: [
|
|
143
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] }
|
|
144
|
+
]
|
|
145
|
+
});
|
|
146
|
+
const { reporter, events } = recordingReporter();
|
|
147
|
+
await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath, { dryRun: true });
|
|
148
|
+
t.truthy(events.find(e => e.event === 'STEP_WOULD_RUN'));
|
|
149
|
+
t.falsy(events.find(e => e.event === 'STEP_STARTING'));
|
|
150
|
+
// No runs should have been committed to disk
|
|
151
|
+
const ws = await Workspace.open(workdir, 'dry-run');
|
|
152
|
+
const runs = await ws.listRuns();
|
|
153
|
+
t.is(runs.length, 0);
|
|
154
|
+
});
|
|
155
|
+
// -- diamond DAG A→B, A→C, B+C→D --------------------------------------------
|
|
156
|
+
dockerTest('diamond DAG executes all steps in correct order', async (t) => {
|
|
157
|
+
const tmpDir = await createTmpDir();
|
|
158
|
+
const workdir = await createTmpDir();
|
|
159
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
160
|
+
id: 'diamond',
|
|
161
|
+
steps: [
|
|
162
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
|
|
163
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/b.txt'], inputs: [{ step: 'a' }] },
|
|
164
|
+
{ id: 'c', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/a/a.txt > /output/c.txt'], inputs: [{ step: 'a' }] },
|
|
165
|
+
{ id: 'd', image: 'alpine:3.20', cmd: ['sh', '-c', 'cat /input/b/b.txt /input/c/c.txt > /output/d.txt'], inputs: [{ step: 'b' }, { step: 'c' }] }
|
|
166
|
+
]
|
|
167
|
+
});
|
|
168
|
+
const { reporter, events } = recordingReporter();
|
|
169
|
+
await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
|
|
170
|
+
const finished = events.filter((e) => e.event === 'STEP_FINISHED');
|
|
171
|
+
t.is(finished.length, 4);
|
|
172
|
+
});
|
|
173
|
+
// -- failed step → dependents skipped ----------------------------------------
|
|
174
|
+
dockerTest('failed step causes dependents to be skipped with reason dependency', async (t) => {
|
|
175
|
+
const tmpDir = await createTmpDir();
|
|
176
|
+
const workdir = await createTmpDir();
|
|
177
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
178
|
+
id: 'fail-dep',
|
|
179
|
+
steps: [
|
|
180
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'exit 1'] },
|
|
181
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'], inputs: [{ step: 'a' }] }
|
|
182
|
+
]
|
|
183
|
+
});
|
|
184
|
+
const { reporter, events } = recordingReporter();
|
|
185
|
+
await t.throwsAsync(async () => new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath), { instanceOf: ContainerCrashError });
|
|
186
|
+
const skipped = events.filter((e) => e.event === 'STEP_SKIPPED');
|
|
187
|
+
t.is(skipped.length, 1);
|
|
188
|
+
t.is(skipped[0].step.id, 'b');
|
|
189
|
+
t.is(skipped[0].reason, 'dependency');
|
|
190
|
+
});
|
|
191
|
+
// -- optional input → step runs despite missing input ------------------------
|
|
192
|
+
dockerTest('optional input allows step to run when input step is missing', async (t) => {
|
|
193
|
+
const tmpDir = await createTmpDir();
|
|
194
|
+
const workdir = await createTmpDir();
|
|
195
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
196
|
+
id: 'opt-input',
|
|
197
|
+
steps: [
|
|
198
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'exit 1'] },
|
|
199
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'], inputs: [{ step: 'a', optional: true }] }
|
|
200
|
+
]
|
|
201
|
+
});
|
|
202
|
+
const { reporter, events } = recordingReporter();
|
|
203
|
+
await t.throwsAsync(async () => new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath));
|
|
204
|
+
// Step b should have been attempted (STEP_STARTING)
|
|
205
|
+
const starting = events.filter((e) => e.event === 'STEP_STARTING');
|
|
206
|
+
t.true(starting.some(e => e.step.id === 'b'));
|
|
207
|
+
});
|
|
208
|
+
// -- if condition → step skipped ---------------------------------------------
|
|
209
|
+
dockerTest('if: "1 == 2" skips step with reason condition', async (t) => {
|
|
210
|
+
const tmpDir = await createTmpDir();
|
|
211
|
+
const workdir = await createTmpDir();
|
|
212
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
213
|
+
id: 'cond-skip',
|
|
214
|
+
steps: [
|
|
215
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'], if: '1 == 2' }
|
|
216
|
+
]
|
|
217
|
+
});
|
|
218
|
+
const { reporter, events } = recordingReporter();
|
|
219
|
+
await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
|
|
220
|
+
const skipped = events.filter((e) => e.event === 'STEP_SKIPPED');
|
|
221
|
+
t.is(skipped.length, 1);
|
|
222
|
+
t.is(skipped[0].reason, 'condition');
|
|
223
|
+
});
|
|
224
|
+
// -- --target → only subgraph executes ---------------------------------------
|
|
225
|
+
dockerTest('--target executes only the targeted step and its dependencies', async (t) => {
|
|
226
|
+
const tmpDir = await createTmpDir();
|
|
227
|
+
const workdir = await createTmpDir();
|
|
228
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
229
|
+
id: 'target-test',
|
|
230
|
+
steps: [
|
|
231
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
|
|
232
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'], inputs: [{ step: 'a' }] },
|
|
233
|
+
{ id: 'c', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo c > /output/c.txt'] }
|
|
234
|
+
]
|
|
235
|
+
});
|
|
236
|
+
const { reporter, events } = recordingReporter();
|
|
237
|
+
await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath, { target: ['b'] });
|
|
238
|
+
const finished = events.filter((e) => e.event === 'STEP_FINISHED');
|
|
239
|
+
const executedIds = finished.map(e => e.step.id).sort();
|
|
240
|
+
t.deepEqual(executedIds, ['a', 'b']);
|
|
241
|
+
});
|
|
242
|
+
// -- independent steps → parallel execution ----------------------------------
|
|
243
|
+
dockerTest('independent steps both finish (parallel execution)', async (t) => {
|
|
244
|
+
const tmpDir = await createTmpDir();
|
|
245
|
+
const workdir = await createTmpDir();
|
|
246
|
+
const pipelinePath = await writePipeline(tmpDir, {
|
|
247
|
+
id: 'parallel',
|
|
248
|
+
steps: [
|
|
249
|
+
{ id: 'a', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo a > /output/a.txt'] },
|
|
250
|
+
{ id: 'b', image: 'alpine:3.20', cmd: ['sh', '-c', 'echo b > /output/b.txt'] }
|
|
251
|
+
]
|
|
252
|
+
});
|
|
253
|
+
const { reporter, events } = recordingReporter();
|
|
254
|
+
await new PipelineRunner(new PipelineLoader(), new DockerCliExecutor(), reporter, workdir).run(pipelinePath);
|
|
255
|
+
const finished = events.filter((e) => e.event === 'STEP_FINISHED');
|
|
256
|
+
t.is(finished.length, 2);
|
|
257
|
+
});
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import test from 'ava';
|
|
2
|
+
import { StateManager } from '../state.js';
|
|
3
|
+
import { createTmpDir } from '../../__tests__/helpers.js';
|
|
4
|
+
// -- load / save round-trip --------------------------------------------------
|
|
5
|
+
test('load on missing file gives empty state, getStep returns undefined', async (t) => {
|
|
6
|
+
const root = await createTmpDir();
|
|
7
|
+
const sm = new StateManager(root);
|
|
8
|
+
await sm.load();
|
|
9
|
+
t.is(sm.getStep('any'), undefined);
|
|
10
|
+
});
|
|
11
|
+
test('setStep + save + new instance load returns same data', async (t) => {
|
|
12
|
+
const root = await createTmpDir();
|
|
13
|
+
const sm1 = new StateManager(root);
|
|
14
|
+
await sm1.load();
|
|
15
|
+
sm1.setStep('build', 'run-1', 'fp-abc');
|
|
16
|
+
await sm1.save();
|
|
17
|
+
const sm2 = new StateManager(root);
|
|
18
|
+
await sm2.load();
|
|
19
|
+
const step = sm2.getStep('build');
|
|
20
|
+
t.truthy(step);
|
|
21
|
+
t.is(step.runId, 'run-1');
|
|
22
|
+
t.is(step.fingerprint, 'fp-abc');
|
|
23
|
+
});
|
|
24
|
+
test('removeStep + save + load removes the step', async (t) => {
|
|
25
|
+
const root = await createTmpDir();
|
|
26
|
+
const sm1 = new StateManager(root);
|
|
27
|
+
await sm1.load();
|
|
28
|
+
sm1.setStep('build', 'run-1', 'fp-1');
|
|
29
|
+
sm1.setStep('test', 'run-2', 'fp-2');
|
|
30
|
+
await sm1.save();
|
|
31
|
+
const sm2 = new StateManager(root);
|
|
32
|
+
await sm2.load();
|
|
33
|
+
sm2.removeStep('build');
|
|
34
|
+
await sm2.save();
|
|
35
|
+
const sm3 = new StateManager(root);
|
|
36
|
+
await sm3.load();
|
|
37
|
+
t.is(sm3.getStep('build'), undefined);
|
|
38
|
+
t.truthy(sm3.getStep('test'));
|
|
39
|
+
});
|
|
40
|
+
test('listSteps survives save/load round-trip', async (t) => {
|
|
41
|
+
const root = await createTmpDir();
|
|
42
|
+
const sm1 = new StateManager(root);
|
|
43
|
+
await sm1.load();
|
|
44
|
+
sm1.setStep('a', 'run-a', 'fp-a');
|
|
45
|
+
sm1.setStep('b', 'run-b', 'fp-b');
|
|
46
|
+
await sm1.save();
|
|
47
|
+
const sm2 = new StateManager(root);
|
|
48
|
+
await sm2.load();
|
|
49
|
+
const steps = sm2.listSteps();
|
|
50
|
+
t.is(steps.length, 2);
|
|
51
|
+
t.truthy(steps.find(s => s.stepId === 'a' && s.runId === 'run-a'));
|
|
52
|
+
t.truthy(steps.find(s => s.stepId === 'b' && s.runId === 'run-b'));
|
|
53
|
+
});
|
|
54
|
+
test('activeRunIds survives save/load round-trip', async (t) => {
|
|
55
|
+
const root = await createTmpDir();
|
|
56
|
+
const sm1 = new StateManager(root);
|
|
57
|
+
await sm1.load();
|
|
58
|
+
sm1.setStep('a', 'run-1', 'fp-a');
|
|
59
|
+
sm1.setStep('b', 'run-2', 'fp-b');
|
|
60
|
+
await sm1.save();
|
|
61
|
+
const sm2 = new StateManager(root);
|
|
62
|
+
await sm2.load();
|
|
63
|
+
const ids = sm2.activeRunIds();
|
|
64
|
+
t.true(ids.has('run-1'));
|
|
65
|
+
t.true(ids.has('run-2'));
|
|
66
|
+
t.is(ids.size, 2);
|
|
67
|
+
});
|
|
68
|
+
test('overwriting same stepId keeps only latest after reload', async (t) => {
|
|
69
|
+
const root = await createTmpDir();
|
|
70
|
+
const sm1 = new StateManager(root);
|
|
71
|
+
await sm1.load();
|
|
72
|
+
sm1.setStep('build', 'run-old', 'fp-old');
|
|
73
|
+
sm1.setStep('build', 'run-new', 'fp-new');
|
|
74
|
+
await sm1.save();
|
|
75
|
+
const sm2 = new StateManager(root);
|
|
76
|
+
await sm2.load();
|
|
77
|
+
const step = sm2.getStep('build');
|
|
78
|
+
t.is(step.runId, 'run-new');
|
|
79
|
+
t.is(step.fingerprint, 'fp-new');
|
|
80
|
+
});
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import test from 'ava';
|
|
2
|
+
import { StateManager } from '../state.js';
|
|
3
|
+
const base = {
|
|
4
|
+
image: 'alpine:3.20',
|
|
5
|
+
cmd: ['echo', 'hello']
|
|
6
|
+
};
|
|
7
|
+
test('fingerprint is deterministic', t => {
|
|
8
|
+
const a = StateManager.fingerprint(base);
|
|
9
|
+
const b = StateManager.fingerprint(base);
|
|
10
|
+
t.is(a, b);
|
|
11
|
+
});
|
|
12
|
+
test('fingerprint sorts env keys', t => {
|
|
13
|
+
const a = StateManager.fingerprint({ ...base, env: { A: '1', B: '2' } });
|
|
14
|
+
const b = StateManager.fingerprint({ ...base, env: { B: '2', A: '1' } });
|
|
15
|
+
t.is(a, b);
|
|
16
|
+
});
|
|
17
|
+
test('fingerprint sorts inputRunIds', t => {
|
|
18
|
+
const a = StateManager.fingerprint({ ...base, inputRunIds: ['x', 'y'] });
|
|
19
|
+
const b = StateManager.fingerprint({ ...base, inputRunIds: ['y', 'x'] });
|
|
20
|
+
t.is(a, b);
|
|
21
|
+
});
|
|
22
|
+
test('fingerprint sorts mounts by containerPath', t => {
|
|
23
|
+
const a = StateManager.fingerprint({
|
|
24
|
+
...base,
|
|
25
|
+
mounts: [
|
|
26
|
+
{ hostPath: 'a', containerPath: '/a' },
|
|
27
|
+
{ hostPath: 'b', containerPath: '/b' }
|
|
28
|
+
]
|
|
29
|
+
});
|
|
30
|
+
const b = StateManager.fingerprint({
|
|
31
|
+
...base,
|
|
32
|
+
mounts: [
|
|
33
|
+
{ hostPath: 'b', containerPath: '/b' },
|
|
34
|
+
{ hostPath: 'a', containerPath: '/a' }
|
|
35
|
+
]
|
|
36
|
+
});
|
|
37
|
+
t.is(a, b);
|
|
38
|
+
});
|
|
39
|
+
test('fingerprint differs when optional fields absent vs present', t => {
|
|
40
|
+
const withEnv = StateManager.fingerprint({ ...base, env: { A: '1' } });
|
|
41
|
+
const without = StateManager.fingerprint(base);
|
|
42
|
+
t.not(withEnv, without);
|
|
43
|
+
});
|
|
44
|
+
test('fingerprint changes when image changes', t => {
|
|
45
|
+
const a = StateManager.fingerprint(base);
|
|
46
|
+
const b = StateManager.fingerprint({ ...base, image: 'node:24' });
|
|
47
|
+
t.not(a, b);
|
|
48
|
+
});
|
|
49
|
+
test('fingerprint changes when cmd changes', t => {
|
|
50
|
+
const a = StateManager.fingerprint(base);
|
|
51
|
+
const b = StateManager.fingerprint({ ...base, cmd: ['echo', 'bye'] });
|
|
52
|
+
t.not(a, b);
|
|
53
|
+
});
|
|
54
|
+
test('fingerprint changes when env value changes', t => {
|
|
55
|
+
const a = StateManager.fingerprint({ ...base, env: { A: '1' } });
|
|
56
|
+
const b = StateManager.fingerprint({ ...base, env: { A: '2' } });
|
|
57
|
+
t.not(a, b);
|
|
58
|
+
});
|