takos-actions-engine 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/coverage/base.css +224 -0
- package/coverage/block-navigation.js +87 -0
- package/coverage/clover.xml +3477 -0
- package/coverage/coverage-final.json +20 -0
- package/coverage/favicon.png +0 -0
- package/coverage/index.html +176 -0
- package/coverage/prettify.css +1 -0
- package/coverage/prettify.js +2 -0
- package/coverage/sort-arrow-sprite.png +0 -0
- package/coverage/sorter.js +210 -0
- package/coverage/src/context/base.ts.html +1792 -0
- package/coverage/src/context/env.ts.html +1243 -0
- package/coverage/src/context/index.html +161 -0
- package/coverage/src/context/index.ts.html +229 -0
- package/coverage/src/context/secrets.ts.html +1276 -0
- package/coverage/src/index.html +131 -0
- package/coverage/src/index.ts.html +502 -0
- package/coverage/src/parser/expression.ts.html +2854 -0
- package/coverage/src/parser/index.html +161 -0
- package/coverage/src/parser/index.ts.html +163 -0
- package/coverage/src/parser/validator.ts.html +1588 -0
- package/coverage/src/parser/workflow.ts.html +616 -0
- package/coverage/src/scheduler/dependency.ts.html +1138 -0
- package/coverage/src/scheduler/index.html +221 -0
- package/coverage/src/scheduler/index.ts.html +214 -0
- package/coverage/src/scheduler/job-context.ts.html +265 -0
- package/coverage/src/scheduler/job-policy.ts.html +559 -0
- package/coverage/src/scheduler/job.ts.html +1816 -0
- package/coverage/src/scheduler/listener-registry.ts.html +199 -0
- package/coverage/src/scheduler/step.ts.html +2206 -0
- package/coverage/src/scheduler/steps-context.ts.html +217 -0
- package/coverage/src/types.ts.html +1897 -0
- package/coverage/src/utils/index.html +116 -0
- package/coverage/src/utils/needs.ts.html +127 -0
- package/dist/__tests__/context/env.test.d.ts +2 -0
- package/dist/__tests__/context/env.test.d.ts.map +1 -0
- package/dist/__tests__/context/env.test.js +28 -0
- package/dist/__tests__/context/env.test.js.map +1 -0
- package/dist/__tests__/index.test.d.ts +2 -0
- package/dist/__tests__/index.test.d.ts.map +1 -0
- package/dist/__tests__/index.test.js +50 -0
- package/dist/__tests__/index.test.js.map +1 -0
- package/dist/__tests__/parser/expression.test.d.ts +2 -0
- package/dist/__tests__/parser/expression.test.d.ts.map +1 -0
- package/dist/__tests__/parser/expression.test.js +116 -0
- package/dist/__tests__/parser/expression.test.js.map +1 -0
- package/dist/__tests__/parser/workflow.test.d.ts +2 -0
- package/dist/__tests__/parser/workflow.test.d.ts.map +1 -0
- package/dist/__tests__/parser/workflow.test.js +134 -0
- package/dist/__tests__/parser/workflow.test.js.map +1 -0
- package/dist/__tests__/scheduler/dependency.test.d.ts +2 -0
- package/dist/__tests__/scheduler/dependency.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/dependency.test.js +41 -0
- package/dist/__tests__/scheduler/dependency.test.js.map +1 -0
- package/dist/__tests__/scheduler/job-context.test.d.ts +2 -0
- package/dist/__tests__/scheduler/job-context.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/job-context.test.js +108 -0
- package/dist/__tests__/scheduler/job-context.test.js.map +1 -0
- package/dist/__tests__/scheduler/job-policy.test.d.ts +2 -0
- package/dist/__tests__/scheduler/job-policy.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/job-policy.test.js +159 -0
- package/dist/__tests__/scheduler/job-policy.test.js.map +1 -0
- package/dist/__tests__/scheduler/job.test.d.ts +2 -0
- package/dist/__tests__/scheduler/job.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/job.test.js +826 -0
- package/dist/__tests__/scheduler/job.test.js.map +1 -0
- package/dist/__tests__/scheduler/listener-registry.test.d.ts +2 -0
- package/dist/__tests__/scheduler/listener-registry.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/listener-registry.test.js +79 -0
- package/dist/__tests__/scheduler/listener-registry.test.js.map +1 -0
- package/dist/__tests__/scheduler/step.test.d.ts +2 -0
- package/dist/__tests__/scheduler/step.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/step.test.js +209 -0
- package/dist/__tests__/scheduler/step.test.js.map +1 -0
- package/dist/__tests__/scheduler/steps-context.test.d.ts +2 -0
- package/dist/__tests__/scheduler/steps-context.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/steps-context.test.js +43 -0
- package/dist/__tests__/scheduler/steps-context.test.js.map +1 -0
- package/dist/constants.d.ts +47 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +53 -0
- package/dist/constants.js.map +1 -0
- package/dist/context.d.ts +37 -0
- package/dist/context.d.ts.map +1 -0
- package/dist/context.js +105 -0
- package/dist/context.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +10 -0
- package/dist/index.js.map +1 -0
- package/dist/parser/evaluator-builtins.d.ts +14 -0
- package/dist/parser/evaluator-builtins.d.ts.map +1 -0
- package/dist/parser/evaluator-builtins.js +258 -0
- package/dist/parser/evaluator-builtins.js.map +1 -0
- package/dist/parser/evaluator.d.ts +38 -0
- package/dist/parser/evaluator.d.ts.map +1 -0
- package/dist/parser/evaluator.js +257 -0
- package/dist/parser/evaluator.js.map +1 -0
- package/dist/parser/expression.d.ts +20 -0
- package/dist/parser/expression.d.ts.map +1 -0
- package/dist/parser/expression.js +128 -0
- package/dist/parser/expression.js.map +1 -0
- package/dist/parser/tokenizer.d.ts +26 -0
- package/dist/parser/tokenizer.d.ts.map +1 -0
- package/dist/parser/tokenizer.js +162 -0
- package/dist/parser/tokenizer.js.map +1 -0
- package/dist/parser/validator.d.ts +13 -0
- package/dist/parser/validator.d.ts.map +1 -0
- package/dist/parser/validator.js +383 -0
- package/dist/parser/validator.js.map +1 -0
- package/dist/parser/workflow.d.ts +30 -0
- package/dist/parser/workflow.d.ts.map +1 -0
- package/dist/parser/workflow.js +152 -0
- package/dist/parser/workflow.js.map +1 -0
- package/dist/scheduler/dependency.d.ts +37 -0
- package/dist/scheduler/dependency.d.ts.map +1 -0
- package/dist/scheduler/dependency.js +133 -0
- package/dist/scheduler/dependency.js.map +1 -0
- package/dist/scheduler/job-policy.d.ts +23 -0
- package/dist/scheduler/job-policy.d.ts.map +1 -0
- package/dist/scheduler/job-policy.js +117 -0
- package/dist/scheduler/job-policy.js.map +1 -0
- package/dist/scheduler/job.d.ts +151 -0
- package/dist/scheduler/job.d.ts.map +1 -0
- package/dist/scheduler/job.js +348 -0
- package/dist/scheduler/job.js.map +1 -0
- package/dist/scheduler/step-output-parser.d.ts +14 -0
- package/dist/scheduler/step-output-parser.d.ts.map +1 -0
- package/dist/scheduler/step-output-parser.js +70 -0
- package/dist/scheduler/step-output-parser.js.map +1 -0
- package/dist/scheduler/step.d.ts +74 -0
- package/dist/scheduler/step.d.ts.map +1 -0
- package/dist/scheduler/step.js +387 -0
- package/dist/scheduler/step.js.map +1 -0
- package/dist/types.d.ts +499 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +5 -0
- package/dist/types.js.map +1 -0
- package/dist/workflow-models.d.ts +504 -0
- package/dist/workflow-models.d.ts.map +1 -0
- package/dist/workflow-models.js +5 -0
- package/dist/workflow-models.js.map +1 -0
- package/package.json +29 -0
- package/src/__tests__/context/env.test.ts +38 -0
- package/src/__tests__/index.test.ts +55 -0
- package/src/__tests__/parser/expression.test.ts +151 -0
- package/src/__tests__/parser/workflow.test.ts +151 -0
- package/src/__tests__/scheduler/dependency.test.ts +51 -0
- package/src/__tests__/scheduler/job-context.test.ts +119 -0
- package/src/__tests__/scheduler/job-policy.test.ts +195 -0
- package/src/__tests__/scheduler/job.test.ts +1014 -0
- package/src/__tests__/scheduler/listener-registry.test.ts +95 -0
- package/src/__tests__/scheduler/step.test.ts +258 -0
- package/src/__tests__/scheduler/steps-context.test.ts +49 -0
- package/src/constants.ts +61 -0
- package/src/context.ts +153 -0
- package/src/index.ts +64 -0
- package/src/parser/evaluator-builtins.ts +315 -0
- package/src/parser/evaluator.ts +333 -0
- package/src/parser/expression.ts +154 -0
- package/src/parser/tokenizer.ts +191 -0
- package/src/parser/validator.ts +444 -0
- package/src/parser/workflow.ts +176 -0
- package/src/scheduler/dependency.ts +180 -0
- package/src/scheduler/job-policy.ts +198 -0
- package/src/scheduler/job.ts +523 -0
- package/src/scheduler/step-output-parser.ts +94 -0
- package/src/scheduler/step.ts +543 -0
- package/src/workflow-models.ts +593 -0
- package/tsconfig.json +14 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,826 @@
|
|
|
1
|
+
import { describe, expect, it } from 'vitest';
|
|
2
|
+
import { createBaseContext } from '../../context.js';
|
|
3
|
+
import { JobScheduler } from '../../scheduler/job.js';
|
|
4
|
+
import { StepRunner } from '../../scheduler/step.js';
|
|
5
|
+
function expectStoredAndEventResultSnapshots(eventResult, storedResultAtEmit, runResult) {
|
|
6
|
+
expect(eventResult).toBeDefined();
|
|
7
|
+
expect(storedResultAtEmit).toBeDefined();
|
|
8
|
+
expect(storedResultAtEmit).not.toBe(eventResult);
|
|
9
|
+
expect(storedResultAtEmit).not.toBe(runResult);
|
|
10
|
+
expect(eventResult).not.toBe(runResult);
|
|
11
|
+
expect(storedResultAtEmit).toEqual(runResult);
|
|
12
|
+
expect(eventResult).toEqual(runResult);
|
|
13
|
+
}
|
|
14
|
+
describe('JobScheduler fail-fast behavior', () => {
|
|
15
|
+
it('stops later phases and preserves cancelled results when fail-fast is enabled', async () => {
|
|
16
|
+
const executedCommands = [];
|
|
17
|
+
const shellExecutor = async (command) => {
|
|
18
|
+
executedCommands.push(command);
|
|
19
|
+
if (command === 'fail') {
|
|
20
|
+
return {
|
|
21
|
+
exitCode: 1,
|
|
22
|
+
stdout: '',
|
|
23
|
+
stderr: 'forced failure',
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
return {
|
|
27
|
+
exitCode: 0,
|
|
28
|
+
stdout: '',
|
|
29
|
+
stderr: '',
|
|
30
|
+
};
|
|
31
|
+
};
|
|
32
|
+
const workflow = {
|
|
33
|
+
name: 'fail-fast-workflow',
|
|
34
|
+
on: 'push',
|
|
35
|
+
jobs: {
|
|
36
|
+
setup: {
|
|
37
|
+
'runs-on': 'ubuntu-latest',
|
|
38
|
+
steps: [{ run: 'setup' }],
|
|
39
|
+
},
|
|
40
|
+
fail: {
|
|
41
|
+
'runs-on': 'ubuntu-latest',
|
|
42
|
+
steps: [{ run: 'fail' }],
|
|
43
|
+
},
|
|
44
|
+
next: {
|
|
45
|
+
'runs-on': 'ubuntu-latest',
|
|
46
|
+
needs: 'setup',
|
|
47
|
+
steps: [{ run: 'next' }],
|
|
48
|
+
},
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
const scheduler = new JobScheduler(workflow, {
|
|
52
|
+
failFast: true,
|
|
53
|
+
stepRunner: { shellExecutor },
|
|
54
|
+
});
|
|
55
|
+
const startedPhases = [];
|
|
56
|
+
const completedJobs = [];
|
|
57
|
+
scheduler.on((event) => {
|
|
58
|
+
if (event.type === 'phase:start') {
|
|
59
|
+
startedPhases.push(event.phase);
|
|
60
|
+
}
|
|
61
|
+
if (event.type === 'job:complete') {
|
|
62
|
+
completedJobs.push(event.jobId);
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
const results = await scheduler.run(createBaseContext());
|
|
66
|
+
expect(executedCommands).toContain('setup');
|
|
67
|
+
expect(executedCommands).toContain('fail');
|
|
68
|
+
expect(executedCommands).not.toContain('next');
|
|
69
|
+
expect(startedPhases).toEqual([0]);
|
|
70
|
+
expect(results.fail.conclusion).toBe('failure');
|
|
71
|
+
expect(results.next.conclusion).toBe('cancelled');
|
|
72
|
+
expect(completedJobs.sort()).toEqual(['fail', 'next', 'setup']);
|
|
73
|
+
expect(scheduler.getConclusion()).toBe('failure');
|
|
74
|
+
});
|
|
75
|
+
it('stops remaining steps after a failed step even when fail-fast is disabled', async () => {
|
|
76
|
+
const executedCommands = [];
|
|
77
|
+
const shellExecutor = async (command) => {
|
|
78
|
+
executedCommands.push(command);
|
|
79
|
+
if (command === 'fail') {
|
|
80
|
+
return {
|
|
81
|
+
exitCode: 1,
|
|
82
|
+
stdout: '',
|
|
83
|
+
stderr: 'forced failure',
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
return {
|
|
87
|
+
exitCode: 0,
|
|
88
|
+
stdout: '',
|
|
89
|
+
stderr: '',
|
|
90
|
+
};
|
|
91
|
+
};
|
|
92
|
+
const workflow = {
|
|
93
|
+
name: 'step-failure-stops-job',
|
|
94
|
+
on: 'push',
|
|
95
|
+
jobs: {
|
|
96
|
+
build: {
|
|
97
|
+
'runs-on': 'ubuntu-latest',
|
|
98
|
+
steps: [{ run: 'fail' }, { run: 'after-fail' }],
|
|
99
|
+
},
|
|
100
|
+
},
|
|
101
|
+
};
|
|
102
|
+
const scheduler = new JobScheduler(workflow, {
|
|
103
|
+
failFast: false,
|
|
104
|
+
stepRunner: { shellExecutor },
|
|
105
|
+
});
|
|
106
|
+
const results = await scheduler.run(createBaseContext());
|
|
107
|
+
expect(executedCommands).toEqual(['fail']);
|
|
108
|
+
expect(results.build.steps).toHaveLength(1);
|
|
109
|
+
expect(results.build.conclusion).toBe('failure');
|
|
110
|
+
});
|
|
111
|
+
it('continues independent jobs and skips only dependency-failed jobs when fail-fast is disabled', async () => {
|
|
112
|
+
const executedCommands = [];
|
|
113
|
+
const shellExecutor = async (command) => {
|
|
114
|
+
executedCommands.push(command);
|
|
115
|
+
if (command === 'build') {
|
|
116
|
+
return {
|
|
117
|
+
exitCode: 1,
|
|
118
|
+
stdout: '',
|
|
119
|
+
stderr: 'forced build failure',
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
exitCode: 0,
|
|
124
|
+
stdout: '',
|
|
125
|
+
stderr: '',
|
|
126
|
+
};
|
|
127
|
+
};
|
|
128
|
+
const workflow = {
|
|
129
|
+
name: 'fail-fast-disabled-dependency-skip-scope',
|
|
130
|
+
on: 'push',
|
|
131
|
+
jobs: {
|
|
132
|
+
build: {
|
|
133
|
+
'runs-on': 'ubuntu-latest',
|
|
134
|
+
steps: [{ run: 'build' }],
|
|
135
|
+
},
|
|
136
|
+
lint: {
|
|
137
|
+
'runs-on': 'ubuntu-latest',
|
|
138
|
+
steps: [{ run: 'lint' }],
|
|
139
|
+
},
|
|
140
|
+
deploy: {
|
|
141
|
+
'runs-on': 'ubuntu-latest',
|
|
142
|
+
needs: 'build',
|
|
143
|
+
steps: [{ run: 'deploy' }],
|
|
144
|
+
},
|
|
145
|
+
},
|
|
146
|
+
};
|
|
147
|
+
const scheduler = new JobScheduler(workflow, {
|
|
148
|
+
failFast: false,
|
|
149
|
+
stepRunner: { shellExecutor },
|
|
150
|
+
});
|
|
151
|
+
const results = await scheduler.run(createBaseContext());
|
|
152
|
+
expect(results.build.conclusion).toBe('failure');
|
|
153
|
+
expect(results.lint.conclusion).toBe('success');
|
|
154
|
+
expect(results.deploy.conclusion).toBe('skipped');
|
|
155
|
+
expect(executedCommands).toContain('build');
|
|
156
|
+
expect(executedCommands).toContain('lint');
|
|
157
|
+
expect(executedCommands).not.toContain('deploy');
|
|
158
|
+
});
|
|
159
|
+
it('preserves continue-on-error semantics when fail-fast is disabled', async () => {
|
|
160
|
+
const executedCommands = [];
|
|
161
|
+
const shellExecutor = async (command) => {
|
|
162
|
+
executedCommands.push(command);
|
|
163
|
+
if (command === 'allowed-fail') {
|
|
164
|
+
return {
|
|
165
|
+
exitCode: 1,
|
|
166
|
+
stdout: '',
|
|
167
|
+
stderr: 'allowed failure',
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
return {
|
|
171
|
+
exitCode: 0,
|
|
172
|
+
stdout: '',
|
|
173
|
+
stderr: '',
|
|
174
|
+
};
|
|
175
|
+
};
|
|
176
|
+
const workflow = {
|
|
177
|
+
name: 'continue-on-error-job',
|
|
178
|
+
on: 'push',
|
|
179
|
+
jobs: {
|
|
180
|
+
build: {
|
|
181
|
+
'runs-on': 'ubuntu-latest',
|
|
182
|
+
steps: [
|
|
183
|
+
{ run: 'allowed-fail', 'continue-on-error': true },
|
|
184
|
+
{ run: 'after-continue' },
|
|
185
|
+
],
|
|
186
|
+
},
|
|
187
|
+
},
|
|
188
|
+
};
|
|
189
|
+
const scheduler = new JobScheduler(workflow, {
|
|
190
|
+
failFast: false,
|
|
191
|
+
stepRunner: { shellExecutor },
|
|
192
|
+
});
|
|
193
|
+
const results = await scheduler.run(createBaseContext());
|
|
194
|
+
expect(executedCommands).toEqual(['allowed-fail', 'after-continue']);
|
|
195
|
+
expect(results.build.steps).toHaveLength(2);
|
|
196
|
+
expect(results.build.conclusion).toBe('success');
|
|
197
|
+
});
|
|
198
|
+
it('propagates fail-fast cancellation within phase chunks', async () => {
|
|
199
|
+
const executedCommands = [];
|
|
200
|
+
const shellExecutor = async (command) => {
|
|
201
|
+
executedCommands.push(command);
|
|
202
|
+
if (command === 'work-1') {
|
|
203
|
+
await new Promise((resolve) => setTimeout(resolve, 20));
|
|
204
|
+
}
|
|
205
|
+
if (command === 'fail') {
|
|
206
|
+
return {
|
|
207
|
+
exitCode: 1,
|
|
208
|
+
stdout: '',
|
|
209
|
+
stderr: 'forced failure',
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
return {
|
|
213
|
+
exitCode: 0,
|
|
214
|
+
stdout: '',
|
|
215
|
+
stderr: '',
|
|
216
|
+
};
|
|
217
|
+
};
|
|
218
|
+
const workflow = {
|
|
219
|
+
name: 'chunk-cancellation',
|
|
220
|
+
on: 'push',
|
|
221
|
+
jobs: {
|
|
222
|
+
'a-fail': {
|
|
223
|
+
'runs-on': 'ubuntu-latest',
|
|
224
|
+
steps: [{ run: 'fail' }],
|
|
225
|
+
},
|
|
226
|
+
'b-work': {
|
|
227
|
+
'runs-on': 'ubuntu-latest',
|
|
228
|
+
steps: [{ run: 'work-1' }, { run: 'work-2' }],
|
|
229
|
+
},
|
|
230
|
+
'c-later': {
|
|
231
|
+
'runs-on': 'ubuntu-latest',
|
|
232
|
+
steps: [{ run: 'later' }],
|
|
233
|
+
},
|
|
234
|
+
},
|
|
235
|
+
};
|
|
236
|
+
const scheduler = new JobScheduler(workflow, {
|
|
237
|
+
failFast: true,
|
|
238
|
+
maxParallel: 2,
|
|
239
|
+
stepRunner: { shellExecutor },
|
|
240
|
+
});
|
|
241
|
+
const results = await scheduler.run(createBaseContext());
|
|
242
|
+
expect(executedCommands).toContain('fail');
|
|
243
|
+
expect(executedCommands).toContain('work-1');
|
|
244
|
+
expect(executedCommands).not.toContain('work-2');
|
|
245
|
+
expect(executedCommands).not.toContain('later');
|
|
246
|
+
expect(results['a-fail'].conclusion).toBe('failure');
|
|
247
|
+
expect(results['b-work'].conclusion).toBe('cancelled');
|
|
248
|
+
expect(results['c-later'].conclusion).toBe('cancelled');
|
|
249
|
+
});
|
|
250
|
+
it('does not execute a job that is already marked as cancelled', async () => {
|
|
251
|
+
const shellExecutor = async () => {
|
|
252
|
+
throw new Error('shell executor should not be called');
|
|
253
|
+
};
|
|
254
|
+
const workflow = {
|
|
255
|
+
name: 'cancelled-job-guard',
|
|
256
|
+
on: 'push',
|
|
257
|
+
jobs: {
|
|
258
|
+
guarded: {
|
|
259
|
+
name: 'guarded',
|
|
260
|
+
'runs-on': 'ubuntu-latest',
|
|
261
|
+
steps: [{ run: 'should-not-run' }],
|
|
262
|
+
},
|
|
263
|
+
},
|
|
264
|
+
};
|
|
265
|
+
const scheduler = new JobScheduler(workflow, {
|
|
266
|
+
stepRunner: { shellExecutor },
|
|
267
|
+
});
|
|
268
|
+
const cancelledResult = {
|
|
269
|
+
id: 'guarded',
|
|
270
|
+
name: 'guarded',
|
|
271
|
+
status: 'completed',
|
|
272
|
+
conclusion: 'cancelled',
|
|
273
|
+
steps: [],
|
|
274
|
+
outputs: {},
|
|
275
|
+
};
|
|
276
|
+
const internalScheduler = scheduler;
|
|
277
|
+
internalScheduler.results.set('guarded', cancelledResult);
|
|
278
|
+
const result = await internalScheduler.runJob('guarded', createBaseContext());
|
|
279
|
+
expect(result).not.toBe(cancelledResult);
|
|
280
|
+
expect(result).toEqual(cancelledResult);
|
|
281
|
+
});
|
|
282
|
+
it('prioritizes cancellation over condition-based skipping when scheduler is cancelled', async () => {
|
|
283
|
+
const shellExecutor = async () => {
|
|
284
|
+
throw new Error('shell executor should not be called');
|
|
285
|
+
};
|
|
286
|
+
const workflow = {
|
|
287
|
+
name: 'cancel-priority-over-skip',
|
|
288
|
+
on: 'push',
|
|
289
|
+
jobs: {
|
|
290
|
+
guarded: {
|
|
291
|
+
name: 'guarded',
|
|
292
|
+
if: '${{ false }}',
|
|
293
|
+
'runs-on': 'ubuntu-latest',
|
|
294
|
+
steps: [{ run: 'should-not-run' }],
|
|
295
|
+
},
|
|
296
|
+
},
|
|
297
|
+
};
|
|
298
|
+
const scheduler = new JobScheduler(workflow, {
|
|
299
|
+
stepRunner: { shellExecutor },
|
|
300
|
+
});
|
|
301
|
+
scheduler.cancel();
|
|
302
|
+
const internalScheduler = scheduler;
|
|
303
|
+
const result = await internalScheduler.runJob('guarded', createBaseContext());
|
|
304
|
+
expect(result.conclusion).toBe('cancelled');
|
|
305
|
+
expect(result.status).toBe('completed');
|
|
306
|
+
});
|
|
307
|
+
it('stores a finalized result before emitting job:complete', async () => {
|
|
308
|
+
const workflow = {
|
|
309
|
+
name: 'complete-event-observation',
|
|
310
|
+
on: 'push',
|
|
311
|
+
jobs: {
|
|
312
|
+
build: {
|
|
313
|
+
'runs-on': 'ubuntu-latest',
|
|
314
|
+
steps: [{ id: 'compile', run: 'compile' }],
|
|
315
|
+
},
|
|
316
|
+
},
|
|
317
|
+
};
|
|
318
|
+
class OutputStepRunner extends StepRunner {
|
|
319
|
+
async runStep(step) {
|
|
320
|
+
return {
|
|
321
|
+
id: step.id,
|
|
322
|
+
name: step.name,
|
|
323
|
+
status: 'completed',
|
|
324
|
+
conclusion: 'success',
|
|
325
|
+
outputs: { artifact: 'build.tar' },
|
|
326
|
+
startedAt: new Date(),
|
|
327
|
+
completedAt: new Date(),
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
const scheduler = new JobScheduler(workflow);
|
|
332
|
+
const internalScheduler = scheduler;
|
|
333
|
+
internalScheduler.stepRunner = new OutputStepRunner();
|
|
334
|
+
let completeEventCount = 0;
|
|
335
|
+
let emittedResult;
|
|
336
|
+
let storedResultAtEmit;
|
|
337
|
+
scheduler.on((event) => {
|
|
338
|
+
if (event.type !== 'job:complete') {
|
|
339
|
+
return;
|
|
340
|
+
}
|
|
341
|
+
completeEventCount += 1;
|
|
342
|
+
emittedResult = event.result;
|
|
343
|
+
storedResultAtEmit = scheduler.getResults()[event.jobId];
|
|
344
|
+
});
|
|
345
|
+
const results = await scheduler.run(createBaseContext());
|
|
346
|
+
expect(completeEventCount).toBe(1);
|
|
347
|
+
expectStoredAndEventResultSnapshots(emittedResult, storedResultAtEmit, results.build);
|
|
348
|
+
expect(storedResultAtEmit?.status).toBe('completed');
|
|
349
|
+
expect(storedResultAtEmit?.conclusion).toBe('success');
|
|
350
|
+
expect(storedResultAtEmit?.completedAt).toBeInstanceOf(Date);
|
|
351
|
+
expect(storedResultAtEmit?.outputs).toEqual({ artifact: 'build.tar' });
|
|
352
|
+
});
|
|
353
|
+
it('keeps job:skip emit and stored skipped result in sync', async () => {
|
|
354
|
+
const shellExecutor = async () => {
|
|
355
|
+
throw new Error('shell executor should not be called');
|
|
356
|
+
};
|
|
357
|
+
const workflow = {
|
|
358
|
+
name: 'skip-event-observation',
|
|
359
|
+
on: 'push',
|
|
360
|
+
jobs: {
|
|
361
|
+
guarded: {
|
|
362
|
+
if: '${{ false }}',
|
|
363
|
+
'runs-on': 'ubuntu-latest',
|
|
364
|
+
steps: [{ run: 'should-not-run' }],
|
|
365
|
+
},
|
|
366
|
+
},
|
|
367
|
+
};
|
|
368
|
+
const scheduler = new JobScheduler(workflow, {
|
|
369
|
+
stepRunner: { shellExecutor },
|
|
370
|
+
});
|
|
371
|
+
const jobEvents = [];
|
|
372
|
+
let skipReason;
|
|
373
|
+
let skipEventResult;
|
|
374
|
+
let storedResultAtSkipEmit;
|
|
375
|
+
scheduler.on((event) => {
|
|
376
|
+
if (event.type === 'job:start' ||
|
|
377
|
+
event.type === 'job:skip' ||
|
|
378
|
+
event.type === 'job:complete') {
|
|
379
|
+
jobEvents.push(event.type);
|
|
380
|
+
}
|
|
381
|
+
if (event.type !== 'job:skip') {
|
|
382
|
+
return;
|
|
383
|
+
}
|
|
384
|
+
skipReason = event.reason;
|
|
385
|
+
skipEventResult = event.result;
|
|
386
|
+
storedResultAtSkipEmit = scheduler.getResults()[event.jobId];
|
|
387
|
+
});
|
|
388
|
+
const results = await scheduler.run(createBaseContext());
|
|
389
|
+
expect(jobEvents).toEqual(['job:skip', 'job:complete']);
|
|
390
|
+
expect(skipReason).toBe('Condition not met');
|
|
391
|
+
expectStoredAndEventResultSnapshots(skipEventResult, storedResultAtSkipEmit, results.guarded);
|
|
392
|
+
expect(storedResultAtSkipEmit?.status).toBe('completed');
|
|
393
|
+
expect(storedResultAtSkipEmit?.conclusion).toBe('skipped');
|
|
394
|
+
});
|
|
395
|
+
it('isolates job:complete and stored results from job:skip event mutations', async () => {
|
|
396
|
+
const shellExecutor = async () => {
|
|
397
|
+
throw new Error('shell executor should not be called');
|
|
398
|
+
};
|
|
399
|
+
const workflow = {
|
|
400
|
+
name: 'skip-event-payload-isolation',
|
|
401
|
+
on: 'push',
|
|
402
|
+
jobs: {
|
|
403
|
+
guarded: {
|
|
404
|
+
if: '${{ false }}',
|
|
405
|
+
'runs-on': 'ubuntu-latest',
|
|
406
|
+
steps: [{ run: 'should-not-run' }],
|
|
407
|
+
},
|
|
408
|
+
},
|
|
409
|
+
};
|
|
410
|
+
const scheduler = new JobScheduler(workflow, {
|
|
411
|
+
stepRunner: { shellExecutor },
|
|
412
|
+
});
|
|
413
|
+
let completeEventResult;
|
|
414
|
+
scheduler.on((event) => {
|
|
415
|
+
if (event.type === 'job:skip') {
|
|
416
|
+
event.result.outputs.leaked = 'mutated-by-skip-listener';
|
|
417
|
+
event.result.steps.push({
|
|
418
|
+
id: 'skip-fake',
|
|
419
|
+
status: 'completed',
|
|
420
|
+
conclusion: 'success',
|
|
421
|
+
outputs: {},
|
|
422
|
+
});
|
|
423
|
+
return;
|
|
424
|
+
}
|
|
425
|
+
if (event.type === 'job:complete') {
|
|
426
|
+
completeEventResult = event.result;
|
|
427
|
+
}
|
|
428
|
+
});
|
|
429
|
+
const results = await scheduler.run(createBaseContext());
|
|
430
|
+
const storedResults = scheduler.getResults();
|
|
431
|
+
expect(completeEventResult).toBeDefined();
|
|
432
|
+
expect(completeEventResult?.outputs.leaked).toBeUndefined();
|
|
433
|
+
expect(completeEventResult?.steps.find((step) => step.id === 'skip-fake')).toBeUndefined();
|
|
434
|
+
expect(results.guarded.outputs.leaked).toBeUndefined();
|
|
435
|
+
expect(results.guarded.steps.find((step) => step.id === 'skip-fake')).toBeUndefined();
|
|
436
|
+
expect(storedResults.guarded.outputs.leaked).toBeUndefined();
|
|
437
|
+
expect(storedResults.guarded.steps.find((step) => step.id === 'skip-fake')).toBeUndefined();
|
|
438
|
+
});
|
|
439
|
+
it('skips dependent jobs when a needed job is skipped', async () => {
|
|
440
|
+
const executedCommands = [];
|
|
441
|
+
const shellExecutor = async (command) => {
|
|
442
|
+
executedCommands.push(command);
|
|
443
|
+
return {
|
|
444
|
+
exitCode: 0,
|
|
445
|
+
stdout: '',
|
|
446
|
+
stderr: '',
|
|
447
|
+
};
|
|
448
|
+
};
|
|
449
|
+
const workflow = {
|
|
450
|
+
name: 'needs-skipped-propagation',
|
|
451
|
+
on: 'push',
|
|
452
|
+
jobs: {
|
|
453
|
+
setup: {
|
|
454
|
+
if: '${{ false }}',
|
|
455
|
+
'runs-on': 'ubuntu-latest',
|
|
456
|
+
steps: [{ run: 'setup' }],
|
|
457
|
+
},
|
|
458
|
+
build: {
|
|
459
|
+
'runs-on': 'ubuntu-latest',
|
|
460
|
+
needs: 'setup',
|
|
461
|
+
steps: [{ run: 'build' }],
|
|
462
|
+
},
|
|
463
|
+
},
|
|
464
|
+
};
|
|
465
|
+
const scheduler = new JobScheduler(workflow, {
|
|
466
|
+
stepRunner: { shellExecutor },
|
|
467
|
+
});
|
|
468
|
+
const skipReasons = {};
|
|
469
|
+
scheduler.on((event) => {
|
|
470
|
+
if (event.type === 'job:skip') {
|
|
471
|
+
skipReasons[event.jobId] = event.reason;
|
|
472
|
+
}
|
|
473
|
+
});
|
|
474
|
+
const results = await scheduler.run(createBaseContext());
|
|
475
|
+
expect(executedCommands).toEqual([]);
|
|
476
|
+
expect(results.setup.conclusion).toBe('skipped');
|
|
477
|
+
expect(results.build.conclusion).toBe('skipped');
|
|
478
|
+
expect(skipReasons.setup).toBe('Condition not met');
|
|
479
|
+
expect(skipReasons.build).toBe('Dependency "setup" skipped');
|
|
480
|
+
});
|
|
481
|
+
it('emits job:complete when a cancelled scheduler short-circuits runJob', async () => {
|
|
482
|
+
const shellExecutor = async () => {
|
|
483
|
+
throw new Error('shell executor should not be called');
|
|
484
|
+
};
|
|
485
|
+
const workflow = {
|
|
486
|
+
name: 'cancelled-runjob-complete-event',
|
|
487
|
+
on: 'push',
|
|
488
|
+
jobs: {
|
|
489
|
+
guarded: {
|
|
490
|
+
'runs-on': 'ubuntu-latest',
|
|
491
|
+
steps: [{ run: 'should-not-run' }],
|
|
492
|
+
},
|
|
493
|
+
},
|
|
494
|
+
};
|
|
495
|
+
const scheduler = new JobScheduler(workflow, {
|
|
496
|
+
stepRunner: { shellExecutor },
|
|
497
|
+
});
|
|
498
|
+
const jobEvents = [];
|
|
499
|
+
scheduler.on((event) => {
|
|
500
|
+
if (event.type === 'job:start' ||
|
|
501
|
+
event.type === 'job:skip' ||
|
|
502
|
+
event.type === 'job:complete') {
|
|
503
|
+
jobEvents.push(event.type);
|
|
504
|
+
}
|
|
505
|
+
});
|
|
506
|
+
scheduler.cancel();
|
|
507
|
+
const internalScheduler = scheduler;
|
|
508
|
+
const result = await internalScheduler.runJob('guarded', createBaseContext());
|
|
509
|
+
expect(result.conclusion).toBe('cancelled');
|
|
510
|
+
expect(result.status).toBe('completed');
|
|
511
|
+
expect(jobEvents).toEqual(['job:complete']);
|
|
512
|
+
expect(scheduler.getResults().guarded).not.toBe(result);
|
|
513
|
+
expect(scheduler.getResults().guarded).toEqual(result);
|
|
514
|
+
});
|
|
515
|
+
it('isolates internal results from job:complete event mutations', async () => {
|
|
516
|
+
const workflow = {
|
|
517
|
+
name: 'event-result-isolation',
|
|
518
|
+
on: 'push',
|
|
519
|
+
jobs: {
|
|
520
|
+
build: {
|
|
521
|
+
'runs-on': 'ubuntu-latest',
|
|
522
|
+
steps: [{ run: 'build' }],
|
|
523
|
+
},
|
|
524
|
+
},
|
|
525
|
+
};
|
|
526
|
+
const scheduler = new JobScheduler(workflow);
|
|
527
|
+
scheduler.on((event) => {
|
|
528
|
+
if (event.type !== 'job:complete') {
|
|
529
|
+
return;
|
|
530
|
+
}
|
|
531
|
+
event.result.outputs.leaked = 'mutated-by-listener';
|
|
532
|
+
event.result.steps.push({
|
|
533
|
+
id: 'fake',
|
|
534
|
+
status: 'completed',
|
|
535
|
+
conclusion: 'success',
|
|
536
|
+
outputs: {},
|
|
537
|
+
});
|
|
538
|
+
});
|
|
539
|
+
const results = await scheduler.run(createBaseContext());
|
|
540
|
+
expect(results.build.outputs.leaked).toBeUndefined();
|
|
541
|
+
expect(results.build.steps.find((step) => step.id === 'fake')).toBeUndefined();
|
|
542
|
+
});
|
|
543
|
+
it('isolates internal results from workflow:complete event mutations', async () => {
|
|
544
|
+
const workflow = {
|
|
545
|
+
name: 'workflow-complete-result-isolation',
|
|
546
|
+
on: 'push',
|
|
547
|
+
jobs: {
|
|
548
|
+
build: {
|
|
549
|
+
'runs-on': 'ubuntu-latest',
|
|
550
|
+
steps: [{ run: 'build' }],
|
|
551
|
+
},
|
|
552
|
+
},
|
|
553
|
+
};
|
|
554
|
+
const scheduler = new JobScheduler(workflow);
|
|
555
|
+
scheduler.on((event) => {
|
|
556
|
+
if (event.type !== 'workflow:complete') {
|
|
557
|
+
return;
|
|
558
|
+
}
|
|
559
|
+
event.results.build.outputs.leaked = 'mutated-by-listener';
|
|
560
|
+
});
|
|
561
|
+
const results = await scheduler.run(createBaseContext());
|
|
562
|
+
expect(results.build.outputs.leaked).toBeUndefined();
|
|
563
|
+
expect(scheduler.getResults().build.outputs.leaked).toBeUndefined();
|
|
564
|
+
});
|
|
565
|
+
it('returns result snapshots that cannot mutate scheduler state', async () => {
|
|
566
|
+
const workflow = {
|
|
567
|
+
name: 'results-snapshot-isolation',
|
|
568
|
+
on: 'push',
|
|
569
|
+
jobs: {
|
|
570
|
+
build: {
|
|
571
|
+
'runs-on': 'ubuntu-latest',
|
|
572
|
+
steps: [{ run: 'build' }],
|
|
573
|
+
},
|
|
574
|
+
},
|
|
575
|
+
};
|
|
576
|
+
const scheduler = new JobScheduler(workflow);
|
|
577
|
+
const runResults = await scheduler.run(createBaseContext());
|
|
578
|
+
runResults.build.outputs.leaked = 'mutated-run-result';
|
|
579
|
+
const snapshotAfterRunReturnMutation = scheduler.getResults();
|
|
580
|
+
expect(snapshotAfterRunReturnMutation.build.outputs.leaked).toBeUndefined();
|
|
581
|
+
const firstSnapshot = scheduler.getResults();
|
|
582
|
+
firstSnapshot.build.outputs.leaked = 'mutated-by-caller';
|
|
583
|
+
firstSnapshot.build.steps.push({
|
|
584
|
+
id: 'fake',
|
|
585
|
+
status: 'completed',
|
|
586
|
+
conclusion: 'success',
|
|
587
|
+
outputs: {},
|
|
588
|
+
});
|
|
589
|
+
const secondSnapshot = scheduler.getResults();
|
|
590
|
+
expect(secondSnapshot.build.outputs.leaked).toBeUndefined();
|
|
591
|
+
expect(secondSnapshot.build.steps.find((step) => step.id === 'fake')).toBeUndefined();
|
|
592
|
+
});
|
|
593
|
+
it('guards against concurrent run invocations while a run is in progress', async () => {
|
|
594
|
+
let unblockFirstRun = () => { };
|
|
595
|
+
const blockFirstRun = new Promise((resolve) => {
|
|
596
|
+
unblockFirstRun = resolve;
|
|
597
|
+
});
|
|
598
|
+
const executedCommands = [];
|
|
599
|
+
const shellExecutor = async (command) => {
|
|
600
|
+
executedCommands.push(command);
|
|
601
|
+
await blockFirstRun;
|
|
602
|
+
return {
|
|
603
|
+
exitCode: 0,
|
|
604
|
+
stdout: '',
|
|
605
|
+
stderr: '',
|
|
606
|
+
};
|
|
607
|
+
};
|
|
608
|
+
const workflow = {
|
|
609
|
+
name: 'concurrent-run-guard',
|
|
610
|
+
on: 'push',
|
|
611
|
+
jobs: {
|
|
612
|
+
build: {
|
|
613
|
+
'runs-on': 'ubuntu-latest',
|
|
614
|
+
steps: [{ run: 'build' }],
|
|
615
|
+
},
|
|
616
|
+
},
|
|
617
|
+
};
|
|
618
|
+
const scheduler = new JobScheduler(workflow, {
|
|
619
|
+
stepRunner: { shellExecutor },
|
|
620
|
+
});
|
|
621
|
+
const firstRunPromise = scheduler.run(createBaseContext());
|
|
622
|
+
await expect(scheduler.run(createBaseContext())).rejects.toThrow('JobScheduler is already running');
|
|
623
|
+
unblockFirstRun();
|
|
624
|
+
const firstRunResults = await firstRunPromise;
|
|
625
|
+
expect(executedCommands).toEqual(['build']);
|
|
626
|
+
expect(firstRunResults.build.conclusion).toBe('success');
|
|
627
|
+
});
|
|
628
|
+
it('isolates dependency outputs from needs context mutations', async () => {
|
|
629
|
+
const workflow = {
|
|
630
|
+
name: 'needs-context-output-isolation',
|
|
631
|
+
on: 'push',
|
|
632
|
+
jobs: {
|
|
633
|
+
setup: {
|
|
634
|
+
'runs-on': 'ubuntu-latest',
|
|
635
|
+
steps: [{ id: 'produce', run: 'produce' }],
|
|
636
|
+
},
|
|
637
|
+
deploy: {
|
|
638
|
+
'runs-on': 'ubuntu-latest',
|
|
639
|
+
needs: 'setup',
|
|
640
|
+
steps: [{ id: 'mutate-needs', run: 'deploy' }],
|
|
641
|
+
},
|
|
642
|
+
},
|
|
643
|
+
};
|
|
644
|
+
class MutatingNeedsStepRunner extends StepRunner {
|
|
645
|
+
async runStep(step, context) {
|
|
646
|
+
if (step.id === 'mutate-needs' && context.needs.setup) {
|
|
647
|
+
context.needs.setup.outputs.token = 'mutated';
|
|
648
|
+
}
|
|
649
|
+
return {
|
|
650
|
+
id: step.id,
|
|
651
|
+
name: step.name,
|
|
652
|
+
status: 'completed',
|
|
653
|
+
conclusion: 'success',
|
|
654
|
+
outputs: step.id === 'produce' ? { token: 'abc' } : {},
|
|
655
|
+
startedAt: new Date(),
|
|
656
|
+
completedAt: new Date(),
|
|
657
|
+
};
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
const scheduler = new JobScheduler(workflow);
|
|
661
|
+
const internalScheduler = scheduler;
|
|
662
|
+
internalScheduler.stepRunner = new MutatingNeedsStepRunner();
|
|
663
|
+
const results = await scheduler.run(createBaseContext());
|
|
664
|
+
expect(results.setup.outputs.token).toBe('abc');
|
|
665
|
+
expect(scheduler.getResults().setup.outputs.token).toBe('abc');
|
|
666
|
+
});
|
|
667
|
+
it('isolates stored step outputs from steps context mutations', async () => {
|
|
668
|
+
const workflow = {
|
|
669
|
+
name: 'steps-context-output-isolation',
|
|
670
|
+
on: 'push',
|
|
671
|
+
jobs: {
|
|
672
|
+
build: {
|
|
673
|
+
'runs-on': 'ubuntu-latest',
|
|
674
|
+
steps: [
|
|
675
|
+
{ id: 'produce', run: 'produce' },
|
|
676
|
+
{ id: 'mutate-steps', run: 'mutate' },
|
|
677
|
+
],
|
|
678
|
+
},
|
|
679
|
+
},
|
|
680
|
+
};
|
|
681
|
+
class MutatingStepsContextStepRunner extends StepRunner {
|
|
682
|
+
async runStep(step, context) {
|
|
683
|
+
if (step.id === 'mutate-steps' && context.steps.produce) {
|
|
684
|
+
context.steps.produce.outputs.artifact = 'mutated';
|
|
685
|
+
}
|
|
686
|
+
return {
|
|
687
|
+
id: step.id,
|
|
688
|
+
name: step.name,
|
|
689
|
+
status: 'completed',
|
|
690
|
+
conclusion: 'success',
|
|
691
|
+
outputs: step.id === 'produce' ? { artifact: 'dist.tar' } : {},
|
|
692
|
+
startedAt: new Date(),
|
|
693
|
+
completedAt: new Date(),
|
|
694
|
+
};
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
const scheduler = new JobScheduler(workflow);
|
|
698
|
+
const internalScheduler = scheduler;
|
|
699
|
+
internalScheduler.stepRunner = new MutatingStepsContextStepRunner();
|
|
700
|
+
const results = await scheduler.run(createBaseContext());
|
|
701
|
+
expect(results.build.steps[0].outputs.artifact).toBe('dist.tar');
|
|
702
|
+
expect(results.build.outputs.artifact).toBe('dist.tar');
|
|
703
|
+
expect(scheduler.getResults().build.outputs.artifact).toBe('dist.tar');
|
|
704
|
+
});
|
|
705
|
+
it('resets cancellation/results between runs while preserving listeners', async () => {
|
|
706
|
+
const executedCommands = [];
|
|
707
|
+
let failBuildOnce = true;
|
|
708
|
+
const shellExecutor = async (command) => {
|
|
709
|
+
executedCommands.push(command);
|
|
710
|
+
if (command === 'build' && failBuildOnce) {
|
|
711
|
+
failBuildOnce = false;
|
|
712
|
+
return {
|
|
713
|
+
exitCode: 1,
|
|
714
|
+
stdout: '',
|
|
715
|
+
stderr: 'forced first-run failure',
|
|
716
|
+
};
|
|
717
|
+
}
|
|
718
|
+
return {
|
|
719
|
+
exitCode: 0,
|
|
720
|
+
stdout: '',
|
|
721
|
+
stderr: '',
|
|
722
|
+
};
|
|
723
|
+
};
|
|
724
|
+
const workflow = {
|
|
725
|
+
name: 'scheduler-reset-across-runs',
|
|
726
|
+
on: 'push',
|
|
727
|
+
jobs: {
|
|
728
|
+
build: {
|
|
729
|
+
'runs-on': 'ubuntu-latest',
|
|
730
|
+
steps: [{ run: 'build' }],
|
|
731
|
+
},
|
|
732
|
+
deploy: {
|
|
733
|
+
'runs-on': 'ubuntu-latest',
|
|
734
|
+
needs: 'build',
|
|
735
|
+
steps: [{ run: 'deploy' }],
|
|
736
|
+
},
|
|
737
|
+
},
|
|
738
|
+
};
|
|
739
|
+
const scheduler = new JobScheduler(workflow, {
|
|
740
|
+
failFast: true,
|
|
741
|
+
stepRunner: { shellExecutor },
|
|
742
|
+
});
|
|
743
|
+
let workflowStarted = 0;
|
|
744
|
+
let workflowCompleted = 0;
|
|
745
|
+
scheduler.on((event) => {
|
|
746
|
+
if (event.type === 'workflow:start') {
|
|
747
|
+
workflowStarted += 1;
|
|
748
|
+
}
|
|
749
|
+
if (event.type === 'workflow:complete') {
|
|
750
|
+
workflowCompleted += 1;
|
|
751
|
+
}
|
|
752
|
+
});
|
|
753
|
+
const firstRun = await scheduler.run(createBaseContext());
|
|
754
|
+
expect(firstRun.build.conclusion).toBe('failure');
|
|
755
|
+
expect(firstRun.deploy.conclusion).toBe('cancelled');
|
|
756
|
+
expect(scheduler.getConclusion()).toBe('failure');
|
|
757
|
+
const secondRun = await scheduler.run(createBaseContext());
|
|
758
|
+
expect(secondRun.build.conclusion).toBe('success');
|
|
759
|
+
expect(secondRun.deploy.conclusion).toBe('success');
|
|
760
|
+
expect(scheduler.getConclusion()).toBe('success');
|
|
761
|
+
expect(executedCommands).toEqual(['build', 'build', 'deploy']);
|
|
762
|
+
expect(workflowStarted).toBe(2);
|
|
763
|
+
expect(workflowCompleted).toBe(2);
|
|
764
|
+
});
|
|
765
|
+
it('marks a job as failure when step runner throws unexpectedly', async () => {
|
|
766
|
+
const workflow = {
|
|
767
|
+
name: 'step-runner-throws',
|
|
768
|
+
on: 'push',
|
|
769
|
+
jobs: {
|
|
770
|
+
build: {
|
|
771
|
+
'runs-on': 'ubuntu-latest',
|
|
772
|
+
steps: [{ run: 'build' }],
|
|
773
|
+
},
|
|
774
|
+
},
|
|
775
|
+
};
|
|
776
|
+
class ThrowingStepRunner extends StepRunner {
|
|
777
|
+
async runStep() {
|
|
778
|
+
throw new Error('unexpected step runner failure');
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
const scheduler = new JobScheduler(workflow);
|
|
782
|
+
const internalScheduler = scheduler;
|
|
783
|
+
internalScheduler.stepRunner = new ThrowingStepRunner();
|
|
784
|
+
const results = await scheduler.run(createBaseContext());
|
|
785
|
+
expect(results.build.conclusion).toBe('failure');
|
|
786
|
+
expect(results.build.steps).toHaveLength(0);
|
|
787
|
+
});
|
|
788
|
+
it('passes zero-based step index metadata to the step runner', async () => {
|
|
789
|
+
const workflow = {
|
|
790
|
+
name: 'step-index-metadata',
|
|
791
|
+
on: 'push',
|
|
792
|
+
jobs: {
|
|
793
|
+
build: {
|
|
794
|
+
'runs-on': 'ubuntu-latest',
|
|
795
|
+
steps: [
|
|
796
|
+
{ id: 'first', run: 'first' },
|
|
797
|
+
{ id: 'second', run: 'second' },
|
|
798
|
+
{ id: 'third', run: 'third' },
|
|
799
|
+
],
|
|
800
|
+
},
|
|
801
|
+
},
|
|
802
|
+
};
|
|
803
|
+
class RecordingStepRunner extends StepRunner {
|
|
804
|
+
indices = [];
|
|
805
|
+
async runStep(step, _context, metadata = {}) {
|
|
806
|
+
this.indices.push(metadata.index ?? -1);
|
|
807
|
+
return {
|
|
808
|
+
id: step.id,
|
|
809
|
+
name: step.name,
|
|
810
|
+
status: 'completed',
|
|
811
|
+
conclusion: 'success',
|
|
812
|
+
outputs: {},
|
|
813
|
+
startedAt: new Date(),
|
|
814
|
+
completedAt: new Date(),
|
|
815
|
+
};
|
|
816
|
+
}
|
|
817
|
+
}
|
|
818
|
+
const scheduler = new JobScheduler(workflow);
|
|
819
|
+
const recordingRunner = new RecordingStepRunner();
|
|
820
|
+
const internalScheduler = scheduler;
|
|
821
|
+
internalScheduler.stepRunner = recordingRunner;
|
|
822
|
+
await scheduler.run(createBaseContext());
|
|
823
|
+
expect(recordingRunner.indices).toEqual([0, 1, 2]);
|
|
824
|
+
});
|
|
825
|
+
});
|
|
826
|
+
//# sourceMappingURL=job.test.js.map
|