takos-actions-engine 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/coverage/base.css +224 -0
- package/coverage/block-navigation.js +87 -0
- package/coverage/clover.xml +3477 -0
- package/coverage/coverage-final.json +20 -0
- package/coverage/favicon.png +0 -0
- package/coverage/index.html +176 -0
- package/coverage/prettify.css +1 -0
- package/coverage/prettify.js +2 -0
- package/coverage/sort-arrow-sprite.png +0 -0
- package/coverage/sorter.js +210 -0
- package/coverage/src/context/base.ts.html +1792 -0
- package/coverage/src/context/env.ts.html +1243 -0
- package/coverage/src/context/index.html +161 -0
- package/coverage/src/context/index.ts.html +229 -0
- package/coverage/src/context/secrets.ts.html +1276 -0
- package/coverage/src/index.html +131 -0
- package/coverage/src/index.ts.html +502 -0
- package/coverage/src/parser/expression.ts.html +2854 -0
- package/coverage/src/parser/index.html +161 -0
- package/coverage/src/parser/index.ts.html +163 -0
- package/coverage/src/parser/validator.ts.html +1588 -0
- package/coverage/src/parser/workflow.ts.html +616 -0
- package/coverage/src/scheduler/dependency.ts.html +1138 -0
- package/coverage/src/scheduler/index.html +221 -0
- package/coverage/src/scheduler/index.ts.html +214 -0
- package/coverage/src/scheduler/job-context.ts.html +265 -0
- package/coverage/src/scheduler/job-policy.ts.html +559 -0
- package/coverage/src/scheduler/job.ts.html +1816 -0
- package/coverage/src/scheduler/listener-registry.ts.html +199 -0
- package/coverage/src/scheduler/step.ts.html +2206 -0
- package/coverage/src/scheduler/steps-context.ts.html +217 -0
- package/coverage/src/types.ts.html +1897 -0
- package/coverage/src/utils/index.html +116 -0
- package/coverage/src/utils/needs.ts.html +127 -0
- package/dist/__tests__/context/env.test.d.ts +2 -0
- package/dist/__tests__/context/env.test.d.ts.map +1 -0
- package/dist/__tests__/context/env.test.js +28 -0
- package/dist/__tests__/context/env.test.js.map +1 -0
- package/dist/__tests__/index.test.d.ts +2 -0
- package/dist/__tests__/index.test.d.ts.map +1 -0
- package/dist/__tests__/index.test.js +50 -0
- package/dist/__tests__/index.test.js.map +1 -0
- package/dist/__tests__/parser/expression.test.d.ts +2 -0
- package/dist/__tests__/parser/expression.test.d.ts.map +1 -0
- package/dist/__tests__/parser/expression.test.js +116 -0
- package/dist/__tests__/parser/expression.test.js.map +1 -0
- package/dist/__tests__/parser/workflow.test.d.ts +2 -0
- package/dist/__tests__/parser/workflow.test.d.ts.map +1 -0
- package/dist/__tests__/parser/workflow.test.js +134 -0
- package/dist/__tests__/parser/workflow.test.js.map +1 -0
- package/dist/__tests__/scheduler/dependency.test.d.ts +2 -0
- package/dist/__tests__/scheduler/dependency.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/dependency.test.js +41 -0
- package/dist/__tests__/scheduler/dependency.test.js.map +1 -0
- package/dist/__tests__/scheduler/job-context.test.d.ts +2 -0
- package/dist/__tests__/scheduler/job-context.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/job-context.test.js +108 -0
- package/dist/__tests__/scheduler/job-context.test.js.map +1 -0
- package/dist/__tests__/scheduler/job-policy.test.d.ts +2 -0
- package/dist/__tests__/scheduler/job-policy.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/job-policy.test.js +159 -0
- package/dist/__tests__/scheduler/job-policy.test.js.map +1 -0
- package/dist/__tests__/scheduler/job.test.d.ts +2 -0
- package/dist/__tests__/scheduler/job.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/job.test.js +826 -0
- package/dist/__tests__/scheduler/job.test.js.map +1 -0
- package/dist/__tests__/scheduler/listener-registry.test.d.ts +2 -0
- package/dist/__tests__/scheduler/listener-registry.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/listener-registry.test.js +79 -0
- package/dist/__tests__/scheduler/listener-registry.test.js.map +1 -0
- package/dist/__tests__/scheduler/step.test.d.ts +2 -0
- package/dist/__tests__/scheduler/step.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/step.test.js +209 -0
- package/dist/__tests__/scheduler/step.test.js.map +1 -0
- package/dist/__tests__/scheduler/steps-context.test.d.ts +2 -0
- package/dist/__tests__/scheduler/steps-context.test.d.ts.map +1 -0
- package/dist/__tests__/scheduler/steps-context.test.js +43 -0
- package/dist/__tests__/scheduler/steps-context.test.js.map +1 -0
- package/dist/constants.d.ts +47 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +53 -0
- package/dist/constants.js.map +1 -0
- package/dist/context.d.ts +37 -0
- package/dist/context.d.ts.map +1 -0
- package/dist/context.js +105 -0
- package/dist/context.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +10 -0
- package/dist/index.js.map +1 -0
- package/dist/parser/evaluator-builtins.d.ts +14 -0
- package/dist/parser/evaluator-builtins.d.ts.map +1 -0
- package/dist/parser/evaluator-builtins.js +258 -0
- package/dist/parser/evaluator-builtins.js.map +1 -0
- package/dist/parser/evaluator.d.ts +38 -0
- package/dist/parser/evaluator.d.ts.map +1 -0
- package/dist/parser/evaluator.js +257 -0
- package/dist/parser/evaluator.js.map +1 -0
- package/dist/parser/expression.d.ts +20 -0
- package/dist/parser/expression.d.ts.map +1 -0
- package/dist/parser/expression.js +128 -0
- package/dist/parser/expression.js.map +1 -0
- package/dist/parser/tokenizer.d.ts +26 -0
- package/dist/parser/tokenizer.d.ts.map +1 -0
- package/dist/parser/tokenizer.js +162 -0
- package/dist/parser/tokenizer.js.map +1 -0
- package/dist/parser/validator.d.ts +13 -0
- package/dist/parser/validator.d.ts.map +1 -0
- package/dist/parser/validator.js +383 -0
- package/dist/parser/validator.js.map +1 -0
- package/dist/parser/workflow.d.ts +30 -0
- package/dist/parser/workflow.d.ts.map +1 -0
- package/dist/parser/workflow.js +152 -0
- package/dist/parser/workflow.js.map +1 -0
- package/dist/scheduler/dependency.d.ts +37 -0
- package/dist/scheduler/dependency.d.ts.map +1 -0
- package/dist/scheduler/dependency.js +133 -0
- package/dist/scheduler/dependency.js.map +1 -0
- package/dist/scheduler/job-policy.d.ts +23 -0
- package/dist/scheduler/job-policy.d.ts.map +1 -0
- package/dist/scheduler/job-policy.js +117 -0
- package/dist/scheduler/job-policy.js.map +1 -0
- package/dist/scheduler/job.d.ts +151 -0
- package/dist/scheduler/job.d.ts.map +1 -0
- package/dist/scheduler/job.js +348 -0
- package/dist/scheduler/job.js.map +1 -0
- package/dist/scheduler/step-output-parser.d.ts +14 -0
- package/dist/scheduler/step-output-parser.d.ts.map +1 -0
- package/dist/scheduler/step-output-parser.js +70 -0
- package/dist/scheduler/step-output-parser.js.map +1 -0
- package/dist/scheduler/step.d.ts +74 -0
- package/dist/scheduler/step.d.ts.map +1 -0
- package/dist/scheduler/step.js +387 -0
- package/dist/scheduler/step.js.map +1 -0
- package/dist/types.d.ts +499 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +5 -0
- package/dist/types.js.map +1 -0
- package/dist/workflow-models.d.ts +504 -0
- package/dist/workflow-models.d.ts.map +1 -0
- package/dist/workflow-models.js +5 -0
- package/dist/workflow-models.js.map +1 -0
- package/package.json +29 -0
- package/src/__tests__/context/env.test.ts +38 -0
- package/src/__tests__/index.test.ts +55 -0
- package/src/__tests__/parser/expression.test.ts +151 -0
- package/src/__tests__/parser/workflow.test.ts +151 -0
- package/src/__tests__/scheduler/dependency.test.ts +51 -0
- package/src/__tests__/scheduler/job-context.test.ts +119 -0
- package/src/__tests__/scheduler/job-policy.test.ts +195 -0
- package/src/__tests__/scheduler/job.test.ts +1014 -0
- package/src/__tests__/scheduler/listener-registry.test.ts +95 -0
- package/src/__tests__/scheduler/step.test.ts +258 -0
- package/src/__tests__/scheduler/steps-context.test.ts +49 -0
- package/src/constants.ts +61 -0
- package/src/context.ts +153 -0
- package/src/index.ts +64 -0
- package/src/parser/evaluator-builtins.ts +315 -0
- package/src/parser/evaluator.ts +333 -0
- package/src/parser/expression.ts +154 -0
- package/src/parser/tokenizer.ts +191 -0
- package/src/parser/validator.ts +444 -0
- package/src/parser/workflow.ts +176 -0
- package/src/scheduler/dependency.ts +180 -0
- package/src/scheduler/job-policy.ts +198 -0
- package/src/scheduler/job.ts +523 -0
- package/src/scheduler/step-output-parser.ts +94 -0
- package/src/scheduler/step.ts +543 -0
- package/src/workflow-models.ts +593 -0
- package/tsconfig.json +14 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,444 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Workflow schema validation using Zod
|
|
3
|
+
*/
|
|
4
|
+
import { z } from 'zod';
|
|
5
|
+
import { buildDependencyGraph, detectCycle, DependencyError } from '../scheduler/dependency.js';
|
|
6
|
+
import type { Workflow, WorkflowDiagnostic } from '../workflow-models.js';
|
|
7
|
+
import { normalizeNeedsInput } from '../scheduler/job.js';
|
|
8
|
+
|
|
9
|
+
// =============================================================================
|
|
10
|
+
// Zod Schemas
|
|
11
|
+
// =============================================================================
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Branch filter schema
|
|
15
|
+
*/
|
|
16
|
+
const branchFilterSchema = z.object({
|
|
17
|
+
branches: z.array(z.string()).optional(),
|
|
18
|
+
'branches-ignore': z.array(z.string()).optional(),
|
|
19
|
+
tags: z.array(z.string()).optional(),
|
|
20
|
+
'tags-ignore': z.array(z.string()).optional(),
|
|
21
|
+
paths: z.array(z.string()).optional(),
|
|
22
|
+
'paths-ignore': z.array(z.string()).optional(),
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Push trigger schema
|
|
27
|
+
*/
|
|
28
|
+
const pushTriggerSchema = branchFilterSchema.nullable();
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Pull request trigger schema
|
|
32
|
+
*/
|
|
33
|
+
const pullRequestTriggerSchema = branchFilterSchema
|
|
34
|
+
.extend({
|
|
35
|
+
types: z.array(z.string()).optional(),
|
|
36
|
+
})
|
|
37
|
+
.nullable();
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Workflow dispatch input schema
|
|
41
|
+
*/
|
|
42
|
+
const workflowDispatchInputSchema = z.object({
|
|
43
|
+
description: z.string().optional(),
|
|
44
|
+
required: z.boolean().optional(),
|
|
45
|
+
default: z.string().optional(),
|
|
46
|
+
type: z.enum(['string', 'boolean', 'choice', 'environment']).optional(),
|
|
47
|
+
options: z.array(z.string()).optional(),
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Workflow dispatch trigger schema
|
|
52
|
+
*/
|
|
53
|
+
const workflowDispatchSchema = z
|
|
54
|
+
.object({
|
|
55
|
+
inputs: z.record(workflowDispatchInputSchema).optional(),
|
|
56
|
+
})
|
|
57
|
+
.nullable();
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Schedule trigger schema
|
|
61
|
+
*/
|
|
62
|
+
const scheduleTriggerSchema = z.object({
|
|
63
|
+
cron: z.string(),
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Workflow call input schema
|
|
68
|
+
*/
|
|
69
|
+
const workflowCallInputSchema = z.object({
|
|
70
|
+
description: z.string().optional(),
|
|
71
|
+
required: z.boolean().optional(),
|
|
72
|
+
default: z.union([z.string(), z.boolean(), z.number()]).optional(),
|
|
73
|
+
type: z.enum(['string', 'boolean', 'number']),
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Workflow call output schema
|
|
78
|
+
*/
|
|
79
|
+
const workflowCallOutputSchema = z.object({
|
|
80
|
+
description: z.string().optional(),
|
|
81
|
+
value: z.string(),
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Workflow call secret schema
|
|
86
|
+
*/
|
|
87
|
+
const workflowCallSecretSchema = z.object({
|
|
88
|
+
description: z.string().optional(),
|
|
89
|
+
required: z.boolean().optional(),
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Workflow call trigger schema
|
|
94
|
+
*/
|
|
95
|
+
const workflowCallSchema = z
|
|
96
|
+
.object({
|
|
97
|
+
inputs: z.record(workflowCallInputSchema).optional(),
|
|
98
|
+
outputs: z.record(workflowCallOutputSchema).optional(),
|
|
99
|
+
secrets: z.record(workflowCallSecretSchema).optional(),
|
|
100
|
+
})
|
|
101
|
+
.nullable();
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Workflow trigger schema
|
|
105
|
+
*/
|
|
106
|
+
const workflowTriggerSchema = z.object({
|
|
107
|
+
push: pushTriggerSchema.optional(),
|
|
108
|
+
pull_request: pullRequestTriggerSchema.optional(),
|
|
109
|
+
pull_request_target: pullRequestTriggerSchema.optional(),
|
|
110
|
+
workflow_dispatch: workflowDispatchSchema.optional(),
|
|
111
|
+
workflow_call: workflowCallSchema.optional(),
|
|
112
|
+
schedule: z.array(scheduleTriggerSchema).optional(),
|
|
113
|
+
repository_dispatch: z
|
|
114
|
+
.object({
|
|
115
|
+
types: z.array(z.string()).optional(),
|
|
116
|
+
})
|
|
117
|
+
.nullable()
|
|
118
|
+
.optional(),
|
|
119
|
+
issues: z
|
|
120
|
+
.object({
|
|
121
|
+
types: z.array(z.string()).optional(),
|
|
122
|
+
})
|
|
123
|
+
.nullable()
|
|
124
|
+
.optional(),
|
|
125
|
+
issue_comment: z
|
|
126
|
+
.object({
|
|
127
|
+
types: z.array(z.string()).optional(),
|
|
128
|
+
})
|
|
129
|
+
.nullable()
|
|
130
|
+
.optional(),
|
|
131
|
+
release: z
|
|
132
|
+
.object({
|
|
133
|
+
types: z.array(z.string()).optional(),
|
|
134
|
+
})
|
|
135
|
+
.nullable()
|
|
136
|
+
.optional(),
|
|
137
|
+
create: z.null().optional(),
|
|
138
|
+
delete: z.null().optional(),
|
|
139
|
+
fork: z.null().optional(),
|
|
140
|
+
watch: z
|
|
141
|
+
.object({
|
|
142
|
+
types: z.array(z.string()).optional(),
|
|
143
|
+
})
|
|
144
|
+
.nullable()
|
|
145
|
+
.optional(),
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Step schema
|
|
150
|
+
*/
|
|
151
|
+
const stepSchema = z
|
|
152
|
+
.object({
|
|
153
|
+
id: z.string().optional(),
|
|
154
|
+
name: z.string().optional(),
|
|
155
|
+
uses: z.string().optional(),
|
|
156
|
+
run: z.string().optional(),
|
|
157
|
+
'working-directory': z.string().optional(),
|
|
158
|
+
shell: z.enum(['bash', 'pwsh', 'python', 'sh', 'cmd', 'powershell']).optional(),
|
|
159
|
+
with: z.record(z.unknown()).optional(),
|
|
160
|
+
env: z.record(z.string()).optional(),
|
|
161
|
+
if: z.string().optional(),
|
|
162
|
+
'continue-on-error': z.boolean().optional(),
|
|
163
|
+
'timeout-minutes': z.number().positive().optional(),
|
|
164
|
+
})
|
|
165
|
+
.refine(
|
|
166
|
+
(step) => step.uses !== undefined || step.run !== undefined,
|
|
167
|
+
{
|
|
168
|
+
message: 'Step must have either "uses" or "run"',
|
|
169
|
+
}
|
|
170
|
+
)
|
|
171
|
+
.refine(
|
|
172
|
+
(step) => !(step.uses !== undefined && step.run !== undefined),
|
|
173
|
+
{
|
|
174
|
+
message: 'Step cannot have both "uses" and "run"',
|
|
175
|
+
}
|
|
176
|
+
);
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Matrix config schema
|
|
180
|
+
*/
|
|
181
|
+
const matrixConfigSchema = z
|
|
182
|
+
.record(z.unknown())
|
|
183
|
+
.refine(
|
|
184
|
+
(obj) => {
|
|
185
|
+
// Allow 'include' and 'exclude' as special keys
|
|
186
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
187
|
+
if (key === 'include' || key === 'exclude') {
|
|
188
|
+
if (!Array.isArray(value)) return false;
|
|
189
|
+
} else if (!Array.isArray(value)) {
|
|
190
|
+
return false;
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
return true;
|
|
194
|
+
},
|
|
195
|
+
{
|
|
196
|
+
message: 'Matrix values must be arrays (except include/exclude)',
|
|
197
|
+
}
|
|
198
|
+
);
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Job strategy schema
|
|
202
|
+
*/
|
|
203
|
+
const jobStrategySchema = z.object({
|
|
204
|
+
matrix: matrixConfigSchema.optional(),
|
|
205
|
+
'fail-fast': z.boolean().optional(),
|
|
206
|
+
'max-parallel': z.number().positive().optional(),
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Container config schema
|
|
211
|
+
*/
|
|
212
|
+
const containerConfigSchema = z.union([
|
|
213
|
+
z.string(),
|
|
214
|
+
z.object({
|
|
215
|
+
image: z.string(),
|
|
216
|
+
credentials: z
|
|
217
|
+
.object({
|
|
218
|
+
username: z.string(),
|
|
219
|
+
password: z.string(),
|
|
220
|
+
})
|
|
221
|
+
.optional(),
|
|
222
|
+
env: z.record(z.string()).optional(),
|
|
223
|
+
ports: z.array(z.union([z.number(), z.string()])).optional(),
|
|
224
|
+
volumes: z.array(z.string()).optional(),
|
|
225
|
+
options: z.string().optional(),
|
|
226
|
+
}),
|
|
227
|
+
]);
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* Permissions schema
|
|
231
|
+
*/
|
|
232
|
+
const permissionsSchema = z.union([
|
|
233
|
+
z.literal('read-all'),
|
|
234
|
+
z.literal('write-all'),
|
|
235
|
+
z.record(z.enum(['read', 'write', 'none'])),
|
|
236
|
+
]);
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Concurrency schema
|
|
240
|
+
*/
|
|
241
|
+
const concurrencySchema = z.union([
|
|
242
|
+
z.string(),
|
|
243
|
+
z.object({
|
|
244
|
+
group: z.string(),
|
|
245
|
+
'cancel-in-progress': z.boolean().optional(),
|
|
246
|
+
}),
|
|
247
|
+
]);
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* Environment schema
|
|
251
|
+
*/
|
|
252
|
+
const environmentSchema = z.union([
|
|
253
|
+
z.string(),
|
|
254
|
+
z.object({
|
|
255
|
+
name: z.string(),
|
|
256
|
+
url: z.string().optional(),
|
|
257
|
+
}),
|
|
258
|
+
]);
|
|
259
|
+
|
|
260
|
+
/**
|
|
261
|
+
* Job defaults schema
|
|
262
|
+
*/
|
|
263
|
+
const jobDefaultsSchema = z.object({
|
|
264
|
+
run: z
|
|
265
|
+
.object({
|
|
266
|
+
shell: z.string().optional(),
|
|
267
|
+
'working-directory': z.string().optional(),
|
|
268
|
+
})
|
|
269
|
+
.optional(),
|
|
270
|
+
});
|
|
271
|
+
|
|
272
|
+
/**
|
|
273
|
+
* Job schema
|
|
274
|
+
*/
|
|
275
|
+
const jobSchema = z.object({
|
|
276
|
+
name: z.string().optional(),
|
|
277
|
+
'runs-on': z.union([z.string(), z.array(z.string())]),
|
|
278
|
+
needs: z.union([z.string(), z.array(z.string())]).optional(),
|
|
279
|
+
if: z.string().optional(),
|
|
280
|
+
env: z.record(z.string()).optional(),
|
|
281
|
+
steps: z.array(stepSchema).min(1, 'Job must have at least one step'),
|
|
282
|
+
outputs: z.record(z.string()).optional(),
|
|
283
|
+
strategy: jobStrategySchema.optional(),
|
|
284
|
+
container: containerConfigSchema.optional(),
|
|
285
|
+
services: z.record(containerConfigSchema).optional(),
|
|
286
|
+
'timeout-minutes': z.number().positive().optional(),
|
|
287
|
+
'continue-on-error': z.boolean().optional(),
|
|
288
|
+
permissions: permissionsSchema.optional(),
|
|
289
|
+
concurrency: concurrencySchema.optional(),
|
|
290
|
+
defaults: jobDefaultsSchema.optional(),
|
|
291
|
+
environment: environmentSchema.optional(),
|
|
292
|
+
});
|
|
293
|
+
|
|
294
|
+
/**
|
|
295
|
+
* Complete workflow schema
|
|
296
|
+
*/
|
|
297
|
+
const workflowSchema = z.object({
|
|
298
|
+
name: z.string().optional(),
|
|
299
|
+
on: z.union([
|
|
300
|
+
workflowTriggerSchema,
|
|
301
|
+
z.string(),
|
|
302
|
+
z.array(z.string()),
|
|
303
|
+
]),
|
|
304
|
+
env: z.record(z.string()).optional(),
|
|
305
|
+
jobs: z
|
|
306
|
+
.record(jobSchema)
|
|
307
|
+
.refine((jobs) => Object.keys(jobs).length > 0, {
|
|
308
|
+
message: 'Workflow must have at least one job',
|
|
309
|
+
}),
|
|
310
|
+
permissions: permissionsSchema.optional(),
|
|
311
|
+
concurrency: concurrencySchema.optional(),
|
|
312
|
+
defaults: jobDefaultsSchema.optional(),
|
|
313
|
+
});
|
|
314
|
+
|
|
315
|
+
// =============================================================================
|
|
316
|
+
// Validation Functions
|
|
317
|
+
// =============================================================================
|
|
318
|
+
|
|
319
|
+
/**
|
|
320
|
+
* Validation result
|
|
321
|
+
*/
|
|
322
|
+
export interface ValidationResult {
|
|
323
|
+
valid: boolean;
|
|
324
|
+
diagnostics: WorkflowDiagnostic[];
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
/**
|
|
328
|
+
* Collect Zod issues as workflow diagnostics
|
|
329
|
+
*/
|
|
330
|
+
function collectSchemaDiagnostics(
|
|
331
|
+
schema: z.ZodTypeAny,
|
|
332
|
+
input: unknown,
|
|
333
|
+
diagnostics: WorkflowDiagnostic[],
|
|
334
|
+
formatPath: (issuePath: Array<string | number>) => string
|
|
335
|
+
): void {
|
|
336
|
+
const result = schema.safeParse(input);
|
|
337
|
+
if (result.success) {
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
for (const issue of result.error.issues) {
|
|
342
|
+
diagnostics.push({
|
|
343
|
+
severity: 'error',
|
|
344
|
+
message: issue.message,
|
|
345
|
+
path: formatPath(issue.path),
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
/**
|
|
351
|
+
* Build validation result from diagnostics
|
|
352
|
+
*/
|
|
353
|
+
function buildValidationResult(diagnostics: WorkflowDiagnostic[]): ValidationResult {
|
|
354
|
+
return {
|
|
355
|
+
valid: !diagnostics.some((d) => d.severity === 'error'),
|
|
356
|
+
diagnostics,
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
/**
|
|
361
|
+
* Validate workflow against schema
|
|
362
|
+
*/
|
|
363
|
+
export function validateWorkflow(workflow: Workflow): ValidationResult {
|
|
364
|
+
const diagnostics: WorkflowDiagnostic[] = [];
|
|
365
|
+
|
|
366
|
+
// Schema validation
|
|
367
|
+
collectSchemaDiagnostics(workflowSchema, workflow, diagnostics, (issuePath) => issuePath.join('.'));
|
|
368
|
+
|
|
369
|
+
// Additional semantic validation
|
|
370
|
+
const semanticDiagnostics = validateSemantics(workflow);
|
|
371
|
+
diagnostics.push(...semanticDiagnostics);
|
|
372
|
+
|
|
373
|
+
return buildValidationResult(diagnostics);
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
/**
|
|
377
|
+
* Perform semantic validation
|
|
378
|
+
*/
|
|
379
|
+
function validateSemantics(workflow: Workflow): WorkflowDiagnostic[] {
|
|
380
|
+
const diagnostics: WorkflowDiagnostic[] = [];
|
|
381
|
+
|
|
382
|
+
// Validate job dependencies
|
|
383
|
+
const jobNames = new Set(Object.keys(workflow.jobs));
|
|
384
|
+
|
|
385
|
+
for (const [jobId, job] of Object.entries(workflow.jobs)) {
|
|
386
|
+
const needs = normalizeNeedsInput(job.needs);
|
|
387
|
+
|
|
388
|
+
for (const need of needs) {
|
|
389
|
+
if (!jobNames.has(need)) {
|
|
390
|
+
diagnostics.push({
|
|
391
|
+
severity: 'error',
|
|
392
|
+
message: `Job "${jobId}" references unknown job "${need}" in needs`,
|
|
393
|
+
path: `jobs.${jobId}.needs`,
|
|
394
|
+
});
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
if (need === jobId) {
|
|
398
|
+
diagnostics.push({
|
|
399
|
+
severity: 'error',
|
|
400
|
+
message: `Job "${jobId}" cannot depend on itself`,
|
|
401
|
+
path: `jobs.${jobId}.needs`,
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
// Validate step IDs are unique
|
|
407
|
+
const stepIds = new Set<string>();
|
|
408
|
+
for (let i = 0; i < job.steps.length; i++) {
|
|
409
|
+
const step = job.steps[i];
|
|
410
|
+
if (step.id) {
|
|
411
|
+
if (stepIds.has(step.id)) {
|
|
412
|
+
diagnostics.push({
|
|
413
|
+
severity: 'error',
|
|
414
|
+
message: `Duplicate step ID "${step.id}" in job "${jobId}"`,
|
|
415
|
+
path: `jobs.${jobId}.steps[${i}].id`,
|
|
416
|
+
});
|
|
417
|
+
}
|
|
418
|
+
stepIds.add(step.id);
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
// Check for circular dependencies using the shared dependency graph
|
|
424
|
+
try {
|
|
425
|
+
const graph = buildDependencyGraph(workflow);
|
|
426
|
+
const cycle = detectCycle(graph);
|
|
427
|
+
if (cycle.length > 0) {
|
|
428
|
+
diagnostics.push({
|
|
429
|
+
severity: 'error',
|
|
430
|
+
message: `Circular dependency detected: ${cycle.join(' -> ')}`,
|
|
431
|
+
path: 'jobs',
|
|
432
|
+
});
|
|
433
|
+
}
|
|
434
|
+
} catch (e) {
|
|
435
|
+
// buildDependencyGraph throws DependencyError for unknown job references,
|
|
436
|
+
// which are already reported by the needs-validation above.
|
|
437
|
+
if (!(e instanceof DependencyError)) {
|
|
438
|
+
throw e;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
return diagnostics;
|
|
443
|
+
}
|
|
444
|
+
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* YAML workflow parser
|
|
3
|
+
*/
|
|
4
|
+
import { parse as parseYaml, stringify as stringifyYaml, YAMLParseError } from 'yaml';
|
|
5
|
+
import type {
|
|
6
|
+
Workflow,
|
|
7
|
+
ParsedWorkflow,
|
|
8
|
+
WorkflowDiagnostic,
|
|
9
|
+
WorkflowTrigger,
|
|
10
|
+
} from '../workflow-models.js';
|
|
11
|
+
import { normalizeNeedsInput } from '../scheduler/job.js';
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Error thrown when workflow parsing fails
|
|
15
|
+
*/
|
|
16
|
+
export class WorkflowParseError extends Error {
|
|
17
|
+
constructor(
|
|
18
|
+
message: string,
|
|
19
|
+
public readonly diagnostics: WorkflowDiagnostic[]
|
|
20
|
+
) {
|
|
21
|
+
super(message);
|
|
22
|
+
this.name = 'WorkflowParseError';
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Normalize workflow trigger from various formats
|
|
28
|
+
*/
|
|
29
|
+
function normalizeTrigger(on: unknown): WorkflowTrigger {
|
|
30
|
+
// String format: on: push
|
|
31
|
+
if (typeof on === 'string') {
|
|
32
|
+
return { [on]: null } as WorkflowTrigger;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Array format: on: [push, pull_request]
|
|
36
|
+
if (Array.isArray(on)) {
|
|
37
|
+
const trigger: Record<string, unknown> = {};
|
|
38
|
+
for (const event of on) {
|
|
39
|
+
if (typeof event === 'string') {
|
|
40
|
+
trigger[event] = null;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return trigger as WorkflowTrigger;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Object format: on: { push: { branches: [...] } }
|
|
47
|
+
if (typeof on === 'object' && on !== null) {
|
|
48
|
+
return on as WorkflowTrigger;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return {};
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Normalize workflow structure
|
|
56
|
+
*/
|
|
57
|
+
function normalizeWorkflow(raw: unknown): Workflow {
|
|
58
|
+
if (typeof raw !== 'object' || raw === null) {
|
|
59
|
+
throw new WorkflowParseError('Workflow must be an object', [
|
|
60
|
+
{ severity: 'error', message: 'Workflow must be an object' },
|
|
61
|
+
]);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const obj = raw as Record<string, unknown>;
|
|
65
|
+
|
|
66
|
+
// Normalize 'on' trigger
|
|
67
|
+
const on = normalizeTrigger(obj.on);
|
|
68
|
+
|
|
69
|
+
// Normalize jobs
|
|
70
|
+
const jobs: Workflow['jobs'] = {};
|
|
71
|
+
const rawJobs = obj.jobs;
|
|
72
|
+
if (typeof rawJobs === 'object' && rawJobs !== null) {
|
|
73
|
+
for (const [jobId, job] of Object.entries(
|
|
74
|
+
rawJobs as Record<string, unknown>
|
|
75
|
+
)) {
|
|
76
|
+
if (typeof job !== 'object' || job === null) {
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
const jobObj = job as Record<string, unknown>;
|
|
80
|
+
const normalizedNeeds = normalizeNeedsInput(jobObj.needs);
|
|
81
|
+
jobs[jobId] = {
|
|
82
|
+
...jobObj,
|
|
83
|
+
needs: normalizedNeeds.length > 0 ? normalizedNeeds : undefined,
|
|
84
|
+
steps: Array.isArray(jobObj.steps) ? jobObj.steps : [],
|
|
85
|
+
} as Workflow['jobs'][string];
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return {
|
|
90
|
+
name: typeof obj.name === 'string' ? obj.name : undefined,
|
|
91
|
+
on,
|
|
92
|
+
env:
|
|
93
|
+
typeof obj.env === 'object' && obj.env !== null
|
|
94
|
+
? (obj.env as Record<string, string>)
|
|
95
|
+
: undefined,
|
|
96
|
+
jobs,
|
|
97
|
+
permissions: obj.permissions as Workflow['permissions'],
|
|
98
|
+
concurrency: obj.concurrency as Workflow['concurrency'],
|
|
99
|
+
defaults: obj.defaults as Workflow['defaults'],
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Parse YAML workflow content
|
|
105
|
+
*
|
|
106
|
+
* @param content - YAML content string
|
|
107
|
+
* @returns Parsed workflow with diagnostics
|
|
108
|
+
*/
|
|
109
|
+
export function parseWorkflow(content: string): ParsedWorkflow {
|
|
110
|
+
const diagnostics: WorkflowDiagnostic[] = [];
|
|
111
|
+
|
|
112
|
+
try {
|
|
113
|
+
const parsed = parseYaml(content, {
|
|
114
|
+
strict: false,
|
|
115
|
+
uniqueKeys: true,
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
const workflow = normalizeWorkflow(parsed);
|
|
119
|
+
|
|
120
|
+
return {
|
|
121
|
+
workflow,
|
|
122
|
+
diagnostics,
|
|
123
|
+
};
|
|
124
|
+
} catch (error) {
|
|
125
|
+
if (error instanceof YAMLParseError) {
|
|
126
|
+
diagnostics.push({
|
|
127
|
+
severity: 'error',
|
|
128
|
+
message: error.message,
|
|
129
|
+
line: error.linePos?.[0]?.line,
|
|
130
|
+
column: error.linePos?.[0]?.col,
|
|
131
|
+
});
|
|
132
|
+
} else if (error instanceof WorkflowParseError) {
|
|
133
|
+
diagnostics.push(...error.diagnostics);
|
|
134
|
+
} else if (error instanceof Error) {
|
|
135
|
+
diagnostics.push({
|
|
136
|
+
severity: 'error',
|
|
137
|
+
message: error.message,
|
|
138
|
+
});
|
|
139
|
+
} else {
|
|
140
|
+
diagnostics.push({
|
|
141
|
+
severity: 'error',
|
|
142
|
+
message: 'Unknown parse error',
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
throw new WorkflowParseError('Failed to parse workflow', diagnostics);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Parse workflow from file path (for Node.js environments)
|
|
152
|
+
*
|
|
153
|
+
* @param filePath - Path to workflow file
|
|
154
|
+
* @returns Parsed workflow
|
|
155
|
+
*/
|
|
156
|
+
export async function parseWorkflowFile(
|
|
157
|
+
filePath: string
|
|
158
|
+
): Promise<ParsedWorkflow> {
|
|
159
|
+
// Dynamic import for Node.js fs
|
|
160
|
+
const { readFile } = await import('node:fs/promises');
|
|
161
|
+
const content = await readFile(filePath, 'utf-8');
|
|
162
|
+
return parseWorkflow(content);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Stringify workflow back to YAML
|
|
167
|
+
*
|
|
168
|
+
* @param workflow - Workflow object
|
|
169
|
+
* @returns YAML string
|
|
170
|
+
*/
|
|
171
|
+
export function stringifyWorkflow(workflow: Workflow): string {
|
|
172
|
+
return stringifyYaml(workflow, {
|
|
173
|
+
indent: 2,
|
|
174
|
+
lineWidth: 0,
|
|
175
|
+
});
|
|
176
|
+
}
|