@toist/spec 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/kinds.ts ADDED
@@ -0,0 +1,166 @@
1
+ // 2121
2
+ // Kind contract — the runtime interface every NodeKind sees.
3
+ //
4
+ // Used by the runner (which executes kinds), domain projects (which declare
5
+ // custom kinds via source-mod or workspace packages), and authoring tools
6
+ // (which read manifest entries). Holds no implementation: only types.
7
+ //
8
+ // Domain authors importing only `NodeKind` pay nothing for the rest of
9
+ // platform-spec at runtime — TypeScript erases types and Bun tree-shakes
10
+ // the dependency graph. The split between this file and `validate.ts` is
11
+ // for human clarity, not for bundle isolation.
12
+
13
+ import type { Database } from "bun:sqlite"
14
+
15
+ export interface ParamDef {
16
+ type: "string" | "number" | "boolean" | "expression" | "json" | "select"
17
+ label?: string
18
+ description?: string
19
+ default?: unknown
20
+ required?: boolean
21
+ options?: { value: string; label?: string }[] // for type: "select"
22
+ placeholder?: string
23
+ }
24
+
25
+ export interface PortDef {
26
+ type: "any" | "object" | "array" | "string" | "number" | "boolean"
27
+ label?: string
28
+ description?: string
29
+ required?: boolean
30
+ }
31
+
32
+ /** Sqlite-backed cache exposed to kinds via ctx.cache. The runner provides
33
+ * the implementation; this is the contract kinds code against. */
34
+ export interface Cache {
35
+ key(...parts: unknown[]): string
36
+ get<T = unknown>(key: string): T | undefined
37
+ set(key: string, value: unknown, opts?: { ttl?: string | number }): void
38
+ delete(key: string): void
39
+ prune(): number
40
+ }
41
+
42
+ /** HITL suspension spec. The runner persists this on first ctx.suspend(spec)
43
+ * call and replays the response on resume. v1: schema is descriptive, not
44
+ * validated; timeout is stored, not enforced. */
45
+ export interface HitlSpec {
46
+ prompt: string
47
+ assignee?: string
48
+ schema?: unknown
49
+ timeout?: string
50
+ }
51
+
52
+ /** Per-node failure handling policy per pipeline-spec.md §11 / §16.1 and
53
+ * decision_pipeline_recovery_model_v1. Default `"abort"`.
54
+ *
55
+ * - `"abort"` — strict abort, run marked failed (v1 default)
56
+ * - `"suspend"` — create an `error_review` task; sibling branches
57
+ * continue if independent. Resume picks retry/skip/abort.
58
+ * - `{ "skip-with": <value> }` — substitute the failed node's output with
59
+ * the value (itself a discriminator: bare | { value } | { expr }).
60
+ * Run continues.
61
+ * - `{ retry: { times, backoff? } }` — retry up to N times with the chosen
62
+ * backoff before applying the next-tier policy (default abort).
63
+ */
64
+ export type OnErrorPolicy =
65
+ | "abort"
66
+ | "suspend"
67
+ | { "skip-with": unknown }
68
+ | { retry: { times: number; backoff?: "linear" | "exponential" } }
69
+
70
+ /** Spec for an error_review task — created when a node with onError:"suspend"
71
+ * throws. Mirrors HitlSpec's role: persisted at suspend, surfaced to the UI,
72
+ * cleared at resume. The resume payload is `{ action: "retry" | "skip" |
73
+ * "abort", value? }` (vs HITL's free-form user input). */
74
+ export interface ErrorReviewSpec {
75
+ prompt: string
76
+ assignee?: string
77
+ /** The original error message that triggered the suspend. */
78
+ error: string
79
+ /** Optional structured details — stack trace, kind id, etc. */
80
+ details?: Record<string, unknown>
81
+ }
82
+
83
+ /** Resource Type declaration — JSON Schema 2020-12 describing a typed
84
+ * external-system handle. Types are registered in the runner's resource
85
+ * type registry (parallel to the kind registry). */
86
+ export interface ResourceTypeDef {
87
+ name: string
88
+ description?: string
89
+ schema: Record<string, unknown> // JSON Schema 2020-12; use x-sensitive:true on secret fields
90
+ }
91
+
92
+ /** Read-only capability over the runtime ledger (runs + node_outputs).
93
+ * Exposed to kinds via ctx.runs so cross-pipeline reads (M3/M4 in
94
+ * pipeline-spec.md §12) don't need a raw runtime-db handle. */
95
+ export interface RunStore {
96
+ /** Final output of the most recent run of `pipeline` matching `status`
97
+ * (default `"done"`). Returns null when no matching run exists. */
98
+ lastOutput(pipeline: string, opts?: { status?: string }): unknown | null
99
+ /** Output of `node` from the most recent run of `pipeline` matching
100
+ * `status` (default `"done"`). Returns null when no matching run/node
101
+ * pair exists. Backed by the same node_outputs table that powers HITL
102
+ * resume per decision_hitl_checkpoint_durability. */
103
+ nodeOutput(pipeline: string, node: string, opts?: { status?: string }): unknown | null
104
+ }
105
+
106
+ /** Outcome of a sub-pipeline invocation. Mirrors the runner's RunOutcome
107
+ * but trimmed to what the pipeline.run kind exposes to its caller. */
108
+ export type SubRunOutcome =
109
+ | { status: "done"; runId: number; output: unknown }
110
+ | { status: "failed"; runId: number; error: string }
111
+ | { status: "suspended"; runId: number; suspendedAt: string }
112
+
113
+ /** Sub-run invocation capability. Pipeline.run uses this; nothing else
114
+ * should. Per pipeline-spec.md §12 / decision_pipeline_recovery_model_v1
115
+ * sub-runs are the natural unit for recovery — each gets its own run-id,
116
+ * own node_outputs, own life-cycle. */
117
+ export type SubRun = (pipelineId: string, payload: Record<string, unknown>) => Promise<SubRunOutcome>
118
+
119
+ /** What the runner passes into runPipeline. The full per-step ExecContext
120
+ * exposed to kinds is built from this by adding `step` and `suspend`. */
121
+ export interface PlatformCtx {
122
+ runId: number
123
+ log: (level: "info" | "warn" | "error", msg: string) => void
124
+ db: Database
125
+ cache: Cache
126
+ /** Resolved resource instances. ctx.resource.<name>.<field> in { expr } nodes. */
127
+ resource: Record<string, Record<string, unknown>>
128
+ /** Read-only ledger capability — used by runs.lastOutput and runs.nodeOutput. */
129
+ runs: RunStore
130
+ /** Sub-pipeline invocation — used by the pipeline.run kind. Pass-through to
131
+ * the runner's dispatcher; each sub-run gets its own run-id and lifecycle. */
132
+ subRun: SubRun
133
+ }
134
+
135
+ export interface ExecContext extends PlatformCtx {
136
+ /** Identifier of the node currently executing. ctx.suspend uses this to
137
+ * address its own task; other kinds may also use it for logging context. */
138
+ step: { nodeId: string }
139
+ /** Pause the run until a human (or agent) responds with input matching the
140
+ * given spec. On the first call, throws (caught by the executor). On
141
+ * resume, returns the recorded response. */
142
+ suspend: (spec: HitlSpec) => Promise<unknown>
143
+ }
144
+
145
+ export interface NodeKind<
146
+ P extends Record<string, unknown> = Record<string, unknown>,
147
+ I extends Record<string, unknown> = Record<string, unknown>,
148
+ > {
149
+ id: string // "transform.filter"
150
+ category: string // "data" | "transform" | "db" | "io" | "control"
151
+ label: string
152
+ description?: string
153
+ icon?: string // lucide icon name, optional
154
+ /** Mutates external state (DB writes, network writes, ...). Ad-hoc invokes
155
+ * via /kinds/:id/invoke or kinds.invoke MCP tool require `confirm: true`
156
+ * for side-effecting kinds. Pipeline runs are always allowed. */
157
+ sideEffect?: boolean
158
+ params: Record<string, ParamDef>
159
+ inputs: Record<string, PortDef> // named, like Windmill OpenFlow modules
160
+ outputs: Record<string, PortDef>
161
+ run: (ctx: ExecContext, params: P, input: I) => Promise<unknown> | unknown
162
+ }
163
+
164
+ /** Manifest entry returned by GET /manifest — same as NodeKind but without
165
+ * `run` (functions aren't serialisable). */
166
+ export type NodeKindManifest = Omit<NodeKind, "run">
@@ -0,0 +1,522 @@
1
+ // 2121
2
+ // Pipeline-spec v1 validator. Implements pipeline-spec.md §13.
3
+ //
4
+ // Pure spec validation with no runtime dependencies. Kind-existence checks
5
+ // are delegated to a caller-supplied resolver — the runner injects its
6
+ // in-memory registry, while the CLI gen scripts and offline tooling can
7
+ // pass an accept-all stub or hit /manifest themselves.
8
+
9
+ import Ajv2020, { type ValidateFunction } from "ajv/dist/2020.js"
10
+ import { parseDiscContainer, parseDisc, type DiscNode } from "./discriminator.ts"
11
+ import type { NodeKindManifest, OnErrorPolicy } from "./kinds.ts"
12
+
13
+ // ─── Types ───────────────────────────────────────────────────────────────────
14
+
15
+ export interface PipelineNode {
16
+ id: string
17
+ kind: string
18
+ label?: string
19
+ params?: Record<string, unknown>
20
+ input?: Record<string, unknown>
21
+ dependsOn?: string[]
22
+ /** §11 / §16.1 failure-handling policy. Defaults to "abort". */
23
+ onError?: OnErrorPolicy
24
+ }
25
+
26
+ /** Discriminator-parsed form of a node, attached after validation. */
27
+ export interface ParsedNode {
28
+ id: string
29
+ kind: string
30
+ label?: string
31
+ paramsTree: DiscNode
32
+ inputTree: DiscNode
33
+ dependsOn: string[]
34
+ /** Union of ctx.results.<id> ids referenced anywhere in params/input. */
35
+ refs: Set<string>
36
+ /** Resolved failure-handling policy. Always present after validation
37
+ * (defaults to "abort" when the node omits onError). For skip-with, the
38
+ * value is a parsed DiscNode so the dispatcher can resolve it against
39
+ * the live ctx at substitution time — mirroring how params/input leaves work. */
40
+ onError: ResolvedOnError
41
+ }
42
+
43
+ export type ResolvedOnError =
44
+ | { kind: "abort" }
45
+ | { kind: "suspend" }
46
+ | { kind: "skip-with"; valueTree: DiscNode }
47
+ | { kind: "retry"; times: number; backoff: "linear" | "exponential" }
48
+
49
+ export interface PipelineSpec {
50
+ apiVersion: string
51
+ id: string
52
+ label?: string
53
+ description?: string
54
+ schedule?: string
55
+ payloadSchema?: object
56
+ /** Compiled payload validator (ajv). Run-time validation per pipeline-spec §10. */
57
+ payloadValidator?: ValidateFunction
58
+ /** Raw nodes as authored in YAML. Preserved for serialization round-trip. */
59
+ nodes: PipelineNode[]
60
+ /** Parsed nodes ready for execution. Populated post-validation. */
61
+ parsedNodes: ParsedNode[]
62
+ /** Derived execution graph. `source` distinguishes expr-derived vs explicit dependsOn. */
63
+ edges: { from: string; to: string; source: "expr" | "dependsOn" }[]
64
+ /** Validation warnings (non-blocking, e.g. apiVersion absent). */
65
+ warnings: string[]
66
+ /** Optional caller-supplied source path; preserved across loads for diagnostics. */
67
+ _source?: string
68
+ }
69
+
70
+ export interface ValidateOptions {
71
+ /** Lookup whether a kind id resolves in the runner's registry. If absent,
72
+ * kind references are accepted (useful for offline validation, schema
73
+ * generation, or tools that have not loaded a runner). */
74
+ hasKind?: (id: string) => boolean
75
+ /** Per-kind manifest for shape enforcement (§13 item 9). When supplied,
76
+ * validateSpec checks that every node's params/input keys match the
77
+ * kind's declared ParamDef/PortDef contract: required keys are present,
78
+ * no unknown keys, no use of params on a kind with no params declared.
79
+ * Type-of-value checks on literals are deferred to v1.1. */
80
+ getKindManifest?: (id: string) => NodeKindManifest | undefined
81
+ }
82
+
83
+ export interface ValidateResult {
84
+ ok: boolean
85
+ errors: string[]
86
+ warnings?: string[]
87
+ parsed?: PipelineSpec
88
+ /** Backwards-compat: edge list when ok=true. */
89
+ edges?: { from: string; to: string }[]
90
+ }
91
+
92
+ // ─── Closed sets and constants ───────────────────────────────────────────────
93
+
94
+ const SLUG_RE = /^[A-Za-z][\w-]*$/
95
+ const API_VERSION = "2121.fi/v1"
96
+
97
+ const ALLOWED_PIPELINE_KEYS = new Set([
98
+ "apiVersion", "id", "label", "description", "schedule", "payloadSchema", "nodes",
99
+ ])
100
+
101
+ const ALLOWED_NODE_KEYS = new Set([
102
+ "id", "kind", "label", "params", "input", "dependsOn", "onError",
103
+ ])
104
+
105
+ // Per pipeline-spec.md §16.4. Listed explicitly so users get a precise
106
+ // "reserved for future use" error rather than a generic unknown-key one.
107
+ //
108
+ // Note: pipeline-level `onError` stays reserved in v1 — the task body of
109
+ // open_task_pipeline_v1_recovery_model and decision_pipeline_recovery_model_v1
110
+ // fix the pipeline default at "abort" with no override; only per-node
111
+ // onError is functional. Forward-compat: lift this from RESERVED to ALLOWED
112
+ // when v1.1 enables pipeline-level overrides.
113
+ const RESERVED_PIPELINE_KEYS = new Set([
114
+ "version", "kind", "tags", "forEach", "loop", "retry", "timeout", "concurrency",
115
+ "notifications", "secrets", "imports", "inputs", "outputs", "outputSchema",
116
+ "dependsOnPipelines", "produces", "consumes", "onError",
117
+ ])
118
+
119
+ const RESERVED_NODE_KEYS = new Set([
120
+ "if", "when", "forEach", "retry", "timeout", "cache",
121
+ "onSuccess", "outputs", "meta",
122
+ ])
123
+
124
+ const ajv = new Ajv2020({ allErrors: true, strict: false })
125
+
126
+ // ─── Validator ───────────────────────────────────────────────────────────────
127
+
128
+ export function validateSpec(input: unknown, options: ValidateOptions = {}): ValidateResult {
129
+ const errors: string[] = []
130
+ const warnings: string[] = []
131
+ const hasKind = options.hasKind ?? (() => true)
132
+ const getKindManifest = options.getKindManifest
133
+
134
+ if (!input || typeof input !== "object" || Array.isArray(input)) {
135
+ return { ok: false, errors: ["spec must be a JSON object"] }
136
+ }
137
+
138
+ const spec = input as Record<string, unknown>
139
+
140
+ // §13.2 — top-level closed set + reserved-key check.
141
+ for (const key of Object.keys(spec)) {
142
+ if (ALLOWED_PIPELINE_KEYS.has(key)) continue
143
+ if (RESERVED_PIPELINE_KEYS.has(key)) {
144
+ errors.push(`pipeline-level key "${key}" is reserved for future use; remove it`)
145
+ } else {
146
+ errors.push(`unknown pipeline-level key "${key}" (allowed: ${[...ALLOWED_PIPELINE_KEYS].join(", ")})`)
147
+ }
148
+ }
149
+
150
+ // §13.4 — apiVersion. Implicit-v1 with warning per §7.
151
+ let apiVersion: string
152
+ if (spec.apiVersion === undefined) {
153
+ warnings.push(`apiVersion absent; treating as "${API_VERSION}". Add apiVersion: "${API_VERSION}" explicitly.`)
154
+ apiVersion = API_VERSION
155
+ } else if (spec.apiVersion === API_VERSION) {
156
+ apiVersion = API_VERSION
157
+ } else {
158
+ errors.push(`unsupported apiVersion: ${JSON.stringify(spec.apiVersion)} (this runner accepts "${API_VERSION}")`)
159
+ apiVersion = API_VERSION
160
+ }
161
+
162
+ // §13.3 — id (slug). Optional in spec input (default applied at file load); required for storage.
163
+ if (spec.id !== undefined) {
164
+ if (typeof spec.id !== "string" || !SLUG_RE.test(spec.id)) {
165
+ errors.push(`id must match ${SLUG_RE} (got ${JSON.stringify(spec.id)})`)
166
+ }
167
+ if (typeof spec.id === "string" && spec.id.includes(".")) {
168
+ errors.push(`id must not contain dots (dots are reserved for kind ids like transform.map)`)
169
+ }
170
+ }
171
+
172
+ // Optional scalar fields.
173
+ if (spec.label !== undefined && typeof spec.label !== "string") {
174
+ errors.push(`label must be a string`)
175
+ }
176
+ if (spec.description !== undefined && typeof spec.description !== "string") {
177
+ errors.push(`description must be a string`)
178
+ }
179
+
180
+ // §13.14 — schedule (cron syntax check).
181
+ if (spec.schedule !== undefined) {
182
+ if (typeof spec.schedule !== "string") {
183
+ errors.push(`schedule must be a 5-field cron string (structured form is reserved, see §9)`)
184
+ } else if (!isValidCron(spec.schedule)) {
185
+ errors.push(`schedule "${spec.schedule}" is not a valid 5-field cron expression`)
186
+ } else {
187
+ warnings.push(`schedule is parsed but the scheduler is not yet active (pipeline-spec.md §16.1)`)
188
+ }
189
+ }
190
+
191
+ // §13.13 — payloadSchema, ajv-compiled at validation time.
192
+ let payloadValidator: ValidateFunction | undefined
193
+ if (spec.payloadSchema !== undefined) {
194
+ if (!spec.payloadSchema || typeof spec.payloadSchema !== "object" || Array.isArray(spec.payloadSchema)) {
195
+ errors.push(`payloadSchema must be a JSON Schema object`)
196
+ } else {
197
+ try {
198
+ payloadValidator = ajv.compile(spec.payloadSchema)
199
+ } catch (err) {
200
+ errors.push(`payloadSchema is not a valid JSON Schema 2020-12 document: ${(err as Error).message}`)
201
+ }
202
+ }
203
+ }
204
+
205
+ // §13.5 — nodes is a non-empty array.
206
+ if (!Array.isArray(spec.nodes)) {
207
+ return { ok: false, errors: [...errors, "nodes must be an array"], warnings }
208
+ }
209
+ if (spec.nodes.length === 0) {
210
+ errors.push("nodes must be a non-empty array")
211
+ }
212
+
213
+ const rawNodes: PipelineNode[] = []
214
+ const parsedNodes: ParsedNode[] = []
215
+ const seenIds = new Set<string>()
216
+
217
+ for (let i = 0; i < spec.nodes.length; i++) {
218
+ const n = spec.nodes[i]
219
+ const ctx = `nodes[${i}]`
220
+ if (!n || typeof n !== "object" || Array.isArray(n)) {
221
+ errors.push(`${ctx}: must be a JSON object`)
222
+ continue
223
+ }
224
+ const node = n as Record<string, unknown>
225
+
226
+ // §13.6 — node closed-set + reserved-key check.
227
+ for (const key of Object.keys(node)) {
228
+ if (ALLOWED_NODE_KEYS.has(key)) continue
229
+ if (RESERVED_NODE_KEYS.has(key)) {
230
+ errors.push(`${ctx}: node key "${key}" is reserved for future use; remove it`)
231
+ } else {
232
+ errors.push(`${ctx}: unknown node key "${key}" (allowed: ${[...ALLOWED_NODE_KEYS].join(", ")})`)
233
+ }
234
+ }
235
+
236
+ // §13.7 — id slug, unique.
237
+ if (typeof node.id !== "string" || !SLUG_RE.test(node.id)) {
238
+ errors.push(`${ctx}: id must match ${SLUG_RE} (got ${JSON.stringify(node.id)})`)
239
+ continue
240
+ }
241
+ if (seenIds.has(node.id)) {
242
+ errors.push(`${ctx}: duplicate node id "${node.id}"`)
243
+ }
244
+ seenIds.add(node.id)
245
+
246
+ // §13.8 — kind resolves in registry (when a resolver is supplied).
247
+ if (typeof node.kind !== "string") {
248
+ errors.push(`${ctx} ("${node.id}"): kind must be a string`)
249
+ continue
250
+ }
251
+ if (!hasKind(node.kind)) {
252
+ errors.push(`${ctx} ("${node.id}"): unknown kind "${node.kind}"`)
253
+ }
254
+
255
+ // params / input objects (raw shape).
256
+ if (node.params !== undefined && (typeof node.params !== "object" || Array.isArray(node.params) || node.params === null)) {
257
+ errors.push(`${ctx} ("${node.id}"): params must be an object`)
258
+ }
259
+ if (node.input !== undefined && (typeof node.input !== "object" || Array.isArray(node.input) || node.input === null)) {
260
+ errors.push(`${ctx} ("${node.id}"): input must be an object`)
261
+ }
262
+
263
+ // dependsOn shape.
264
+ let dependsOn: string[] = []
265
+ if (node.dependsOn !== undefined) {
266
+ if (!Array.isArray(node.dependsOn) || !node.dependsOn.every((d) => typeof d === "string")) {
267
+ errors.push(`${ctx} ("${node.id}"): dependsOn must be a string[]`)
268
+ } else {
269
+ dependsOn = node.dependsOn as string[]
270
+ }
271
+ }
272
+
273
+ // §13.9 — discriminator parse for params + input. `params` and `input`
274
+ // are kind-namespaced containers per §1: keys are kind-defined, values
275
+ // follow the discriminator. parseDiscContainer ensures the container
276
+ // itself is not mistaken for an explicit-static `{ value: ... }` form
277
+ // when a kind happens to have a single port named `value` (e.g. sink).
278
+ const paramsRaw = (node.params as Record<string, unknown> | undefined) ?? {}
279
+ const inputRaw = (node.input as Record<string, unknown> | undefined) ?? {}
280
+ const paramsParse = parseDiscContainer(paramsRaw, `${ctx}.params`)
281
+ const inputParse = parseDiscContainer(inputRaw, `${ctx}.input`)
282
+ for (const e of paramsParse.errors) errors.push(e)
283
+ for (const e of inputParse.errors) errors.push(e)
284
+
285
+ const refs = new Set<string>([...paramsParse.refs, ...inputParse.refs])
286
+
287
+ // §11 / §16.1 — per-node failure-handling policy.
288
+ const onError = parseOnError(node.onError, `${ctx} ("${node.id}")`, errors) ?? { kind: "abort" }
289
+
290
+ // §13 item 9 — per-kind manifest enforcement (params + input shape).
291
+ // Skipped for unknown kinds (already errored above) and when no manifest
292
+ // resolver was supplied (offline mode).
293
+ if (getKindManifest && hasKind(node.kind as string)) {
294
+ const manifest = getKindManifest(node.kind as string)
295
+ if (manifest) {
296
+ const declParams = manifest.params ?? {}
297
+ const declInputs = manifest.inputs ?? {}
298
+
299
+ for (const [key, def] of Object.entries(declParams)) {
300
+ if (def.required && !(key in paramsRaw)) {
301
+ errors.push(`${ctx} ("${node.id}"): kind "${node.kind}" requires param "${key}"`)
302
+ }
303
+ }
304
+ for (const key of Object.keys(paramsRaw)) {
305
+ if (!(key in declParams)) {
306
+ const allowed = Object.keys(declParams).join(", ") || "(none)"
307
+ errors.push(`${ctx} ("${node.id}"): kind "${node.kind}" has no param "${key}" (allowed: ${allowed})`)
308
+ }
309
+ }
310
+
311
+ for (const [key, def] of Object.entries(declInputs)) {
312
+ if (def.required && !(key in inputRaw)) {
313
+ errors.push(`${ctx} ("${node.id}"): kind "${node.kind}" requires input "${key}"`)
314
+ }
315
+ }
316
+ for (const key of Object.keys(inputRaw)) {
317
+ if (!(key in declInputs)) {
318
+ const allowed = Object.keys(declInputs).join(", ") || "(none)"
319
+ errors.push(`${ctx} ("${node.id}"): kind "${node.kind}" has no input "${key}" (allowed: ${allowed})`)
320
+ }
321
+ }
322
+ }
323
+ }
324
+
325
+ rawNodes.push({
326
+ id: node.id,
327
+ kind: node.kind as string,
328
+ label: typeof node.label === "string" ? node.label : undefined,
329
+ params: paramsRaw,
330
+ input: inputRaw,
331
+ dependsOn: dependsOn.length > 0 ? dependsOn : undefined,
332
+ onError: node.onError as PipelineNode["onError"],
333
+ })
334
+
335
+ parsedNodes.push({
336
+ id: node.id,
337
+ kind: node.kind as string,
338
+ label: typeof node.label === "string" ? node.label : undefined,
339
+ paramsTree: paramsParse.tree,
340
+ inputTree: inputParse.tree,
341
+ dependsOn,
342
+ refs,
343
+ onError,
344
+ })
345
+ }
346
+
347
+ // §13.11 — every ref resolves to an existing node.
348
+ // §13.12 — DAG is acyclic.
349
+ const edges: { from: string; to: string; source: "expr" | "dependsOn" }[] = []
350
+ if (errors.length === 0) {
351
+ for (const node of parsedNodes) {
352
+ for (const ref of node.refs) {
353
+ if (!seenIds.has(ref)) {
354
+ errors.push(`node "${node.id}" references unknown result "${ref}" via ctx.results.${ref}`)
355
+ } else {
356
+ edges.push({ from: ref, to: node.id, source: "expr" })
357
+ }
358
+ }
359
+ for (const dep of node.dependsOn) {
360
+ if (!seenIds.has(dep)) {
361
+ errors.push(`node "${node.id}" dependsOn unknown id "${dep}"`)
362
+ } else {
363
+ edges.push({ from: dep, to: node.id, source: "dependsOn" })
364
+ }
365
+ }
366
+ }
367
+
368
+ if (errors.length === 0) {
369
+ const cycle = detectCycle(parsedNodes, edges)
370
+ if (cycle) errors.push(`pipeline has a cycle: ${cycle.join(" → ")}`)
371
+ }
372
+ }
373
+
374
+ if (errors.length > 0) {
375
+ return { ok: false, errors, warnings }
376
+ }
377
+
378
+ const parsed: PipelineSpec = {
379
+ apiVersion,
380
+ id: typeof spec.id === "string" ? spec.id : "draft",
381
+ label: typeof spec.label === "string" ? spec.label : undefined,
382
+ description: typeof spec.description === "string" ? spec.description : undefined,
383
+ schedule: typeof spec.schedule === "string" ? spec.schedule : undefined,
384
+ payloadSchema: spec.payloadSchema as object | undefined,
385
+ payloadValidator,
386
+ nodes: rawNodes,
387
+ parsedNodes,
388
+ edges,
389
+ warnings,
390
+ }
391
+
392
+ return {
393
+ ok: true,
394
+ errors: [],
395
+ warnings,
396
+ parsed,
397
+ edges: edges.map(({ from, to }) => ({ from, to })),
398
+ }
399
+ }
400
+
401
+ /** Cron 5-field syntactic check. Lenient — accepts standard `* / , -` syntax. */
402
+ function isValidCron(expr: string): boolean {
403
+ const fields = expr.trim().split(/\s+/)
404
+ if (fields.length !== 5) return false
405
+ return fields.every((f) => /^[\d*/,\-]+$/.test(f) || /^[A-Za-z]{3}([,\-][A-Za-z]{3})*$/.test(f))
406
+ }
407
+
408
+ /** DFS cycle detection. Returns the offending path on cycle, null otherwise. */
409
+ function detectCycle(
410
+ nodes: ParsedNode[],
411
+ edges: { from: string; to: string }[],
412
+ ): string[] | null {
413
+ const adj = new Map<string, string[]>()
414
+ for (const n of nodes) adj.set(n.id, [])
415
+ for (const e of edges) adj.get(e.from)!.push(e.to)
416
+
417
+ const WHITE = 0, GREY = 1, BLACK = 2
418
+ const color = new Map<string, number>()
419
+ for (const n of nodes) color.set(n.id, WHITE)
420
+ const stack: string[] = []
421
+
422
+ const visit = (id: string): string[] | null => {
423
+ color.set(id, GREY)
424
+ stack.push(id)
425
+ for (const next of adj.get(id) ?? []) {
426
+ const c = color.get(next)
427
+ if (c === GREY) {
428
+ const start = stack.indexOf(next)
429
+ return [...stack.slice(start), next]
430
+ }
431
+ if (c === WHITE) {
432
+ const cyc = visit(next)
433
+ if (cyc) return cyc
434
+ }
435
+ }
436
+ color.set(id, BLACK)
437
+ stack.pop()
438
+ return null
439
+ }
440
+
441
+ for (const n of nodes) {
442
+ if (color.get(n.id) === WHITE) {
443
+ const cyc = visit(n.id)
444
+ if (cyc) return cyc
445
+ }
446
+ }
447
+ return null
448
+ }
449
+
450
+ // ─── onError policy parser ───────────────────────────────────────────────────
451
+ // Per pipeline-spec.md §11 / §16.1 and decision_pipeline_recovery_model_v1.
452
+ // Returns a ResolvedOnError on success (or `null` plus errors[] additions).
453
+ // The skip-with value is parsed as a discriminator so the dispatcher can
454
+ // resolve it against the live ctx at substitution time.
455
+
456
+ function parseOnError(
457
+ raw: unknown,
458
+ ctxLabel: string,
459
+ errors: string[],
460
+ ): ResolvedOnError | null {
461
+ if (raw === undefined) return { kind: "abort" }
462
+
463
+ if (raw === "abort") return { kind: "abort" }
464
+ if (raw === "suspend") return { kind: "suspend" }
465
+
466
+ if (!raw || typeof raw !== "object" || Array.isArray(raw)) {
467
+ errors.push(
468
+ `${ctxLabel}: onError must be "abort" | "suspend" | { skip-with } | { retry } ` +
469
+ `(got ${JSON.stringify(raw)})`,
470
+ )
471
+ return null
472
+ }
473
+
474
+ const obj = raw as Record<string, unknown>
475
+ const keys = Object.keys(obj)
476
+ if (keys.length !== 1) {
477
+ errors.push(
478
+ `${ctxLabel}: onError object form must have exactly one key (skip-with or retry); got [${keys.join(", ")}]`,
479
+ )
480
+ return null
481
+ }
482
+
483
+ const key = keys[0]
484
+ if (key === "skip-with") {
485
+ // The value is itself a discriminator leaf — bare | { value } | { expr }.
486
+ const parsed = parseDisc(obj[key], `${ctxLabel}.skip-with`)
487
+ for (const e of parsed.errors) errors.push(e)
488
+ return { kind: "skip-with", valueTree: parsed.tree }
489
+ }
490
+
491
+ if (key === "retry") {
492
+ const r = obj[key]
493
+ if (!r || typeof r !== "object" || Array.isArray(r)) {
494
+ errors.push(`${ctxLabel}: onError.retry must be an object { times, backoff? }`)
495
+ return null
496
+ }
497
+ const ro = r as Record<string, unknown>
498
+ const times = ro.times
499
+ if (typeof times !== "number" || !Number.isFinite(times) || times < 1 || !Number.isInteger(times)) {
500
+ errors.push(`${ctxLabel}: onError.retry.times must be a positive integer (got ${JSON.stringify(times)})`)
501
+ return null
502
+ }
503
+ let backoff: "linear" | "exponential" = "linear"
504
+ if (ro.backoff !== undefined) {
505
+ if (ro.backoff !== "linear" && ro.backoff !== "exponential") {
506
+ errors.push(`${ctxLabel}: onError.retry.backoff must be "linear" or "exponential" (got ${JSON.stringify(ro.backoff)})`)
507
+ return null
508
+ }
509
+ backoff = ro.backoff
510
+ }
511
+ // Reject extra keys
512
+ const extras = Object.keys(ro).filter((k) => k !== "times" && k !== "backoff")
513
+ if (extras.length > 0) {
514
+ errors.push(`${ctxLabel}: onError.retry has unknown keys: ${extras.join(", ")}`)
515
+ return null
516
+ }
517
+ return { kind: "retry", times, backoff }
518
+ }
519
+
520
+ errors.push(`${ctxLabel}: onError object key "${key}" not recognised (allowed: skip-with, retry)`)
521
+ return null
522
+ }