@toist/spec 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/package.json +16 -0
- package/src/discriminator.ts +148 -0
- package/src/expr.ts +133 -0
- package/src/index.ts +54 -0
- package/src/jsonschema.ts +296 -0
- package/src/kinds.ts +166 -0
- package/src/validate.ts +522 -0
- package/src/yaml.ts +61 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
All notable changes to `@toist/spec` are recorded here.
|
|
4
|
+
|
|
5
|
+
## 0.2.0 — 2026-05-05
|
|
6
|
+
|
|
7
|
+
Rebrand: `@2121/platform-spec` → `@toist/spec`. Republished to **npmjs.com**
|
|
8
|
+
(public scope) — the abandoned `@2121/*` v0.1.0 publishes lived on a private
|
|
9
|
+
Gitea registry and required per-consumer auth tokens. See
|
|
10
|
+
`session_summary_2026_05_05_phase_f_w3_publish_and_federated_design` in nalich
|
|
11
|
+
for the rationale.
|
|
12
|
+
|
|
13
|
+
No API or behaviour changes from 0.1.0; the rename is the only breaking change.
|
|
14
|
+
|
|
15
|
+
## 0.1.0 — 2026-05-05 — abandoned (@2121/platform-spec on Gitea)
|
|
16
|
+
|
|
17
|
+
Initial release. Extracted from the platform monorepo per Phase F W2.
|
|
18
|
+
|
|
19
|
+
- Type definitions for kinds, pipelines, resources, run records, and
|
|
20
|
+
HITL specs — see kind-spec, pipeline-spec, resource-spec at
|
|
21
|
+
https://toist.in.
|
|
22
|
+
- AJV-based validators for pipeline YAML and resource definitions.
|
|
23
|
+
- JSON-schema generation utilities for editor tooling.
|
|
24
|
+
- Babel-parser-based discriminator extraction for kind-id resolution.
|
package/package.json
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@toist/spec",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./src/index.ts",
|
|
6
|
+
"types": "./src/index.ts",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": "./src/index.ts"
|
|
9
|
+
},
|
|
10
|
+
"files": ["src/", "CHANGELOG.md"],
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"@babel/parser": "^7.25.0",
|
|
13
|
+
"ajv": "^8.17.1",
|
|
14
|
+
"yaml": "^2.8.3"
|
|
15
|
+
}
|
|
16
|
+
}
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// Discriminator parsing for pipeline-spec v1 §5.
|
|
3
|
+
//
|
|
4
|
+
// Every leaf inside a node's `params` and `input` is one of three forms:
|
|
5
|
+
//
|
|
6
|
+
// 1. Bare literal — auto-static shorthand (any non-discriminator value)
|
|
7
|
+
// 2. Explicit static — { value: <literal> }
|
|
8
|
+
// 3. TypeScript expression — { expr: "<ts-expression>" }
|
|
9
|
+
//
|
|
10
|
+
// Discriminator detection: an object with **exactly one key** equal to
|
|
11
|
+
// `value` or `expr` is the discriminator form. Anything else is a literal.
|
|
12
|
+
// To force a literal that is itself discriminator-shaped, wrap explicitly:
|
|
13
|
+
// `{ value: { expr: "x" } }`.
|
|
14
|
+
|
|
15
|
+
import { parseExpr, evalExpr, type ParsedExpr } from "./expr.ts"
|
|
16
|
+
|
|
17
|
+
export type DiscNode =
|
|
18
|
+
| { kind: "literal"; value: unknown }
|
|
19
|
+
| { kind: "expr"; expr: ParsedExpr }
|
|
20
|
+
| { kind: "object"; entries: Record<string, DiscNode> }
|
|
21
|
+
| { kind: "array"; items: DiscNode[] }
|
|
22
|
+
|
|
23
|
+
export interface ParseDiscResult {
|
|
24
|
+
tree: DiscNode
|
|
25
|
+
errors: string[]
|
|
26
|
+
/** Union of all `ctx.results.<id>` ids referenced anywhere in this subtree. */
|
|
27
|
+
refs: Set<string>
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Walk a raw value and produce a parsed discriminator tree. Surfaces parse
|
|
32
|
+
* errors per leaf (caller decides whether to short-circuit). Validates §13
|
|
33
|
+
* item 9 (discriminator shape) and item 10 (expression syntactic
|
|
34
|
+
* well-formedness).
|
|
35
|
+
*/
|
|
36
|
+
export function parseDisc(value: unknown, path = ""): ParseDiscResult {
|
|
37
|
+
const errors: string[] = []
|
|
38
|
+
const refs = new Set<string>()
|
|
39
|
+
const tree = walk(value, path, errors, refs)
|
|
40
|
+
return { tree, errors, refs }
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Parse a `params` or `input` container per pipeline-spec §1 (three layers
|
|
45
|
+
* of vocabulary): keys are kind-defined, values follow the discriminator.
|
|
46
|
+
* The container itself is **not** discriminator-detectable — a kind whose
|
|
47
|
+
* input has a single port named `value` would otherwise be misinterpreted
|
|
48
|
+
* as `{ value: <literal> }` explicit-static.
|
|
49
|
+
*/
|
|
50
|
+
export function parseDiscContainer(
|
|
51
|
+
obj: Record<string, unknown>,
|
|
52
|
+
path = "",
|
|
53
|
+
): ParseDiscResult {
|
|
54
|
+
const errors: string[] = []
|
|
55
|
+
const refs = new Set<string>()
|
|
56
|
+
const entries: Record<string, DiscNode> = {}
|
|
57
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
58
|
+
entries[k] = walk(v, path ? `${path}.${k}` : k, errors, refs)
|
|
59
|
+
}
|
|
60
|
+
return { tree: { kind: "object", entries }, errors, refs }
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function walk(value: unknown, path: string, errors: string[], refs: Set<string>): DiscNode {
|
|
64
|
+
// Primitives — bare literal.
|
|
65
|
+
if (value === null || typeof value !== "object") {
|
|
66
|
+
return { kind: "literal", value }
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Arrays — recurse into elements.
|
|
70
|
+
if (Array.isArray(value)) {
|
|
71
|
+
return {
|
|
72
|
+
kind: "array",
|
|
73
|
+
items: value.map((v, i) => walk(v, `${path}[${i}]`, errors, refs)),
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Objects — discriminator detection.
|
|
78
|
+
const obj = value as Record<string, unknown>
|
|
79
|
+
const keys = Object.keys(obj)
|
|
80
|
+
|
|
81
|
+
if (keys.length === 1) {
|
|
82
|
+
if (keys[0] === "value") {
|
|
83
|
+
// Explicit static. The wrapped value is a literal — do NOT recurse
|
|
84
|
+
// into it for further discriminator parsing (per §5: { value } extracts
|
|
85
|
+
// the contained literal as-is, even if it is itself discriminator-shaped).
|
|
86
|
+
return { kind: "literal", value: obj.value }
|
|
87
|
+
}
|
|
88
|
+
if (keys[0] === "expr") {
|
|
89
|
+
const src = obj.expr
|
|
90
|
+
if (typeof src !== "string") {
|
|
91
|
+
errors.push(`${pathLabel(path)}: { expr: ... } must contain a string, got ${describeType(src)}`)
|
|
92
|
+
return { kind: "literal", value: null }
|
|
93
|
+
}
|
|
94
|
+
try {
|
|
95
|
+
const parsed = parseExpr(src)
|
|
96
|
+
for (const r of parsed.refs) refs.add(r)
|
|
97
|
+
return { kind: "expr", expr: parsed }
|
|
98
|
+
} catch (err) {
|
|
99
|
+
errors.push(`${pathLabel(path)}: ${(err as Error).message}`)
|
|
100
|
+
return { kind: "literal", value: null }
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// §13 item 9: reject mixed discriminators like { value, expr }.
|
|
106
|
+
if (keys.length === 2 && keys.includes("value") && keys.includes("expr")) {
|
|
107
|
+
errors.push(`${pathLabel(path)}: object has both 'value' and 'expr' — discriminator must be exactly one of them`)
|
|
108
|
+
return { kind: "literal", value: null }
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Generic object — recurse into values.
|
|
112
|
+
const entries: Record<string, DiscNode> = {}
|
|
113
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
114
|
+
entries[k] = walk(v, path ? `${path}.${k}` : k, errors, refs)
|
|
115
|
+
}
|
|
116
|
+
return { kind: "object", entries }
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
function pathLabel(path: string): string {
|
|
120
|
+
return path === "" ? "<root>" : path
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
function describeType(v: unknown): string {
|
|
124
|
+
if (v === null) return "null"
|
|
125
|
+
if (Array.isArray(v)) return "array"
|
|
126
|
+
return typeof v
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Resolve a parsed discriminator tree against a runtime ctx. Bare literals
|
|
131
|
+
* pass through; `{ value }` already extracted; `{ expr }` evaluates lazily
|
|
132
|
+
* with `ctx` in scope.
|
|
133
|
+
*/
|
|
134
|
+
export function resolveDisc(tree: DiscNode, ctx: unknown): unknown {
|
|
135
|
+
switch (tree.kind) {
|
|
136
|
+
case "literal":
|
|
137
|
+
return tree.value
|
|
138
|
+
case "expr":
|
|
139
|
+
return evalExpr(tree.expr.source, ctx)
|
|
140
|
+
case "array":
|
|
141
|
+
return tree.items.map((n) => resolveDisc(n, ctx))
|
|
142
|
+
case "object": {
|
|
143
|
+
const out: Record<string, unknown> = {}
|
|
144
|
+
for (const [k, v] of Object.entries(tree.entries)) out[k] = resolveDisc(v, ctx)
|
|
145
|
+
return out
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
package/src/expr.ts
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// TypeScript expression parsing and evaluation for pipeline-spec v1 §5.
|
|
3
|
+
//
|
|
4
|
+
// `{ expr: "<ts-expression>" }` discriminator values are parsed once at
|
|
5
|
+
// spec-load (validation + ref extraction) and evaluated lazily per-node
|
|
6
|
+
// at run time. The single bound identifier is `ctx`; see pipeline-spec.md
|
|
7
|
+
// §5 for the contract.
|
|
8
|
+
//
|
|
9
|
+
// Static analysis extracts `ctx.results.<id>` references for DAG derivation.
|
|
10
|
+
// Computed access (`ctx.results[someVar]`) is rejected — node references
|
|
11
|
+
// must be statically determinable per §13 item 11.
|
|
12
|
+
|
|
13
|
+
import { parse as babelParse } from "@babel/parser"
|
|
14
|
+
import type { Node, MemberExpression } from "@babel/types"
|
|
15
|
+
|
|
16
|
+
export interface ParsedExpr {
|
|
17
|
+
/** Original source string, kept for evaluation and diagnostics. */
|
|
18
|
+
source: string
|
|
19
|
+
/** Set of node ids referenced via ctx.results.<id>. */
|
|
20
|
+
refs: Set<string>
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Parse a TypeScript expression string. Validates syntactic well-formedness
|
|
25
|
+
* and extracts the set of `ctx.results.<id>` references for DAG derivation.
|
|
26
|
+
* Throws on parse error or computed `ctx.results[...]` access.
|
|
27
|
+
*/
|
|
28
|
+
export function parseExpr(source: string): ParsedExpr {
|
|
29
|
+
let file
|
|
30
|
+
try {
|
|
31
|
+
// Wrap in parens to force expression-position parsing — without this,
|
|
32
|
+
// a leading `{` would be parsed as a block statement, and `function () {}`
|
|
33
|
+
// would be a function declaration rather than an expression.
|
|
34
|
+
file = babelParse(`(${source})`, {
|
|
35
|
+
sourceType: "script",
|
|
36
|
+
plugins: ["typescript"],
|
|
37
|
+
errorRecovery: false,
|
|
38
|
+
})
|
|
39
|
+
} catch (err) {
|
|
40
|
+
throw new Error(`expression parse error: ${(err as Error).message}`)
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const stmt = file.program.body[0]
|
|
44
|
+
if (!stmt || stmt.type !== "ExpressionStatement") {
|
|
45
|
+
throw new Error("not a TypeScript expression")
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const refs = new Set<string>()
|
|
49
|
+
collectCtxRefs(stmt.expression as Node, refs)
|
|
50
|
+
return { source, refs }
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Recursively walk an AST collecting `ctx.results.<id>` member-access roots.
|
|
55
|
+
* Throws on computed access at the `ctx.results[...]` level.
|
|
56
|
+
*/
|
|
57
|
+
function collectCtxRefs(node: Node | null | undefined, refs: Set<string>): void {
|
|
58
|
+
if (!node || typeof node !== "object" || typeof (node as Node).type !== "string") return
|
|
59
|
+
|
|
60
|
+
if (node.type === "MemberExpression") {
|
|
61
|
+
const ref = matchCtxResultsRef(node)
|
|
62
|
+
if (ref) refs.add(ref)
|
|
63
|
+
// Fall through: still walk children to catch nested ctx.results.X inside
|
|
64
|
+
// member expressions (e.g. ctx.results.a[ctx.results.b ? 0 : 1] would be
|
|
65
|
+
// rejected at the inner level, but we still want to surface ctx.results.a).
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
for (const key of Object.keys(node)) {
|
|
69
|
+
if (key === "loc" || key === "range" || key === "leadingComments" || key === "trailingComments") continue
|
|
70
|
+
const value = (node as unknown as Record<string, unknown>)[key]
|
|
71
|
+
if (Array.isArray(value)) {
|
|
72
|
+
for (const item of value) collectCtxRefs(item as Node, refs)
|
|
73
|
+
} else if (value && typeof value === "object") {
|
|
74
|
+
collectCtxRefs(value as Node, refs)
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* If `node` is `ctx.results.<id>` (non-computed identifier access), return the
|
|
81
|
+
* id. If it is `ctx.results[<expr>]` (computed access), throw — the spec
|
|
82
|
+
* requires statically determinable references.
|
|
83
|
+
*/
|
|
84
|
+
function matchCtxResultsRef(node: MemberExpression): string | null {
|
|
85
|
+
if (!isCtxResults(node.object)) return null
|
|
86
|
+
|
|
87
|
+
if (node.computed) {
|
|
88
|
+
// Allow numeric/string-literal computed access (e.g. ctx.results["foo"])
|
|
89
|
+
// since those are statically determinable. Reject everything else.
|
|
90
|
+
if (node.property.type === "StringLiteral") return node.property.value
|
|
91
|
+
if (node.property.type === "NumericLiteral") return String(node.property.value)
|
|
92
|
+
throw new Error(
|
|
93
|
+
"computed access on ctx.results (e.g. ctx.results[someVar]) is not allowed; " +
|
|
94
|
+
"node references must be statically determinable",
|
|
95
|
+
)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (node.property.type !== "Identifier") return null
|
|
99
|
+
return node.property.name
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/** Returns true iff `node` is the member access `ctx.results`. */
|
|
103
|
+
function isCtxResults(node: Node): boolean {
|
|
104
|
+
if (node.type !== "MemberExpression") return false
|
|
105
|
+
if (node.computed) return false
|
|
106
|
+
if (node.property.type !== "Identifier" || node.property.name !== "results") return false
|
|
107
|
+
if (node.object.type !== "Identifier" || node.object.name !== "ctx") return false
|
|
108
|
+
return true
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// ─── Evaluation ──────────────────────────────────────────────────────────────
|
|
112
|
+
|
|
113
|
+
const compileCache = new Map<string, (ctx: unknown) => unknown>()
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Evaluate a TS expression against `ctx`. Compiled functions are cached by
|
|
117
|
+
* source string so repeated runs of the same pipeline reuse the same closure.
|
|
118
|
+
*
|
|
119
|
+
* v1 evaluates with full Bun JS privileges (no sandbox); the trust boundary
|
|
120
|
+
* is pipeline write authority. See pipeline-spec.md §5 and §16.2.
|
|
121
|
+
*/
|
|
122
|
+
export function evalExpr(source: string, ctx: unknown): unknown {
|
|
123
|
+
let fn = compileCache.get(source)
|
|
124
|
+
if (!fn) {
|
|
125
|
+
try {
|
|
126
|
+
fn = new Function("ctx", `return (${source})`) as (ctx: unknown) => unknown
|
|
127
|
+
} catch (err) {
|
|
128
|
+
throw new Error(`expression compile error: ${(err as Error).message}`)
|
|
129
|
+
}
|
|
130
|
+
compileCache.set(source, fn)
|
|
131
|
+
}
|
|
132
|
+
return fn(ctx)
|
|
133
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// Platform-spec — the contract shared between the runner, the CLI, and any
|
|
3
|
+
// future agents that need to reason about pipelines and kinds.
|
|
4
|
+
//
|
|
5
|
+
// Two concerns live together because they evolve together:
|
|
6
|
+
//
|
|
7
|
+
// - **Pipeline format** (`pipeline-spec.md`) — yaml strict loader,
|
|
8
|
+
// `{ value | expr }` discriminator, TS expression parser/evaluator,
|
|
9
|
+
// and the v1 validator with closed-set + payloadSchema enforcement.
|
|
10
|
+
//
|
|
11
|
+
// - **Kind contract** (`kind-spec.md`, forthcoming) — the runtime
|
|
12
|
+
// interface every NodeKind sees: ParamDef/PortDef shapes, the
|
|
13
|
+
// ExecContext exposed to `run`, and Cache/HitlSpec collateral types.
|
|
14
|
+
//
|
|
15
|
+
// Validation is offline-friendly: kind existence is pluggable via
|
|
16
|
+
// `ValidateOptions.hasKind`, so the CLI can validate a pipeline without
|
|
17
|
+
// booting a runner. Keep this package free of HTTP, DB, and registry
|
|
18
|
+
// implementation — types and pure functions only.
|
|
19
|
+
|
|
20
|
+
export { parse as parseYaml, YamlError } from "./yaml.ts"
|
|
21
|
+
export { parseExpr, evalExpr, type ParsedExpr } from "./expr.ts"
|
|
22
|
+
export { parseDisc, parseDiscContainer, resolveDisc, type DiscNode, type ParseDiscResult } from "./discriminator.ts"
|
|
23
|
+
export {
|
|
24
|
+
validateSpec,
|
|
25
|
+
type ValidateOptions,
|
|
26
|
+
type ValidateResult,
|
|
27
|
+
type PipelineSpec,
|
|
28
|
+
type PipelineNode,
|
|
29
|
+
type ParsedNode,
|
|
30
|
+
} from "./validate.ts"
|
|
31
|
+
export {
|
|
32
|
+
type ResourceTypeDef,
|
|
33
|
+
type RunStore,
|
|
34
|
+
type SubRun,
|
|
35
|
+
type SubRunOutcome,
|
|
36
|
+
type OnErrorPolicy,
|
|
37
|
+
type ErrorReviewSpec,
|
|
38
|
+
type ParamDef,
|
|
39
|
+
type PortDef,
|
|
40
|
+
type Cache,
|
|
41
|
+
type HitlSpec,
|
|
42
|
+
type PlatformCtx,
|
|
43
|
+
type ExecContext,
|
|
44
|
+
type NodeKind,
|
|
45
|
+
type NodeKindManifest,
|
|
46
|
+
} from "./kinds.ts"
|
|
47
|
+
export {
|
|
48
|
+
paramDefToSchema,
|
|
49
|
+
portDefToSchema,
|
|
50
|
+
kindToNodeSchema,
|
|
51
|
+
pipelineSchema,
|
|
52
|
+
ctxDts,
|
|
53
|
+
type JsonSchema,
|
|
54
|
+
} from "./jsonschema.ts"
|
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
// 2121
|
|
2
|
+
// JSON Schema generation for the v1 pipeline format. Per pipeline-spec.md §15,
|
|
3
|
+
// the generated schema covers §3 (pipeline-level) + §4 (node-level) with
|
|
4
|
+
// per-kind params/input shapes inlined from the kind manifest.
|
|
5
|
+
//
|
|
6
|
+
// Pure spec logic — no fs, no fetch. The CLI's gen-pipeline-schema.ts script
|
|
7
|
+
// fetches the live manifest and resource types, then calls these helpers to
|
|
8
|
+
// produce the actual artifacts.
|
|
9
|
+
|
|
10
|
+
import type { ParamDef, PortDef, NodeKindManifest, ResourceTypeDef } from "./kinds.ts"
|
|
11
|
+
|
|
12
|
+
const API_VERSION = "2121.fi/v1"
|
|
13
|
+
const SLUG_PATTERN = "^[A-Za-z][\\w-]*$"
|
|
14
|
+
|
|
15
|
+
export type JsonSchema = Record<string, unknown>
|
|
16
|
+
|
|
17
|
+
// ─── Discriminator wrapping ───────────────────────────────────────────────────
|
|
18
|
+
// Per pipeline-spec.md §5: any value can be authored as a bare literal,
|
|
19
|
+
// `{ value: <literal> }`, or `{ expr: "..." }`. The discriminator union below
|
|
20
|
+
// is reused for every param and port leaf.
|
|
21
|
+
|
|
22
|
+
function discriminatorUnion(literal: JsonSchema): JsonSchema {
|
|
23
|
+
// anyOf, not oneOf — for permissive types like `any`/`object`/`json` the
|
|
24
|
+
// literal branch overlaps with `{ value: ... }` and `{ expr: ... }` (an
|
|
25
|
+
// object literal IS an object). Authors don't care which branch matches;
|
|
26
|
+
// the runtime discriminator parser disambiguates exact shapes regardless.
|
|
27
|
+
return {
|
|
28
|
+
anyOf: [
|
|
29
|
+
literal,
|
|
30
|
+
{
|
|
31
|
+
type: "object",
|
|
32
|
+
required: ["value"],
|
|
33
|
+
properties: { value: literal },
|
|
34
|
+
additionalProperties: false,
|
|
35
|
+
},
|
|
36
|
+
{
|
|
37
|
+
type: "object",
|
|
38
|
+
required: ["expr"],
|
|
39
|
+
properties: { expr: { type: "string" } },
|
|
40
|
+
additionalProperties: false,
|
|
41
|
+
},
|
|
42
|
+
],
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// ─── ParamDef / PortDef → JSON Schema ────────────────────────────────────────
|
|
47
|
+
|
|
48
|
+
/** Translate a ParamDef to its JSON Schema fragment, wrapped in the
|
|
49
|
+
* discriminator union (bare | { value } | { expr }) per §5. */
|
|
50
|
+
export function paramDefToSchema(def: ParamDef): JsonSchema {
|
|
51
|
+
let literal: JsonSchema
|
|
52
|
+
switch (def.type) {
|
|
53
|
+
case "string":
|
|
54
|
+
literal = { type: "string" }
|
|
55
|
+
break
|
|
56
|
+
case "number":
|
|
57
|
+
literal = { type: "number" }
|
|
58
|
+
break
|
|
59
|
+
case "boolean":
|
|
60
|
+
literal = { type: "boolean" }
|
|
61
|
+
break
|
|
62
|
+
case "select":
|
|
63
|
+
literal = { type: "string", enum: (def.options ?? []).map((o) => o.value) }
|
|
64
|
+
break
|
|
65
|
+
case "expression":
|
|
66
|
+
// An expression param expects a function value at runtime. The author
|
|
67
|
+
// writes { expr: "(item) => ..." } in YAML — bare-literal and { value }
|
|
68
|
+
// forms don't make sense here. Restrict to the expr branch.
|
|
69
|
+
return {
|
|
70
|
+
type: "object",
|
|
71
|
+
required: ["expr"],
|
|
72
|
+
properties: { expr: { type: "string" } },
|
|
73
|
+
additionalProperties: false,
|
|
74
|
+
}
|
|
75
|
+
case "json":
|
|
76
|
+
default:
|
|
77
|
+
literal = {} // any JSON value
|
|
78
|
+
break
|
|
79
|
+
}
|
|
80
|
+
if (def.description) literal.description = def.description
|
|
81
|
+
return discriminatorUnion(literal)
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/** Translate a PortDef to its JSON Schema fragment, wrapped in the
|
|
85
|
+
* discriminator union. Unlike params, ports never carry a hardcoded
|
|
86
|
+
* expression-only restriction — any port can be authored as a literal,
|
|
87
|
+
* { value: ... }, or { expr: ... }. */
|
|
88
|
+
export function portDefToSchema(def: PortDef): JsonSchema {
|
|
89
|
+
let literal: JsonSchema
|
|
90
|
+
switch (def.type) {
|
|
91
|
+
case "string": literal = { type: "string" }; break
|
|
92
|
+
case "number": literal = { type: "number" }; break
|
|
93
|
+
case "boolean": literal = { type: "boolean" }; break
|
|
94
|
+
case "object": literal = { type: "object" }; break
|
|
95
|
+
case "array": literal = { type: "array" }; break
|
|
96
|
+
case "any":
|
|
97
|
+
default:
|
|
98
|
+
literal = {}
|
|
99
|
+
break
|
|
100
|
+
}
|
|
101
|
+
if (def.description) literal.description = def.description
|
|
102
|
+
return discriminatorUnion(literal)
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// ─── Per-kind node schema ────────────────────────────────────────────────────
|
|
106
|
+
|
|
107
|
+
function paramsObjectSchema(decl: Record<string, ParamDef>): JsonSchema {
|
|
108
|
+
const props: Record<string, JsonSchema> = {}
|
|
109
|
+
const required: string[] = []
|
|
110
|
+
for (const [key, def] of Object.entries(decl)) {
|
|
111
|
+
props[key] = paramDefToSchema(def)
|
|
112
|
+
if (def.required) required.push(key)
|
|
113
|
+
}
|
|
114
|
+
const schema: JsonSchema = {
|
|
115
|
+
type: "object",
|
|
116
|
+
properties: props,
|
|
117
|
+
additionalProperties: false,
|
|
118
|
+
}
|
|
119
|
+
if (required.length > 0) schema.required = required
|
|
120
|
+
return schema
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
function inputsObjectSchema(decl: Record<string, PortDef>): JsonSchema {
|
|
124
|
+
const props: Record<string, JsonSchema> = {}
|
|
125
|
+
const required: string[] = []
|
|
126
|
+
for (const [key, def] of Object.entries(decl)) {
|
|
127
|
+
props[key] = portDefToSchema(def)
|
|
128
|
+
if (def.required) required.push(key)
|
|
129
|
+
}
|
|
130
|
+
const schema: JsonSchema = {
|
|
131
|
+
type: "object",
|
|
132
|
+
properties: props,
|
|
133
|
+
additionalProperties: false,
|
|
134
|
+
}
|
|
135
|
+
if (required.length > 0) schema.required = required
|
|
136
|
+
return schema
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/** A discriminator branch — the schema for a node when `kind` matches a
|
|
140
|
+
* specific id. The base node fields (id/label/dependsOn) are added by the
|
|
141
|
+
* caller via `allOf` composition. params and input are themselves marked
|
|
142
|
+
* required when the kind declares any required fields inside them, so
|
|
143
|
+
* e.g. transform.sort (which requires params.by) rejects a node that
|
|
144
|
+
* omits `params:` entirely. */
|
|
145
|
+
export function kindToNodeSchema(kind: NodeKindManifest): JsonSchema {
|
|
146
|
+
const required: string[] = ["kind"]
|
|
147
|
+
if (Object.values(kind.params).some((d) => d.required)) required.push("params")
|
|
148
|
+
if (Object.values(kind.inputs).some((d) => d.required)) required.push("input")
|
|
149
|
+
return {
|
|
150
|
+
type: "object",
|
|
151
|
+
properties: {
|
|
152
|
+
kind: { const: kind.id },
|
|
153
|
+
params: paramsObjectSchema(kind.params),
|
|
154
|
+
input: inputsObjectSchema(kind.inputs),
|
|
155
|
+
},
|
|
156
|
+
required,
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// ─── Top-level pipeline schema ───────────────────────────────────────────────
|
|
161
|
+
|
|
162
|
+
const NODE_BASE: JsonSchema = {
|
|
163
|
+
type: "object",
|
|
164
|
+
properties: {
|
|
165
|
+
id: { type: "string", pattern: SLUG_PATTERN },
|
|
166
|
+
kind: { type: "string" },
|
|
167
|
+
label: { type: "string" },
|
|
168
|
+
params: { type: "object" },
|
|
169
|
+
input: { type: "object" },
|
|
170
|
+
dependsOn: { type: "array", items: { type: "string" } },
|
|
171
|
+
},
|
|
172
|
+
required: ["id", "kind"],
|
|
173
|
+
additionalProperties: false,
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
/** Build the v1 pipeline JSON Schema from a snapshot of the kind manifest.
|
|
177
|
+
* Editors (Monaco, VS Code, JetBrains) consume this for auto-complete
|
|
178
|
+
* and per-kind shape validation on YAML files. */
|
|
179
|
+
export function pipelineSchema(kinds: NodeKindManifest[]): JsonSchema {
|
|
180
|
+
const kindUnion: JsonSchema[] = kinds.map(kindToNodeSchema)
|
|
181
|
+
|
|
182
|
+
const nodeSchema: JsonSchema = kindUnion.length > 0
|
|
183
|
+
? { allOf: [NODE_BASE, { oneOf: kindUnion }] }
|
|
184
|
+
: NODE_BASE
|
|
185
|
+
|
|
186
|
+
return {
|
|
187
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
188
|
+
$id: `https://2121.fi/pipeline-spec/v1/pipeline.schema.json`,
|
|
189
|
+
title: "Pipeline (2121.fi/v1)",
|
|
190
|
+
description: "Generated from the live kind manifest. Regenerate when kinds are added or changed.",
|
|
191
|
+
type: "object",
|
|
192
|
+
properties: {
|
|
193
|
+
apiVersion: { const: API_VERSION },
|
|
194
|
+
id: { type: "string", pattern: SLUG_PATTERN },
|
|
195
|
+
label: { type: "string" },
|
|
196
|
+
description: { type: "string" },
|
|
197
|
+
schedule: { type: "string", description: "5-field cron expression (planned for §16.1; parsed but not active in v1)" },
|
|
198
|
+
payloadSchema: { type: "object", description: "JSON Schema 2020-12 for the pipeline's run payload" },
|
|
199
|
+
nodes: { type: "array", minItems: 1, items: nodeSchema },
|
|
200
|
+
},
|
|
201
|
+
required: ["nodes"],
|
|
202
|
+
additionalProperties: false,
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// ─── ctx.d.ts (TypeScript declarations) ───────────────────────────────────────
|
|
207
|
+
// Per pipeline-spec.md §15, ctx.d.ts is generated from payloadSchema, the
|
|
208
|
+
// kind manifest's output ports, and the resource registry. v1 ships a coarse
|
|
209
|
+
// but correct version: per-Resource-Type interfaces, untyped ctx.results /
|
|
210
|
+
// ctx.params (those depend on the specific pipeline being authored).
|
|
211
|
+
|
|
212
|
+
/** Sanitize a Resource Type name into a valid TypeScript identifier suffix. */
|
|
213
|
+
function tsIdent(name: string): string {
|
|
214
|
+
return name.replace(/[^A-Za-z0-9_]/g, "")
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/** JSON Schema → TypeScript type expression. Coarse but correct for the
|
|
218
|
+
* shapes Resource Type schemas use (object with primitive/optional fields).
|
|
219
|
+
* Falls back to `unknown` for anything beyond that. */
|
|
220
|
+
function jsonSchemaToTs(schema: unknown, indent = 0): string {
|
|
221
|
+
if (!schema || typeof schema !== "object") return "unknown"
|
|
222
|
+
const s = schema as Record<string, unknown>
|
|
223
|
+
|
|
224
|
+
if (s.enum && Array.isArray(s.enum)) {
|
|
225
|
+
return s.enum.map((v) => JSON.stringify(v)).join(" | ")
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
switch (s.type) {
|
|
229
|
+
case "string": return "string"
|
|
230
|
+
case "number": return "number"
|
|
231
|
+
case "integer": return "number"
|
|
232
|
+
case "boolean": return "boolean"
|
|
233
|
+
case "null": return "null"
|
|
234
|
+
case "array": {
|
|
235
|
+
const items = jsonSchemaToTs(s.items, indent)
|
|
236
|
+
return `${items}[]`
|
|
237
|
+
}
|
|
238
|
+
case "object": {
|
|
239
|
+
const props = (s.properties as Record<string, unknown>) ?? {}
|
|
240
|
+
const required = new Set((s.required as string[]) ?? [])
|
|
241
|
+
const keys = Object.keys(props)
|
|
242
|
+
if (keys.length === 0) return "Record<string, unknown>"
|
|
243
|
+
const pad = " ".repeat(indent + 1)
|
|
244
|
+
const closePad = " ".repeat(indent)
|
|
245
|
+
const lines = keys.map((k) => {
|
|
246
|
+
const optional = required.has(k) ? "" : "?"
|
|
247
|
+
const valTs = jsonSchemaToTs(props[k], indent + 1)
|
|
248
|
+
return `${pad}${k}${optional}: ${valTs};`
|
|
249
|
+
})
|
|
250
|
+
return `{\n${lines.join("\n")}\n${closePad}}`
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
return "unknown"
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
/** Generate `ctx.d.ts` source from the resource type registry. The output is
|
|
257
|
+
* a self-contained module that authors can reference for autocomplete on
|
|
258
|
+
* `ctx.resource.<name>.<field>` once they have a concrete Resource Type. */
|
|
259
|
+
export function ctxDts(resourceTypes: ResourceTypeDef[]): string {
|
|
260
|
+
const interfaces = resourceTypes.map((t) => {
|
|
261
|
+
const tsType = jsonSchemaToTs(t.schema)
|
|
262
|
+
const desc = t.description ? `/** ${t.description} */\n` : ""
|
|
263
|
+
return `${desc}export interface ${tsIdent(t.name)} ${tsType}`
|
|
264
|
+
}).join("\n\n")
|
|
265
|
+
|
|
266
|
+
const typeUnion = resourceTypes.length > 0
|
|
267
|
+
? resourceTypes.map((t) => tsIdent(t.name)).join(" | ")
|
|
268
|
+
: "Record<string, unknown>"
|
|
269
|
+
|
|
270
|
+
return `// AUTO-GENERATED by packages/cli/scripts/gen-pipeline-schema.ts
|
|
271
|
+
// Source: kind manifest + resource type registry. Regenerate after Resource
|
|
272
|
+
// Type registration changes. Do not edit by hand.
|
|
273
|
+
//
|
|
274
|
+
// Resource Type interfaces give pipeline authors per-field type-checking on
|
|
275
|
+
// \`ctx.resource.<name>\` once they assert which Type they're addressing:
|
|
276
|
+
// const api = ctx.resource.anthropic as AnthropicApi
|
|
277
|
+
// api.apiKey // typed string
|
|
278
|
+
//
|
|
279
|
+
// v1 leaves \`ctx.params\` and \`ctx.results\` untyped because both depend on
|
|
280
|
+
// the specific pipeline being authored. Per-pipeline narrowing is §16.2.
|
|
281
|
+
|
|
282
|
+
${interfaces}
|
|
283
|
+
|
|
284
|
+
export type AnyResource = ${typeUnion}
|
|
285
|
+
|
|
286
|
+
export interface Ctx {
|
|
287
|
+
/** Run payload (validated against pipeline.payloadSchema if declared). */
|
|
288
|
+
params: Record<string, unknown>
|
|
289
|
+
/** Outputs of completed upstream nodes, keyed by node id. */
|
|
290
|
+
results: Record<string, unknown>
|
|
291
|
+
/** Configured resources (\`ctx.resource.<name>.<field>\`). Cast to a
|
|
292
|
+
* specific Resource Type for field-level type-checking. */
|
|
293
|
+
resource: Record<string, AnyResource>
|
|
294
|
+
}
|
|
295
|
+
`
|
|
296
|
+
}
|