@duckflux/runner 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +55191 -0
- package/package.json +34 -0
- package/src/lint.ts +179 -0
- package/src/main.ts +58 -0
- package/src/run.ts +164 -0
- package/src/validate.ts +95 -0
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@duckflux/runner",
|
|
3
|
+
"version": "0.6.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"bin": {
|
|
6
|
+
"duckflux": "dist/main.js"
|
|
7
|
+
},
|
|
8
|
+
"exports": {
|
|
9
|
+
".": "./dist/index.js",
|
|
10
|
+
"./dist/main.js": "./dist/main.js"
|
|
11
|
+
},
|
|
12
|
+
"files": [
|
|
13
|
+
"dist",
|
|
14
|
+
"src"
|
|
15
|
+
],
|
|
16
|
+
"scripts": {
|
|
17
|
+
"build": "bun build src/main.ts --outdir dist --target node --format esm --banner '#!/usr/bin/env node' && tsc --project tsconfig.build.json",
|
|
18
|
+
"postbuild": "chmod +x dist/main.js",
|
|
19
|
+
"prepublishOnly": "bun run build",
|
|
20
|
+
"compile:linux-x64": "bun build --compile --target=bun-linux-x64 src/main.ts --outfile bin/duckflux-linux-x64",
|
|
21
|
+
"compile:linux-arm64": "bun build --compile --target=bun-linux-arm64 src/main.ts --outfile bin/duckflux-linux-arm64",
|
|
22
|
+
"compile:darwin-x64": "bun build --compile --target=bun-darwin-x64 src/main.ts --outfile bin/duckflux-darwin-x64",
|
|
23
|
+
"compile:darwin-arm64": "bun build --compile --target=bun-darwin-arm64 src/main.ts --outfile bin/duckflux-darwin-arm64",
|
|
24
|
+
"compile:windows-x64": "bun build --compile --target=bun-windows-x64 src/main.ts --outfile bin/duckflux-windows-x64.exe",
|
|
25
|
+
"compile": "bun run compile:linux-x64 && bun run compile:linux-arm64 && bun run compile:darwin-x64 && bun run compile:darwin-arm64 && bun run compile:windows-x64"
|
|
26
|
+
},
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"@duckflux/core": "workspace:*"
|
|
29
|
+
},
|
|
30
|
+
"optionalDependencies": {
|
|
31
|
+
"@duckflux/hub-nats": "workspace:*",
|
|
32
|
+
"@duckflux/hub-redis": "workspace:*"
|
|
33
|
+
}
|
|
34
|
+
}
|
package/src/lint.ts
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
import { dirname } from "node:path";
|
|
3
|
+
import { parseWorkflowFile, validateSchema, validateSemantic } from "@duckflux/core";
|
|
4
|
+
import type { Workflow } from "@duckflux/core";
|
|
5
|
+
|
|
6
|
+
interface LintWarning {
|
|
7
|
+
path: string;
|
|
8
|
+
message: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function collectLintWarnings(workflow: Workflow): LintWarning[] {
|
|
12
|
+
const warnings: LintWarning[] = [];
|
|
13
|
+
const participants = workflow.participants ?? {};
|
|
14
|
+
|
|
15
|
+
collectFlowWarnings(workflow.flow ?? [], participants, warnings);
|
|
16
|
+
|
|
17
|
+
return warnings;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function collectFlowWarnings(
|
|
21
|
+
flow: unknown[],
|
|
22
|
+
participants: Record<string, { type: string; as?: string; [key: string]: unknown }>,
|
|
23
|
+
warnings: LintWarning[],
|
|
24
|
+
basePath = "flow",
|
|
25
|
+
): void {
|
|
26
|
+
for (const [index, step] of flow.entries()) {
|
|
27
|
+
const stepPath = `${basePath}[${index}]`;
|
|
28
|
+
|
|
29
|
+
if (!step || typeof step !== "object") continue;
|
|
30
|
+
|
|
31
|
+
const obj = step as Record<string, unknown>;
|
|
32
|
+
|
|
33
|
+
// Warn: loop with no `until` and no `max`
|
|
34
|
+
if (obj.loop && Object.keys(obj).length === 1) {
|
|
35
|
+
const loopDef = obj.loop as Record<string, unknown>;
|
|
36
|
+
if (loopDef.until == null && loopDef.max == null) {
|
|
37
|
+
warnings.push({
|
|
38
|
+
path: `${stepPath}.loop`,
|
|
39
|
+
message: "loop has no 'until' and no 'max' — this will be rejected at runtime",
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
collectFlowWarnings(
|
|
43
|
+
(loopDef.steps ?? []) as unknown[],
|
|
44
|
+
participants,
|
|
45
|
+
warnings,
|
|
46
|
+
`${stepPath}.loop.steps`,
|
|
47
|
+
);
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Warn: parallel branches referencing the same mutable state via `set`
|
|
52
|
+
if (obj.parallel && Object.keys(obj).length === 1) {
|
|
53
|
+
const parallelSteps = obj.parallel as unknown[];
|
|
54
|
+
const branchSetKeys: Map<string, number[]> = new Map();
|
|
55
|
+
for (const [branchIdx, branch] of parallelSteps.entries()) {
|
|
56
|
+
const setKeys = collectSetKeys(branch);
|
|
57
|
+
for (const key of setKeys) {
|
|
58
|
+
const branches = branchSetKeys.get(key) ?? [];
|
|
59
|
+
branches.push(branchIdx);
|
|
60
|
+
branchSetKeys.set(key, branches);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
for (const [key, branches] of branchSetKeys) {
|
|
64
|
+
if (branches.length > 1) {
|
|
65
|
+
warnings.push({
|
|
66
|
+
path: `${stepPath}.parallel`,
|
|
67
|
+
message: `branches [${branches.join(", ")}] both write to '${key}' via set — race condition risk`,
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
collectFlowWarnings(parallelSteps, participants, warnings, `${stepPath}.parallel`);
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Warn: anonymous participant output referenced by name (unreachable)
|
|
76
|
+
if ("type" in obj && !obj.as) {
|
|
77
|
+
warnings.push({
|
|
78
|
+
path: stepPath,
|
|
79
|
+
message: "inline participant without 'as' — its output cannot be referenced by name",
|
|
80
|
+
});
|
|
81
|
+
continue;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (obj.if && Object.keys(obj).length === 1) {
|
|
85
|
+
const ifDef = obj.if as Record<string, unknown>;
|
|
86
|
+
collectFlowWarnings(
|
|
87
|
+
(ifDef.then ?? []) as unknown[],
|
|
88
|
+
participants,
|
|
89
|
+
warnings,
|
|
90
|
+
`${stepPath}.if.then`,
|
|
91
|
+
);
|
|
92
|
+
if (ifDef.else) {
|
|
93
|
+
collectFlowWarnings(
|
|
94
|
+
ifDef.else as unknown[],
|
|
95
|
+
participants,
|
|
96
|
+
warnings,
|
|
97
|
+
`${stepPath}.if.else`,
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function collectSetKeys(step: unknown): string[] {
|
|
105
|
+
if (!step || typeof step !== "object") return [];
|
|
106
|
+
const obj = step as Record<string, unknown>;
|
|
107
|
+
|
|
108
|
+
if ("set" in obj && Object.keys(obj).length === 1) {
|
|
109
|
+
return Object.keys(obj.set as Record<string, unknown>);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Recurse into nested flow structures
|
|
113
|
+
const keys: string[] = [];
|
|
114
|
+
if (obj.loop) {
|
|
115
|
+
const loopDef = obj.loop as Record<string, unknown>;
|
|
116
|
+
for (const s of (loopDef.steps ?? []) as unknown[]) {
|
|
117
|
+
keys.push(...collectSetKeys(s));
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
if (obj.if) {
|
|
121
|
+
const ifDef = obj.if as Record<string, unknown>;
|
|
122
|
+
for (const s of (ifDef.then ?? []) as unknown[]) {
|
|
123
|
+
keys.push(...collectSetKeys(s));
|
|
124
|
+
}
|
|
125
|
+
if (ifDef.else) {
|
|
126
|
+
for (const s of ifDef.else as unknown[]) {
|
|
127
|
+
keys.push(...collectSetKeys(s));
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
return keys;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
export default async function lintCommand(filePath?: string): Promise<number> {
|
|
135
|
+
if (!filePath) {
|
|
136
|
+
console.error("Usage: duckflux lint <workflow.yaml>");
|
|
137
|
+
return 1;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
try {
|
|
141
|
+
const workflow = await parseWorkflowFile(filePath);
|
|
142
|
+
|
|
143
|
+
const schemaRes = validateSchema(workflow);
|
|
144
|
+
if (!schemaRes.valid) {
|
|
145
|
+
console.error("Schema validation failed:");
|
|
146
|
+
for (const e of schemaRes.errors) {
|
|
147
|
+
console.error(` - ${e.path}: ${e.message}`);
|
|
148
|
+
}
|
|
149
|
+
return 1;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
const basePath = dirname(filePath);
|
|
153
|
+
const semanticRes = await validateSemantic(workflow, basePath);
|
|
154
|
+
if (!semanticRes.valid) {
|
|
155
|
+
console.error("Semantic validation failed:");
|
|
156
|
+
for (const e of semanticRes.errors) {
|
|
157
|
+
console.error(` - ${e.path}: ${e.message}`);
|
|
158
|
+
}
|
|
159
|
+
return 1;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Lint warnings (non-blocking)
|
|
163
|
+
const warnings = collectLintWarnings(workflow);
|
|
164
|
+
if (warnings.length > 0) {
|
|
165
|
+
console.warn("Warnings:");
|
|
166
|
+
for (const w of warnings) {
|
|
167
|
+
console.warn(` - ${w.path}: ${w.message}`);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
console.log("valid");
|
|
172
|
+
return 0;
|
|
173
|
+
} catch (err: any) {
|
|
174
|
+
console.error("Error during lint:", err && err.message ? err.message : err);
|
|
175
|
+
return 1;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
export { lintCommand };
|
package/src/main.ts
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
import { readFileSync } from "node:fs";
|
|
3
|
+
import { parseArgs } from "node:util";
|
|
4
|
+
import { dirname, resolve } from "node:path";
|
|
5
|
+
import runCommand from "./run";
|
|
6
|
+
import lintCommand from "./lint";
|
|
7
|
+
import validateCommand from "./validate";
|
|
8
|
+
|
|
9
|
+
function getVersion(): string {
|
|
10
|
+
try {
|
|
11
|
+
const pkg = JSON.parse(readFileSync(resolve(dirname(new URL(import.meta.url).pathname), "../package.json"), "utf-8"));
|
|
12
|
+
return pkg.version ?? "0.0.0";
|
|
13
|
+
} catch {
|
|
14
|
+
return "0.0.0";
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if (import.meta.main) {
|
|
19
|
+
const argv = Bun.argv.slice(2);
|
|
20
|
+
const { values, positionals } = parseArgs({
|
|
21
|
+
args: argv,
|
|
22
|
+
options: {
|
|
23
|
+
input: { type: "string", multiple: true, short: "i" },
|
|
24
|
+
"input-file": { type: "string" },
|
|
25
|
+
verbose: { type: "boolean", short: "v" },
|
|
26
|
+
quiet: { type: "boolean", default: false },
|
|
27
|
+
cwd: { type: "string" },
|
|
28
|
+
"event-backend": { type: "string", default: "memory" },
|
|
29
|
+
"nats-url": { type: "string" },
|
|
30
|
+
"nats-stream": { type: "string", default: "duckflux-events" },
|
|
31
|
+
"redis-addr": { type: "string", default: "localhost:6379" },
|
|
32
|
+
"redis-db": { type: "string", default: "0" },
|
|
33
|
+
},
|
|
34
|
+
allowPositionals: true,
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
const cmd = positionals[0] ?? "run";
|
|
38
|
+
|
|
39
|
+
if (cmd === "version") {
|
|
40
|
+
console.log(getVersion());
|
|
41
|
+
} else if (cmd === "run") {
|
|
42
|
+
const file = positionals[1];
|
|
43
|
+
const exitCode = await runCommand(file, values);
|
|
44
|
+
if (typeof exitCode === "number" && exitCode !== 0) process.exit(exitCode);
|
|
45
|
+
} else if (cmd === "lint") {
|
|
46
|
+
const file = positionals[1];
|
|
47
|
+
const exitCode = await lintCommand(file);
|
|
48
|
+
if (typeof exitCode === "number" && exitCode !== 0) process.exit(exitCode);
|
|
49
|
+
} else if (cmd === "validate") {
|
|
50
|
+
const file = positionals[1];
|
|
51
|
+
const exitCode = await validateCommand(file, values);
|
|
52
|
+
if (typeof exitCode === "number" && exitCode !== 0) process.exit(exitCode);
|
|
53
|
+
} else {
|
|
54
|
+
console.error("Unknown command:", cmd);
|
|
55
|
+
console.error("Available commands: run, lint, validate, version");
|
|
56
|
+
process.exit(1);
|
|
57
|
+
}
|
|
58
|
+
}
|
package/src/run.ts
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
import { readFile } from "node:fs/promises";
|
|
3
|
+
import { runWorkflowFromFile } from "@duckflux/core/engine";
|
|
4
|
+
import type { ExecuteOptions, EventHub } from "@duckflux/core/engine";
|
|
5
|
+
|
|
6
|
+
type CLIValues = Record<string, unknown> | undefined;
|
|
7
|
+
|
|
8
|
+
async function createHubFromFlags(values: CLIValues): Promise<EventHub | undefined> {
|
|
9
|
+
const backend = (values?.["event-backend"] as string) ?? "memory";
|
|
10
|
+
|
|
11
|
+
if (backend === "memory") {
|
|
12
|
+
const { MemoryHub } = await import("@duckflux/core/eventhub");
|
|
13
|
+
return new MemoryHub();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
if (backend === "nats") {
|
|
17
|
+
const url = values?.["nats-url"] as string | undefined;
|
|
18
|
+
if (!url) {
|
|
19
|
+
console.error("Error: --nats-url is required when using the NATS backend");
|
|
20
|
+
throw new Error("missing --nats-url");
|
|
21
|
+
}
|
|
22
|
+
const stream = (values?.["nats-stream"] as string) ?? "duckflux-events";
|
|
23
|
+
try {
|
|
24
|
+
const { NatsHub } = await import("@duckflux/hub-nats");
|
|
25
|
+
return await NatsHub.create({ url, stream });
|
|
26
|
+
} catch (err: unknown) {
|
|
27
|
+
if (err instanceof Error && (err.message.includes("Cannot find module") || err.message.includes("Cannot find package"))) {
|
|
28
|
+
console.error("Error: install @duckflux/hub-nats to use the NATS backend");
|
|
29
|
+
throw new Error("@duckflux/hub-nats not installed");
|
|
30
|
+
}
|
|
31
|
+
throw err;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (backend === "redis") {
|
|
36
|
+
const addr = (values?.["redis-addr"] as string) ?? "localhost:6379";
|
|
37
|
+
const db = Number((values?.["redis-db"] as string) ?? "0");
|
|
38
|
+
try {
|
|
39
|
+
const { RedisHub } = await import("@duckflux/hub-redis");
|
|
40
|
+
return await RedisHub.create({ addr, db });
|
|
41
|
+
} catch (err: unknown) {
|
|
42
|
+
if (err instanceof Error && (err.message.includes("Cannot find module") || err.message.includes("Cannot find package"))) {
|
|
43
|
+
console.error("Error: install @duckflux/hub-redis to use the Redis backend");
|
|
44
|
+
throw new Error("@duckflux/hub-redis not installed");
|
|
45
|
+
}
|
|
46
|
+
throw err;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
console.error(`Error: unknown event backend "${backend}". Supported: memory, nats, redis`);
|
|
51
|
+
throw new Error(`unknown event backend: ${backend}`);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function parseInputFlags(arr: string[] | undefined): Record<string, unknown> {
|
|
55
|
+
const out: Record<string, unknown> = {};
|
|
56
|
+
if (!arr) return out;
|
|
57
|
+
for (const item of arr) {
|
|
58
|
+
const idx = item.indexOf("=");
|
|
59
|
+
if (idx === -1) {
|
|
60
|
+
out[item] = true;
|
|
61
|
+
} else {
|
|
62
|
+
const k = item.slice(0, idx);
|
|
63
|
+
const v = item.slice(idx + 1);
|
|
64
|
+
try {
|
|
65
|
+
out[k] = JSON.parse(v);
|
|
66
|
+
} catch {
|
|
67
|
+
out[k] = v;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return out;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export default async function runCommand(filePath?: string, cliValues?: CLIValues): Promise<number> {
|
|
75
|
+
if (!filePath) {
|
|
76
|
+
console.error("Usage: duckflux run <workflow.yaml> [--input k=v] [--input-file file.json] [--cwd dir]");
|
|
77
|
+
return 1;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Input precedence: --input > --input-file > stdin
|
|
81
|
+
let inputs: Record<string, unknown> = {};
|
|
82
|
+
|
|
83
|
+
// 1. Try stdin first (lowest priority)
|
|
84
|
+
try {
|
|
85
|
+
if (process.stdin && !process.stdin.isTTY) {
|
|
86
|
+
let stdin = "";
|
|
87
|
+
for await (const chunk of process.stdin) {
|
|
88
|
+
stdin += chunk;
|
|
89
|
+
}
|
|
90
|
+
stdin = stdin.trim();
|
|
91
|
+
if (stdin.length > 0) {
|
|
92
|
+
try {
|
|
93
|
+
const parsed = JSON.parse(stdin);
|
|
94
|
+
if (typeof parsed === "object" && parsed !== null) inputs = { ...inputs, ...parsed };
|
|
95
|
+
} catch {
|
|
96
|
+
// ignore non-json stdin
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
} catch {
|
|
101
|
+
// ignore
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (cliValues) {
|
|
105
|
+
// 2. --input-file (overrides stdin)
|
|
106
|
+
if (cliValues["input-file"]) {
|
|
107
|
+
try {
|
|
108
|
+
const content = await readFile(String(cliValues["input-file"]), "utf-8");
|
|
109
|
+
const parsed = JSON.parse(content);
|
|
110
|
+
if (typeof parsed === "object" && parsed !== null) inputs = { ...inputs, ...parsed };
|
|
111
|
+
} catch (err) {
|
|
112
|
+
console.error("Failed to read input file:", err);
|
|
113
|
+
return 1;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// 3. --input flags (highest priority)
|
|
118
|
+
if (cliValues.input) {
|
|
119
|
+
const parsed = Array.isArray(cliValues.input) ? cliValues.input : [cliValues.input];
|
|
120
|
+
inputs = { ...inputs, ...parseInputFlags(parsed as string[]) };
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
let hub: EventHub | undefined;
|
|
125
|
+
try {
|
|
126
|
+
hub = await createHubFromFlags(cliValues);
|
|
127
|
+
} catch {
|
|
128
|
+
return 1;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const options: ExecuteOptions = {
|
|
132
|
+
hub,
|
|
133
|
+
cwd: cliValues?.cwd as string | undefined,
|
|
134
|
+
verbose: cliValues?.verbose as boolean | undefined,
|
|
135
|
+
quiet: cliValues?.quiet as boolean | undefined,
|
|
136
|
+
};
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
const res = await runWorkflowFromFile(filePath, inputs, options);
|
|
140
|
+
|
|
141
|
+
// Print resolved output, not full WorkflowResult
|
|
142
|
+
const output = res.output;
|
|
143
|
+
if (output === undefined || output === null) {
|
|
144
|
+
// No output
|
|
145
|
+
} else if (typeof output === "string") {
|
|
146
|
+
process.stdout.write(output);
|
|
147
|
+
} else {
|
|
148
|
+
console.log(JSON.stringify(output, null, 2));
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
return res.success ? 0 : 2;
|
|
152
|
+
} catch (err: unknown) {
|
|
153
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
154
|
+
console.error("Error:", msg);
|
|
155
|
+
if (cliValues?.verbose && err instanceof Error && err.stack) {
|
|
156
|
+
console.error(err.stack);
|
|
157
|
+
}
|
|
158
|
+
return 1;
|
|
159
|
+
} finally {
|
|
160
|
+
await hub?.close();
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
export { createHubFromFlags, parseInputFlags };
|
package/src/validate.ts
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
import { readFile } from "node:fs/promises";
|
|
3
|
+
import { dirname } from "node:path";
|
|
4
|
+
import { parseInputFlags } from "./run";
|
|
5
|
+
import { parseWorkflowFile, validateSchema, validateSemantic, validateInputs } from "@duckflux/core";
|
|
6
|
+
|
|
7
|
+
type CLIValues = Record<string, any> | undefined;
|
|
8
|
+
|
|
9
|
+
export default async function validateCommand(filePath?: string, cliValues?: CLIValues): Promise<number> {
|
|
10
|
+
if (!filePath) {
|
|
11
|
+
console.error("Usage: duckflux validate <workflow.yaml> [--input k=v] [--input-file file.json]");
|
|
12
|
+
return 1;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
let inputs: Record<string, unknown> = {};
|
|
16
|
+
|
|
17
|
+
if (cliValues) {
|
|
18
|
+
if (cliValues.input) {
|
|
19
|
+
const parsed = Array.isArray(cliValues.input) ? cliValues.input : [cliValues.input];
|
|
20
|
+
inputs = { ...inputs, ...parseInputFlags(parsed) };
|
|
21
|
+
}
|
|
22
|
+
if (cliValues["input-file"]) {
|
|
23
|
+
try {
|
|
24
|
+
const content = await readFile(String(cliValues["input-file"]), "utf-8");
|
|
25
|
+
const parsed = JSON.parse(content);
|
|
26
|
+
if (typeof parsed === "object" && parsed !== null) inputs = { ...inputs, ...parsed };
|
|
27
|
+
} catch (err) {
|
|
28
|
+
console.error("Failed to read input file:", err);
|
|
29
|
+
return 1;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Try stdin if piped and no inputs provided
|
|
35
|
+
try {
|
|
36
|
+
if (process.stdin && !process.stdin.isTTY && Object.keys(inputs).length === 0) {
|
|
37
|
+
let stdin = "";
|
|
38
|
+
for await (const chunk of process.stdin) {
|
|
39
|
+
stdin += chunk;
|
|
40
|
+
}
|
|
41
|
+
stdin = stdin.trim();
|
|
42
|
+
if (stdin.length > 0) {
|
|
43
|
+
try {
|
|
44
|
+
const parsed = JSON.parse(stdin);
|
|
45
|
+
if (typeof parsed === "object" && parsed !== null) inputs = { ...inputs, ...parsed };
|
|
46
|
+
} catch {
|
|
47
|
+
// ignore non-json stdin
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
} catch {
|
|
52
|
+
// ignore
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
const workflow = await parseWorkflowFile(filePath);
|
|
57
|
+
|
|
58
|
+
const schemaRes = validateSchema(workflow);
|
|
59
|
+
if (!schemaRes.valid) {
|
|
60
|
+
console.error("Schema validation failed:");
|
|
61
|
+
for (const e of schemaRes.errors) {
|
|
62
|
+
console.error(`- ${e.path}: ${e.message}`);
|
|
63
|
+
}
|
|
64
|
+
return 1;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const basePath = dirname(filePath);
|
|
68
|
+
const semanticRes = await validateSemantic(workflow, basePath);
|
|
69
|
+
if (!semanticRes.valid) {
|
|
70
|
+
console.error("Semantic validation failed:");
|
|
71
|
+
for (const e of semanticRes.errors) {
|
|
72
|
+
console.error(`- ${e.path}: ${e.message}`);
|
|
73
|
+
}
|
|
74
|
+
return 1;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Validate inputs against declared schema
|
|
78
|
+
const { result: inputsResult, resolved } = validateInputs(workflow.inputs as any, inputs);
|
|
79
|
+
if (!inputsResult.valid) {
|
|
80
|
+
console.error("Input validation failed:");
|
|
81
|
+
for (const e of inputsResult.errors) {
|
|
82
|
+
console.error(`- ${e.path}: ${e.message}`);
|
|
83
|
+
}
|
|
84
|
+
return 1;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
console.log("valid");
|
|
88
|
+
return 0;
|
|
89
|
+
} catch (err: any) {
|
|
90
|
+
console.error("Error during validate:", err && err.message ? err.message : err);
|
|
91
|
+
return 1;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
export { validateCommand };
|