pipeline-sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +146 -0
  3. package/package.json +63 -0
  4. package/schemas/pipeline.schema.json +158 -0
  5. package/src/adapters/claude-code.ts +112 -0
  6. package/src/adapters/detector.ts +26 -0
  7. package/src/adapters/generic.ts +30 -0
  8. package/src/adapters/interface.ts +7 -0
  9. package/src/cli/advance.ts +27 -0
  10. package/src/cli/cleanup.ts +55 -0
  11. package/src/cli/helpers.ts +52 -0
  12. package/src/cli/index.ts +92 -0
  13. package/src/cli/init.ts +248 -0
  14. package/src/cli/resume.ts +45 -0
  15. package/src/cli/signal.ts +21 -0
  16. package/src/cli/start.ts +33 -0
  17. package/src/cli/status.ts +24 -0
  18. package/src/cli/template.ts +28 -0
  19. package/src/cli/validate.ts +21 -0
  20. package/src/cli/verify.ts +33 -0
  21. package/src/cli/visualize.ts +36 -0
  22. package/src/core/cleanup.ts +75 -0
  23. package/src/core/evidence.ts +144 -0
  24. package/src/core/gate-runner.ts +109 -0
  25. package/src/core/loader.ts +125 -0
  26. package/src/core/state-machine.ts +119 -0
  27. package/src/daemon/ipc.ts +56 -0
  28. package/src/daemon/server.ts +144 -0
  29. package/src/daemon/state-file.ts +65 -0
  30. package/src/gates/async.ts +60 -0
  31. package/src/gates/builtin.ts +40 -0
  32. package/src/gates/custom.ts +71 -0
  33. package/src/index.ts +20 -0
  34. package/src/mcp/prompts.ts +40 -0
  35. package/src/mcp/resources.ts +71 -0
  36. package/src/mcp/server.ts +211 -0
  37. package/src/mcp/tools.ts +52 -0
  38. package/src/templates/infra-gitops.yaml +37 -0
  39. package/src/templates/sdlc-full.yaml +69 -0
  40. package/src/templates/static-site.yaml +45 -0
  41. package/src/templates/zship.yaml +224 -0
  42. package/src/types.ts +210 -0
@@ -0,0 +1,52 @@
1
+ interface DaemonInfo {
2
+ port: number;
3
+ pid: number;
4
+ }
5
+
6
+ export async function readDaemonPid(dir = ".pipeline"): Promise<DaemonInfo> {
7
+ const pidPath = `${dir}/daemon.pid`;
8
+ const pidFile = Bun.file(pidPath);
9
+
10
+ if (!(await pidFile.exists())) {
11
+ console.error("Error: No running daemon found.");
12
+ console.error("");
13
+ console.error(" Start one with: pipeline start");
14
+ process.exit(1);
15
+ }
16
+
17
+ let info: DaemonInfo;
18
+ try {
19
+ info = JSON.parse(await pidFile.text()) as DaemonInfo;
20
+ } catch {
21
+ console.error("Error: Corrupt PID file at .pipeline/daemon.pid");
22
+ console.error("");
23
+ console.error(" Remove it and restart: rm .pipeline/daemon.pid && pipeline start");
24
+ process.exit(1);
25
+ }
26
+
27
+ if (!info.port || !info.pid) {
28
+ console.error("Error: Invalid PID file — missing port or pid.");
29
+ console.error("");
30
+ console.error(" Remove it and restart: rm .pipeline/daemon.pid && pipeline start");
31
+ process.exit(1);
32
+ }
33
+
34
+ return info;
35
+ }
36
+
37
+ export async function fetchDaemon(
38
+ port: number,
39
+ path: string,
40
+ options?: RequestInit,
41
+ ): Promise<Response> {
42
+ try {
43
+ return await fetch(`http://127.0.0.1:${port}${path}`, options);
44
+ } catch {
45
+ console.error(`Error: Daemon is not responding on port ${port}.`);
46
+ console.error("");
47
+ console.error(" The daemon may have crashed. Try:");
48
+ console.error(" rm .pipeline/daemon.pid");
49
+ console.error(" pipeline start");
50
+ process.exit(1);
51
+ }
52
+ }
@@ -0,0 +1,92 @@
1
+ #!/usr/bin/env bun
2
+ import { Command } from "commander";
3
+ import { VERSION } from "../index";
4
+ import { advanceCommand } from "./advance";
5
+ import { cleanupCommand } from "./cleanup";
6
+ import { initCommand } from "./init";
7
+ import { resumeCommand } from "./resume";
8
+ import { signalCommand } from "./signal";
9
+ import { startCommand } from "./start";
10
+ import { statusCommand } from "./status";
11
+ import { templateCommand } from "./template";
12
+ import { validateCommand } from "./validate";
13
+ import { verifyCommand } from "./verify";
14
+ import { visualizeCommand } from "./visualize";
15
+
16
+ const program = new Command();
17
+
18
+ program
19
+ .name("pipeline")
20
+ .description("Pipeline SDK — multi-stage AI agent orchestration")
21
+ .version(VERSION);
22
+
23
+ program
24
+ .command("init")
25
+ .description("Initialize a pipeline in the current project")
26
+ .option("-t, --template <name>", "Pipeline template to use")
27
+ .action((opts: { template?: string }) => initCommand(opts));
28
+
29
+ program
30
+ .command("start")
31
+ .description("Start pipeline execution")
32
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
33
+ .action((opts) => startCommand(opts));
34
+
35
+ program
36
+ .command("status")
37
+ .description("Show current pipeline status")
38
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
39
+ .action((opts) => statusCommand(opts));
40
+
41
+ program
42
+ .command("advance")
43
+ .description("Advance to the next stage by emitting an event")
44
+ .argument("<event>", "Event name to emit")
45
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
46
+ .action((event, opts) => advanceCommand({ ...opts, event }));
47
+
48
+ program
49
+ .command("signal")
50
+ .description("Send a signal to a waiting async gate")
51
+ .argument("<gate>", "Gate ID to signal")
52
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
53
+ .action((gate, opts) => signalCommand({ ...opts, gate }));
54
+
55
+ program
56
+ .command("validate")
57
+ .description("Validate a pipeline definition file")
58
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
59
+ .action((opts: { file: string }) => validateCommand(opts));
60
+
61
+ program
62
+ .command("verify")
63
+ .description("Verify the evidence chain for a completed pipeline")
64
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
65
+ .action((opts) => verifyCommand(opts));
66
+
67
+ program
68
+ .command("visualize")
69
+ .description("Generate a visual diagram of the pipeline")
70
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
71
+ .option("--format <fmt>", "Output format (mermaid)", "mermaid")
72
+ .action((opts: { file: string; format: string }) => visualizeCommand(opts));
73
+
74
+ program
75
+ .command("resume")
76
+ .description("Resume a paused pipeline from its last saved state")
77
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
78
+ .action((opts) => resumeCommand(opts));
79
+
80
+ program
81
+ .command("cleanup")
82
+ .description("Run cleanup handlers for a terminated pipeline")
83
+ .option("-f, --file <path>", "Path to pipeline.yaml", "pipeline.yaml")
84
+ .action((opts) => cleanupCommand(opts));
85
+
86
+ program
87
+ .command("template")
88
+ .description("List or export built-in pipeline templates")
89
+ .argument("[name]", "Template name to export")
90
+ .action((name, opts) => templateCommand({ ...opts, name }));
91
+
92
+ program.parse();
@@ -0,0 +1,248 @@
1
+ import { access, mkdir, readFile, writeFile } from "node:fs/promises";
2
+ import { dirname, join } from "node:path";
3
+ import { detectProviders } from "../adapters/detector";
4
+
5
+ const MINIMAL_PIPELINE_YAML = `pipeline:
6
+ id: my-pipeline
7
+ version: 1
8
+ initial: work
9
+ stages:
10
+ work:
11
+ description: Do the work
12
+ on:
13
+ STAGE_COMPLETE:
14
+ target: done
15
+ done:
16
+ description: Pipeline complete
17
+ type: terminal
18
+ `;
19
+
20
+ const PIPELINE_ENFORCEMENT_SECTION = `
21
+ ## Pipeline Enforcement
22
+
23
+ This project uses pipeline-sdk for multi-stage AI agent orchestration.
24
+
25
+ - A \`pipeline.yaml\` at the repo root defines the allowed stage sequence.
26
+ - The pipeline daemon enforces gate checks before stage transitions.
27
+ - Do not skip gates or jump to later stages without completing prior ones.
28
+ - Use \`pipeline status\` to check the current stage at any time.
29
+ - Use \`pipeline advance <EVENT>\` to emit transition events.
30
+ - Evidence is recorded in \`.pipeline/evidence/\` for each completed stage.
31
+ `;
32
+
33
+ async function fileExists(path: string): Promise<boolean> {
34
+ try {
35
+ await access(path);
36
+ return true;
37
+ } catch {
38
+ return false;
39
+ }
40
+ }
41
+
42
+ async function ensureDir(path: string): Promise<void> {
43
+ await mkdir(path, { recursive: true });
44
+ }
45
+
46
+ async function readTemplate(templateName: string): Promise<string> {
47
+ const templatePath = join(
48
+ dirname(import.meta.url.replace("file://", "")),
49
+ "../templates",
50
+ `${templateName}.yaml`,
51
+ );
52
+ try {
53
+ return await readFile(templatePath, "utf-8");
54
+ } catch {
55
+ // Fall back to inline minimal template
56
+ return MINIMAL_PIPELINE_YAML;
57
+ }
58
+ }
59
+
60
+ async function registerMcpInClaude(cwd: string): Promise<void> {
61
+ const settingsPath = join(cwd, ".claude", "settings.json");
62
+ let settings: Record<string, unknown> = {};
63
+ try {
64
+ const raw = await readFile(settingsPath, "utf-8");
65
+ settings = JSON.parse(raw) as Record<string, unknown>;
66
+ } catch {
67
+ // Start fresh
68
+ }
69
+
70
+ const mcpServers = (settings.mcpServers as Record<string, unknown> | undefined) ?? {};
71
+ if (!mcpServers["pipeline-sdk"]) {
72
+ await writeFile(
73
+ settingsPath,
74
+ JSON.stringify(
75
+ {
76
+ ...settings,
77
+ mcpServers: {
78
+ ...mcpServers,
79
+ "pipeline-sdk": {
80
+ command: "bun",
81
+ args: ["run", "node_modules/pipeline-sdk/src/mcp/server.ts"],
82
+ },
83
+ },
84
+ },
85
+ null,
86
+ 2,
87
+ ),
88
+ "utf-8",
89
+ );
90
+ }
91
+ }
92
+
93
+ async function registerMcpInCursor(cwd: string): Promise<void> {
94
+ const mcpPath = join(cwd, ".cursor", "mcp.json");
95
+ let config: Record<string, unknown> = {};
96
+ try {
97
+ const raw = await readFile(mcpPath, "utf-8");
98
+ config = JSON.parse(raw) as Record<string, unknown>;
99
+ } catch {
100
+ // Start fresh
101
+ }
102
+
103
+ const servers = (config.mcpServers as Record<string, unknown> | undefined) ?? {};
104
+ if (!servers["pipeline-sdk"]) {
105
+ await writeFile(
106
+ mcpPath,
107
+ JSON.stringify(
108
+ {
109
+ ...config,
110
+ mcpServers: {
111
+ ...servers,
112
+ "pipeline-sdk": {
113
+ command: "bun",
114
+ args: ["run", "node_modules/pipeline-sdk/src/mcp/server.ts"],
115
+ },
116
+ },
117
+ },
118
+ null,
119
+ 2,
120
+ ),
121
+ "utf-8",
122
+ );
123
+ }
124
+ }
125
+
126
+ async function registerMcpInGemini(cwd: string): Promise<void> {
127
+ const settingsPath = join(cwd, ".gemini", "settings.json");
128
+ let settings: Record<string, unknown> = {};
129
+ try {
130
+ const raw = await readFile(settingsPath, "utf-8");
131
+ settings = JSON.parse(raw) as Record<string, unknown>;
132
+ } catch {
133
+ // Start fresh
134
+ }
135
+
136
+ const tools = (settings.tools as Record<string, unknown> | undefined) ?? {};
137
+ const mcpServers = (tools.mcpServers as Record<string, unknown> | undefined) ?? {};
138
+ if (!mcpServers["pipeline-sdk"]) {
139
+ await writeFile(
140
+ settingsPath,
141
+ JSON.stringify(
142
+ {
143
+ ...settings,
144
+ tools: {
145
+ ...tools,
146
+ mcpServers: {
147
+ ...mcpServers,
148
+ "pipeline-sdk": {
149
+ command: "bun",
150
+ args: ["run", "node_modules/pipeline-sdk/src/mcp/server.ts"],
151
+ },
152
+ },
153
+ },
154
+ },
155
+ null,
156
+ 2,
157
+ ),
158
+ "utf-8",
159
+ );
160
+ }
161
+ }
162
+
163
+ async function appendInstructionFile(filePath: string): Promise<void> {
164
+ let existing = "";
165
+ try {
166
+ existing = await readFile(filePath, "utf-8");
167
+ } catch {
168
+ // File doesn't exist yet — will create it
169
+ }
170
+
171
+ if (existing.includes("Pipeline Enforcement")) {
172
+ return; // Already present
173
+ }
174
+
175
+ await writeFile(filePath, existing + PIPELINE_ENFORCEMENT_SECTION, "utf-8");
176
+ }
177
+
178
+ export async function initCommand(opts: { template?: string }): Promise<void> {
179
+ const cwd = process.cwd();
180
+ const templateName = opts.template ?? "default";
181
+
182
+ console.log(`Initializing pipeline in ${cwd}...`);
183
+
184
+ // 1. Detect providers
185
+ const providers = await detectProviders(cwd);
186
+ console.log(`Detected providers: ${providers.length > 0 ? providers.join(", ") : "(none)"}`);
187
+
188
+ // 2. Write pipeline.yaml
189
+ const pipelinePath = join(cwd, "pipeline.yaml");
190
+ if (await fileExists(pipelinePath)) {
191
+ console.log("pipeline.yaml already exists — skipping.");
192
+ } else {
193
+ const yaml = await readTemplate(templateName);
194
+ await writeFile(pipelinePath, yaml, "utf-8");
195
+ console.log(`Created pipeline.yaml (template: ${templateName})`);
196
+ }
197
+
198
+ // 3. Create .pipeline/evidence/ directory
199
+ await ensureDir(join(cwd, ".pipeline", "evidence"));
200
+ console.log("Created .pipeline/evidence/");
201
+
202
+ // 4. Register MCP server in each detected provider's config
203
+ for (const provider of providers) {
204
+ try {
205
+ if (provider === "claude-code") {
206
+ await registerMcpInClaude(cwd);
207
+ console.log("Registered MCP server in .claude/settings.json");
208
+ } else if (provider === "cursor") {
209
+ await registerMcpInCursor(cwd);
210
+ console.log("Registered MCP server in .cursor/mcp.json");
211
+ } else if (provider === "gemini-cli") {
212
+ await registerMcpInGemini(cwd);
213
+ console.log("Registered MCP server in .gemini/settings.json");
214
+ }
215
+ } catch (err: unknown) {
216
+ console.warn(
217
+ `Warning: could not register MCP for ${provider}: ${err instanceof Error ? err.message : String(err)}`,
218
+ );
219
+ }
220
+ }
221
+
222
+ // 5. Generate instruction file additions
223
+ const instructionFiles: Array<{ file: string; provider: string }> = [
224
+ { file: "CLAUDE.md", provider: "claude-code" },
225
+ { file: "GEMINI.md", provider: "gemini-cli" },
226
+ { file: "AGENTS.md", provider: "generic" },
227
+ { file: ".cursorrules", provider: "cursor" },
228
+ ];
229
+
230
+ for (const { file, provider } of instructionFiles) {
231
+ // Write for all detected providers, or AGENTS.md always
232
+ const shouldWrite =
233
+ provider === "generic" || providers.includes(provider) || providers.length === 0;
234
+ if (shouldWrite) {
235
+ try {
236
+ await appendInstructionFile(join(cwd, file));
237
+ console.log(`Updated ${file} with Pipeline Enforcement section`);
238
+ } catch (err: unknown) {
239
+ console.warn(
240
+ `Warning: could not update ${file}: ${err instanceof Error ? err.message : String(err)}`,
241
+ );
242
+ }
243
+ }
244
+ }
245
+
246
+ console.log("\nPipeline initialized successfully.");
247
+ console.log("Run `pipeline validate` to check your pipeline definition.");
248
+ }
@@ -0,0 +1,45 @@
1
+ import { loadPipeline } from "../core/loader";
2
+ import { PipelineDaemon } from "../daemon/server";
3
+ import { StateFile } from "../daemon/state-file";
4
+ import type { PipelineDefinition } from "../types";
5
+
6
+ export async function resumeCommand(opts: { file: string }): Promise<void> {
7
+ let pipeline: PipelineDefinition;
8
+ try {
9
+ pipeline = await loadPipeline(opts.file);
10
+ } catch (err: unknown) {
11
+ console.error(`Error: Failed to load pipeline from ${opts.file}`);
12
+ console.error("");
13
+ if ((err as NodeJS.ErrnoException).code === "ENOENT") {
14
+ console.error(" File not found. Are you in the right directory?");
15
+ } else {
16
+ console.error(` ${err instanceof Error ? err.message : String(err)}`);
17
+ }
18
+ process.exit(1);
19
+ }
20
+
21
+ const dir = ".pipeline";
22
+ const stateFile = new StateFile(`${dir}/state.json`);
23
+ const state = await stateFile.read();
24
+
25
+ if (!state) {
26
+ console.error("Error: No saved state found at .pipeline/state.json");
27
+ console.error("");
28
+ console.error(" A pipeline must be started and advanced at least once to create state.");
29
+ console.error(" Start fresh with: pipeline start");
30
+ process.exit(1);
31
+ }
32
+
33
+ try {
34
+ const daemon = new PipelineDaemon(pipeline, dir, state);
35
+ const { port, pid } = await daemon.start();
36
+ console.log(`Daemon resumed on port ${port} (PID ${pid})`);
37
+ console.log(`Pipeline: ${state.pipeline_id} | Stage: ${state.current_stage}`);
38
+ await new Promise(() => {});
39
+ } catch (err: unknown) {
40
+ console.error("Error: Failed to resume daemon.");
41
+ console.error("");
42
+ console.error(` ${err instanceof Error ? err.message : String(err)}`);
43
+ process.exit(1);
44
+ }
45
+ }
@@ -0,0 +1,21 @@
1
+ import { fetchDaemon, readDaemonPid } from "./helpers";
2
+
3
+ export async function signalCommand(opts: { file: string; gate: string }): Promise<void> {
4
+ const { port } = await readDaemonPid();
5
+ const res = await fetchDaemon(port, "/api/signal", {
6
+ method: "POST",
7
+ headers: { "Content-Type": "application/json" },
8
+ body: JSON.stringify({ gate_id: opts.gate, signal: "LGTM" }),
9
+ });
10
+ const result = (await res.json()) as { gate_cleared: boolean; gate_id?: string };
11
+
12
+ if (result.gate_cleared) {
13
+ console.log(`Signal sent to gate: ${result.gate_id}`);
14
+ process.exit(0);
15
+ }
16
+
17
+ console.error(`Error: Failed to signal gate "${opts.gate}".`);
18
+ console.error("");
19
+ console.error(" Check available gates with: pipeline status");
20
+ process.exit(1);
21
+ }
@@ -0,0 +1,33 @@
1
+ import { loadPipeline } from "../core/loader";
2
+ import { PipelineDaemon } from "../daemon/server";
3
+ import type { PipelineDefinition } from "../types";
4
+
5
+ export async function startCommand(opts: { file: string }): Promise<void> {
6
+ let pipeline: PipelineDefinition;
7
+ try {
8
+ pipeline = await loadPipeline(opts.file);
9
+ } catch (err: unknown) {
10
+ console.error(`Error: Failed to load pipeline from ${opts.file}`);
11
+ console.error("");
12
+ if ((err as NodeJS.ErrnoException).code === "ENOENT") {
13
+ console.error(" File not found. Create one with: pipeline init");
14
+ } else {
15
+ console.error(` ${err instanceof Error ? err.message : String(err)}`);
16
+ }
17
+ process.exit(1);
18
+ }
19
+
20
+ try {
21
+ const dir = ".pipeline";
22
+ const daemon = new PipelineDaemon(pipeline, dir);
23
+ const { port, pid } = await daemon.start();
24
+ console.log(`Daemon started on port ${port} (PID ${pid})`);
25
+ console.log(`Pipeline: ${pipeline.id} | Stage: ${pipeline.initial}`);
26
+ await new Promise(() => {});
27
+ } catch (err: unknown) {
28
+ console.error("Error: Failed to start daemon.");
29
+ console.error("");
30
+ console.error(` ${err instanceof Error ? err.message : String(err)}`);
31
+ process.exit(1);
32
+ }
33
+ }
@@ -0,0 +1,24 @@
1
+ import { fetchDaemon, readDaemonPid } from "./helpers";
2
+
3
+ export async function statusCommand(_opts: { file: string }): Promise<void> {
4
+ const { port } = await readDaemonPid();
5
+ const res = await fetchDaemon(port, "/api/status");
6
+ const status = (await res.json()) as Record<string, unknown>;
7
+
8
+ console.log(`Stage: ${status.stage}`);
9
+ if (status.started_at) console.log(`Started: ${status.started_at}`);
10
+
11
+ const completed = status.completed_stages as string[] | undefined;
12
+ if (completed?.length) console.log(`Completed: ${completed.join(" → ")}`);
13
+
14
+ const events = status.allowed_events as string[] | undefined;
15
+ if (events?.length) console.log(`Events: ${events.join(", ")}`);
16
+
17
+ const tools = status.allowed_tools as string[] | undefined;
18
+ if (tools?.length) console.log(`Tools: ${tools.join(", ")}`);
19
+
20
+ const gates = status.gates_remaining as string[] | undefined;
21
+ if (gates?.length) console.log(`Gates: ${gates.join(", ")}`);
22
+
23
+ process.exit(0);
24
+ }
@@ -0,0 +1,28 @@
1
+ import { readdir, readFile, writeFile } from "node:fs/promises";
2
+ import { join } from "node:path";
3
+
4
+ const TEMPLATES_DIR = join(import.meta.dir, "../templates");
5
+
6
+ export async function templateCommand(opts: Record<string, unknown>): Promise<void> {
7
+ const name = opts.name as string | undefined;
8
+
9
+ if (!name) {
10
+ // List mode
11
+ const files = await readdir(TEMPLATES_DIR);
12
+ const templates = files.filter((f) => f.endsWith(".yaml")).map((f) => f.replace(".yaml", ""));
13
+ console.log("Available templates:");
14
+ for (const t of templates) console.log(` - ${t}`);
15
+ return;
16
+ }
17
+
18
+ // Use mode
19
+ const src = join(TEMPLATES_DIR, `${name}.yaml`);
20
+ try {
21
+ const content = await readFile(src, "utf-8");
22
+ await writeFile(join(process.cwd(), "pipeline.yaml"), content);
23
+ console.log(`Applied template "${name}" to pipeline.yaml`);
24
+ } catch {
25
+ console.error(`Template "${name}" not found.`);
26
+ process.exit(1);
27
+ }
28
+ }
@@ -0,0 +1,21 @@
1
+ import { loadPipeline } from "../core/loader";
2
+
3
+ export async function validateCommand(opts: { file: string }): Promise<void> {
4
+ try {
5
+ const pipeline = await loadPipeline(opts.file);
6
+
7
+ const terminalStages = Object.entries(pipeline.stages)
8
+ .filter(([, stage]) => stage.type === "terminal")
9
+ .map(([id]) => id);
10
+
11
+ console.log(`Pipeline: ${pipeline.id}`);
12
+ console.log(` Stages : ${Object.keys(pipeline.stages).length}`);
13
+ console.log(` Initial : ${pipeline.initial}`);
14
+ console.log(` Terminal : ${terminalStages.length > 0 ? terminalStages.join(", ") : "(none)"}`);
15
+ console.log("Validation passed.");
16
+ process.exit(0);
17
+ } catch (err: unknown) {
18
+ console.error(`Validation failed: ${err instanceof Error ? err.message : String(err)}`);
19
+ process.exit(1);
20
+ }
21
+ }
@@ -0,0 +1,33 @@
1
+ import { EvidenceStore } from "../core/evidence";
2
+ import { loadPipeline } from "../core/loader";
3
+
4
+ export async function verifyCommand(opts: { file: string }): Promise<void> {
5
+ try {
6
+ await loadPipeline(opts.file);
7
+ } catch (err: unknown) {
8
+ console.error(`Error: Failed to load pipeline from ${opts.file}`);
9
+ console.error("");
10
+ if ((err as NodeJS.ErrnoException).code === "ENOENT") {
11
+ console.error(" File not found. Are you in the right directory?");
12
+ } else {
13
+ console.error(` ${err instanceof Error ? err.message : String(err)}`);
14
+ }
15
+ process.exit(1);
16
+ }
17
+
18
+ const evidenceDir = ".pipeline/evidence";
19
+ const store = new EvidenceStore(evidenceDir);
20
+ const result = await store.verify();
21
+
22
+ if (result.valid) {
23
+ console.log(`Evidence chain valid (${result.recordCount} records)`);
24
+ process.exit(0);
25
+ }
26
+
27
+ console.error("Error: Evidence chain verification failed.");
28
+ console.error("");
29
+ for (const err of result.errors) {
30
+ console.error(` - ${err}`);
31
+ }
32
+ process.exit(1);
33
+ }
@@ -0,0 +1,36 @@
1
+ import { loadPipeline } from "../core/loader";
2
+
3
+ export async function visualizeCommand(opts: { format: string; file: string }): Promise<void> {
4
+ try {
5
+ const pipeline = await loadPipeline(opts.file);
6
+
7
+ if (opts.format !== "mermaid") {
8
+ console.error(`Unknown format "${opts.format}". Supported: mermaid`);
9
+ process.exit(1);
10
+ }
11
+
12
+ const lines: string[] = ["stateDiagram-v2"];
13
+
14
+ // Initial arrow
15
+ lines.push(` [*] --> ${pipeline.initial}`);
16
+
17
+ // Transitions
18
+ for (const [stageId, stage] of Object.entries(pipeline.stages)) {
19
+ if (stage.on) {
20
+ for (const [event, transition] of Object.entries(stage.on)) {
21
+ lines.push(` ${stageId} --> ${transition.target} : ${event}`);
22
+ }
23
+ }
24
+ // Terminal stages point to [*]
25
+ if (stage.type === "terminal") {
26
+ lines.push(` ${stageId} --> [*]`);
27
+ }
28
+ }
29
+
30
+ console.log(lines.join("\n"));
31
+ process.exit(0);
32
+ } catch (err: unknown) {
33
+ console.error(`Visualization failed: ${err instanceof Error ? err.message : String(err)}`);
34
+ process.exit(1);
35
+ }
36
+ }