pipeline-sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +146 -0
  3. package/package.json +63 -0
  4. package/schemas/pipeline.schema.json +158 -0
  5. package/src/adapters/claude-code.ts +112 -0
  6. package/src/adapters/detector.ts +26 -0
  7. package/src/adapters/generic.ts +30 -0
  8. package/src/adapters/interface.ts +7 -0
  9. package/src/cli/advance.ts +27 -0
  10. package/src/cli/cleanup.ts +55 -0
  11. package/src/cli/helpers.ts +52 -0
  12. package/src/cli/index.ts +92 -0
  13. package/src/cli/init.ts +248 -0
  14. package/src/cli/resume.ts +45 -0
  15. package/src/cli/signal.ts +21 -0
  16. package/src/cli/start.ts +33 -0
  17. package/src/cli/status.ts +24 -0
  18. package/src/cli/template.ts +28 -0
  19. package/src/cli/validate.ts +21 -0
  20. package/src/cli/verify.ts +33 -0
  21. package/src/cli/visualize.ts +36 -0
  22. package/src/core/cleanup.ts +75 -0
  23. package/src/core/evidence.ts +144 -0
  24. package/src/core/gate-runner.ts +109 -0
  25. package/src/core/loader.ts +125 -0
  26. package/src/core/state-machine.ts +119 -0
  27. package/src/daemon/ipc.ts +56 -0
  28. package/src/daemon/server.ts +144 -0
  29. package/src/daemon/state-file.ts +65 -0
  30. package/src/gates/async.ts +60 -0
  31. package/src/gates/builtin.ts +40 -0
  32. package/src/gates/custom.ts +71 -0
  33. package/src/index.ts +20 -0
  34. package/src/mcp/prompts.ts +40 -0
  35. package/src/mcp/resources.ts +71 -0
  36. package/src/mcp/server.ts +211 -0
  37. package/src/mcp/tools.ts +52 -0
  38. package/src/templates/infra-gitops.yaml +37 -0
  39. package/src/templates/sdlc-full.yaml +69 -0
  40. package/src/templates/static-site.yaml +45 -0
  41. package/src/templates/zship.yaml +224 -0
  42. package/src/types.ts +210 -0
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025-present Anshul
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,146 @@
1
+ # pipeline-sdk
2
+
3
+ YAML-driven pipeline orchestration for AI agents.
4
+
5
+ [![npm version](https://img.shields.io/npm/v/pipeline-sdk)](https://www.npmjs.com/package/pipeline-sdk)
6
+ [![CI](https://github.com/anshul-homelab/pipeline-sdk/actions/workflows/ci.yml/badge.svg)](https://github.com/anshul-homelab/pipeline-sdk/actions/workflows/ci.yml)
7
+ [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE)
8
+
9
+ ## Why?
10
+
11
+ AI coding agents (Claude Code, Cursor, Codex) execute multi-step workflows -- write code, run tests, deploy -- but lack a structured way to enforce stage gates, track evidence, or coordinate handoffs. pipeline-sdk gives your agent a state machine: define stages in YAML, gate transitions on passing tests, and keep a cryptographic audit trail of what happened.
12
+
13
+ ## What is this?
14
+
15
+ Define multi-stage pipelines in YAML with gates, evidence chains, and cleanup handlers. The SDK provides a state machine engine, HTTP daemon, CLI, and MCP server to orchestrate AI agent workflows. Pipelines are declarative, auditable, and resumable.
16
+
17
+ ## Quick Start
18
+
19
+ ```bash
20
+ # Install
21
+ bun add pipeline-sdk
22
+
23
+ # Initialize a pipeline from a template
24
+ bunx pipeline init
25
+
26
+ # Start the daemon
27
+ bunx pipeline start
28
+
29
+ # In another terminal -- check status
30
+ bunx pipeline status
31
+
32
+ # Advance to the next stage
33
+ bunx pipeline advance STAGE_COMPLETE
34
+ ```
35
+
36
+ ## Architecture
37
+
38
+ ```
39
+ pipeline.yaml --> StateMachine --> PipelineDaemon (HTTP localhost:<port>)
40
+ |
41
+ MCP Server <--> AI Agent
42
+ |
43
+ Evidence Chain
44
+ ```
45
+
46
+ The YAML file defines stages and transitions. The state machine enforces the rules. The daemon exposes an HTTP API. The MCP server lets AI agents participate natively. Every transition is recorded in a tamper-evident evidence chain.
47
+
48
+ ## Key Concepts
49
+
50
+ **Stages** -- Named phases of your pipeline (e.g., `dev`, `review`, `deploy`). Each stage can define entry/exit actions, gates, and cleanup handlers.
51
+
52
+ **Events and Transitions** -- Events like `DEV_DONE` trigger transitions between stages. Transitions can be conditional, requiring gates to pass before proceeding.
53
+
54
+ **Gates** -- Preconditions that must be satisfied before a transition completes:
55
+ - *Builtin* -- automated checks (lint, test, typecheck)
56
+ - *Custom* -- shell commands or scripts
57
+ - *Async* -- external signals (human approval, CI completion)
58
+
59
+ **Evidence Chain** -- A cryptographically linked log of every transition, gate result, and signal. Provides a verifiable audit trail for the entire pipeline run.
60
+
61
+ **Adapters** -- Pluggable system for integrating with external tools (CI systems, deployment platforms, notification services).
62
+
63
+ ## CLI Reference
64
+
65
+ | Command | Description |
66
+ |---------|-------------|
67
+ | `pipeline init` | Initialize a new pipeline from a template |
68
+ | `pipeline start` | Start the pipeline daemon |
69
+ | `pipeline status` | Show current stage and pipeline state |
70
+ | `pipeline advance <event>` | Emit an event to trigger a transition |
71
+ | `pipeline signal <gate>` | Signal an async gate as satisfied |
72
+ | `pipeline verify` | Verify evidence chain integrity |
73
+ | `pipeline resume` | Resume pipeline from saved state |
74
+ | `pipeline cleanup` | Run cleanup handlers and stop the daemon |
75
+ | `pipeline validate` | Validate pipeline.yaml against the schema |
76
+ | `pipeline visualize` | Generate a Mermaid state diagram |
77
+ | `pipeline template [name]` | List or apply a built-in template |
78
+
79
+ ## Built-in Templates
80
+
81
+ | Template | Description |
82
+ |----------|-------------|
83
+ | `sdlc-full` | Complete software development lifecycle with review and deploy gates |
84
+ | `static-site` | Web portfolio pipeline with accessibility gates |
85
+ | `zship` | Dual-model development workflow (Claude + Codex) |
86
+ | `infra-gitops` | Infrastructure deployment with approval gates and rollback |
87
+
88
+ ## MCP Integration
89
+
90
+ The SDK exposes an MCP server so AI agents can participate in pipelines natively:
91
+
92
+ - **4 tools** -- `check`, `advance`, `signal`, `status`
93
+ - **3 resources** -- current state, stage instructions, gate status
94
+ - **1 prompt** -- stage context for the active phase
95
+
96
+ The MCP server is created programmatically via `createMcpServer()` and communicates over stdio. See [MCP Integration](docs/mcp-integration.md) for setup details. AI agents like Claude Code can then query pipeline state, advance stages, and signal gates through the standard MCP protocol.
97
+
98
+ ## API Usage
99
+
100
+ ```typescript
101
+ import { loadPipeline, StateMachine } from "pipeline-sdk";
102
+
103
+ const pipeline = await loadPipeline("pipeline.yaml");
104
+ const sm = new StateMachine(pipeline);
105
+
106
+ console.log(sm.currentStage); // "dev"
107
+
108
+ const result = sm.transition("DEV_DONE");
109
+ console.log(result.newStage); // "review"
110
+ ```
111
+
112
+ ### Evidence Store
113
+
114
+ ```typescript
115
+ import { EvidenceStore } from "pipeline-sdk";
116
+
117
+ const store = new EvidenceStore(".pipeline/evidence");
118
+ const result = await store.verify();
119
+ console.log(result.valid, result.recordCount);
120
+ ```
121
+
122
+ ## Documentation
123
+
124
+ - [Getting Started](docs/getting-started.md)
125
+ - [CLI Reference](docs/cli-reference.md)
126
+ - [Pipeline YAML Reference](docs/pipeline-yaml-reference.md)
127
+ - [Architecture](docs/architecture.md)
128
+ - [MCP Integration](docs/mcp-integration.md)
129
+
130
+ ## Development
131
+
132
+ ```bash
133
+ bun install
134
+ bun test # 284 tests
135
+ bun run lint # Biome
136
+ bun run typecheck # TypeScript
137
+ bun run build # Compile to binary
138
+ ```
139
+
140
+ ## Contributing
141
+
142
+ See [CONTRIBUTING.md](CONTRIBUTING.md) for development setup, commit conventions, and PR guidelines.
143
+
144
+ ## License
145
+
146
+ [MIT](LICENSE)
package/package.json ADDED
@@ -0,0 +1,63 @@
1
+ {
2
+ "name": "pipeline-sdk",
3
+ "version": "0.1.0",
4
+ "description": "YAML-driven pipeline orchestration SDK with CLI, daemon, MCP server, and adapter system",
5
+ "module": "index.ts",
6
+ "type": "module",
7
+ "author": "Anshul",
8
+ "license": "MIT",
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "https://github.com/anshul-homelab/pipeline-sdk.git"
12
+ },
13
+ "homepage": "https://github.com/anshul-homelab/pipeline-sdk",
14
+ "bugs": {
15
+ "url": "https://github.com/anshul-homelab/pipeline-sdk/issues"
16
+ },
17
+ "keywords": [
18
+ "pipeline",
19
+ "orchestration",
20
+ "yaml",
21
+ "state-machine",
22
+ "mcp",
23
+ "ai-agent",
24
+ "cli",
25
+ "daemon",
26
+ "gates",
27
+ "evidence-chain"
28
+ ],
29
+ "files": [
30
+ "src/",
31
+ "schemas/",
32
+ "templates/",
33
+ "README.md",
34
+ "LICENSE"
35
+ ],
36
+ "bin": {
37
+ "pipeline": "./src/cli/index.ts"
38
+ },
39
+ "scripts": {
40
+ "dev": "bun run src/cli/index.ts",
41
+ "test": "bun test",
42
+ "lint": "bunx biome check src/",
43
+ "format": "bunx biome format --write src/",
44
+ "typecheck": "bunx tsc --noEmit",
45
+ "build": "bun build --compile src/cli/index.ts --outfile dist/pipeline"
46
+ },
47
+ "devDependencies": {
48
+ "@biomejs/biome": "^2.4.10",
49
+ "@commitlint/cli": "^20.5.0",
50
+ "@commitlint/config-conventional": "^20.5.0",
51
+ "@types/bun": "latest",
52
+ "lefthook": "^2.1.4"
53
+ },
54
+ "peerDependencies": {
55
+ "typescript": "^5"
56
+ },
57
+ "dependencies": {
58
+ "@modelcontextprotocol/sdk": "^1.29.0",
59
+ "ajv": "^8.18.0",
60
+ "commander": "^14.0.3",
61
+ "yaml": "^2.8.3"
62
+ }
63
+ }
@@ -0,0 +1,158 @@
1
+ {
2
+ "$schema": "http://json-schema.org/draft-07/schema#",
3
+ "title": "Pipeline Definition",
4
+ "type": "object",
5
+ "required": ["pipeline"],
6
+ "additionalProperties": false,
7
+ "properties": {
8
+ "pipeline": {
9
+ "type": "object",
10
+ "required": ["id", "version", "initial", "stages"],
11
+ "additionalProperties": false,
12
+ "properties": {
13
+ "id": { "type": "string", "minLength": 1 },
14
+ "version": { "type": "number" },
15
+ "description": { "type": "string" },
16
+ "initial": { "type": "string", "minLength": 1 },
17
+ "execution": {
18
+ "type": "object",
19
+ "additionalProperties": false,
20
+ "properties": {
21
+ "provider": { "type": "string" },
22
+ "mode": { "type": "string", "enum": ["daemon", "binary"] },
23
+ "timeout": { "type": "string" },
24
+ "evidence_dir": { "type": "string" },
25
+ "webhook_port": { "type": "number" },
26
+ "webhook_secret": { "type": "string" }
27
+ }
28
+ },
29
+ "stages": {
30
+ "type": "object",
31
+ "minProperties": 1,
32
+ "additionalProperties": {
33
+ "$ref": "#/definitions/stage"
34
+ }
35
+ },
36
+ "gates": {
37
+ "type": "object",
38
+ "additionalProperties": {
39
+ "$ref": "#/definitions/gate"
40
+ }
41
+ },
42
+ "conditions": {
43
+ "type": "object",
44
+ "additionalProperties": {
45
+ "$ref": "#/definitions/condition"
46
+ }
47
+ }
48
+ }
49
+ }
50
+ },
51
+ "definitions": {
52
+ "stage": {
53
+ "type": "object",
54
+ "additionalProperties": false,
55
+ "properties": {
56
+ "description": { "type": "string" },
57
+ "type": { "type": "string", "enum": ["terminal", "parallel-child"] },
58
+ "agent": {
59
+ "type": "object",
60
+ "additionalProperties": false,
61
+ "properties": {
62
+ "instructions": { "type": "string" },
63
+ "tools": { "type": "array", "items": { "type": "string" } },
64
+ "model": { "type": "string" },
65
+ "mode": { "type": "string" }
66
+ }
67
+ },
68
+ "gates": {
69
+ "type": "object",
70
+ "additionalProperties": false,
71
+ "properties": {
72
+ "entry": { "type": "array", "items": { "type": "string" } },
73
+ "exit": { "type": "array", "items": { "type": "string" } }
74
+ }
75
+ },
76
+ "on": {
77
+ "type": "object",
78
+ "additionalProperties": {
79
+ "$ref": "#/definitions/transition"
80
+ }
81
+ },
82
+ "parallel": { "type": "array", "items": { "type": "string" } },
83
+ "when": { "type": "string" },
84
+ "max_retries": { "type": "number", "minimum": 0 },
85
+ "cleanup": {
86
+ "type": "array",
87
+ "items": {
88
+ "type": "object",
89
+ "required": ["script"],
90
+ "additionalProperties": false,
91
+ "properties": {
92
+ "script": { "type": "string" },
93
+ "inputs": { "type": "object" }
94
+ }
95
+ }
96
+ },
97
+ "on_enter": {
98
+ "type": "array",
99
+ "items": {
100
+ "type": "object",
101
+ "required": ["script"],
102
+ "additionalProperties": false,
103
+ "properties": {
104
+ "script": { "type": "string" },
105
+ "inputs": { "type": "object" }
106
+ }
107
+ }
108
+ },
109
+ "trigger": {
110
+ "type": "object",
111
+ "required": ["type", "cron"],
112
+ "additionalProperties": false,
113
+ "properties": {
114
+ "type": { "type": "string", "enum": ["schedule"] },
115
+ "cron": { "type": "string" }
116
+ }
117
+ }
118
+ }
119
+ },
120
+ "transition": {
121
+ "type": "object",
122
+ "required": ["target"],
123
+ "additionalProperties": false,
124
+ "properties": {
125
+ "target": { "type": "string", "minLength": 1 },
126
+ "guard": { "type": "string" }
127
+ }
128
+ },
129
+ "gate": {
130
+ "type": "object",
131
+ "required": ["type"],
132
+ "additionalProperties": false,
133
+ "properties": {
134
+ "type": { "type": "string", "enum": ["builtin", "custom", "async"] },
135
+ "command": { "type": "string" },
136
+ "script": { "type": "string" },
137
+ "protocol": { "type": "string" },
138
+ "signal": { "type": "string" },
139
+ "timeout": { "type": "string" },
140
+ "escalate_after": { "type": "string" },
141
+ "expect": { "type": "object" },
142
+ "inputs": { "type": "object" },
143
+ "when": { "type": "string" }
144
+ }
145
+ },
146
+ "condition": {
147
+ "type": "object",
148
+ "required": ["check"],
149
+ "additionalProperties": false,
150
+ "properties": {
151
+ "check": { "type": "string" },
152
+ "pattern": { "type": "string" },
153
+ "path": { "type": "string" },
154
+ "not": { "type": "string" }
155
+ }
156
+ }
157
+ }
158
+ }
@@ -0,0 +1,112 @@
1
+ import { execSync } from "node:child_process";
2
+ import { mkdir, readFile, writeFile } from "node:fs/promises";
3
+ import { dirname, join } from "node:path";
4
+ import type { ProviderCapabilities, StageOutput, StageRequest } from "../types";
5
+ import type { ProviderAdapter } from "./interface";
6
+
7
+ export class ClaudeCodeAdapter implements ProviderAdapter {
8
+ getCapabilities(): ProviderCapabilities {
9
+ return {
10
+ name: "claude-code",
11
+ hooks: true,
12
+ parallel: true,
13
+ session: true,
14
+ headless: true,
15
+ mcp: true,
16
+ };
17
+ }
18
+
19
+ async invoke(request: StageRequest): Promise<StageOutput> {
20
+ const start = Date.now();
21
+ const prompt = [
22
+ `Stage: ${request.stage_id}`,
23
+ `Instructions: ${request.instructions}`,
24
+ `Tools: ${request.tools.join(", ")}`,
25
+ `Context: ${JSON.stringify(request.context)}`,
26
+ ].join("\n");
27
+
28
+ let stdout = "";
29
+ let exit_code = 0;
30
+
31
+ try {
32
+ stdout = execSync(`claude --print ${JSON.stringify(prompt)}`, {
33
+ encoding: "utf-8",
34
+ timeout: request.timeout ?? 300_000,
35
+ });
36
+ } catch (err: unknown) {
37
+ exit_code = (err as NodeJS.ErrnoException & { status?: number }).status ?? 1;
38
+ stdout = (err as NodeJS.ErrnoException & { stdout?: string }).stdout ?? "";
39
+ }
40
+
41
+ return {
42
+ artifacts: [],
43
+ stdout,
44
+ exit_code,
45
+ duration_ms: Date.now() - start,
46
+ };
47
+ }
48
+
49
+ async installHook(config: { socketPath?: string; httpPort?: number }): Promise<() => void> {
50
+ const settingsPath = join(process.cwd(), ".claude", "settings.json");
51
+
52
+ let settings: Record<string, unknown> = {};
53
+ try {
54
+ const raw = await readFile(settingsPath, "utf-8");
55
+ settings = JSON.parse(raw) as Record<string, unknown>;
56
+ } catch {
57
+ // File may not exist yet — start fresh
58
+ }
59
+
60
+ const hookEntry: Record<string, unknown> = {
61
+ type: "pipeline-sdk",
62
+ ...(config.socketPath ? { socketPath: config.socketPath } : {}),
63
+ ...(config.httpPort !== undefined ? { httpPort: config.httpPort } : {}),
64
+ };
65
+
66
+ const existing = (settings.hooks as Record<string, unknown> | undefined) ?? {};
67
+ const preToolUse = (existing.PreToolUse as unknown[]) ?? [];
68
+
69
+ const alreadyInstalled = preToolUse.some(
70
+ (h) => (h as Record<string, unknown>).type === "pipeline-sdk",
71
+ );
72
+
73
+ if (!alreadyInstalled) {
74
+ const updated = {
75
+ ...settings,
76
+ hooks: {
77
+ ...existing,
78
+ PreToolUse: [...preToolUse, hookEntry],
79
+ },
80
+ };
81
+ await mkdir(dirname(settingsPath), { recursive: true });
82
+ await writeFile(settingsPath, JSON.stringify(updated, null, 2), "utf-8");
83
+ }
84
+
85
+ return async () => {
86
+ let current: Record<string, unknown> = {};
87
+ try {
88
+ const raw = await readFile(settingsPath, "utf-8");
89
+ current = JSON.parse(raw) as Record<string, unknown>;
90
+ } catch {
91
+ return;
92
+ }
93
+
94
+ const hooks = (current.hooks as Record<string, unknown> | undefined) ?? {};
95
+ const list = (hooks.PreToolUse as unknown[]) ?? [];
96
+ const filtered = list.filter((h) => (h as Record<string, unknown>).type !== "pipeline-sdk");
97
+
98
+ await writeFile(
99
+ settingsPath,
100
+ JSON.stringify(
101
+ {
102
+ ...current,
103
+ hooks: { ...hooks, PreToolUse: filtered },
104
+ },
105
+ null,
106
+ 2,
107
+ ),
108
+ "utf-8",
109
+ );
110
+ };
111
+ }
112
+ }
@@ -0,0 +1,26 @@
1
+ import { access } from "node:fs/promises";
2
+ import { join } from "node:path";
3
+
4
+ const PROVIDER_DIRS: Array<{ dir: string; name: string }> = [
5
+ { dir: ".claude", name: "claude-code" },
6
+ { dir: ".cursor", name: "cursor" },
7
+ { dir: ".gemini", name: "gemini-cli" },
8
+ { dir: ".codex", name: "codex-cli" },
9
+ { dir: ".windsurf", name: "windsurf" },
10
+ { dir: ".vscode", name: "vscode" },
11
+ ];
12
+
13
+ export async function detectProviders(cwd: string): Promise<string[]> {
14
+ const detected: string[] = [];
15
+
16
+ for (const { dir, name } of PROVIDER_DIRS) {
17
+ try {
18
+ await access(join(cwd, dir));
19
+ detected.push(name);
20
+ } catch {
21
+ // directory not present — skip
22
+ }
23
+ }
24
+
25
+ return detected;
26
+ }
@@ -0,0 +1,30 @@
1
+ import type { ProviderCapabilities, StageOutput, StageRequest } from "../types";
2
+ import type { ProviderAdapter } from "./interface";
3
+
4
+ export class GenericAdapter implements ProviderAdapter {
5
+ getCapabilities(): ProviderCapabilities {
6
+ return {
7
+ name: "generic",
8
+ hooks: false,
9
+ parallel: false,
10
+ session: false,
11
+ headless: true,
12
+ mcp: true,
13
+ };
14
+ }
15
+
16
+ async invoke(request: StageRequest): Promise<StageOutput> {
17
+ const start = Date.now();
18
+ return {
19
+ artifacts: [],
20
+ stdout: `Generic adapter: stage "${request.stage_id}" invoked. No provider-specific execution available.`,
21
+ exit_code: 0,
22
+ duration_ms: Date.now() - start,
23
+ };
24
+ }
25
+
26
+ async installHook(_config: { socketPath?: string; httpPort?: number }): Promise<() => void> {
27
+ // Generic adapter has no hook mechanism — no-op
28
+ return async () => {};
29
+ }
30
+ }
@@ -0,0 +1,7 @@
1
+ import type { ProviderCapabilities, StageOutput, StageRequest } from "../types";
2
+
3
+ export interface ProviderAdapter {
4
+ getCapabilities(): ProviderCapabilities;
5
+ invoke(request: StageRequest): Promise<StageOutput>;
6
+ installHook(config: { socketPath?: string; httpPort?: number }): Promise<() => void>;
7
+ }
@@ -0,0 +1,27 @@
1
+ import { fetchDaemon, readDaemonPid } from "./helpers";
2
+
3
+ export async function advanceCommand(opts: { file: string; event: string }): Promise<void> {
4
+ const { port } = await readDaemonPid();
5
+ const res = await fetchDaemon(port, "/api/advance", {
6
+ method: "POST",
7
+ headers: { "Content-Type": "application/json" },
8
+ body: JSON.stringify({ event: opts.event }),
9
+ });
10
+ const result = (await res.json()) as {
11
+ transitioned: boolean;
12
+ new_stage?: string;
13
+ error?: string;
14
+ };
15
+
16
+ if (result.transitioned) {
17
+ console.log(`Advanced to stage: ${result.new_stage}`);
18
+ process.exit(0);
19
+ }
20
+
21
+ console.error(`Error: Transition failed for event "${opts.event}".`);
22
+ console.error("");
23
+ if (result.error) console.error(` ${result.error}`);
24
+ console.error("");
25
+ console.error(" Check allowed events with: pipeline status");
26
+ process.exit(1);
27
+ }
@@ -0,0 +1,55 @@
1
+ import { CleanupManager } from "../core/cleanup";
2
+ import { loadPipeline } from "../core/loader";
3
+ import { StateFile } from "../daemon/state-file";
4
+ import type { PipelineDefinition } from "../types";
5
+
6
+ export async function cleanupCommand(opts: { file: string }): Promise<void> {
7
+ let pipeline: PipelineDefinition;
8
+ try {
9
+ pipeline = await loadPipeline(opts.file);
10
+ } catch (err: unknown) {
11
+ console.error(`Error: Failed to load pipeline from ${opts.file}`);
12
+ console.error("");
13
+ if ((err as NodeJS.ErrnoException).code === "ENOENT") {
14
+ console.error(" File not found. Are you in the right directory?");
15
+ } else {
16
+ console.error(` ${err instanceof Error ? err.message : String(err)}`);
17
+ }
18
+ process.exit(1);
19
+ }
20
+
21
+ const dir = ".pipeline";
22
+ const stateFile = new StateFile(`${dir}/state.json`);
23
+ const state = await stateFile.read();
24
+
25
+ const manager = new CleanupManager();
26
+ const completedStages = state?.stages_completed ?? [];
27
+ for (const stageId of completedStages) {
28
+ const stageDef = pipeline.stages[stageId];
29
+ if (stageDef?.cleanup) {
30
+ manager.register(stageId, stageDef.cleanup);
31
+ }
32
+ }
33
+
34
+ const results = await manager.runAll(process.cwd());
35
+ for (const r of results) {
36
+ const icon = r.success ? "OK" : "FAIL";
37
+ console.log(` [${icon}] ${r.stage}: ${r.script}${r.error ? ` — ${r.error}` : ""}`);
38
+ }
39
+
40
+ // Kill daemon if running
41
+ const pidFile = Bun.file(`${dir}/daemon.pid`);
42
+ if (await pidFile.exists()) {
43
+ try {
44
+ const { pid } = JSON.parse(await pidFile.text()) as { port: number; pid: number };
45
+ process.kill(pid);
46
+ } catch {
47
+ // daemon already gone
48
+ }
49
+ const { unlink } = await import("node:fs/promises");
50
+ await unlink(`${dir}/daemon.pid`).catch(() => {});
51
+ }
52
+
53
+ console.log("Cleanup complete");
54
+ process.exit(0);
55
+ }