builderman 1.0.9 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -18,29 +18,34 @@ import { task, pipeline } from "builderman"
18
18
  const task1 = task({
19
19
  name: "lib:build",
20
20
  commands: {
21
- dev: "tsc --watch",
22
21
  build: "tsc",
22
+ dev: {
23
+ run: "tsc --watch",
24
+ readyWhen: (stdout) => {
25
+ // mark this task as ready when the process is watching for file changes
26
+ return stdout.includes("Watching for file changes.")
27
+ },
28
+ },
23
29
  },
24
30
  cwd: "packages/lib",
25
- isReady: (stdout) => {
26
- // mark this this task as ready when the process is watching for file changes
27
- return stdout.includes("Watching for file changes.")
28
- },
29
31
  })
30
32
 
31
33
  const task2 = task({
32
34
  name: "consumer:dev",
33
35
  commands: {
34
- dev: "npm run dev",
35
36
  build: "npm run build",
37
+ dev: "npm run dev",
38
+ deploy: "npm run deploy",
36
39
  },
37
40
  cwd: "packages/consumer",
38
41
  dependencies: [task1],
39
42
  })
40
43
 
41
44
  await pipeline([task1, task2]).run({
42
- onTaskError: (taskName, error) => {
43
- console.error(`[${taskName}] Error: ${error.message}`)
45
+ // default command is "build" if process.NODE_ENV is "production", otherwise "dev".
46
+ command: "deploy",
47
+ onTaskBegin: (taskName) => {
48
+ console.log(`[${taskName}] Starting...`)
44
49
  },
45
50
  onTaskComplete: (taskName) => {
46
51
  console.log(`[${taskName}] Complete!`)
@@ -53,3 +58,304 @@ await pipeline([task1, task2]).run({
53
58
  },
54
59
  })
55
60
  ```
61
+
62
+ ## Error Handling
63
+
64
+ Pipeline errors are provided as `PipelineError` instances with error codes for easier handling:
65
+
66
+ ```ts
67
+ import { pipeline, PipelineError } from "builderman"
68
+
69
+ await pipeline([task1, task2]).run({
70
+ onPipelineError: (error) => {
71
+ switch (error.code) {
72
+ case PipelineError.Aborted:
73
+ console.error("Pipeline was cancelled")
74
+ break
75
+ case PipelineError.TaskFailed:
76
+ console.error(`Task failed: ${error.message}`)
77
+ break
78
+ case PipelineError.ProcessTerminated:
79
+ console.error("Process was terminated")
80
+ break
81
+ case PipelineError.InvalidTask:
82
+ console.error(`Invalid task configuration: ${error.message}`)
83
+ break
84
+ case PipelineError.InvalidSignal:
85
+ console.error("Invalid abort signal")
86
+ break
87
+ }
88
+ },
89
+ })
90
+ ```
91
+
92
+ ## Cancellation
93
+
94
+ You can cancel a running pipeline by providing an `AbortSignal`:
95
+
96
+ ```ts
97
+ import { pipeline, PipelineError } from "builderman"
98
+
99
+ const abortController = new AbortController()
100
+
101
+ const runPromise = pipeline([task1, task2]).run({
102
+ signal: abortController.signal,
103
+ onPipelineError: (error) => {
104
+ if (error.code === PipelineError.Aborted) {
105
+ console.error("Pipeline was cancelled")
106
+ }
107
+ },
108
+ })
109
+
110
+ // Cancel the pipeline after 5 seconds
111
+ setTimeout(() => {
112
+ abortController.abort()
113
+ }, 5000)
114
+
115
+ try {
116
+ await runPromise
117
+ } catch (error) {
118
+ if (error instanceof PipelineError && error.code === PipelineError.Aborted) {
119
+ // Pipeline was cancelled
120
+ }
121
+ }
122
+ ```
123
+
124
+ ## Teardown
125
+
126
+ Tasks can specify teardown commands that run automatically when the task completes or fails. Teardowns are executed in reverse dependency order (dependents before dependencies) to ensure proper cleanup.
127
+
128
+ ### Basic Teardown
129
+
130
+ ```ts
131
+ const dbTask = task({
132
+ name: "database",
133
+ commands: {
134
+ dev: {
135
+ run: "docker-compose up",
136
+ teardown: "docker-compose down",
137
+ },
138
+ build: "echo build",
139
+ },
140
+ cwd: ".",
141
+ })
142
+ ```
143
+
144
+ ### Teardown Callbacks
145
+
146
+ You can monitor teardown execution with callbacks. Note that teardown failures do not cause the pipeline to fail - they are fire-and-forget cleanup operations:
147
+
148
+ ```ts
149
+ await pipeline([dbTask]).run({
150
+ onTaskTeardown: (taskName) => {
151
+ console.log(`[${taskName}] Starting teardown...`)
152
+ },
153
+ onTaskTeardownError: (taskName, error) => {
154
+ console.error(`[${taskName}] Teardown failed: ${error.message}`)
155
+ // error is a regular Error instance (not a PipelineError)
156
+ // Teardown failures do not affect pipeline success/failure
157
+ },
158
+ })
159
+ ```
160
+
161
+ ### Teardown Execution Rules
162
+
163
+ Teardowns run when:
164
+
165
+ - ✅ The command entered the running state (regardless of success or failure)
166
+ - ✅ The pipeline completes successfully
167
+ - ✅ The pipeline fails after tasks have started
168
+
169
+ Teardowns do **not** run when:
170
+
171
+ - ❌ The task was skipped (no command for the current mode)
172
+ - ❌ The task failed before starting (spawn error)
173
+ - ❌ The pipeline never began execution
174
+
175
+ ### Reverse Dependency Order
176
+
177
+ Teardowns execute in reverse dependency order to ensure dependents are cleaned up before their dependencies:
178
+
179
+ ```ts
180
+ const db = task({
181
+ name: "db",
182
+ commands: {
183
+ dev: { run: "docker-compose up", teardown: "docker-compose down" },
184
+ build: "echo build",
185
+ },
186
+ cwd: ".",
187
+ })
188
+
189
+ const api = task({
190
+ name: "api",
191
+ commands: {
192
+ dev: { run: "npm run dev", teardown: "echo stopping api" },
193
+ build: "echo build",
194
+ },
195
+ cwd: ".",
196
+ dependencies: [db], // api depends on db
197
+ })
198
+
199
+ // Teardown order: api first, then db
200
+ await pipeline([db, api]).run()
201
+ ```
202
+
203
+ ## Skipping Tasks
204
+
205
+ Tasks can be automatically skipped when they don't have a command for the current mode. This is useful for multi-mode pipelines where some tasks are only relevant in certain contexts.
206
+
207
+ ### Default Behavior
208
+
209
+ If a task has no command for the current mode, it is **skipped**:
210
+
211
+ - ✅ The task participates in the dependency graph
212
+ - ✅ The task resolves immediately (satisfies dependencies)
213
+ - ✅ Dependents are unblocked
214
+ - ❌ No command is executed
215
+ - ❌ No teardown is registered
216
+ - ❌ No readiness is waited for
217
+
218
+ ```ts
219
+ const dbTask = task({
220
+ name: "database",
221
+ commands: {
222
+ dev: "docker-compose up",
223
+ // No build command - will be skipped in build mode
224
+ },
225
+ cwd: ".",
226
+ })
227
+
228
+ const apiTask = task({
229
+ name: "api",
230
+ commands: {
231
+ dev: "npm run dev",
232
+ build: "npm run build",
233
+ },
234
+ cwd: ".",
235
+ dependencies: [dbTask], // dbTask will be skipped, but apiTask will still run
236
+ })
237
+
238
+ await pipeline([dbTask, apiTask]).run({
239
+ command: "build",
240
+ onTaskSkipped: (taskName, mode) => {
241
+ console.log(`[${taskName}] skipped (no command for mode "${mode}")`)
242
+ },
243
+ })
244
+ ```
245
+
246
+ ### Strict Mode
247
+
248
+ In strict mode, missing commands cause the pipeline to fail. Use this for CI/release pipelines where every task is expected to participate:
249
+
250
+ ```ts
251
+ await pipeline([dbTask, apiTask]).run({
252
+ command: "build",
253
+ strict: true, // Missing commands will cause pipeline to fail
254
+ })
255
+ ```
256
+
257
+ ### Task-Level Override
258
+
259
+ Even with global strict mode, you can explicitly allow a task to be skipped:
260
+
261
+ ```ts
262
+ const dbTask = task({
263
+ name: "database",
264
+ commands: {
265
+ dev: "docker-compose up",
266
+ // No build command, but explicitly allowed to skip
267
+ },
268
+ cwd: ".",
269
+ allowSkip: true, // Explicitly allow skipping even in strict mode
270
+ })
271
+
272
+ await pipeline([dbTask]).run({
273
+ command: "build",
274
+ strict: true, // Global strict mode
275
+ // dbTask will still be skipped because allowSkip: true
276
+ })
277
+ ```
278
+
279
+ ### Nested Pipeline Behavior
280
+
281
+ When a pipeline is converted to a task, skip behavior is preserved:
282
+
283
+ - If **all** inner tasks are skipped → outer task is skipped
284
+ - If **some** run, some skip → outer task is completed
285
+ - If **any** fail → outer task fails
286
+
287
+ ```ts
288
+ const innerPipeline = pipeline([
289
+ task({ name: "inner1", commands: { dev: "..." }, cwd: "." }),
290
+ task({ name: "inner2", commands: { dev: "..." }, cwd: "." }),
291
+ ])
292
+
293
+ const outerTask = innerPipeline.toTask({ name: "outer" })
294
+
295
+ // If all inner tasks are skipped in build mode, outer task is also skipped
296
+ await pipeline([outerTask]).run({ command: "build" })
297
+ ```
298
+
299
+ ## Pipeline Composition
300
+
301
+ Build complex workflows by composing tasks and pipelines together.
302
+
303
+ ### Task Chaining
304
+
305
+ Chain tasks together using `andThen()` to create a pipeline that will run the tasks in order, automatically adding the previous task as a dependency:
306
+
307
+ ```ts
308
+ import { task } from "builderman"
309
+
310
+ const build = task({
311
+ name: "compile",
312
+ commands: {
313
+ build: "tsc",
314
+ dev: {
315
+ run: "tsc --watch",
316
+ readyWhen: (output) => output.includes("Watching for file changes."),
317
+ },
318
+ },
319
+ cwd: "packages/lib",
320
+ }).andThen({
321
+ name: "bundle",
322
+ commands: {
323
+ build: "rollup",
324
+ dev: {
325
+ run: "rollup --watch",
326
+ readyWhen: (output) => output.includes("Watching for file changes."),
327
+ },
328
+ },
329
+ cwd: "packages/lib",
330
+ })
331
+
332
+ await build.run()
333
+ ```
334
+
335
+ ### Composing Pipelines as Tasks
336
+
337
+ Convert pipelines to tasks and compose them with explicit dependencies:
338
+
339
+ ```ts
340
+ const build = pipeline([
341
+ /* ... */
342
+ ])
343
+ const test = pipeline([
344
+ /* ... */
345
+ ])
346
+ const deploy = pipeline([
347
+ /* ... */
348
+ ])
349
+
350
+ // Convert to tasks first
351
+ const buildTask = build.toTask({ name: "build" })
352
+ const testTask = test.toTask({ name: "test", dependencies: [buildTask] })
353
+ const deployTask = deploy.toTask({ name: "deploy", dependencies: [testTask] })
354
+
355
+ // Compose into final pipeline
356
+ const ci = pipeline([buildTask, testTask, deployTask])
357
+
358
+ await ci.run()
359
+ ```
360
+
361
+ **Note:** When a pipeline is converted to a task, it becomes a single unit in the dependency graph. The nested pipeline will execute completely before any dependent tasks can start.
@@ -0,0 +1,2 @@
1
+ import type { TaskGraph, Task } from "./types.js";
2
+ export declare function createTaskGraph(tasks: Task[]): TaskGraph;
package/dist/graph.js ADDED
@@ -0,0 +1,97 @@
1
+ import { $TASK_INTERNAL } from "./constants.js";
2
+ import { validateTasks } from "./util.js";
3
+ export function createTaskGraph(tasks) {
4
+ const nodes = new Map();
5
+ validateTasks(tasks);
6
+ // Create nodes for all tasks
7
+ for (const task of tasks) {
8
+ const { id: taskId } = task[$TASK_INTERNAL];
9
+ nodes.set(taskId, {
10
+ task,
11
+ dependencies: new Set(),
12
+ dependents: new Set(),
13
+ });
14
+ }
15
+ // Build dependency relationships
16
+ for (const task of tasks) {
17
+ const { id: taskId, name: taskName, dependencies } = task[$TASK_INTERNAL];
18
+ const node = nodes.get(taskId);
19
+ for (const dep of dependencies) {
20
+ const { id: depId, name: depName } = dep[$TASK_INTERNAL];
21
+ if (!nodes.has(depId)) {
22
+ throw new Error(`Task "${taskName}" depends on "${depName}" which is not in the pipeline`);
23
+ }
24
+ node.dependencies.add(depId);
25
+ nodes.get(depId).dependents.add(taskId);
26
+ }
27
+ }
28
+ return {
29
+ nodes,
30
+ validate() {
31
+ // Use DFS to detect cycles
32
+ const visited = new Set();
33
+ const recursionStack = new Set();
34
+ const visit = (nodeId, path) => {
35
+ if (recursionStack.has(nodeId)) {
36
+ // Found a cycle - build the cycle path
37
+ const cycleStart = path.indexOf(nodeId);
38
+ const cycle = [...path.slice(cycleStart), nodeId];
39
+ throw new Error(`Circular dependency detected: ${cycle.join(" -> ")}`);
40
+ }
41
+ if (visited.has(nodeId)) {
42
+ return;
43
+ }
44
+ visited.add(nodeId);
45
+ recursionStack.add(nodeId);
46
+ const node = nodes.get(nodeId);
47
+ for (const depId of node.dependencies) {
48
+ visit(depId, [...path, nodeId]);
49
+ }
50
+ recursionStack.delete(nodeId);
51
+ };
52
+ for (const nodeId of nodes.keys()) {
53
+ if (!visited.has(nodeId)) {
54
+ visit(nodeId, []);
55
+ }
56
+ }
57
+ },
58
+ simplify() {
59
+ // Remove transitive dependencies using Floyd-Warshall approach
60
+ // For each node, if there's a path through another node, remove the direct edge
61
+ const reachable = new Map();
62
+ // Initialize reachable sets with direct dependencies
63
+ for (const [id, node] of nodes) {
64
+ reachable.set(id, new Set(node.dependencies));
65
+ }
66
+ // Compute transitive closure
67
+ for (const k of nodes.keys()) {
68
+ for (const i of nodes.keys()) {
69
+ if (reachable.get(i).has(k)) {
70
+ for (const j of nodes.keys()) {
71
+ if (reachable.get(k).has(j)) {
72
+ reachable.get(i).add(j);
73
+ }
74
+ }
75
+ }
76
+ }
77
+ }
78
+ // Remove transitive edges
79
+ for (const [nodeId, node] of nodes) {
80
+ const toRemove = new Set();
81
+ for (const depId of node.dependencies) {
82
+ // Check if there's a path from this node to dep through another dependency
83
+ for (const otherDep of node.dependencies) {
84
+ if (otherDep !== depId && reachable.get(otherDep).has(depId)) {
85
+ // dep is reachable through otherDep, so it's transitive
86
+ toRemove.add(depId);
87
+ }
88
+ }
89
+ }
90
+ for (const depId of toRemove) {
91
+ node.dependencies.delete(depId);
92
+ nodes.get(depId).dependents.delete(nodeId);
93
+ }
94
+ }
95
+ },
96
+ };
97
+ }
package/dist/index.d.ts CHANGED
@@ -1,3 +1,3 @@
1
- export * from "./task.js";
2
- export * from "./pipeline.js";
3
- export type { Task, Pipeline, TaskConfig } from "./types.js";
1
+ export { task } from "./task.js";
2
+ export { pipeline, PipelineError } from "./pipeline.js";
3
+ export type { Task, Pipeline, TaskConfig, Command, CommandConfig, Commands, PipelineRunConfig, PipelineTaskConfig, } from "./types.js";
package/dist/index.js CHANGED
@@ -1,2 +1,2 @@
1
- export * from "./task.js";
2
- export * from "./pipeline.js";
1
+ export { task } from "./task.js";
2
+ export { pipeline, PipelineError } from "./pipeline.js";
@@ -1,5 +1,23 @@
1
1
  import type { Pipeline, Task } from "./types.js";
2
2
  /**
3
3
  * Creates a pipeline that manages task execution with dependency-based coordination.
4
+ * @param tasks - The tasks to include in the pipeline.
5
+ * @returns A pipeline that can be used to execute the tasks.
6
+ * @example
7
+ * const task1 = task({ name: "task1", commands: { dev: "echo task1" }, cwd: "." })
8
+ * const task2 = task({ name: "task2", commands: { dev: "echo task2" }, cwd: ".", dependencies: [task1] })
9
+ * await pipeline([task1, task2]).run()
4
10
  */
5
11
  export declare function pipeline(tasks: Task[]): Pipeline;
12
+ type PipelineErrorCode = typeof PipelineError.Aborted | typeof PipelineError.ProcessTerminated | typeof PipelineError.TaskFailed | typeof PipelineError.InvalidSignal | typeof PipelineError.InvalidTask;
13
+ export declare class PipelineError extends Error {
14
+ readonly code: PipelineErrorCode;
15
+ readonly taskName?: string;
16
+ constructor(message: string, code: PipelineErrorCode, taskName?: string);
17
+ static Aborted: 0;
18
+ static ProcessTerminated: 1;
19
+ static TaskFailed: 2;
20
+ static InvalidSignal: 3;
21
+ static InvalidTask: 4;
22
+ }
23
+ export {};