builderman 1.0.8 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +103 -1
- package/dist/graph.d.ts +2 -0
- package/dist/graph.js +97 -0
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/pipeline.js +191 -131
- package/dist/pipeline.test.d.ts +1 -0
- package/dist/scheduler.d.ts +17 -0
- package/dist/scheduler.js +48 -0
- package/dist/task.js +18 -28
- package/dist/types.d.ts +43 -11
- package/dist/types.js +1 -0
- package/dist/util.d.ts +2 -0
- package/dist/util.js +6 -0
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -1,5 +1,107 @@
|
|
|
1
1
|
# **builderman**
|
|
2
2
|
|
|
3
|
-
####
|
|
3
|
+
#### _A simple task runner for building and developing projects._
|
|
4
4
|
|
|
5
5
|
<br />
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install builderman
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Usage
|
|
14
|
+
|
|
15
|
+
```ts
|
|
16
|
+
import { task, pipeline } from "builderman"
|
|
17
|
+
|
|
18
|
+
const task1 = task({
|
|
19
|
+
name: "lib:build",
|
|
20
|
+
commands: {
|
|
21
|
+
dev: "tsc --watch",
|
|
22
|
+
build: "tsc",
|
|
23
|
+
},
|
|
24
|
+
cwd: "packages/lib",
|
|
25
|
+
isReady: (stdout) => {
|
|
26
|
+
// mark this this task as ready when the process is watching for file changes
|
|
27
|
+
return stdout.includes("Watching for file changes.")
|
|
28
|
+
},
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
const task2 = task({
|
|
32
|
+
name: "consumer:dev",
|
|
33
|
+
commands: {
|
|
34
|
+
dev: "npm run dev",
|
|
35
|
+
build: "npm run build",
|
|
36
|
+
},
|
|
37
|
+
cwd: "packages/consumer",
|
|
38
|
+
dependencies: [task1],
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
await pipeline([task1, task2]).run({
|
|
42
|
+
onTaskError: (taskName, error) => {
|
|
43
|
+
console.error(`[${taskName}] Error: ${error.message}`)
|
|
44
|
+
},
|
|
45
|
+
onTaskComplete: (taskName) => {
|
|
46
|
+
console.log(`[${taskName}] Complete!`)
|
|
47
|
+
},
|
|
48
|
+
onPipelineComplete: () => {
|
|
49
|
+
console.log("All tasks complete! 🎉")
|
|
50
|
+
},
|
|
51
|
+
onPipelineError: (error) => {
|
|
52
|
+
console.error(`Pipeline error: ${error.message}`)
|
|
53
|
+
},
|
|
54
|
+
})
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Pipeline Composition
|
|
58
|
+
|
|
59
|
+
Build complex workflows by composing tasks and pipelines together.
|
|
60
|
+
|
|
61
|
+
### Task Chaining
|
|
62
|
+
|
|
63
|
+
Chain tasks together using `andThen()` to create a pipeline that will run the tasks in order:
|
|
64
|
+
|
|
65
|
+
```ts
|
|
66
|
+
import { task, pipeline } from "builderman"
|
|
67
|
+
|
|
68
|
+
const build = task({
|
|
69
|
+
name: "compile",
|
|
70
|
+
commands: { dev: "tsc --watch", build: "tsc" },
|
|
71
|
+
cwd: "packages/lib",
|
|
72
|
+
}).andThen({
|
|
73
|
+
name: "bundle",
|
|
74
|
+
commands: { dev: "rollup --watch", build: "rollup" },
|
|
75
|
+
cwd: "packages/lib",
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
await build.run()
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Composing Pipelines as Tasks
|
|
82
|
+
|
|
83
|
+
Convert pipelines to tasks and compose them with explicit dependencies:
|
|
84
|
+
|
|
85
|
+
```ts
|
|
86
|
+
const build = pipeline([
|
|
87
|
+
/* ... */
|
|
88
|
+
])
|
|
89
|
+
const test = pipeline([
|
|
90
|
+
/* ... */
|
|
91
|
+
])
|
|
92
|
+
const deploy = pipeline([
|
|
93
|
+
/* ... */
|
|
94
|
+
])
|
|
95
|
+
|
|
96
|
+
// Convert to tasks first
|
|
97
|
+
const buildTask = build.toTask({ name: "build" })
|
|
98
|
+
const testTask = test.toTask({ name: "test", dependencies: [buildTask] })
|
|
99
|
+
const deployTask = deploy.toTask({ name: "deploy", dependencies: [testTask] })
|
|
100
|
+
|
|
101
|
+
// Compose into final pipeline
|
|
102
|
+
const ci = pipeline([buildTask, testTask, deployTask])
|
|
103
|
+
|
|
104
|
+
await ci.run()
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
**Note:** When a pipeline is converted to a task, it becomes a single unit in the dependency graph. The nested pipeline will execute completely before any dependent tasks can start.
|
package/dist/graph.d.ts
ADDED
package/dist/graph.js
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { $TASK_INTERNAL } from "./constants.js";
|
|
2
|
+
import { validateTasks } from "./util.js";
|
|
3
|
+
export function createTaskGraph(tasks) {
|
|
4
|
+
const nodes = new Map();
|
|
5
|
+
validateTasks(tasks);
|
|
6
|
+
// Create nodes for all tasks
|
|
7
|
+
for (const task of tasks) {
|
|
8
|
+
const { id: taskId } = task[$TASK_INTERNAL];
|
|
9
|
+
nodes.set(taskId, {
|
|
10
|
+
task,
|
|
11
|
+
dependencies: new Set(),
|
|
12
|
+
dependents: new Set(),
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
// Build dependency relationships
|
|
16
|
+
for (const task of tasks) {
|
|
17
|
+
const { id: taskId, name: taskName, dependencies } = task[$TASK_INTERNAL];
|
|
18
|
+
const node = nodes.get(taskId);
|
|
19
|
+
for (const dep of dependencies) {
|
|
20
|
+
const { id: depId, name: depName } = dep[$TASK_INTERNAL];
|
|
21
|
+
if (!nodes.has(depId)) {
|
|
22
|
+
throw new Error(`Task "${taskName}" depends on "${depName}" which is not in the pipeline`);
|
|
23
|
+
}
|
|
24
|
+
node.dependencies.add(depId);
|
|
25
|
+
nodes.get(depId).dependents.add(taskId);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return {
|
|
29
|
+
nodes,
|
|
30
|
+
validate() {
|
|
31
|
+
// Use DFS to detect cycles
|
|
32
|
+
const visited = new Set();
|
|
33
|
+
const recursionStack = new Set();
|
|
34
|
+
const visit = (nodeId, path) => {
|
|
35
|
+
if (recursionStack.has(nodeId)) {
|
|
36
|
+
// Found a cycle - build the cycle path
|
|
37
|
+
const cycleStart = path.indexOf(nodeId);
|
|
38
|
+
const cycle = [...path.slice(cycleStart), nodeId];
|
|
39
|
+
throw new Error(`Circular dependency detected: ${cycle.join(" -> ")}`);
|
|
40
|
+
}
|
|
41
|
+
if (visited.has(nodeId)) {
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
visited.add(nodeId);
|
|
45
|
+
recursionStack.add(nodeId);
|
|
46
|
+
const node = nodes.get(nodeId);
|
|
47
|
+
for (const depId of node.dependencies) {
|
|
48
|
+
visit(depId, [...path, nodeId]);
|
|
49
|
+
}
|
|
50
|
+
recursionStack.delete(nodeId);
|
|
51
|
+
};
|
|
52
|
+
for (const nodeId of nodes.keys()) {
|
|
53
|
+
if (!visited.has(nodeId)) {
|
|
54
|
+
visit(nodeId, []);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
},
|
|
58
|
+
simplify() {
|
|
59
|
+
// Remove transitive dependencies using Floyd-Warshall approach
|
|
60
|
+
// For each node, if there's a path through another node, remove the direct edge
|
|
61
|
+
const reachable = new Map();
|
|
62
|
+
// Initialize reachable sets with direct dependencies
|
|
63
|
+
for (const [id, node] of nodes) {
|
|
64
|
+
reachable.set(id, new Set(node.dependencies));
|
|
65
|
+
}
|
|
66
|
+
// Compute transitive closure
|
|
67
|
+
for (const k of nodes.keys()) {
|
|
68
|
+
for (const i of nodes.keys()) {
|
|
69
|
+
if (reachable.get(i).has(k)) {
|
|
70
|
+
for (const j of nodes.keys()) {
|
|
71
|
+
if (reachable.get(k).has(j)) {
|
|
72
|
+
reachable.get(i).add(j);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
// Remove transitive edges
|
|
79
|
+
for (const [nodeId, node] of nodes) {
|
|
80
|
+
const toRemove = new Set();
|
|
81
|
+
for (const depId of node.dependencies) {
|
|
82
|
+
// Check if there's a path from this node to dep through another dependency
|
|
83
|
+
for (const otherDep of node.dependencies) {
|
|
84
|
+
if (otherDep !== depId && reachable.get(otherDep).has(depId)) {
|
|
85
|
+
// dep is reachable through otherDep, so it's transitive
|
|
86
|
+
toRemove.add(depId);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
for (const depId of toRemove) {
|
|
91
|
+
node.dependencies.delete(depId);
|
|
92
|
+
nodes.get(depId).dependents.delete(nodeId);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
};
|
|
97
|
+
}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
export
|
|
2
|
-
export
|
|
1
|
+
export { task } from "./task.js";
|
|
2
|
+
export { pipeline } from "./pipeline.js";
|
|
3
3
|
export type { Task, Pipeline, TaskConfig } from "./types.js";
|
package/dist/index.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export
|
|
2
|
-
export
|
|
1
|
+
export { task } from "./task.js";
|
|
2
|
+
export { pipeline } from "./pipeline.js";
|
package/dist/pipeline.js
CHANGED
|
@@ -1,174 +1,234 @@
|
|
|
1
1
|
import { spawn } from "node:child_process";
|
|
2
|
-
import { EventEmitter } from "node:events";
|
|
3
2
|
import * as path from "node:path";
|
|
3
|
+
import * as fs from "node:fs";
|
|
4
4
|
import { $TASK_INTERNAL } from "./constants.js";
|
|
5
|
+
import { createTaskGraph } from "./graph.js";
|
|
6
|
+
import { createScheduler } from "./scheduler.js";
|
|
7
|
+
import { task } from "./task.js";
|
|
8
|
+
import { validateTasks } from "./util.js";
|
|
9
|
+
// Module-level cache for pipeline-to-task conversions
|
|
10
|
+
// Key: Pipeline, Value: Map of name -> Task
|
|
11
|
+
const pipelineTaskCache = new WeakMap();
|
|
5
12
|
/**
|
|
6
13
|
* Creates a pipeline that manages task execution with dependency-based coordination.
|
|
7
14
|
*/
|
|
8
15
|
export function pipeline(tasks) {
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
16
|
+
const graph = createTaskGraph(tasks);
|
|
17
|
+
graph.validate();
|
|
18
|
+
graph.simplify();
|
|
19
|
+
const pipelineImpl = {
|
|
20
|
+
toTask(config) {
|
|
21
|
+
validateTasks(config.dependencies);
|
|
22
|
+
const syntheticTask = task({
|
|
23
|
+
name: config.name,
|
|
24
|
+
commands: { dev: ":", build: ":" }, // Dummy commands (no-op)
|
|
25
|
+
cwd: ".", // Dummy cwd
|
|
26
|
+
dependencies: [...(config.dependencies || [])],
|
|
27
|
+
});
|
|
28
|
+
// Mark this task as a pipeline task
|
|
29
|
+
syntheticTask[$TASK_INTERNAL].pipeline = pipelineImpl;
|
|
30
|
+
// Cache this conversion
|
|
31
|
+
let cache = pipelineTaskCache.get(pipelineImpl);
|
|
32
|
+
if (!cache) {
|
|
33
|
+
cache = new Map();
|
|
34
|
+
pipelineTaskCache.set(pipelineImpl, cache);
|
|
35
|
+
}
|
|
36
|
+
cache.set(config.name, syntheticTask);
|
|
37
|
+
return syntheticTask;
|
|
38
|
+
},
|
|
39
|
+
async run(config) {
|
|
40
|
+
const spawnFn = config?.spawn ?? spawn;
|
|
12
41
|
const runningTasks = new Map();
|
|
13
|
-
const
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
};
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
return true;
|
|
30
|
-
}
|
|
31
|
-
// Wait for all dependencies
|
|
32
|
-
for (const dep of dependencies) {
|
|
33
|
-
if (typeof dep === "function") {
|
|
34
|
-
await dep();
|
|
42
|
+
const runningPipelines = new Map();
|
|
43
|
+
let failed = false;
|
|
44
|
+
const scheduler = createScheduler(graph);
|
|
45
|
+
let completionResolver = null;
|
|
46
|
+
let completionRejector = null;
|
|
47
|
+
const completionPromise = new Promise((resolve, reject) => {
|
|
48
|
+
completionResolver = resolve;
|
|
49
|
+
completionRejector = reject;
|
|
50
|
+
});
|
|
51
|
+
const failPipeline = (error) => {
|
|
52
|
+
if (failed)
|
|
53
|
+
return;
|
|
54
|
+
failed = true;
|
|
55
|
+
for (const child of runningTasks.values()) {
|
|
56
|
+
try {
|
|
57
|
+
child.kill("SIGTERM");
|
|
35
58
|
}
|
|
36
|
-
|
|
37
|
-
|
|
59
|
+
catch { }
|
|
60
|
+
}
|
|
61
|
+
// Stop nested pipelines
|
|
62
|
+
for (const { stop } of runningPipelines.values()) {
|
|
63
|
+
try {
|
|
64
|
+
stop();
|
|
38
65
|
}
|
|
66
|
+
catch { }
|
|
39
67
|
}
|
|
40
|
-
|
|
68
|
+
config?.onPipelineError?.(error);
|
|
69
|
+
completionRejector?.(error);
|
|
41
70
|
};
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
if (runningTasks.has(
|
|
71
|
+
const startTask = (task) => {
|
|
72
|
+
const { name: taskName, [$TASK_INTERNAL]: { id: taskId, pipeline: nestedPipeline }, } = task;
|
|
73
|
+
if (runningTasks.has(taskId))
|
|
74
|
+
return;
|
|
75
|
+
// Handle pipeline tasks
|
|
76
|
+
if (nestedPipeline) {
|
|
77
|
+
// Mark as ready immediately (pipeline entry nodes will handle their own ready state)
|
|
78
|
+
advanceScheduler({ type: "ready", taskId });
|
|
79
|
+
// Create an abort controller to stop the nested pipeline if needed
|
|
80
|
+
let pipelineStopped = false;
|
|
81
|
+
const stopPipeline = () => {
|
|
82
|
+
pipelineStopped = true;
|
|
83
|
+
// The nested pipeline will continue running, but we've marked it as stopped
|
|
84
|
+
// In a more sophisticated implementation, we could propagate stop signals
|
|
85
|
+
};
|
|
86
|
+
runningPipelines.set(taskId, { stop: stopPipeline });
|
|
87
|
+
// Run the nested pipeline
|
|
88
|
+
nestedPipeline
|
|
89
|
+
.run({
|
|
90
|
+
spawn: spawnFn,
|
|
91
|
+
onTaskError: (nestedTaskName, error) => {
|
|
92
|
+
if (pipelineStopped)
|
|
93
|
+
return;
|
|
94
|
+
config?.onTaskError?.(`${taskName}:${nestedTaskName}`, error);
|
|
95
|
+
},
|
|
96
|
+
onTaskComplete: (nestedTaskName) => {
|
|
97
|
+
if (pipelineStopped)
|
|
98
|
+
return;
|
|
99
|
+
config?.onTaskComplete?.(`${taskName}:${nestedTaskName}`);
|
|
100
|
+
},
|
|
101
|
+
onPipelineError: (error) => {
|
|
102
|
+
if (pipelineStopped)
|
|
103
|
+
return;
|
|
104
|
+
runningPipelines.delete(taskId);
|
|
105
|
+
const e = new Error(`[${taskName}] Pipeline failed: ${error.message}`);
|
|
106
|
+
config?.onTaskError?.(taskName, e);
|
|
107
|
+
failPipeline(e);
|
|
108
|
+
},
|
|
109
|
+
onPipelineComplete: () => {
|
|
110
|
+
if (pipelineStopped)
|
|
111
|
+
return;
|
|
112
|
+
runningPipelines.delete(taskId);
|
|
113
|
+
config?.onTaskComplete?.(taskName);
|
|
114
|
+
advanceScheduler({ type: "complete", taskId });
|
|
115
|
+
},
|
|
116
|
+
})
|
|
117
|
+
.catch((error) => {
|
|
118
|
+
if (pipelineStopped)
|
|
119
|
+
return;
|
|
120
|
+
runningPipelines.delete(taskId);
|
|
121
|
+
const e = new Error(`[${taskName}] Pipeline failed: ${error.message}`);
|
|
122
|
+
config?.onTaskError?.(taskName, e);
|
|
123
|
+
failPipeline(e);
|
|
124
|
+
});
|
|
45
125
|
return;
|
|
46
126
|
}
|
|
47
|
-
|
|
48
|
-
const
|
|
49
|
-
|
|
127
|
+
// Regular task execution
|
|
128
|
+
const command = process.env.NODE_ENV === "production"
|
|
129
|
+
? task[$TASK_INTERNAL].commands.build
|
|
130
|
+
: task[$TASK_INTERNAL].commands.dev;
|
|
131
|
+
const { cwd, shouldStdoutMarkReady } = task[$TASK_INTERNAL];
|
|
50
132
|
const taskCwd = path.isAbsolute(cwd)
|
|
51
133
|
? cwd
|
|
52
134
|
: path.resolve(process.cwd(), cwd);
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
const rootBinPath = path.join(process.cwd(), "node_modules", ".bin");
|
|
59
|
-
if (rootBinPath !== localBinPath) {
|
|
60
|
-
binPaths.push(rootBinPath);
|
|
61
|
-
}
|
|
62
|
-
if (existingPath) {
|
|
63
|
-
binPaths.push(existingPath);
|
|
135
|
+
if (!fs.existsSync(taskCwd)) {
|
|
136
|
+
const e = new Error(`[${taskName}] Working directory does not exist: ${taskCwd}`);
|
|
137
|
+
config?.onTaskError?.(taskName, e);
|
|
138
|
+
failPipeline(e);
|
|
139
|
+
return;
|
|
64
140
|
}
|
|
65
|
-
const
|
|
141
|
+
const accumulatedPath = [
|
|
142
|
+
path.join(taskCwd, "node_modules", ".bin"),
|
|
143
|
+
path.join(process.cwd(), "node_modules", ".bin"),
|
|
144
|
+
process.env.PATH,
|
|
145
|
+
]
|
|
146
|
+
.filter(Boolean)
|
|
147
|
+
.join(process.platform === "win32" ? ";" : ":");
|
|
66
148
|
const env = {
|
|
67
149
|
...process.env,
|
|
68
|
-
PATH:
|
|
69
|
-
Path:
|
|
150
|
+
PATH: accumulatedPath,
|
|
151
|
+
Path: accumulatedPath,
|
|
70
152
|
};
|
|
71
|
-
const child =
|
|
72
|
-
cwd,
|
|
153
|
+
const child = spawnFn(command, {
|
|
154
|
+
cwd: taskCwd,
|
|
73
155
|
stdio: ["inherit", "pipe", "pipe"],
|
|
74
156
|
shell: true,
|
|
75
157
|
env,
|
|
76
158
|
});
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
markComplete();
|
|
83
|
-
process.exitCode = 1;
|
|
84
|
-
eventEmitter.emit("taskCompleted", task.name);
|
|
85
|
-
});
|
|
86
|
-
runningTasks.set(task.name, child);
|
|
87
|
-
// If task doesn't have readyOn, mark as ready immediately
|
|
88
|
-
if (!readyOn) {
|
|
89
|
-
readyTasks.add(task.name);
|
|
90
|
-
markReady();
|
|
91
|
-
eventEmitter.emit("taskReady", task.name);
|
|
159
|
+
runningTasks.set(taskId, child);
|
|
160
|
+
let didMarkReady = false;
|
|
161
|
+
if (!shouldStdoutMarkReady) {
|
|
162
|
+
advanceScheduler({ type: "ready", taskId });
|
|
163
|
+
didMarkReady = true;
|
|
92
164
|
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
stdoutBuffer = lines.pop() || "";
|
|
102
|
-
for (const line of lines) {
|
|
103
|
-
// Check if task is ready based on readyOn callback
|
|
104
|
-
if (readyOn && !readyTasks.has(task.name)) {
|
|
105
|
-
if (readyOn(allOutput)) {
|
|
106
|
-
readyTasks.add(task.name);
|
|
107
|
-
markReady();
|
|
108
|
-
eventEmitter.emit("taskReady", task.name);
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
// Forward stdout to parent
|
|
112
|
-
process.stdout.write(line + "\n");
|
|
165
|
+
let output = "";
|
|
166
|
+
child.stdout?.on("data", (buf) => {
|
|
167
|
+
const chunk = buf.toString();
|
|
168
|
+
output += chunk;
|
|
169
|
+
process.stdout.write(chunk);
|
|
170
|
+
if (!didMarkReady && shouldStdoutMarkReady(output)) {
|
|
171
|
+
advanceScheduler({ type: "ready", taskId });
|
|
172
|
+
didMarkReady = true;
|
|
113
173
|
}
|
|
114
174
|
});
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
if (stdoutBuffer) {
|
|
118
|
-
process.stdout.write(stdoutBuffer);
|
|
119
|
-
}
|
|
175
|
+
child.stderr?.on("data", (buf) => {
|
|
176
|
+
process.stderr.write(buf);
|
|
120
177
|
});
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
178
|
+
child.on("error", (error) => {
|
|
179
|
+
const e = new Error(`[${taskName}] Failed to start: ${error.message}`);
|
|
180
|
+
config?.onTaskError?.(taskName, e);
|
|
181
|
+
failPipeline(e);
|
|
124
182
|
});
|
|
125
|
-
// Handle task completion
|
|
126
183
|
child.on("exit", (code) => {
|
|
127
|
-
runningTasks.delete(
|
|
128
|
-
completedTasks.add(task.name);
|
|
129
|
-
markComplete();
|
|
184
|
+
runningTasks.delete(taskId);
|
|
130
185
|
if (code !== 0) {
|
|
131
|
-
|
|
186
|
+
const e = new Error(`[${taskName}] Task failed with exit code ${code ?? 1}`);
|
|
187
|
+
config?.onTaskError?.(taskName, e);
|
|
188
|
+
failPipeline(e);
|
|
189
|
+
return;
|
|
132
190
|
}
|
|
133
|
-
|
|
191
|
+
config?.onTaskComplete?.(taskName);
|
|
192
|
+
// 🔑 Notify scheduler and drain newly runnable tasks
|
|
193
|
+
advanceScheduler({ type: "complete", taskId });
|
|
134
194
|
});
|
|
135
195
|
};
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
196
|
+
const advanceScheduler = (input) => {
|
|
197
|
+
let result = input ? scheduler.next(input) : scheduler.next();
|
|
198
|
+
while (true) {
|
|
199
|
+
const event = result.value;
|
|
200
|
+
const isFinished = result.done && result.value.type === "done";
|
|
201
|
+
if (isFinished) {
|
|
202
|
+
config?.onPipelineComplete?.();
|
|
203
|
+
completionResolver?.();
|
|
204
|
+
return;
|
|
205
|
+
}
|
|
206
|
+
if (event.type === "run") {
|
|
207
|
+
startTask(graph.nodes.get(event.taskId).task);
|
|
208
|
+
result = scheduler.next();
|
|
209
|
+
continue;
|
|
210
|
+
}
|
|
211
|
+
if (event.type === "idle") {
|
|
212
|
+
return;
|
|
141
213
|
}
|
|
142
214
|
}
|
|
143
215
|
};
|
|
144
|
-
//
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
await tryStartTasks();
|
|
149
|
-
// Wait for all tasks to complete
|
|
150
|
-
return new Promise((resolve, reject) => {
|
|
151
|
-
const checkCompletion = () => {
|
|
152
|
-
if (runningTasks.size === 0) {
|
|
153
|
-
resolve();
|
|
154
|
-
}
|
|
216
|
+
// Handle termination signals
|
|
217
|
+
const cleanups = ["SIGINT", "SIGTERM", "SIGQUIT", "SIGBREAK"].map((signal) => {
|
|
218
|
+
const handleSignal = () => {
|
|
219
|
+
failPipeline(new Error(`Received ${signal}`));
|
|
155
220
|
};
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
process.on("SIGTERM", () => {
|
|
166
|
-
for (const child of runningTasks.values()) {
|
|
167
|
-
child.kill("SIGTERM");
|
|
168
|
-
}
|
|
169
|
-
reject(new Error("Process terminated"));
|
|
170
|
-
});
|
|
221
|
+
process.once(signal, handleSignal);
|
|
222
|
+
return () => {
|
|
223
|
+
process.removeListener(signal, handleSignal);
|
|
224
|
+
};
|
|
225
|
+
});
|
|
226
|
+
// 🚀 Kick off initial runnable tasks
|
|
227
|
+
advanceScheduler();
|
|
228
|
+
await completionPromise.finally(() => {
|
|
229
|
+
cleanups.forEach((cleanup) => cleanup());
|
|
171
230
|
});
|
|
172
231
|
},
|
|
173
232
|
};
|
|
233
|
+
return pipelineImpl;
|
|
174
234
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { TaskGraph } from "./types.js";
|
|
2
|
+
export type SchedulerInput = {
|
|
3
|
+
type: "complete";
|
|
4
|
+
taskId: number;
|
|
5
|
+
} | {
|
|
6
|
+
type: "ready";
|
|
7
|
+
taskId: number;
|
|
8
|
+
};
|
|
9
|
+
export type SchedulerOutput = {
|
|
10
|
+
type: "run";
|
|
11
|
+
taskId: number;
|
|
12
|
+
} | {
|
|
13
|
+
type: "idle";
|
|
14
|
+
};
|
|
15
|
+
export declare function createScheduler(graph: TaskGraph): Generator<SchedulerOutput, {
|
|
16
|
+
type: "done";
|
|
17
|
+
}, SchedulerInput>;
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
export function* createScheduler(graph) {
|
|
2
|
+
const remainingReadyDeps = new Map();
|
|
3
|
+
const readyTasks = new Set(); // Track which tasks are already ready
|
|
4
|
+
const runnable = [];
|
|
5
|
+
let completed = 0;
|
|
6
|
+
for (const [id, node] of graph.nodes) {
|
|
7
|
+
remainingReadyDeps.set(id, node.dependencies.size);
|
|
8
|
+
if (node.dependencies.size === 0) {
|
|
9
|
+
runnable.push(id);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
const markDependencyReady = (taskId) => {
|
|
13
|
+
if (readyTasks.has(taskId)) {
|
|
14
|
+
return; // Already marked as ready, don't double-count
|
|
15
|
+
}
|
|
16
|
+
readyTasks.add(taskId);
|
|
17
|
+
// A dependency became ready, check if dependents can now run
|
|
18
|
+
const node = graph.nodes.get(taskId);
|
|
19
|
+
for (const depId of node.dependents) {
|
|
20
|
+
const next = (remainingReadyDeps.get(depId) ?? 0) - 1;
|
|
21
|
+
remainingReadyDeps.set(depId, next);
|
|
22
|
+
if (next === 0)
|
|
23
|
+
runnable.push(depId);
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
while (completed < graph.nodes.size) {
|
|
27
|
+
if (runnable.length > 0) {
|
|
28
|
+
yield { type: "run", taskId: runnable.shift() };
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
const input = yield { type: "idle" };
|
|
32
|
+
if (input?.type === "ready" && input.taskId !== undefined) {
|
|
33
|
+
markDependencyReady(input.taskId);
|
|
34
|
+
// After marking a dependency as ready, continue the loop to check for newly runnable tasks
|
|
35
|
+
// This ensures that if dependents become runnable, they are yielded immediately
|
|
36
|
+
continue;
|
|
37
|
+
}
|
|
38
|
+
else if (input?.type === "complete" && input.taskId !== undefined) {
|
|
39
|
+
completed++;
|
|
40
|
+
// When a task completes, it's also ready (if it wasn't already)
|
|
41
|
+
// This handles the case where a task completes without a ready check
|
|
42
|
+
markDependencyReady(input.taskId);
|
|
43
|
+
// Continue the loop to check for newly runnable tasks
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return { type: "done" };
|
|
48
|
+
}
|
package/dist/task.js
CHANGED
|
@@ -1,39 +1,29 @@
|
|
|
1
1
|
import { $TASK_INTERNAL } from "./constants.js";
|
|
2
|
+
import { validateTasks } from "./util.js";
|
|
3
|
+
import { pipeline } from "./pipeline.js";
|
|
4
|
+
let taskId = 0;
|
|
2
5
|
/**
|
|
3
6
|
* Creates a task configuration.
|
|
4
7
|
*/
|
|
5
8
|
export function task(config) {
|
|
6
|
-
|
|
7
|
-
const
|
|
8
|
-
resolveReady = resolve;
|
|
9
|
-
});
|
|
10
|
-
let isReady = false;
|
|
11
|
-
let isComplete = false;
|
|
12
|
-
return {
|
|
9
|
+
validateTasks(config.dependencies);
|
|
10
|
+
const taskInstance = {
|
|
13
11
|
name: config.name,
|
|
14
|
-
readyOrComplete() {
|
|
15
|
-
return readyPromise;
|
|
16
|
-
},
|
|
17
12
|
[$TASK_INTERNAL]: {
|
|
18
13
|
...config,
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
if (!isReady && resolveReady) {
|
|
32
|
-
isReady = true;
|
|
33
|
-
resolveReady();
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
},
|
|
14
|
+
id: taskId++,
|
|
15
|
+
dependencies: [...(config.dependencies || [])],
|
|
16
|
+
shouldStdoutMarkReady: config.isReady,
|
|
17
|
+
},
|
|
18
|
+
andThen(nextConfig) {
|
|
19
|
+
// Create the next task with the current task as a dependency
|
|
20
|
+
const nextTask = task({
|
|
21
|
+
...nextConfig,
|
|
22
|
+
dependencies: [taskInstance],
|
|
23
|
+
});
|
|
24
|
+
// Return a pipeline containing both tasks
|
|
25
|
+
return pipeline([taskInstance, nextTask]);
|
|
37
26
|
},
|
|
38
27
|
};
|
|
28
|
+
return taskInstance;
|
|
39
29
|
}
|
package/dist/types.d.ts
CHANGED
|
@@ -7,23 +7,55 @@ export interface TaskConfig {
|
|
|
7
7
|
name: string;
|
|
8
8
|
commands: Commands;
|
|
9
9
|
cwd: string;
|
|
10
|
-
|
|
11
|
-
|
|
10
|
+
isReady?: (stdout: string) => boolean;
|
|
11
|
+
dependencies?: Task[];
|
|
12
12
|
}
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
markReady: () => void;
|
|
19
|
-
markComplete: () => void;
|
|
13
|
+
interface TaskInternal extends Omit<TaskConfig, "isReady"> {
|
|
14
|
+
id: number;
|
|
15
|
+
dependencies: Task[];
|
|
16
|
+
shouldStdoutMarkReady?: (stdout: string) => boolean;
|
|
17
|
+
pipeline?: Pipeline;
|
|
20
18
|
}
|
|
21
19
|
export interface Task {
|
|
22
20
|
name: string;
|
|
23
|
-
readyOrComplete(): Promise<void>;
|
|
24
21
|
[$TASK_INTERNAL]: TaskInternal;
|
|
22
|
+
andThen(config: Omit<TaskConfig, "dependencies">): Pipeline;
|
|
23
|
+
}
|
|
24
|
+
export interface PipelineRunConfig {
|
|
25
|
+
/**
|
|
26
|
+
* Provides a custom spawn function for the pipeline.
|
|
27
|
+
* @default import("node:child_process").spawn
|
|
28
|
+
*/
|
|
29
|
+
spawn?: typeof import("node:child_process").spawn;
|
|
30
|
+
onTaskError?: (taskName: string, error: Error) => void;
|
|
31
|
+
onTaskComplete?: (taskName: string) => void;
|
|
32
|
+
onPipelineError?: (error: Error) => void;
|
|
33
|
+
onPipelineComplete?: () => void;
|
|
34
|
+
}
|
|
35
|
+
export interface PipelineTaskConfig {
|
|
36
|
+
name: string;
|
|
37
|
+
dependencies?: Task[];
|
|
25
38
|
}
|
|
26
39
|
export interface Pipeline {
|
|
27
|
-
run(): Promise<void>;
|
|
40
|
+
run(config?: PipelineRunConfig): Promise<void>;
|
|
41
|
+
toTask(config: PipelineTaskConfig): Task;
|
|
42
|
+
}
|
|
43
|
+
export interface TaskNode {
|
|
44
|
+
task: Task;
|
|
45
|
+
dependencies: Set<number>;
|
|
46
|
+
dependents: Set<number>;
|
|
47
|
+
}
|
|
48
|
+
export interface TaskGraph {
|
|
49
|
+
nodes: Map<number, TaskNode>;
|
|
50
|
+
/**
|
|
51
|
+
* Validates the graph for circular dependencies.
|
|
52
|
+
* @throws Error if circular dependencies are detected
|
|
53
|
+
*/
|
|
54
|
+
validate(): void;
|
|
55
|
+
/**
|
|
56
|
+
* Simplifies the graph by removing transitive dependencies.
|
|
57
|
+
* If A depends on B and B depends on C, then A->C is transitive and can be removed.
|
|
58
|
+
*/
|
|
59
|
+
simplify(): void;
|
|
28
60
|
}
|
|
29
61
|
export {};
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/util.d.ts
ADDED
package/dist/util.js
ADDED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "builderman",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "1.1.0",
|
|
4
4
|
"description": "Simple task runner for building and developing projects.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"type": "module",
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"@types/node": "^25.0.8"
|
|
22
22
|
},
|
|
23
23
|
"scripts": {
|
|
24
|
-
"test": "
|
|
24
|
+
"test": "node --test ./dist/pipeline.test.js",
|
|
25
25
|
"build": "rm -rf dist && tsc",
|
|
26
26
|
"dev": "tsc --watch"
|
|
27
27
|
}
|