builderman 1.0.9 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +52 -0
- package/dist/graph.d.ts +2 -0
- package/dist/graph.js +97 -0
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/pipeline.js +211 -160
- package/dist/pipeline.test.d.ts +1 -0
- package/dist/scheduler.d.ts +17 -0
- package/dist/scheduler.js +48 -0
- package/dist/task.js +18 -26
- package/dist/types.d.ts +34 -7
- package/dist/types.js +1 -0
- package/dist/util.d.ts +2 -0
- package/dist/util.js +6 -0
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -53,3 +53,55 @@ await pipeline([task1, task2]).run({
|
|
|
53
53
|
},
|
|
54
54
|
})
|
|
55
55
|
```
|
|
56
|
+
|
|
57
|
+
## Pipeline Composition
|
|
58
|
+
|
|
59
|
+
Build complex workflows by composing tasks and pipelines together.
|
|
60
|
+
|
|
61
|
+
### Task Chaining
|
|
62
|
+
|
|
63
|
+
Chain tasks together using `andThen()` to create a pipeline that will run the tasks in order:
|
|
64
|
+
|
|
65
|
+
```ts
|
|
66
|
+
import { task, pipeline } from "builderman"
|
|
67
|
+
|
|
68
|
+
const build = task({
|
|
69
|
+
name: "compile",
|
|
70
|
+
commands: { dev: "tsc --watch", build: "tsc" },
|
|
71
|
+
cwd: "packages/lib",
|
|
72
|
+
}).andThen({
|
|
73
|
+
name: "bundle",
|
|
74
|
+
commands: { dev: "rollup --watch", build: "rollup" },
|
|
75
|
+
cwd: "packages/lib",
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
await build.run()
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Composing Pipelines as Tasks
|
|
82
|
+
|
|
83
|
+
Convert pipelines to tasks and compose them with explicit dependencies:
|
|
84
|
+
|
|
85
|
+
```ts
|
|
86
|
+
const build = pipeline([
|
|
87
|
+
/* ... */
|
|
88
|
+
])
|
|
89
|
+
const test = pipeline([
|
|
90
|
+
/* ... */
|
|
91
|
+
])
|
|
92
|
+
const deploy = pipeline([
|
|
93
|
+
/* ... */
|
|
94
|
+
])
|
|
95
|
+
|
|
96
|
+
// Convert to tasks first
|
|
97
|
+
const buildTask = build.toTask({ name: "build" })
|
|
98
|
+
const testTask = test.toTask({ name: "test", dependencies: [buildTask] })
|
|
99
|
+
const deployTask = deploy.toTask({ name: "deploy", dependencies: [testTask] })
|
|
100
|
+
|
|
101
|
+
// Compose into final pipeline
|
|
102
|
+
const ci = pipeline([buildTask, testTask, deployTask])
|
|
103
|
+
|
|
104
|
+
await ci.run()
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
**Note:** When a pipeline is converted to a task, it becomes a single unit in the dependency graph. The nested pipeline will execute completely before any dependent tasks can start.
|
package/dist/graph.d.ts
ADDED
package/dist/graph.js
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { $TASK_INTERNAL } from "./constants.js";
|
|
2
|
+
import { validateTasks } from "./util.js";
|
|
3
|
+
export function createTaskGraph(tasks) {
|
|
4
|
+
const nodes = new Map();
|
|
5
|
+
validateTasks(tasks);
|
|
6
|
+
// Create nodes for all tasks
|
|
7
|
+
for (const task of tasks) {
|
|
8
|
+
const { id: taskId } = task[$TASK_INTERNAL];
|
|
9
|
+
nodes.set(taskId, {
|
|
10
|
+
task,
|
|
11
|
+
dependencies: new Set(),
|
|
12
|
+
dependents: new Set(),
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
// Build dependency relationships
|
|
16
|
+
for (const task of tasks) {
|
|
17
|
+
const { id: taskId, name: taskName, dependencies } = task[$TASK_INTERNAL];
|
|
18
|
+
const node = nodes.get(taskId);
|
|
19
|
+
for (const dep of dependencies) {
|
|
20
|
+
const { id: depId, name: depName } = dep[$TASK_INTERNAL];
|
|
21
|
+
if (!nodes.has(depId)) {
|
|
22
|
+
throw new Error(`Task "${taskName}" depends on "${depName}" which is not in the pipeline`);
|
|
23
|
+
}
|
|
24
|
+
node.dependencies.add(depId);
|
|
25
|
+
nodes.get(depId).dependents.add(taskId);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return {
|
|
29
|
+
nodes,
|
|
30
|
+
validate() {
|
|
31
|
+
// Use DFS to detect cycles
|
|
32
|
+
const visited = new Set();
|
|
33
|
+
const recursionStack = new Set();
|
|
34
|
+
const visit = (nodeId, path) => {
|
|
35
|
+
if (recursionStack.has(nodeId)) {
|
|
36
|
+
// Found a cycle - build the cycle path
|
|
37
|
+
const cycleStart = path.indexOf(nodeId);
|
|
38
|
+
const cycle = [...path.slice(cycleStart), nodeId];
|
|
39
|
+
throw new Error(`Circular dependency detected: ${cycle.join(" -> ")}`);
|
|
40
|
+
}
|
|
41
|
+
if (visited.has(nodeId)) {
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
visited.add(nodeId);
|
|
45
|
+
recursionStack.add(nodeId);
|
|
46
|
+
const node = nodes.get(nodeId);
|
|
47
|
+
for (const depId of node.dependencies) {
|
|
48
|
+
visit(depId, [...path, nodeId]);
|
|
49
|
+
}
|
|
50
|
+
recursionStack.delete(nodeId);
|
|
51
|
+
};
|
|
52
|
+
for (const nodeId of nodes.keys()) {
|
|
53
|
+
if (!visited.has(nodeId)) {
|
|
54
|
+
visit(nodeId, []);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
},
|
|
58
|
+
simplify() {
|
|
59
|
+
// Remove transitive dependencies using Floyd-Warshall approach
|
|
60
|
+
// For each node, if there's a path through another node, remove the direct edge
|
|
61
|
+
const reachable = new Map();
|
|
62
|
+
// Initialize reachable sets with direct dependencies
|
|
63
|
+
for (const [id, node] of nodes) {
|
|
64
|
+
reachable.set(id, new Set(node.dependencies));
|
|
65
|
+
}
|
|
66
|
+
// Compute transitive closure
|
|
67
|
+
for (const k of nodes.keys()) {
|
|
68
|
+
for (const i of nodes.keys()) {
|
|
69
|
+
if (reachable.get(i).has(k)) {
|
|
70
|
+
for (const j of nodes.keys()) {
|
|
71
|
+
if (reachable.get(k).has(j)) {
|
|
72
|
+
reachable.get(i).add(j);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
// Remove transitive edges
|
|
79
|
+
for (const [nodeId, node] of nodes) {
|
|
80
|
+
const toRemove = new Set();
|
|
81
|
+
for (const depId of node.dependencies) {
|
|
82
|
+
// Check if there's a path from this node to dep through another dependency
|
|
83
|
+
for (const otherDep of node.dependencies) {
|
|
84
|
+
if (otherDep !== depId && reachable.get(otherDep).has(depId)) {
|
|
85
|
+
// dep is reachable through otherDep, so it's transitive
|
|
86
|
+
toRemove.add(depId);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
for (const depId of toRemove) {
|
|
91
|
+
node.dependencies.delete(depId);
|
|
92
|
+
nodes.get(depId).dependents.delete(nodeId);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
},
|
|
96
|
+
};
|
|
97
|
+
}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
export
|
|
2
|
-
export
|
|
1
|
+
export { task } from "./task.js";
|
|
2
|
+
export { pipeline } from "./pipeline.js";
|
|
3
3
|
export type { Task, Pipeline, TaskConfig } from "./types.js";
|
package/dist/index.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export
|
|
2
|
-
export
|
|
1
|
+
export { task } from "./task.js";
|
|
2
|
+
export { pipeline } from "./pipeline.js";
|
package/dist/pipeline.js
CHANGED
|
@@ -1,183 +1,234 @@
|
|
|
1
1
|
import { spawn } from "node:child_process";
|
|
2
|
-
import { EventEmitter } from "node:events";
|
|
3
2
|
import * as path from "node:path";
|
|
4
3
|
import * as fs from "node:fs";
|
|
5
4
|
import { $TASK_INTERNAL } from "./constants.js";
|
|
5
|
+
import { createTaskGraph } from "./graph.js";
|
|
6
|
+
import { createScheduler } from "./scheduler.js";
|
|
7
|
+
import { task } from "./task.js";
|
|
8
|
+
import { validateTasks } from "./util.js";
|
|
9
|
+
// Module-level cache for pipeline-to-task conversions
|
|
10
|
+
// Key: Pipeline, Value: Map of name -> Task
|
|
11
|
+
const pipelineTaskCache = new WeakMap();
|
|
6
12
|
/**
|
|
7
13
|
* Creates a pipeline that manages task execution with dependency-based coordination.
|
|
8
14
|
*/
|
|
9
15
|
export function pipeline(tasks) {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
16
|
+
const graph = createTaskGraph(tasks);
|
|
17
|
+
graph.validate();
|
|
18
|
+
graph.simplify();
|
|
19
|
+
const pipelineImpl = {
|
|
20
|
+
toTask(config) {
|
|
21
|
+
validateTasks(config.dependencies);
|
|
22
|
+
const syntheticTask = task({
|
|
23
|
+
name: config.name,
|
|
24
|
+
commands: { dev: ":", build: ":" }, // Dummy commands (no-op)
|
|
25
|
+
cwd: ".", // Dummy cwd
|
|
26
|
+
dependencies: [...(config.dependencies || [])],
|
|
27
|
+
});
|
|
28
|
+
// Mark this task as a pipeline task
|
|
29
|
+
syntheticTask[$TASK_INTERNAL].pipeline = pipelineImpl;
|
|
30
|
+
// Cache this conversion
|
|
31
|
+
let cache = pipelineTaskCache.get(pipelineImpl);
|
|
32
|
+
if (!cache) {
|
|
33
|
+
cache = new Map();
|
|
34
|
+
pipelineTaskCache.set(pipelineImpl, cache);
|
|
35
|
+
}
|
|
36
|
+
cache.set(config.name, syntheticTask);
|
|
37
|
+
return syntheticTask;
|
|
38
|
+
},
|
|
39
|
+
async run(config) {
|
|
40
|
+
const spawnFn = config?.spawn ?? spawn;
|
|
41
|
+
const runningTasks = new Map();
|
|
42
|
+
const runningPipelines = new Map();
|
|
43
|
+
let failed = false;
|
|
44
|
+
const scheduler = createScheduler(graph);
|
|
45
|
+
let completionResolver = null;
|
|
46
|
+
let completionRejector = null;
|
|
47
|
+
const completionPromise = new Promise((resolve, reject) => {
|
|
48
|
+
completionResolver = resolve;
|
|
49
|
+
completionRejector = reject;
|
|
50
|
+
});
|
|
51
|
+
const failPipeline = (error) => {
|
|
52
|
+
if (failed)
|
|
53
|
+
return;
|
|
54
|
+
failed = true;
|
|
55
|
+
for (const child of runningTasks.values()) {
|
|
56
|
+
try {
|
|
57
|
+
child.kill("SIGTERM");
|
|
43
58
|
}
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
59
|
+
catch { }
|
|
60
|
+
}
|
|
61
|
+
// Stop nested pipelines
|
|
62
|
+
for (const { stop } of runningPipelines.values()) {
|
|
63
|
+
try {
|
|
64
|
+
stop();
|
|
47
65
|
}
|
|
48
|
-
|
|
49
|
-
|
|
66
|
+
catch { }
|
|
67
|
+
}
|
|
68
|
+
config?.onPipelineError?.(error);
|
|
69
|
+
completionRejector?.(error);
|
|
70
|
+
};
|
|
71
|
+
const startTask = (task) => {
|
|
72
|
+
const { name: taskName, [$TASK_INTERNAL]: { id: taskId, pipeline: nestedPipeline }, } = task;
|
|
73
|
+
if (runningTasks.has(taskId))
|
|
74
|
+
return;
|
|
75
|
+
// Handle pipeline tasks
|
|
76
|
+
if (nestedPipeline) {
|
|
77
|
+
// Mark as ready immediately (pipeline entry nodes will handle their own ready state)
|
|
78
|
+
advanceScheduler({ type: "ready", taskId });
|
|
79
|
+
// Create an abort controller to stop the nested pipeline if needed
|
|
80
|
+
let pipelineStopped = false;
|
|
81
|
+
const stopPipeline = () => {
|
|
82
|
+
pipelineStopped = true;
|
|
83
|
+
// The nested pipeline will continue running, but we've marked it as stopped
|
|
84
|
+
// In a more sophisticated implementation, we could propagate stop signals
|
|
85
|
+
};
|
|
86
|
+
runningPipelines.set(taskId, { stop: stopPipeline });
|
|
87
|
+
// Run the nested pipeline
|
|
88
|
+
nestedPipeline
|
|
89
|
+
.run({
|
|
90
|
+
spawn: spawnFn,
|
|
91
|
+
onTaskError: (nestedTaskName, error) => {
|
|
92
|
+
if (pipelineStopped)
|
|
93
|
+
return;
|
|
94
|
+
config?.onTaskError?.(`${taskName}:${nestedTaskName}`, error);
|
|
95
|
+
},
|
|
96
|
+
onTaskComplete: (nestedTaskName) => {
|
|
97
|
+
if (pipelineStopped)
|
|
98
|
+
return;
|
|
99
|
+
config?.onTaskComplete?.(`${taskName}:${nestedTaskName}`);
|
|
100
|
+
},
|
|
101
|
+
onPipelineError: (error) => {
|
|
102
|
+
if (pipelineStopped)
|
|
103
|
+
return;
|
|
104
|
+
runningPipelines.delete(taskId);
|
|
105
|
+
const e = new Error(`[${taskName}] Pipeline failed: ${error.message}`);
|
|
106
|
+
config?.onTaskError?.(taskName, e);
|
|
107
|
+
failPipeline(e);
|
|
108
|
+
},
|
|
109
|
+
onPipelineComplete: () => {
|
|
110
|
+
if (pipelineStopped)
|
|
111
|
+
return;
|
|
112
|
+
runningPipelines.delete(taskId);
|
|
113
|
+
config?.onTaskComplete?.(taskName);
|
|
114
|
+
advanceScheduler({ type: "complete", taskId });
|
|
115
|
+
},
|
|
116
|
+
})
|
|
117
|
+
.catch((error) => {
|
|
118
|
+
if (pipelineStopped)
|
|
119
|
+
return;
|
|
120
|
+
runningPipelines.delete(taskId);
|
|
121
|
+
const e = new Error(`[${taskName}] Pipeline failed: ${error.message}`);
|
|
122
|
+
config?.onTaskError?.(taskName, e);
|
|
123
|
+
failPipeline(e);
|
|
124
|
+
});
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
// Regular task execution
|
|
128
|
+
const command = process.env.NODE_ENV === "production"
|
|
129
|
+
? task[$TASK_INTERNAL].commands.build
|
|
130
|
+
: task[$TASK_INTERNAL].commands.dev;
|
|
131
|
+
const { cwd, shouldStdoutMarkReady } = task[$TASK_INTERNAL];
|
|
132
|
+
const taskCwd = path.isAbsolute(cwd)
|
|
133
|
+
? cwd
|
|
134
|
+
: path.resolve(process.cwd(), cwd);
|
|
135
|
+
if (!fs.existsSync(taskCwd)) {
|
|
136
|
+
const e = new Error(`[${taskName}] Working directory does not exist: ${taskCwd}`);
|
|
137
|
+
config?.onTaskError?.(taskName, e);
|
|
138
|
+
failPipeline(e);
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
const accumulatedPath = [
|
|
142
|
+
path.join(taskCwd, "node_modules", ".bin"),
|
|
143
|
+
path.join(process.cwd(), "node_modules", ".bin"),
|
|
144
|
+
process.env.PATH,
|
|
145
|
+
]
|
|
146
|
+
.filter(Boolean)
|
|
147
|
+
.join(process.platform === "win32" ? ";" : ":");
|
|
148
|
+
const env = {
|
|
149
|
+
...process.env,
|
|
150
|
+
PATH: accumulatedPath,
|
|
151
|
+
Path: accumulatedPath,
|
|
50
152
|
};
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
const
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
153
|
+
const child = spawnFn(command, {
|
|
154
|
+
cwd: taskCwd,
|
|
155
|
+
stdio: ["inherit", "pipe", "pipe"],
|
|
156
|
+
shell: true,
|
|
157
|
+
env,
|
|
158
|
+
});
|
|
159
|
+
runningTasks.set(taskId, child);
|
|
160
|
+
let didMarkReady = false;
|
|
161
|
+
if (!shouldStdoutMarkReady) {
|
|
162
|
+
advanceScheduler({ type: "ready", taskId });
|
|
163
|
+
didMarkReady = true;
|
|
164
|
+
}
|
|
165
|
+
let output = "";
|
|
166
|
+
child.stdout?.on("data", (buf) => {
|
|
167
|
+
const chunk = buf.toString();
|
|
168
|
+
output += chunk;
|
|
169
|
+
process.stdout.write(chunk);
|
|
170
|
+
if (!didMarkReady && shouldStdoutMarkReady(output)) {
|
|
171
|
+
advanceScheduler({ type: "ready", taskId });
|
|
172
|
+
didMarkReady = true;
|
|
69
173
|
}
|
|
70
|
-
|
|
71
|
-
|
|
174
|
+
});
|
|
175
|
+
child.stderr?.on("data", (buf) => {
|
|
176
|
+
process.stderr.write(buf);
|
|
177
|
+
});
|
|
178
|
+
child.on("error", (error) => {
|
|
179
|
+
const e = new Error(`[${taskName}] Failed to start: ${error.message}`);
|
|
180
|
+
config?.onTaskError?.(taskName, e);
|
|
181
|
+
failPipeline(e);
|
|
182
|
+
});
|
|
183
|
+
child.on("exit", (code) => {
|
|
184
|
+
runningTasks.delete(taskId);
|
|
185
|
+
if (code !== 0) {
|
|
186
|
+
const e = new Error(`[${taskName}] Task failed with exit code ${code ?? 1}`);
|
|
187
|
+
config?.onTaskError?.(taskName, e);
|
|
188
|
+
failPipeline(e);
|
|
189
|
+
return;
|
|
72
190
|
}
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
191
|
+
config?.onTaskComplete?.(taskName);
|
|
192
|
+
// 🔑 Notify scheduler and drain newly runnable tasks
|
|
193
|
+
advanceScheduler({ type: "complete", taskId });
|
|
194
|
+
});
|
|
195
|
+
};
|
|
196
|
+
const advanceScheduler = (input) => {
|
|
197
|
+
let result = input ? scheduler.next(input) : scheduler.next();
|
|
198
|
+
while (true) {
|
|
199
|
+
const event = result.value;
|
|
200
|
+
const isFinished = result.done && result.value.type === "done";
|
|
201
|
+
if (isFinished) {
|
|
202
|
+
config?.onPipelineComplete?.();
|
|
203
|
+
completionResolver?.();
|
|
84
204
|
return;
|
|
85
205
|
}
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
shell: true,
|
|
91
|
-
env,
|
|
92
|
-
});
|
|
93
|
-
// Handle spawn errors
|
|
94
|
-
child.on("error", (error) => {
|
|
95
|
-
const errorMsg = `[${task.name}] Failed to start: ${error.message}\n Command: ${command}\n CWD: ${taskCwd}`;
|
|
96
|
-
const e = new Error(errorMsg);
|
|
97
|
-
config.onTaskError?.(task.name, e);
|
|
98
|
-
failPipeline(e);
|
|
99
|
-
});
|
|
100
|
-
runningTasks.set(task.name, child);
|
|
101
|
-
// If task doesn't have getIsReady, mark as ready immediately
|
|
102
|
-
if (!getIsReady) {
|
|
103
|
-
readyTasks.add(task.name);
|
|
104
|
-
markReady();
|
|
105
|
-
eventEmitter.emit("taskReady", task.name);
|
|
206
|
+
if (event.type === "run") {
|
|
207
|
+
startTask(graph.nodes.get(event.taskId).task);
|
|
208
|
+
result = scheduler.next();
|
|
209
|
+
continue;
|
|
106
210
|
}
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
let allOutput = "";
|
|
110
|
-
child.stdout?.on("data", (data) => {
|
|
111
|
-
const chunk = data.toString();
|
|
112
|
-
allOutput += chunk;
|
|
113
|
-
stdoutBuffer += chunk;
|
|
114
|
-
const lines = stdoutBuffer.split("\n");
|
|
115
|
-
stdoutBuffer = lines.pop() || "";
|
|
116
|
-
for (const line of lines) {
|
|
117
|
-
// Check if task is ready based on readyOn callback
|
|
118
|
-
if (getIsReady && !readyTasks.has(task.name)) {
|
|
119
|
-
if (getIsReady(allOutput)) {
|
|
120
|
-
readyTasks.add(task.name);
|
|
121
|
-
markReady();
|
|
122
|
-
eventEmitter.emit("taskReady", task.name);
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
// Forward stdout to parent
|
|
126
|
-
process.stdout.write(line + "\n");
|
|
127
|
-
}
|
|
128
|
-
});
|
|
129
|
-
// Forward any remaining buffer on end
|
|
130
|
-
child.stdout?.on("end", () => {
|
|
131
|
-
if (stdoutBuffer) {
|
|
132
|
-
process.stdout.write(stdoutBuffer);
|
|
133
|
-
}
|
|
134
|
-
});
|
|
135
|
-
// Forward stderr
|
|
136
|
-
child.stderr?.on("data", (data) => {
|
|
137
|
-
process.stderr.write(data);
|
|
138
|
-
});
|
|
139
|
-
// Handle task completion
|
|
140
|
-
child.on("exit", (code) => {
|
|
141
|
-
runningTasks.delete(task.name);
|
|
142
|
-
completedTasks.add(task.name);
|
|
143
|
-
if (code !== 0) {
|
|
144
|
-
const error = new Error(`[${task.name}] Task failed with exit code ${code || 1}`);
|
|
145
|
-
config.onTaskError?.(task.name, error);
|
|
146
|
-
failPipeline(error);
|
|
147
|
-
}
|
|
148
|
-
else {
|
|
149
|
-
markComplete();
|
|
150
|
-
eventEmitter.emit("taskCompleted", task.name);
|
|
151
|
-
config.onTaskComplete?.(task.name);
|
|
152
|
-
}
|
|
153
|
-
});
|
|
154
|
-
};
|
|
155
|
-
const tryStartTasks = async () => {
|
|
156
|
-
for (const task of tasks) {
|
|
157
|
-
if (await canStart(task)) {
|
|
158
|
-
startTask(task);
|
|
159
|
-
}
|
|
211
|
+
if (event.type === "idle") {
|
|
212
|
+
return;
|
|
160
213
|
}
|
|
214
|
+
}
|
|
215
|
+
};
|
|
216
|
+
// Handle termination signals
|
|
217
|
+
const cleanups = ["SIGINT", "SIGTERM", "SIGQUIT", "SIGBREAK"].map((signal) => {
|
|
218
|
+
const handleSignal = () => {
|
|
219
|
+
failPipeline(new Error(`Received ${signal}`));
|
|
161
220
|
};
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
config.onPipelineComplete?.();
|
|
166
|
-
resolvePipeline();
|
|
167
|
-
}
|
|
221
|
+
process.once(signal, handleSignal);
|
|
222
|
+
return () => {
|
|
223
|
+
process.removeListener(signal, handleSignal);
|
|
168
224
|
};
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
});
|
|
175
|
-
});
|
|
176
|
-
eventEmitter.on("taskReady", tryStartTasks);
|
|
177
|
-
eventEmitter.on("taskCompleted", tryStartTasks);
|
|
178
|
-
eventEmitter.on("taskCompleted", checkCompletion);
|
|
179
|
-
tryStartTasks().catch(failPipeline);
|
|
225
|
+
});
|
|
226
|
+
// 🚀 Kick off initial runnable tasks
|
|
227
|
+
advanceScheduler();
|
|
228
|
+
await completionPromise.finally(() => {
|
|
229
|
+
cleanups.forEach((cleanup) => cleanup());
|
|
180
230
|
});
|
|
181
231
|
},
|
|
182
232
|
};
|
|
233
|
+
return pipelineImpl;
|
|
183
234
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { TaskGraph } from "./types.js";
|
|
2
|
+
export type SchedulerInput = {
|
|
3
|
+
type: "complete";
|
|
4
|
+
taskId: number;
|
|
5
|
+
} | {
|
|
6
|
+
type: "ready";
|
|
7
|
+
taskId: number;
|
|
8
|
+
};
|
|
9
|
+
export type SchedulerOutput = {
|
|
10
|
+
type: "run";
|
|
11
|
+
taskId: number;
|
|
12
|
+
} | {
|
|
13
|
+
type: "idle";
|
|
14
|
+
};
|
|
15
|
+
export declare function createScheduler(graph: TaskGraph): Generator<SchedulerOutput, {
|
|
16
|
+
type: "done";
|
|
17
|
+
}, SchedulerInput>;
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
export function* createScheduler(graph) {
|
|
2
|
+
const remainingReadyDeps = new Map();
|
|
3
|
+
const readyTasks = new Set(); // Track which tasks are already ready
|
|
4
|
+
const runnable = [];
|
|
5
|
+
let completed = 0;
|
|
6
|
+
for (const [id, node] of graph.nodes) {
|
|
7
|
+
remainingReadyDeps.set(id, node.dependencies.size);
|
|
8
|
+
if (node.dependencies.size === 0) {
|
|
9
|
+
runnable.push(id);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
const markDependencyReady = (taskId) => {
|
|
13
|
+
if (readyTasks.has(taskId)) {
|
|
14
|
+
return; // Already marked as ready, don't double-count
|
|
15
|
+
}
|
|
16
|
+
readyTasks.add(taskId);
|
|
17
|
+
// A dependency became ready, check if dependents can now run
|
|
18
|
+
const node = graph.nodes.get(taskId);
|
|
19
|
+
for (const depId of node.dependents) {
|
|
20
|
+
const next = (remainingReadyDeps.get(depId) ?? 0) - 1;
|
|
21
|
+
remainingReadyDeps.set(depId, next);
|
|
22
|
+
if (next === 0)
|
|
23
|
+
runnable.push(depId);
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
while (completed < graph.nodes.size) {
|
|
27
|
+
if (runnable.length > 0) {
|
|
28
|
+
yield { type: "run", taskId: runnable.shift() };
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
const input = yield { type: "idle" };
|
|
32
|
+
if (input?.type === "ready" && input.taskId !== undefined) {
|
|
33
|
+
markDependencyReady(input.taskId);
|
|
34
|
+
// After marking a dependency as ready, continue the loop to check for newly runnable tasks
|
|
35
|
+
// This ensures that if dependents become runnable, they are yielded immediately
|
|
36
|
+
continue;
|
|
37
|
+
}
|
|
38
|
+
else if (input?.type === "complete" && input.taskId !== undefined) {
|
|
39
|
+
completed++;
|
|
40
|
+
// When a task completes, it's also ready (if it wasn't already)
|
|
41
|
+
// This handles the case where a task completes without a ready check
|
|
42
|
+
markDependencyReady(input.taskId);
|
|
43
|
+
// Continue the loop to check for newly runnable tasks
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return { type: "done" };
|
|
48
|
+
}
|
package/dist/task.js
CHANGED
|
@@ -1,37 +1,29 @@
|
|
|
1
1
|
import { $TASK_INTERNAL } from "./constants.js";
|
|
2
|
+
import { validateTasks } from "./util.js";
|
|
3
|
+
import { pipeline } from "./pipeline.js";
|
|
4
|
+
let taskId = 0;
|
|
2
5
|
/**
|
|
3
6
|
* Creates a task configuration.
|
|
4
7
|
*/
|
|
5
8
|
export function task(config) {
|
|
6
|
-
|
|
7
|
-
const
|
|
8
|
-
resolveReady = resolve;
|
|
9
|
-
});
|
|
10
|
-
let isReady = false;
|
|
11
|
-
let isComplete = false;
|
|
12
|
-
return {
|
|
9
|
+
validateTasks(config.dependencies);
|
|
10
|
+
const taskInstance = {
|
|
13
11
|
name: config.name,
|
|
14
12
|
[$TASK_INTERNAL]: {
|
|
15
13
|
...config,
|
|
16
|
-
|
|
17
|
-
dependencies: config.dependencies || [],
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
isComplete = true;
|
|
29
|
-
if (!isReady && resolveReady) {
|
|
30
|
-
isReady = true;
|
|
31
|
-
resolveReady();
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
},
|
|
14
|
+
id: taskId++,
|
|
15
|
+
dependencies: [...(config.dependencies || [])],
|
|
16
|
+
shouldStdoutMarkReady: config.isReady,
|
|
17
|
+
},
|
|
18
|
+
andThen(nextConfig) {
|
|
19
|
+
// Create the next task with the current task as a dependency
|
|
20
|
+
const nextTask = task({
|
|
21
|
+
...nextConfig,
|
|
22
|
+
dependencies: [taskInstance],
|
|
23
|
+
});
|
|
24
|
+
// Return a pipeline containing both tasks
|
|
25
|
+
return pipeline([taskInstance, nextTask]);
|
|
35
26
|
},
|
|
36
27
|
};
|
|
28
|
+
return taskInstance;
|
|
37
29
|
}
|
package/dist/types.d.ts
CHANGED
|
@@ -10,25 +10,52 @@ export interface TaskConfig {
|
|
|
10
10
|
isReady?: (stdout: string) => boolean;
|
|
11
11
|
dependencies?: Task[];
|
|
12
12
|
}
|
|
13
|
-
interface TaskInternal extends TaskConfig {
|
|
14
|
-
|
|
13
|
+
interface TaskInternal extends Omit<TaskConfig, "isReady"> {
|
|
14
|
+
id: number;
|
|
15
15
|
dependencies: Task[];
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
markReady: () => void;
|
|
19
|
-
markComplete: () => void;
|
|
16
|
+
shouldStdoutMarkReady?: (stdout: string) => boolean;
|
|
17
|
+
pipeline?: Pipeline;
|
|
20
18
|
}
|
|
21
19
|
export interface Task {
|
|
22
20
|
name: string;
|
|
23
21
|
[$TASK_INTERNAL]: TaskInternal;
|
|
22
|
+
andThen(config: Omit<TaskConfig, "dependencies">): Pipeline;
|
|
24
23
|
}
|
|
25
24
|
export interface PipelineRunConfig {
|
|
25
|
+
/**
|
|
26
|
+
* Provides a custom spawn function for the pipeline.
|
|
27
|
+
* @default import("node:child_process").spawn
|
|
28
|
+
*/
|
|
29
|
+
spawn?: typeof import("node:child_process").spawn;
|
|
26
30
|
onTaskError?: (taskName: string, error: Error) => void;
|
|
27
31
|
onTaskComplete?: (taskName: string) => void;
|
|
28
32
|
onPipelineError?: (error: Error) => void;
|
|
29
33
|
onPipelineComplete?: () => void;
|
|
30
34
|
}
|
|
35
|
+
export interface PipelineTaskConfig {
|
|
36
|
+
name: string;
|
|
37
|
+
dependencies?: Task[];
|
|
38
|
+
}
|
|
31
39
|
export interface Pipeline {
|
|
32
|
-
run(config
|
|
40
|
+
run(config?: PipelineRunConfig): Promise<void>;
|
|
41
|
+
toTask(config: PipelineTaskConfig): Task;
|
|
42
|
+
}
|
|
43
|
+
export interface TaskNode {
|
|
44
|
+
task: Task;
|
|
45
|
+
dependencies: Set<number>;
|
|
46
|
+
dependents: Set<number>;
|
|
47
|
+
}
|
|
48
|
+
export interface TaskGraph {
|
|
49
|
+
nodes: Map<number, TaskNode>;
|
|
50
|
+
/**
|
|
51
|
+
* Validates the graph for circular dependencies.
|
|
52
|
+
* @throws Error if circular dependencies are detected
|
|
53
|
+
*/
|
|
54
|
+
validate(): void;
|
|
55
|
+
/**
|
|
56
|
+
* Simplifies the graph by removing transitive dependencies.
|
|
57
|
+
* If A depends on B and B depends on C, then A->C is transitive and can be removed.
|
|
58
|
+
*/
|
|
59
|
+
simplify(): void;
|
|
33
60
|
}
|
|
34
61
|
export {};
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/util.d.ts
ADDED
package/dist/util.js
ADDED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "builderman",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "1.1.0",
|
|
4
4
|
"description": "Simple task runner for building and developing projects.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"type": "module",
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"@types/node": "^25.0.8"
|
|
22
22
|
},
|
|
23
23
|
"scripts": {
|
|
24
|
-
"test": "
|
|
24
|
+
"test": "node --test ./dist/pipeline.test.js",
|
|
25
25
|
"build": "rm -rf dist && tsc",
|
|
26
26
|
"dev": "tsc --watch"
|
|
27
27
|
}
|