builderman 1.2.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +380 -200
- package/dist/graph.js +0 -2
- package/dist/index.d.ts +3 -2
- package/dist/index.js +2 -1
- package/dist/modules/signal-handler.d.ts +13 -0
- package/dist/modules/signal-handler.js +38 -0
- package/dist/modules/task-executor.d.ts +25 -0
- package/dist/modules/task-executor.js +333 -0
- package/dist/modules/teardown-manager.d.ts +22 -0
- package/dist/modules/teardown-manager.js +157 -0
- package/dist/pipeline-error.d.ts +11 -0
- package/dist/pipeline-error.js +50 -0
- package/dist/pipeline.d.ts +0 -12
- package/dist/pipeline.js +164 -481
- package/dist/scheduler.d.ts +4 -4
- package/dist/scheduler.js +3 -3
- package/dist/task.d.ts +0 -5
- package/dist/task.js +19 -23
- package/dist/types.d.ts +232 -26
- package/package.json +1 -1
- package/dist/pipeline.test.d.ts +0 -1
package/dist/graph.js
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import { $TASK_INTERNAL } from "./constants.js";
|
|
2
|
-
import { validateTasks } from "./util.js";
|
|
3
2
|
export function createTaskGraph(tasks) {
|
|
4
3
|
const nodes = new Map();
|
|
5
|
-
validateTasks(tasks);
|
|
6
4
|
// Create nodes for all tasks
|
|
7
5
|
for (const task of tasks) {
|
|
8
6
|
const { id: taskId } = task[$TASK_INTERNAL];
|
package/dist/index.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
1
|
export { task } from "./task.js";
|
|
2
|
-
export { pipeline
|
|
3
|
-
export
|
|
2
|
+
export { pipeline } from "./pipeline.js";
|
|
3
|
+
export { PipelineError, type PipelineErrorCode } from "./pipeline-error.js";
|
|
4
|
+
export type { Task, Pipeline, TaskConfig, Command, CommandConfig, Commands, PipelineRunConfig, PipelineTaskConfig, RunResult, PipelineStats, TaskStats, TaskStatus, } from "./types.js";
|
package/dist/index.js
CHANGED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export interface SignalHandlerConfig {
|
|
2
|
+
abortSignal?: AbortSignal;
|
|
3
|
+
onProcessTerminated: (message: string) => void;
|
|
4
|
+
onAborted: () => void;
|
|
5
|
+
}
|
|
6
|
+
export interface SignalHandler {
|
|
7
|
+
cleanup(): void;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Creates a signal handler for pipeline execution.
|
|
11
|
+
* Handles process termination signals (SIGINT, SIGTERM, etc.) and abort signals.
|
|
12
|
+
*/
|
|
13
|
+
export declare function createSignalHandler({ abortSignal, onAborted, onProcessTerminated, }: SignalHandlerConfig): SignalHandler;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates a signal handler for pipeline execution.
|
|
3
|
+
* Handles process termination signals (SIGINT, SIGTERM, etc.) and abort signals.
|
|
4
|
+
*/
|
|
5
|
+
export function createSignalHandler({ abortSignal, onAborted, onProcessTerminated, }) {
|
|
6
|
+
// Handle termination signals
|
|
7
|
+
const processTerminationListenerCleanups = [
|
|
8
|
+
"SIGINT",
|
|
9
|
+
"SIGTERM",
|
|
10
|
+
"SIGQUIT",
|
|
11
|
+
"SIGBREAK",
|
|
12
|
+
].map((sig) => {
|
|
13
|
+
const handleSignal = () => {
|
|
14
|
+
onProcessTerminated(`Received ${sig}`);
|
|
15
|
+
};
|
|
16
|
+
process.once(sig, handleSignal);
|
|
17
|
+
return () => {
|
|
18
|
+
process.removeListener(sig, handleSignal);
|
|
19
|
+
};
|
|
20
|
+
});
|
|
21
|
+
// Handle abort signal if provided
|
|
22
|
+
let signalCleanup = null;
|
|
23
|
+
if (abortSignal) {
|
|
24
|
+
abortSignal.addEventListener("abort", onAborted);
|
|
25
|
+
signalCleanup = () => {
|
|
26
|
+
abortSignal.removeEventListener("abort", onAborted);
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
return {
|
|
30
|
+
/**
|
|
31
|
+
* Cleans up all signal listeners.
|
|
32
|
+
*/
|
|
33
|
+
cleanup() {
|
|
34
|
+
processTerminationListenerCleanups.forEach((cleanup) => cleanup());
|
|
35
|
+
signalCleanup?.();
|
|
36
|
+
},
|
|
37
|
+
};
|
|
38
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { type ChildProcess } from "node:child_process";
|
|
2
|
+
import { PipelineError } from "../pipeline-error.js";
|
|
3
|
+
import type { Task, Pipeline, PipelineRunConfig, TaskGraph, TaskStats } from "../types.js";
|
|
4
|
+
import type { SchedulerInput } from "../scheduler.js";
|
|
5
|
+
import type { TeardownManager } from "./teardown-manager.js";
|
|
6
|
+
export interface TaskExecutorConfig {
|
|
7
|
+
spawn: typeof import("node:child_process").spawn;
|
|
8
|
+
signal?: AbortSignal;
|
|
9
|
+
config?: PipelineRunConfig;
|
|
10
|
+
graph: TaskGraph;
|
|
11
|
+
runningTasks: Map<string, ChildProcess>;
|
|
12
|
+
runningPipelines: Map<string, {
|
|
13
|
+
stop: () => void;
|
|
14
|
+
}>;
|
|
15
|
+
teardownManager: TeardownManager;
|
|
16
|
+
pipelineTasksCache: WeakMap<Pipeline, Task[]>;
|
|
17
|
+
failPipeline: (error: PipelineError) => Promise<void>;
|
|
18
|
+
advanceScheduler: (input?: SchedulerInput) => void;
|
|
19
|
+
updateTaskStatus: (taskId: string, updates: Partial<TaskStats>) => void;
|
|
20
|
+
taskStats: Map<string, TaskStats>;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Executes a task (either a regular task or a nested pipeline).
|
|
24
|
+
*/
|
|
25
|
+
export declare function executeTask(task: Task, executorConfig: TaskExecutorConfig): void;
|
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
import * as path from "node:path";
|
|
2
|
+
import * as fs from "node:fs";
|
|
3
|
+
import { $TASK_INTERNAL } from "../constants.js";
|
|
4
|
+
import { createTaskGraph } from "../graph.js";
|
|
5
|
+
import { PipelineError } from "../pipeline-error.js";
|
|
6
|
+
/**
|
|
7
|
+
* Executes a task (either a regular task or a nested pipeline).
|
|
8
|
+
*/
|
|
9
|
+
export function executeTask(task, executorConfig) {
|
|
10
|
+
// Check if signal is aborted before starting new tasks
|
|
11
|
+
if (executorConfig.signal?.aborted) {
|
|
12
|
+
executorConfig.failPipeline(new PipelineError("Aborted", PipelineError.InvalidSignal));
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
const { name: taskName, [$TASK_INTERNAL]: { id: taskId, pipeline: nestedPipeline }, } = task;
|
|
16
|
+
if (executorConfig.runningTasks.has(taskId))
|
|
17
|
+
return;
|
|
18
|
+
// Handle pipeline tasks
|
|
19
|
+
if (nestedPipeline) {
|
|
20
|
+
executeNestedPipeline(task, taskId, taskName, nestedPipeline, executorConfig);
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
// Regular task execution
|
|
24
|
+
executeRegularTask(task, taskId, taskName, executorConfig);
|
|
25
|
+
}
|
|
26
|
+
function executeNestedPipeline(task, taskId, taskName, nestedPipeline, executorConfig) {
|
|
27
|
+
const { config, runningPipelines, pipelineTasksCache, failPipeline, advanceScheduler, updateTaskStatus, } = executorConfig;
|
|
28
|
+
// Track nested pipeline state for skip behavior
|
|
29
|
+
let nestedSkippedCount = 0;
|
|
30
|
+
let nestedCompletedCount = 0;
|
|
31
|
+
// Get total tasks from nested pipeline
|
|
32
|
+
const nestedTasks = pipelineTasksCache.get(nestedPipeline);
|
|
33
|
+
const nestedTotalTasks = nestedTasks
|
|
34
|
+
? createTaskGraph(nestedTasks).nodes.size
|
|
35
|
+
: 0;
|
|
36
|
+
const commandName = config?.command ?? process.env.NODE_ENV === "production" ? "build" : "dev";
|
|
37
|
+
// Mark as ready immediately (pipeline entry nodes will handle their own ready state)
|
|
38
|
+
const startedAt = Date.now();
|
|
39
|
+
updateTaskStatus(taskId, {
|
|
40
|
+
status: "running",
|
|
41
|
+
command: commandName,
|
|
42
|
+
startedAt,
|
|
43
|
+
});
|
|
44
|
+
advanceScheduler({ type: "ready", taskId });
|
|
45
|
+
config?.onTaskBegin?.(taskName);
|
|
46
|
+
// Create an abort controller to stop the nested pipeline if needed
|
|
47
|
+
let pipelineStopped = false;
|
|
48
|
+
const stopPipeline = () => {
|
|
49
|
+
pipelineStopped = true;
|
|
50
|
+
// The nested pipeline will continue running, but we've marked it as stopped
|
|
51
|
+
// In a more sophisticated implementation, we could propagate stop signals
|
|
52
|
+
};
|
|
53
|
+
runningPipelines.set(taskId, { stop: stopPipeline });
|
|
54
|
+
// Merge environment variables: pipeline.env -> task.env (from pipeline.toTask config)
|
|
55
|
+
const taskEnv = task[$TASK_INTERNAL].env;
|
|
56
|
+
const pipelineEnv = config?.env ?? {};
|
|
57
|
+
const mergedEnv = {
|
|
58
|
+
...pipelineEnv,
|
|
59
|
+
...taskEnv,
|
|
60
|
+
};
|
|
61
|
+
// Run the nested pipeline with signal propagation
|
|
62
|
+
nestedPipeline
|
|
63
|
+
.run({
|
|
64
|
+
spawn: executorConfig.spawn,
|
|
65
|
+
command: config?.command,
|
|
66
|
+
strict: config?.strict,
|
|
67
|
+
signal: executorConfig.signal, // Pass signal to nested pipeline
|
|
68
|
+
env: mergedEnv, // Pass merged env to nested pipeline
|
|
69
|
+
onTaskBegin: (nestedTaskName) => {
|
|
70
|
+
if (pipelineStopped)
|
|
71
|
+
return;
|
|
72
|
+
config?.onTaskBegin?.(`${taskName}:${nestedTaskName}`);
|
|
73
|
+
},
|
|
74
|
+
onTaskComplete: (nestedTaskName) => {
|
|
75
|
+
if (pipelineStopped)
|
|
76
|
+
return;
|
|
77
|
+
nestedCompletedCount++;
|
|
78
|
+
config?.onTaskComplete?.(`${taskName}:${nestedTaskName}`);
|
|
79
|
+
},
|
|
80
|
+
onTaskSkipped: (nestedTaskName, mode) => {
|
|
81
|
+
if (pipelineStopped)
|
|
82
|
+
return;
|
|
83
|
+
nestedSkippedCount++;
|
|
84
|
+
config?.onTaskSkipped?.(`${taskName}:${nestedTaskName}`, mode);
|
|
85
|
+
},
|
|
86
|
+
})
|
|
87
|
+
.then((result) => {
|
|
88
|
+
if (pipelineStopped)
|
|
89
|
+
return;
|
|
90
|
+
runningPipelines.delete(taskId);
|
|
91
|
+
if (!result.ok) {
|
|
92
|
+
// Nested pipeline failed
|
|
93
|
+
const finishedAt = Date.now();
|
|
94
|
+
updateTaskStatus(taskId, {
|
|
95
|
+
status: "failed",
|
|
96
|
+
finishedAt,
|
|
97
|
+
durationMs: finishedAt - startedAt,
|
|
98
|
+
error: result.error,
|
|
99
|
+
});
|
|
100
|
+
failPipeline(result.error);
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
// Determine nested pipeline result based on skip behavior:
|
|
104
|
+
// - If all inner tasks are skipped → outer task is skipped
|
|
105
|
+
// - If some run, some skip → outer task is completed
|
|
106
|
+
// - If any fail → outer task fails (handled above)
|
|
107
|
+
if (nestedSkippedCount === nestedTotalTasks && nestedTotalTasks > 0) {
|
|
108
|
+
// All tasks were skipped
|
|
109
|
+
const finishedAt = Date.now();
|
|
110
|
+
updateTaskStatus(taskId, {
|
|
111
|
+
status: "skipped",
|
|
112
|
+
finishedAt,
|
|
113
|
+
durationMs: finishedAt - startedAt,
|
|
114
|
+
});
|
|
115
|
+
config?.onTaskSkipped?.(taskName, commandName);
|
|
116
|
+
setImmediate(() => {
|
|
117
|
+
advanceScheduler({ type: "skip", taskId });
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
// Some tasks ran (and completed successfully)
|
|
122
|
+
const finishedAt = Date.now();
|
|
123
|
+
updateTaskStatus(taskId, {
|
|
124
|
+
status: "completed",
|
|
125
|
+
finishedAt,
|
|
126
|
+
durationMs: finishedAt - startedAt,
|
|
127
|
+
});
|
|
128
|
+
config?.onTaskComplete?.(taskName);
|
|
129
|
+
advanceScheduler({ type: "complete", taskId });
|
|
130
|
+
}
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
function executeRegularTask(task, taskId, taskName, executorConfig) {
|
|
134
|
+
const { spawn: spawnFn, signal, config, runningTasks, teardownManager, failPipeline, advanceScheduler, updateTaskStatus, } = executorConfig;
|
|
135
|
+
const { allowSkip, commands, cwd, env: taskEnv } = task[$TASK_INTERNAL];
|
|
136
|
+
const commandName = config?.command ?? process.env.NODE_ENV === "production" ? "build" : "dev";
|
|
137
|
+
const commandConfig = commands[commandName];
|
|
138
|
+
// Check if command exists
|
|
139
|
+
if (commandConfig === undefined) {
|
|
140
|
+
const strict = config?.strict ?? false;
|
|
141
|
+
// If strict mode and not explicitly allowed to skip, fail
|
|
142
|
+
if (strict && !allowSkip) {
|
|
143
|
+
const error = new PipelineError(`[${taskName}] No command for "${commandName}" and strict mode is enabled`, PipelineError.TaskFailed, taskName);
|
|
144
|
+
const finishedAt = Date.now();
|
|
145
|
+
updateTaskStatus(taskId, {
|
|
146
|
+
status: "failed",
|
|
147
|
+
command: commandName,
|
|
148
|
+
finishedAt,
|
|
149
|
+
error,
|
|
150
|
+
});
|
|
151
|
+
failPipeline(error);
|
|
152
|
+
return;
|
|
153
|
+
}
|
|
154
|
+
// Skip the task
|
|
155
|
+
const finishedAt = Date.now();
|
|
156
|
+
updateTaskStatus(taskId, {
|
|
157
|
+
status: "skipped",
|
|
158
|
+
command: commandName,
|
|
159
|
+
finishedAt,
|
|
160
|
+
durationMs: 0,
|
|
161
|
+
});
|
|
162
|
+
config?.onTaskSkipped?.(taskName, commandName);
|
|
163
|
+
// Mark as skipped - this satisfies dependencies and unblocks dependents
|
|
164
|
+
// Use setImmediate to ensure scheduler is at idle yield before receiving skip
|
|
165
|
+
setImmediate(() => {
|
|
166
|
+
advanceScheduler({ type: "skip", taskId });
|
|
167
|
+
});
|
|
168
|
+
return;
|
|
169
|
+
}
|
|
170
|
+
let command;
|
|
171
|
+
let readyWhen;
|
|
172
|
+
let readyTimeout = Infinity;
|
|
173
|
+
let teardown;
|
|
174
|
+
let commandEnv = {};
|
|
175
|
+
if (typeof commandConfig === "string") {
|
|
176
|
+
command = commandConfig;
|
|
177
|
+
}
|
|
178
|
+
else {
|
|
179
|
+
command = commandConfig.run;
|
|
180
|
+
readyWhen = commandConfig.readyWhen;
|
|
181
|
+
readyTimeout = commandConfig.readyTimeout ?? Infinity;
|
|
182
|
+
teardown = commandConfig.teardown;
|
|
183
|
+
commandEnv = commandConfig.env ?? {};
|
|
184
|
+
}
|
|
185
|
+
const taskCwd = path.isAbsolute(cwd) ? cwd : path.resolve(process.cwd(), cwd);
|
|
186
|
+
if (!fs.existsSync(taskCwd)) {
|
|
187
|
+
const finishedAt = Date.now();
|
|
188
|
+
const pipelineError = new PipelineError(`[${taskName}] Working directory does not exist: ${taskCwd}`, PipelineError.InvalidTask, taskName);
|
|
189
|
+
updateTaskStatus(taskId, {
|
|
190
|
+
status: "failed",
|
|
191
|
+
command: commandName,
|
|
192
|
+
finishedAt,
|
|
193
|
+
durationMs: 0,
|
|
194
|
+
error: pipelineError,
|
|
195
|
+
});
|
|
196
|
+
failPipeline(pipelineError);
|
|
197
|
+
return;
|
|
198
|
+
}
|
|
199
|
+
const accumulatedPath = [
|
|
200
|
+
path.join(taskCwd, "node_modules", ".bin"),
|
|
201
|
+
path.join(process.cwd(), "node_modules", ".bin"),
|
|
202
|
+
process.env.PATH,
|
|
203
|
+
]
|
|
204
|
+
.filter(Boolean)
|
|
205
|
+
.join(process.platform === "win32" ? ";" : ":");
|
|
206
|
+
// Merge environment variables in order: process.env -> pipeline.env -> task.env -> command.env
|
|
207
|
+
const accumulatedEnv = {
|
|
208
|
+
...process.env,
|
|
209
|
+
PATH: accumulatedPath,
|
|
210
|
+
Path: accumulatedPath,
|
|
211
|
+
...config?.env,
|
|
212
|
+
...taskEnv,
|
|
213
|
+
...commandEnv,
|
|
214
|
+
};
|
|
215
|
+
const child = spawnFn(command, {
|
|
216
|
+
cwd: taskCwd,
|
|
217
|
+
stdio: ["inherit", "pipe", "pipe"],
|
|
218
|
+
shell: true,
|
|
219
|
+
env: accumulatedEnv,
|
|
220
|
+
});
|
|
221
|
+
runningTasks.set(taskId, child);
|
|
222
|
+
const startedAt = Date.now();
|
|
223
|
+
updateTaskStatus(taskId, {
|
|
224
|
+
status: "running",
|
|
225
|
+
command: commandName,
|
|
226
|
+
startedAt,
|
|
227
|
+
});
|
|
228
|
+
// Store teardown command if provided
|
|
229
|
+
if (teardown) {
|
|
230
|
+
teardownManager.register(taskId, {
|
|
231
|
+
command: teardown,
|
|
232
|
+
cwd: taskCwd,
|
|
233
|
+
taskName,
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
config?.onTaskBegin?.(taskName);
|
|
237
|
+
let didMarkReady = false;
|
|
238
|
+
let readyTimeoutId = null;
|
|
239
|
+
if (!readyWhen) {
|
|
240
|
+
advanceScheduler({ type: "ready", taskId });
|
|
241
|
+
didMarkReady = true;
|
|
242
|
+
}
|
|
243
|
+
else if (readyTimeout !== Infinity) {
|
|
244
|
+
// Set up timeout for readyWhen condition
|
|
245
|
+
readyTimeoutId = setTimeout(() => {
|
|
246
|
+
if (!didMarkReady) {
|
|
247
|
+
const finishedAt = Date.now();
|
|
248
|
+
const pipelineError = new PipelineError(`[${taskName}] Task did not become ready within ${readyTimeout}ms`, PipelineError.TaskFailed, taskName);
|
|
249
|
+
updateTaskStatus(taskId, {
|
|
250
|
+
status: "failed",
|
|
251
|
+
finishedAt,
|
|
252
|
+
durationMs: finishedAt - startedAt,
|
|
253
|
+
error: pipelineError,
|
|
254
|
+
});
|
|
255
|
+
failPipeline(pipelineError);
|
|
256
|
+
}
|
|
257
|
+
}, readyTimeout);
|
|
258
|
+
}
|
|
259
|
+
let output = "";
|
|
260
|
+
child.stdout?.on("data", (buf) => {
|
|
261
|
+
// Check if signal is aborted before processing stdout
|
|
262
|
+
if (signal?.aborted) {
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
const chunk = buf.toString();
|
|
266
|
+
output += chunk;
|
|
267
|
+
process.stdout.write(chunk);
|
|
268
|
+
if (!didMarkReady && readyWhen && readyWhen(output)) {
|
|
269
|
+
if (readyTimeoutId) {
|
|
270
|
+
clearTimeout(readyTimeoutId);
|
|
271
|
+
readyTimeoutId = null;
|
|
272
|
+
}
|
|
273
|
+
advanceScheduler({ type: "ready", taskId });
|
|
274
|
+
didMarkReady = true;
|
|
275
|
+
}
|
|
276
|
+
});
|
|
277
|
+
child.stderr?.on("data", (buf) => {
|
|
278
|
+
process.stderr.write(buf);
|
|
279
|
+
});
|
|
280
|
+
child.on("error", (error) => {
|
|
281
|
+
// Task failed before entering running state, so don't execute teardown
|
|
282
|
+
// Remove teardown from map since it was never actually running
|
|
283
|
+
teardownManager.unregister(taskId);
|
|
284
|
+
// Clear ready timeout if it exists
|
|
285
|
+
if (readyTimeoutId) {
|
|
286
|
+
clearTimeout(readyTimeoutId);
|
|
287
|
+
readyTimeoutId = null;
|
|
288
|
+
}
|
|
289
|
+
const finishedAt = Date.now();
|
|
290
|
+
const pipelineError = new PipelineError(`[${taskName}] Failed to start: ${error.message}`, PipelineError.TaskFailed, taskName);
|
|
291
|
+
updateTaskStatus(taskId, {
|
|
292
|
+
status: "failed",
|
|
293
|
+
finishedAt,
|
|
294
|
+
durationMs: finishedAt - startedAt,
|
|
295
|
+
error: pipelineError,
|
|
296
|
+
});
|
|
297
|
+
failPipeline(pipelineError);
|
|
298
|
+
});
|
|
299
|
+
child.on("exit", (code, signal) => {
|
|
300
|
+
runningTasks.delete(taskId);
|
|
301
|
+
// Clear ready timeout if it exists
|
|
302
|
+
if (readyTimeoutId) {
|
|
303
|
+
clearTimeout(readyTimeoutId);
|
|
304
|
+
readyTimeoutId = null;
|
|
305
|
+
}
|
|
306
|
+
const finishedAt = Date.now();
|
|
307
|
+
const durationMs = finishedAt - startedAt;
|
|
308
|
+
// Don't execute teardown immediately - it will be executed in reverse dependency order
|
|
309
|
+
// when the pipeline completes or fails
|
|
310
|
+
if (code !== 0 || signal) {
|
|
311
|
+
const pipelineError = new PipelineError(`[${taskName}] Task failed with non-zero exit code: ${code ?? signal}`, PipelineError.TaskFailed, taskName);
|
|
312
|
+
updateTaskStatus(taskId, {
|
|
313
|
+
status: "failed",
|
|
314
|
+
finishedAt,
|
|
315
|
+
durationMs,
|
|
316
|
+
exitCode: code ?? undefined,
|
|
317
|
+
signal: signal ?? undefined,
|
|
318
|
+
error: pipelineError,
|
|
319
|
+
});
|
|
320
|
+
failPipeline(pipelineError);
|
|
321
|
+
return;
|
|
322
|
+
}
|
|
323
|
+
updateTaskStatus(taskId, {
|
|
324
|
+
status: "completed",
|
|
325
|
+
finishedAt,
|
|
326
|
+
durationMs,
|
|
327
|
+
exitCode: code ?? 0,
|
|
328
|
+
});
|
|
329
|
+
config?.onTaskComplete?.(taskName);
|
|
330
|
+
// 🔑 Notify scheduler and drain newly runnable tasks
|
|
331
|
+
advanceScheduler({ type: "complete", taskId });
|
|
332
|
+
});
|
|
333
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { TaskGraph } from "../types.js";
|
|
2
|
+
export interface TeardownCommand {
|
|
3
|
+
command: string;
|
|
4
|
+
cwd: string;
|
|
5
|
+
taskName: string;
|
|
6
|
+
}
|
|
7
|
+
export interface TeardownManagerConfig {
|
|
8
|
+
spawn: typeof import("node:child_process").spawn;
|
|
9
|
+
onTaskTeardown?: (taskName: string) => void;
|
|
10
|
+
onTaskTeardownError?: (taskName: string, error: Error) => void;
|
|
11
|
+
updateTaskTeardownStatus: (taskId: string, status: "not-run" | "completed" | "failed", error?: Error) => void;
|
|
12
|
+
}
|
|
13
|
+
export interface TeardownManager {
|
|
14
|
+
register(taskId: string, teardown: TeardownCommand): void;
|
|
15
|
+
unregister(taskId: string): void;
|
|
16
|
+
executeAll(graph: TaskGraph): Promise<void>;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Creates a teardown manager for a pipeline.
|
|
20
|
+
* Manages teardown commands for tasks, handling execution order and error reporting.
|
|
21
|
+
*/
|
|
22
|
+
export declare function createTeardownManager(config: TeardownManagerConfig): TeardownManager;
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Creates a teardown manager for a pipeline.
|
|
3
|
+
* Manages teardown commands for tasks, handling execution order and error reporting.
|
|
4
|
+
*/
|
|
5
|
+
export function createTeardownManager(config) {
|
|
6
|
+
const teardownCommands = new Map();
|
|
7
|
+
/**
|
|
8
|
+
* Executes a single teardown command.
|
|
9
|
+
*/
|
|
10
|
+
const executeTeardown = (taskId) => {
|
|
11
|
+
const teardown = teardownCommands.get(taskId);
|
|
12
|
+
if (!teardown) {
|
|
13
|
+
// No teardown registered, mark as not-run
|
|
14
|
+
config.updateTaskTeardownStatus(taskId, "not-run");
|
|
15
|
+
return Promise.resolve();
|
|
16
|
+
}
|
|
17
|
+
// Remove from map so it doesn't run again
|
|
18
|
+
teardownCommands.delete(taskId);
|
|
19
|
+
config.onTaskTeardown?.(teardown.taskName);
|
|
20
|
+
return new Promise((resolve) => {
|
|
21
|
+
try {
|
|
22
|
+
const teardownProcess = config.spawn(teardown.command, {
|
|
23
|
+
cwd: teardown.cwd,
|
|
24
|
+
stdio: "inherit",
|
|
25
|
+
shell: true,
|
|
26
|
+
});
|
|
27
|
+
let resolved = false;
|
|
28
|
+
const resolveOnce = () => {
|
|
29
|
+
if (!resolved) {
|
|
30
|
+
resolved = true;
|
|
31
|
+
resolve();
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
teardownProcess.on("error", (error) => {
|
|
35
|
+
const teardownError = new Error(`[${teardown.taskName}] Teardown failed: ${error.message}`);
|
|
36
|
+
config.onTaskTeardownError?.(teardown.taskName, teardownError);
|
|
37
|
+
config.updateTaskTeardownStatus(taskId, "failed", teardownError);
|
|
38
|
+
resolveOnce();
|
|
39
|
+
});
|
|
40
|
+
teardownProcess.on("exit", (code) => {
|
|
41
|
+
if (code !== 0) {
|
|
42
|
+
const teardownError = new Error(`[${teardown.taskName}] Teardown failed with exit code ${code ?? 1}`);
|
|
43
|
+
config.onTaskTeardownError?.(teardown.taskName, teardownError);
|
|
44
|
+
config.updateTaskTeardownStatus(taskId, "failed", teardownError);
|
|
45
|
+
}
|
|
46
|
+
else {
|
|
47
|
+
config.updateTaskTeardownStatus(taskId, "completed");
|
|
48
|
+
}
|
|
49
|
+
resolveOnce();
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
const teardownError = new Error(`[${teardown.taskName}] Teardown failed to start: ${error.message}`);
|
|
54
|
+
config.onTaskTeardownError?.(teardown.taskName, teardownError);
|
|
55
|
+
config.updateTaskTeardownStatus(taskId, "failed", teardownError);
|
|
56
|
+
resolve();
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
};
|
|
60
|
+
return {
|
|
61
|
+
/**
|
|
62
|
+
* Registers a teardown command for a task.
|
|
63
|
+
*/
|
|
64
|
+
register(taskId, teardown) {
|
|
65
|
+
teardownCommands.set(taskId, teardown);
|
|
66
|
+
},
|
|
67
|
+
/**
|
|
68
|
+
* Removes a teardown command (e.g., if task failed before starting).
|
|
69
|
+
*/
|
|
70
|
+
unregister(taskId) {
|
|
71
|
+
teardownCommands.delete(taskId);
|
|
72
|
+
},
|
|
73
|
+
/**
|
|
74
|
+
* Executes all registered teardowns in reverse dependency order.
|
|
75
|
+
* Tasks with dependents are torn down before their dependencies.
|
|
76
|
+
*/
|
|
77
|
+
async executeAll(graph) {
|
|
78
|
+
const taskIdsWithTeardown = Array.from(teardownCommands.keys());
|
|
79
|
+
if (taskIdsWithTeardown.length === 0) {
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
// Calculate reverse topological order
|
|
83
|
+
const teardownOrder = getReverseDependencyOrder(taskIdsWithTeardown, graph);
|
|
84
|
+
// Execute teardowns sequentially in reverse dependency order
|
|
85
|
+
for (const taskId of teardownOrder) {
|
|
86
|
+
await executeTeardown(taskId);
|
|
87
|
+
}
|
|
88
|
+
// Mark any remaining tasks (that had teardown registered but weren't in the order) as not-run
|
|
89
|
+
for (const taskId of teardownCommands.keys()) {
|
|
90
|
+
config.updateTaskTeardownStatus(taskId, "not-run");
|
|
91
|
+
}
|
|
92
|
+
},
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Calculates the reverse dependency order for teardown execution.
|
|
97
|
+
* Tasks that have dependents should be torn down first.
|
|
98
|
+
* If api depends on db, we want: api first, then db.
|
|
99
|
+
*/
|
|
100
|
+
function getReverseDependencyOrder(taskIds, graph) {
|
|
101
|
+
const taskIdSet = new Set(taskIds);
|
|
102
|
+
// Count how many dependencies each task has (within the teardown set)
|
|
103
|
+
const dependencyCount = new Map();
|
|
104
|
+
for (const taskId of taskIds) {
|
|
105
|
+
const node = graph.nodes.get(taskId);
|
|
106
|
+
if (node) {
|
|
107
|
+
let count = 0;
|
|
108
|
+
for (const depId of node.dependencies) {
|
|
109
|
+
if (taskIdSet.has(depId)) {
|
|
110
|
+
count++;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
dependencyCount.set(taskId, count);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
// Build result in reverse order
|
|
117
|
+
const result = [];
|
|
118
|
+
const visited = new Set();
|
|
119
|
+
const queue = [];
|
|
120
|
+
// Find leaf nodes (tasks with no dependencies in teardown set)
|
|
121
|
+
for (const taskId of taskIds) {
|
|
122
|
+
if (dependencyCount.get(taskId) === 0) {
|
|
123
|
+
queue.push(taskId);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// Process in reverse topological order
|
|
127
|
+
while (queue.length > 0) {
|
|
128
|
+
const taskId = queue.shift();
|
|
129
|
+
if (visited.has(taskId))
|
|
130
|
+
continue;
|
|
131
|
+
visited.add(taskId);
|
|
132
|
+
// Add to front (so we get reverse order: dependents before dependencies)
|
|
133
|
+
result.unshift(taskId);
|
|
134
|
+
// Find tasks that depend on this one (dependents)
|
|
135
|
+
const node = graph.nodes.get(taskId);
|
|
136
|
+
if (node) {
|
|
137
|
+
for (const dependentId of node.dependents) {
|
|
138
|
+
if (taskIdSet.has(dependentId) && !visited.has(dependentId)) {
|
|
139
|
+
const currentCount = dependencyCount.get(dependentId) ?? 0;
|
|
140
|
+
const newCount = currentCount - 1;
|
|
141
|
+
dependencyCount.set(dependentId, newCount);
|
|
142
|
+
// When a dependent has no more dependencies to wait for, add it to queue
|
|
143
|
+
if (newCount === 0) {
|
|
144
|
+
queue.push(dependentId);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
// Add any remaining tasks (shouldn't happen in a valid graph, but handle it)
|
|
151
|
+
for (const taskId of taskIds) {
|
|
152
|
+
if (!visited.has(taskId)) {
|
|
153
|
+
result.unshift(taskId);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
return result;
|
|
157
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export type PipelineErrorCode = typeof PipelineError.Aborted | typeof PipelineError.ProcessTerminated | typeof PipelineError.TaskFailed | typeof PipelineError.InvalidSignal | typeof PipelineError.InvalidTask;
|
|
2
|
+
export declare class PipelineError extends Error {
|
|
3
|
+
readonly code: PipelineErrorCode;
|
|
4
|
+
readonly taskName?: string;
|
|
5
|
+
constructor(message: string, code: PipelineErrorCode, taskName?: string);
|
|
6
|
+
static Aborted: 0;
|
|
7
|
+
static ProcessTerminated: 1;
|
|
8
|
+
static TaskFailed: 2;
|
|
9
|
+
static InvalidSignal: 3;
|
|
10
|
+
static InvalidTask: 4;
|
|
11
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
export class PipelineError extends Error {
|
|
2
|
+
constructor(message, code, taskName) {
|
|
3
|
+
super(message);
|
|
4
|
+
Object.defineProperty(this, "code", {
|
|
5
|
+
enumerable: true,
|
|
6
|
+
configurable: true,
|
|
7
|
+
writable: true,
|
|
8
|
+
value: void 0
|
|
9
|
+
});
|
|
10
|
+
Object.defineProperty(this, "taskName", {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
configurable: true,
|
|
13
|
+
writable: true,
|
|
14
|
+
value: void 0
|
|
15
|
+
});
|
|
16
|
+
this.name = "PipelineError";
|
|
17
|
+
this.code = code;
|
|
18
|
+
this.taskName = taskName;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
Object.defineProperty(PipelineError, "Aborted", {
|
|
22
|
+
enumerable: true,
|
|
23
|
+
configurable: true,
|
|
24
|
+
writable: true,
|
|
25
|
+
value: 0
|
|
26
|
+
});
|
|
27
|
+
Object.defineProperty(PipelineError, "ProcessTerminated", {
|
|
28
|
+
enumerable: true,
|
|
29
|
+
configurable: true,
|
|
30
|
+
writable: true,
|
|
31
|
+
value: 1
|
|
32
|
+
});
|
|
33
|
+
Object.defineProperty(PipelineError, "TaskFailed", {
|
|
34
|
+
enumerable: true,
|
|
35
|
+
configurable: true,
|
|
36
|
+
writable: true,
|
|
37
|
+
value: 2
|
|
38
|
+
});
|
|
39
|
+
Object.defineProperty(PipelineError, "InvalidSignal", {
|
|
40
|
+
enumerable: true,
|
|
41
|
+
configurable: true,
|
|
42
|
+
writable: true,
|
|
43
|
+
value: 3
|
|
44
|
+
});
|
|
45
|
+
Object.defineProperty(PipelineError, "InvalidTask", {
|
|
46
|
+
enumerable: true,
|
|
47
|
+
configurable: true,
|
|
48
|
+
writable: true,
|
|
49
|
+
value: 4
|
|
50
|
+
});
|