builderman 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,38 @@
1
+ /**
2
+ * Creates a signal handler for pipeline execution.
3
+ * Handles process termination signals (SIGINT, SIGTERM, etc.) and abort signals.
4
+ */
5
+ export function createSignalHandler({ abortSignal, onAborted, onProcessTerminated, }) {
6
+ // Handle termination signals
7
+ const processTerminationListenerCleanups = [
8
+ "SIGINT",
9
+ "SIGTERM",
10
+ "SIGQUIT",
11
+ "SIGBREAK",
12
+ ].map((sig) => {
13
+ const handleSignal = () => {
14
+ onProcessTerminated(`Received ${sig}`);
15
+ };
16
+ process.once(sig, handleSignal);
17
+ return () => {
18
+ process.removeListener(sig, handleSignal);
19
+ };
20
+ });
21
+ // Handle abort signal if provided
22
+ let signalCleanup = null;
23
+ if (abortSignal) {
24
+ abortSignal.addEventListener("abort", onAborted);
25
+ signalCleanup = () => {
26
+ abortSignal.removeEventListener("abort", onAborted);
27
+ };
28
+ }
29
+ return {
30
+ /**
31
+ * Cleans up all signal listeners.
32
+ */
33
+ cleanup() {
34
+ processTerminationListenerCleanups.forEach((cleanup) => cleanup());
35
+ signalCleanup?.();
36
+ },
37
+ };
38
+ }
@@ -0,0 +1,25 @@
1
+ import { type ChildProcess } from "node:child_process";
2
+ import { PipelineError } from "../pipeline-error.js";
3
+ import type { Task, Pipeline, PipelineRunConfig, TaskGraph, TaskStats } from "../types.js";
4
+ import type { SchedulerInput } from "../scheduler.js";
5
+ import type { TeardownManager } from "./teardown-manager.js";
6
+ export interface TaskExecutorConfig {
7
+ spawn: typeof import("node:child_process").spawn;
8
+ signal?: AbortSignal;
9
+ config?: PipelineRunConfig;
10
+ graph: TaskGraph;
11
+ runningTasks: Map<string, ChildProcess>;
12
+ runningPipelines: Map<string, {
13
+ stop: () => void;
14
+ }>;
15
+ teardownManager: TeardownManager;
16
+ pipelineTasksCache: WeakMap<Pipeline, Task[]>;
17
+ failPipeline: (error: PipelineError) => Promise<void>;
18
+ advanceScheduler: (input?: SchedulerInput) => void;
19
+ updateTaskStatus: (taskId: string, updates: Partial<TaskStats>) => void;
20
+ taskStats: Map<string, TaskStats>;
21
+ }
22
+ /**
23
+ * Executes a task (either a regular task or a nested pipeline).
24
+ */
25
+ export declare function executeTask(task: Task, executorConfig: TaskExecutorConfig): void;
@@ -0,0 +1,313 @@
1
+ import * as path from "node:path";
2
+ import * as fs from "node:fs";
3
+ import { $TASK_INTERNAL } from "../constants.js";
4
+ import { createTaskGraph } from "../graph.js";
5
+ import { PipelineError } from "../pipeline-error.js";
6
+ /**
7
+ * Executes a task (either a regular task or a nested pipeline).
8
+ */
9
+ export function executeTask(task, executorConfig) {
10
+ // Check if signal is aborted before starting new tasks
11
+ if (executorConfig.signal?.aborted) {
12
+ executorConfig.failPipeline(new PipelineError("Aborted", PipelineError.InvalidSignal));
13
+ return;
14
+ }
15
+ const { name: taskName, [$TASK_INTERNAL]: { id: taskId, pipeline: nestedPipeline }, } = task;
16
+ if (executorConfig.runningTasks.has(taskId))
17
+ return;
18
+ // Handle pipeline tasks
19
+ if (nestedPipeline) {
20
+ executeNestedPipeline(taskId, taskName, nestedPipeline, executorConfig);
21
+ return;
22
+ }
23
+ // Regular task execution
24
+ executeRegularTask(task, taskId, taskName, executorConfig);
25
+ }
26
+ function executeNestedPipeline(taskId, taskName, nestedPipeline, executorConfig) {
27
+ const { config, runningPipelines, pipelineTasksCache, failPipeline, advanceScheduler, updateTaskStatus, } = executorConfig;
28
+ // Track nested pipeline state for skip behavior
29
+ let nestedSkippedCount = 0;
30
+ let nestedCompletedCount = 0;
31
+ // Get total tasks from nested pipeline
32
+ const nestedTasks = pipelineTasksCache.get(nestedPipeline);
33
+ const nestedTotalTasks = nestedTasks
34
+ ? createTaskGraph(nestedTasks).nodes.size
35
+ : 0;
36
+ const commandName = (config?.command ?? process.env.NODE_ENV === "production") ? "build" : "dev";
37
+ // Mark as ready immediately (pipeline entry nodes will handle their own ready state)
38
+ const startedAt = Date.now();
39
+ updateTaskStatus(taskId, {
40
+ status: "running",
41
+ command: commandName,
42
+ startedAt,
43
+ });
44
+ advanceScheduler({ type: "ready", taskId });
45
+ config?.onTaskBegin?.(taskName);
46
+ // Create an abort controller to stop the nested pipeline if needed
47
+ let pipelineStopped = false;
48
+ const stopPipeline = () => {
49
+ pipelineStopped = true;
50
+ // The nested pipeline will continue running, but we've marked it as stopped
51
+ // In a more sophisticated implementation, we could propagate stop signals
52
+ };
53
+ runningPipelines.set(taskId, { stop: stopPipeline });
54
+ // Run the nested pipeline with signal propagation
55
+ nestedPipeline
56
+ .run({
57
+ spawn: executorConfig.spawn,
58
+ command: config?.command,
59
+ strict: config?.strict,
60
+ signal: executorConfig.signal, // Pass signal to nested pipeline
61
+ onTaskBegin: (nestedTaskName) => {
62
+ if (pipelineStopped)
63
+ return;
64
+ config?.onTaskBegin?.(`${taskName}:${nestedTaskName}`);
65
+ },
66
+ onTaskComplete: (nestedTaskName) => {
67
+ if (pipelineStopped)
68
+ return;
69
+ nestedCompletedCount++;
70
+ config?.onTaskComplete?.(`${taskName}:${nestedTaskName}`);
71
+ },
72
+ onTaskSkipped: (nestedTaskName, mode) => {
73
+ if (pipelineStopped)
74
+ return;
75
+ nestedSkippedCount++;
76
+ config?.onTaskSkipped?.(`${taskName}:${nestedTaskName}`, mode);
77
+ },
78
+ })
79
+ .then((result) => {
80
+ if (pipelineStopped)
81
+ return;
82
+ runningPipelines.delete(taskId);
83
+ if (!result.ok) {
84
+ // Nested pipeline failed
85
+ const finishedAt = Date.now();
86
+ updateTaskStatus(taskId, {
87
+ status: "failed",
88
+ finishedAt,
89
+ durationMs: finishedAt - startedAt,
90
+ error: result.error,
91
+ });
92
+ failPipeline(result.error);
93
+ return;
94
+ }
95
+ // Determine nested pipeline result based on skip behavior:
96
+ // - If all inner tasks are skipped → outer task is skipped
97
+ // - If some run, some skip → outer task is completed
98
+ // - If any fail → outer task fails (handled above)
99
+ if (nestedSkippedCount === nestedTotalTasks && nestedTotalTasks > 0) {
100
+ // All tasks were skipped
101
+ const finishedAt = Date.now();
102
+ updateTaskStatus(taskId, {
103
+ status: "skipped",
104
+ finishedAt,
105
+ durationMs: finishedAt - startedAt,
106
+ });
107
+ config?.onTaskSkipped?.(taskName, commandName);
108
+ setImmediate(() => {
109
+ advanceScheduler({ type: "skip", taskId });
110
+ });
111
+ }
112
+ else {
113
+ // Some tasks ran (and completed successfully)
114
+ const finishedAt = Date.now();
115
+ updateTaskStatus(taskId, {
116
+ status: "completed",
117
+ finishedAt,
118
+ durationMs: finishedAt - startedAt,
119
+ });
120
+ config?.onTaskComplete?.(taskName);
121
+ advanceScheduler({ type: "complete", taskId });
122
+ }
123
+ });
124
+ }
125
+ function executeRegularTask(task, taskId, taskName, executorConfig) {
126
+ const { spawn: spawnFn, signal, config, runningTasks, teardownManager, failPipeline, advanceScheduler, updateTaskStatus, } = executorConfig;
127
+ const commandName = (config?.command ?? process.env.NODE_ENV === "production") ? "build" : "dev";
128
+ const commandConfig = task[$TASK_INTERNAL].commands[commandName];
129
+ // Check if command exists
130
+ if (commandConfig === undefined) {
131
+ const allowSkip = task[$TASK_INTERNAL].allowSkip ?? false;
132
+ const strict = config?.strict ?? false;
133
+ // If strict mode and not explicitly allowed to skip, fail
134
+ if (strict && !allowSkip) {
135
+ const error = new PipelineError(`[${taskName}] No command for "${commandName}" and strict mode is enabled`, PipelineError.TaskFailed, taskName);
136
+ const finishedAt = Date.now();
137
+ updateTaskStatus(taskId, {
138
+ status: "failed",
139
+ command: commandName,
140
+ finishedAt,
141
+ error,
142
+ });
143
+ failPipeline(error);
144
+ return;
145
+ }
146
+ // Skip the task
147
+ const finishedAt = Date.now();
148
+ updateTaskStatus(taskId, {
149
+ status: "skipped",
150
+ command: commandName,
151
+ finishedAt,
152
+ durationMs: 0,
153
+ });
154
+ config?.onTaskSkipped?.(taskName, commandName);
155
+ // Mark as skipped - this satisfies dependencies and unblocks dependents
156
+ // Use setImmediate to ensure scheduler is at idle yield before receiving skip
157
+ setImmediate(() => {
158
+ advanceScheduler({ type: "skip", taskId });
159
+ });
160
+ return;
161
+ }
162
+ const command = typeof commandConfig === "string" ? commandConfig : commandConfig.run;
163
+ const readyWhen = typeof commandConfig === "string" ? undefined : commandConfig.readyWhen;
164
+ const readyTimeout = typeof commandConfig === "string"
165
+ ? Infinity
166
+ : (commandConfig.readyTimeout ?? Infinity);
167
+ const teardown = typeof commandConfig === "string" ? undefined : commandConfig.teardown;
168
+ const { cwd } = task[$TASK_INTERNAL];
169
+ const taskCwd = path.isAbsolute(cwd) ? cwd : path.resolve(process.cwd(), cwd);
170
+ if (!fs.existsSync(taskCwd)) {
171
+ const finishedAt = Date.now();
172
+ const pipelineError = new PipelineError(`[${taskName}] Working directory does not exist: ${taskCwd}`, PipelineError.InvalidTask, taskName);
173
+ updateTaskStatus(taskId, {
174
+ status: "failed",
175
+ command: commandName,
176
+ finishedAt,
177
+ durationMs: 0,
178
+ error: pipelineError,
179
+ });
180
+ failPipeline(pipelineError);
181
+ return;
182
+ }
183
+ const accumulatedPath = [
184
+ path.join(taskCwd, "node_modules", ".bin"),
185
+ path.join(process.cwd(), "node_modules", ".bin"),
186
+ process.env.PATH,
187
+ ]
188
+ .filter(Boolean)
189
+ .join(process.platform === "win32" ? ";" : ":");
190
+ const env = {
191
+ ...process.env,
192
+ PATH: accumulatedPath,
193
+ Path: accumulatedPath,
194
+ };
195
+ const child = spawnFn(command, {
196
+ cwd: taskCwd,
197
+ stdio: ["inherit", "pipe", "pipe"],
198
+ shell: true,
199
+ env,
200
+ });
201
+ runningTasks.set(taskId, child);
202
+ const startedAt = Date.now();
203
+ updateTaskStatus(taskId, {
204
+ status: "running",
205
+ command: commandName,
206
+ startedAt,
207
+ });
208
+ // Store teardown command if provided
209
+ if (teardown) {
210
+ teardownManager.register(taskId, {
211
+ command: teardown,
212
+ cwd: taskCwd,
213
+ taskName,
214
+ });
215
+ }
216
+ config?.onTaskBegin?.(taskName);
217
+ let didMarkReady = false;
218
+ let readyTimeoutId = null;
219
+ if (!readyWhen) {
220
+ advanceScheduler({ type: "ready", taskId });
221
+ didMarkReady = true;
222
+ }
223
+ else if (readyTimeout !== Infinity) {
224
+ // Set up timeout for readyWhen condition
225
+ readyTimeoutId = setTimeout(() => {
226
+ if (!didMarkReady) {
227
+ const finishedAt = Date.now();
228
+ const pipelineError = new PipelineError(`[${taskName}] Task did not become ready within ${readyTimeout}ms`, PipelineError.TaskFailed, taskName);
229
+ updateTaskStatus(taskId, {
230
+ status: "failed",
231
+ finishedAt,
232
+ durationMs: finishedAt - startedAt,
233
+ error: pipelineError,
234
+ });
235
+ failPipeline(pipelineError);
236
+ }
237
+ }, readyTimeout);
238
+ }
239
+ let output = "";
240
+ child.stdout?.on("data", (buf) => {
241
+ // Check if signal is aborted before processing stdout
242
+ if (signal?.aborted) {
243
+ return;
244
+ }
245
+ const chunk = buf.toString();
246
+ output += chunk;
247
+ process.stdout.write(chunk);
248
+ if (!didMarkReady && readyWhen && readyWhen(output)) {
249
+ if (readyTimeoutId) {
250
+ clearTimeout(readyTimeoutId);
251
+ readyTimeoutId = null;
252
+ }
253
+ advanceScheduler({ type: "ready", taskId });
254
+ didMarkReady = true;
255
+ }
256
+ });
257
+ child.stderr?.on("data", (buf) => {
258
+ process.stderr.write(buf);
259
+ });
260
+ child.on("error", (error) => {
261
+ // Task failed before entering running state, so don't execute teardown
262
+ // Remove teardown from map since it was never actually running
263
+ teardownManager.unregister(taskId);
264
+ // Clear ready timeout if it exists
265
+ if (readyTimeoutId) {
266
+ clearTimeout(readyTimeoutId);
267
+ readyTimeoutId = null;
268
+ }
269
+ const finishedAt = Date.now();
270
+ const pipelineError = new PipelineError(`[${taskName}] Failed to start: ${error.message}`, PipelineError.TaskFailed, taskName);
271
+ updateTaskStatus(taskId, {
272
+ status: "failed",
273
+ finishedAt,
274
+ durationMs: finishedAt - startedAt,
275
+ error: pipelineError,
276
+ });
277
+ failPipeline(pipelineError);
278
+ });
279
+ child.on("exit", (code, signal) => {
280
+ runningTasks.delete(taskId);
281
+ // Clear ready timeout if it exists
282
+ if (readyTimeoutId) {
283
+ clearTimeout(readyTimeoutId);
284
+ readyTimeoutId = null;
285
+ }
286
+ const finishedAt = Date.now();
287
+ const durationMs = finishedAt - startedAt;
288
+ // Don't execute teardown immediately - it will be executed in reverse dependency order
289
+ // when the pipeline completes or fails
290
+ if (code !== 0 || signal) {
291
+ const pipelineError = new PipelineError(`[${taskName}] Task failed with non-zero exit code: ${code ?? signal}`, PipelineError.TaskFailed, taskName);
292
+ updateTaskStatus(taskId, {
293
+ status: "failed",
294
+ finishedAt,
295
+ durationMs,
296
+ exitCode: code ?? undefined,
297
+ signal: signal ?? undefined,
298
+ error: pipelineError,
299
+ });
300
+ failPipeline(pipelineError);
301
+ return;
302
+ }
303
+ updateTaskStatus(taskId, {
304
+ status: "completed",
305
+ finishedAt,
306
+ durationMs,
307
+ exitCode: code ?? 0,
308
+ });
309
+ config?.onTaskComplete?.(taskName);
310
+ // 🔑 Notify scheduler and drain newly runnable tasks
311
+ advanceScheduler({ type: "complete", taskId });
312
+ });
313
+ }
@@ -0,0 +1,22 @@
1
+ import type { TaskGraph } from "../types.js";
2
+ export interface TeardownCommand {
3
+ command: string;
4
+ cwd: string;
5
+ taskName: string;
6
+ }
7
+ export interface TeardownManagerConfig {
8
+ spawn: typeof import("node:child_process").spawn;
9
+ onTaskTeardown?: (taskName: string) => void;
10
+ onTaskTeardownError?: (taskName: string, error: Error) => void;
11
+ updateTaskTeardownStatus: (taskId: string, status: "not-run" | "completed" | "failed", error?: Error) => void;
12
+ }
13
+ export interface TeardownManager {
14
+ register(taskId: string, teardown: TeardownCommand): void;
15
+ unregister(taskId: string): void;
16
+ executeAll(graph: TaskGraph): Promise<void>;
17
+ }
18
+ /**
19
+ * Creates a teardown manager for a pipeline.
20
+ * Manages teardown commands for tasks, handling execution order and error reporting.
21
+ */
22
+ export declare function createTeardownManager(config: TeardownManagerConfig): TeardownManager;
@@ -0,0 +1,157 @@
1
+ /**
2
+ * Creates a teardown manager for a pipeline.
3
+ * Manages teardown commands for tasks, handling execution order and error reporting.
4
+ */
5
+ export function createTeardownManager(config) {
6
+ const teardownCommands = new Map();
7
+ /**
8
+ * Executes a single teardown command.
9
+ */
10
+ const executeTeardown = (taskId) => {
11
+ const teardown = teardownCommands.get(taskId);
12
+ if (!teardown) {
13
+ // No teardown registered, mark as not-run
14
+ config.updateTaskTeardownStatus(taskId, "not-run");
15
+ return Promise.resolve();
16
+ }
17
+ // Remove from map so it doesn't run again
18
+ teardownCommands.delete(taskId);
19
+ config.onTaskTeardown?.(teardown.taskName);
20
+ return new Promise((resolve) => {
21
+ try {
22
+ const teardownProcess = config.spawn(teardown.command, {
23
+ cwd: teardown.cwd,
24
+ stdio: "inherit",
25
+ shell: true,
26
+ });
27
+ let resolved = false;
28
+ const resolveOnce = () => {
29
+ if (!resolved) {
30
+ resolved = true;
31
+ resolve();
32
+ }
33
+ };
34
+ teardownProcess.on("error", (error) => {
35
+ const teardownError = new Error(`[${teardown.taskName}] Teardown failed: ${error.message}`);
36
+ config.onTaskTeardownError?.(teardown.taskName, teardownError);
37
+ config.updateTaskTeardownStatus(taskId, "failed", teardownError);
38
+ resolveOnce();
39
+ });
40
+ teardownProcess.on("exit", (code) => {
41
+ if (code !== 0) {
42
+ const teardownError = new Error(`[${teardown.taskName}] Teardown failed with exit code ${code ?? 1}`);
43
+ config.onTaskTeardownError?.(teardown.taskName, teardownError);
44
+ config.updateTaskTeardownStatus(taskId, "failed", teardownError);
45
+ }
46
+ else {
47
+ config.updateTaskTeardownStatus(taskId, "completed");
48
+ }
49
+ resolveOnce();
50
+ });
51
+ }
52
+ catch (error) {
53
+ const teardownError = new Error(`[${teardown.taskName}] Teardown failed to start: ${error.message}`);
54
+ config.onTaskTeardownError?.(teardown.taskName, teardownError);
55
+ config.updateTaskTeardownStatus(taskId, "failed", teardownError);
56
+ resolve();
57
+ }
58
+ });
59
+ };
60
+ return {
61
+ /**
62
+ * Registers a teardown command for a task.
63
+ */
64
+ register(taskId, teardown) {
65
+ teardownCommands.set(taskId, teardown);
66
+ },
67
+ /**
68
+ * Removes a teardown command (e.g., if task failed before starting).
69
+ */
70
+ unregister(taskId) {
71
+ teardownCommands.delete(taskId);
72
+ },
73
+ /**
74
+ * Executes all registered teardowns in reverse dependency order.
75
+ * Tasks with dependents are torn down before their dependencies.
76
+ */
77
+ async executeAll(graph) {
78
+ const taskIdsWithTeardown = Array.from(teardownCommands.keys());
79
+ if (taskIdsWithTeardown.length === 0) {
80
+ return;
81
+ }
82
+ // Calculate reverse topological order
83
+ const teardownOrder = getReverseDependencyOrder(taskIdsWithTeardown, graph);
84
+ // Execute teardowns sequentially in reverse dependency order
85
+ for (const taskId of teardownOrder) {
86
+ await executeTeardown(taskId);
87
+ }
88
+ // Mark any remaining tasks (that had teardown registered but weren't in the order) as not-run
89
+ for (const taskId of teardownCommands.keys()) {
90
+ config.updateTaskTeardownStatus(taskId, "not-run");
91
+ }
92
+ },
93
+ };
94
+ }
95
+ /**
96
+ * Calculates the reverse dependency order for teardown execution.
97
+ * Tasks that have dependents should be torn down first.
98
+ * If api depends on db, we want: api first, then db.
99
+ */
100
+ function getReverseDependencyOrder(taskIds, graph) {
101
+ const taskIdSet = new Set(taskIds);
102
+ // Count how many dependencies each task has (within the teardown set)
103
+ const dependencyCount = new Map();
104
+ for (const taskId of taskIds) {
105
+ const node = graph.nodes.get(taskId);
106
+ if (node) {
107
+ let count = 0;
108
+ for (const depId of node.dependencies) {
109
+ if (taskIdSet.has(depId)) {
110
+ count++;
111
+ }
112
+ }
113
+ dependencyCount.set(taskId, count);
114
+ }
115
+ }
116
+ // Build result in reverse order
117
+ const result = [];
118
+ const visited = new Set();
119
+ const queue = [];
120
+ // Find leaf nodes (tasks with no dependencies in teardown set)
121
+ for (const taskId of taskIds) {
122
+ if (dependencyCount.get(taskId) === 0) {
123
+ queue.push(taskId);
124
+ }
125
+ }
126
+ // Process in reverse topological order
127
+ while (queue.length > 0) {
128
+ const taskId = queue.shift();
129
+ if (visited.has(taskId))
130
+ continue;
131
+ visited.add(taskId);
132
+ // Add to front (so we get reverse order: dependents before dependencies)
133
+ result.unshift(taskId);
134
+ // Find tasks that depend on this one (dependents)
135
+ const node = graph.nodes.get(taskId);
136
+ if (node) {
137
+ for (const dependentId of node.dependents) {
138
+ if (taskIdSet.has(dependentId) && !visited.has(dependentId)) {
139
+ const currentCount = dependencyCount.get(dependentId) ?? 0;
140
+ const newCount = currentCount - 1;
141
+ dependencyCount.set(dependentId, newCount);
142
+ // When a dependent has no more dependencies to wait for, add it to queue
143
+ if (newCount === 0) {
144
+ queue.push(dependentId);
145
+ }
146
+ }
147
+ }
148
+ }
149
+ }
150
+ // Add any remaining tasks (shouldn't happen in a valid graph, but handle it)
151
+ for (const taskId of taskIds) {
152
+ if (!visited.has(taskId)) {
153
+ result.unshift(taskId);
154
+ }
155
+ }
156
+ return result;
157
+ }
@@ -0,0 +1,11 @@
1
+ export type PipelineErrorCode = typeof PipelineError.Aborted | typeof PipelineError.ProcessTerminated | typeof PipelineError.TaskFailed | typeof PipelineError.InvalidSignal | typeof PipelineError.InvalidTask;
2
+ export declare class PipelineError extends Error {
3
+ readonly code: PipelineErrorCode;
4
+ readonly taskName?: string;
5
+ constructor(message: string, code: PipelineErrorCode, taskName?: string);
6
+ static Aborted: 0;
7
+ static ProcessTerminated: 1;
8
+ static TaskFailed: 2;
9
+ static InvalidSignal: 3;
10
+ static InvalidTask: 4;
11
+ }
@@ -0,0 +1,50 @@
1
+ export class PipelineError extends Error {
2
+ constructor(message, code, taskName) {
3
+ super(message);
4
+ Object.defineProperty(this, "code", {
5
+ enumerable: true,
6
+ configurable: true,
7
+ writable: true,
8
+ value: void 0
9
+ });
10
+ Object.defineProperty(this, "taskName", {
11
+ enumerable: true,
12
+ configurable: true,
13
+ writable: true,
14
+ value: void 0
15
+ });
16
+ this.name = "PipelineError";
17
+ this.code = code;
18
+ this.taskName = taskName;
19
+ }
20
+ }
21
+ Object.defineProperty(PipelineError, "Aborted", {
22
+ enumerable: true,
23
+ configurable: true,
24
+ writable: true,
25
+ value: 0
26
+ });
27
+ Object.defineProperty(PipelineError, "ProcessTerminated", {
28
+ enumerable: true,
29
+ configurable: true,
30
+ writable: true,
31
+ value: 1
32
+ });
33
+ Object.defineProperty(PipelineError, "TaskFailed", {
34
+ enumerable: true,
35
+ configurable: true,
36
+ writable: true,
37
+ value: 2
38
+ });
39
+ Object.defineProperty(PipelineError, "InvalidSignal", {
40
+ enumerable: true,
41
+ configurable: true,
42
+ writable: true,
43
+ value: 3
44
+ });
45
+ Object.defineProperty(PipelineError, "InvalidTask", {
46
+ enumerable: true,
47
+ configurable: true,
48
+ writable: true,
49
+ value: 4
50
+ });
@@ -9,15 +9,3 @@ import type { Pipeline, Task } from "./types.js";
9
9
  * await pipeline([task1, task2]).run()
10
10
  */
11
11
  export declare function pipeline(tasks: Task[]): Pipeline;
12
- type PipelineErrorCode = typeof PipelineError.Aborted | typeof PipelineError.ProcessTerminated | typeof PipelineError.TaskFailed | typeof PipelineError.InvalidSignal | typeof PipelineError.InvalidTask;
13
- export declare class PipelineError extends Error {
14
- readonly code: PipelineErrorCode;
15
- readonly taskName?: string;
16
- constructor(message: string, code: PipelineErrorCode, taskName?: string);
17
- static Aborted: 0;
18
- static ProcessTerminated: 1;
19
- static TaskFailed: 2;
20
- static InvalidSignal: 3;
21
- static InvalidTask: 4;
22
- }
23
- export {};