@ryanfw/prompt-orchestration-pipeline 0.11.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -1
- package/src/components/DAGGrid.jsx +157 -47
- package/src/components/ui/RestartJobModal.jsx +26 -6
- package/src/components/ui/StopJobModal.jsx +183 -0
- package/src/core/config.js +7 -3
- package/src/core/lifecycle-policy.js +62 -0
- package/src/core/pipeline-runner.js +312 -217
- package/src/core/status-writer.js +84 -0
- package/src/pages/Code.jsx +8 -1
- package/src/pages/PipelineDetail.jsx +85 -3
- package/src/pages/PromptPipelineDashboard.jsx +10 -11
- package/src/ui/client/adapters/job-adapter.js +60 -0
- package/src/ui/client/api.js +233 -8
- package/src/ui/client/hooks/useJobList.js +14 -1
- package/src/ui/dist/app.js +262 -0
- package/src/ui/dist/assets/{index-DeDzq-Kk.js → index-B320avRx.js} +4854 -2104
- package/src/ui/dist/assets/index-B320avRx.js.map +1 -0
- package/src/ui/dist/assets/style-BYCoLBnK.css +62 -0
- package/src/ui/dist/favicon.svg +12 -0
- package/src/ui/dist/index.html +2 -2
- package/src/ui/endpoints/file-endpoints.js +330 -0
- package/src/ui/endpoints/job-control-endpoints.js +1001 -0
- package/src/ui/endpoints/job-endpoints.js +62 -0
- package/src/ui/endpoints/sse-endpoints.js +223 -0
- package/src/ui/endpoints/state-endpoint.js +85 -0
- package/src/ui/endpoints/upload-endpoints.js +406 -0
- package/src/ui/express-app.js +182 -0
- package/src/ui/server.js +38 -1880
- package/src/ui/sse-broadcast.js +93 -0
- package/src/ui/utils/http-utils.js +139 -0
- package/src/ui/utils/mime-types.js +196 -0
- package/src/ui/vite.config.js +22 -0
- package/src/utils/jobs.js +39 -0
- package/src/ui/dist/assets/style-aBtD_Yrs.css +0 -62
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
|
+
import fsSync from "node:fs";
|
|
2
3
|
import path from "node:path";
|
|
3
4
|
import { runPipeline } from "./task-runner.js";
|
|
4
5
|
import { loadFreshModule } from "./module-loader.js";
|
|
@@ -15,6 +16,7 @@ import {
|
|
|
15
16
|
import { createTaskFileIO, generateLogName } from "./file-io.js";
|
|
16
17
|
import { createJobLogger } from "./logger.js";
|
|
17
18
|
import { LogEvent, LogFileExtension } from "../config/log-events.js";
|
|
19
|
+
import { decideTransition } from "./lifecycle-policy.js";
|
|
18
20
|
|
|
19
21
|
const ROOT = process.env.PO_ROOT || process.cwd();
|
|
20
22
|
const DATA_DIR = path.join(ROOT, process.env.PO_DATA_DIR || "pipeline-data");
|
|
@@ -30,7 +32,44 @@ const logger = createJobLogger("PipelineRunner", jobId);
|
|
|
30
32
|
|
|
31
33
|
const workDir = path.join(CURRENT_DIR, jobId);
|
|
32
34
|
|
|
35
|
+
// Write runner PID file for stop functionality
|
|
36
|
+
const runnerPidPath = path.join(workDir, "runner.pid");
|
|
37
|
+
await fs.writeFile(runnerPidPath, `${process.pid}\n`, "utf8");
|
|
38
|
+
|
|
39
|
+
// Cleanup function to remove PID file on any exit
|
|
40
|
+
async function cleanupRunnerPid() {
|
|
41
|
+
try {
|
|
42
|
+
await fs.unlink(runnerPidPath);
|
|
43
|
+
} catch (error) {
|
|
44
|
+
// ENOENT means file doesn't exist, which is fine
|
|
45
|
+
if (error.code !== "ENOENT") {
|
|
46
|
+
console.error("Failed to cleanup runner PID file:", error);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Register cleanup handlers for all exit scenarios
|
|
52
|
+
// Use synchronous unlink for 'exit' handler since it doesn't allow async operations
|
|
53
|
+
process.on("exit", () => {
|
|
54
|
+
try {
|
|
55
|
+
fsSync.unlinkSync(runnerPidPath);
|
|
56
|
+
} catch (error) {
|
|
57
|
+
if (error.code !== "ENOENT") {
|
|
58
|
+
console.error("Failed to cleanup runner PID file:", error);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
process.on("SIGINT", async () => {
|
|
63
|
+
await cleanupRunnerPid();
|
|
64
|
+
process.exit(130);
|
|
65
|
+
});
|
|
66
|
+
process.on("SIGTERM", async () => {
|
|
67
|
+
await cleanupRunnerPid();
|
|
68
|
+
process.exit(143);
|
|
69
|
+
});
|
|
70
|
+
|
|
33
71
|
const startFromTask = process.env.PO_START_FROM_TASK;
|
|
72
|
+
const runSingleTask = process.env.PO_RUN_SINGLE_TASK === "true";
|
|
34
73
|
|
|
35
74
|
// Get pipeline slug from environment or fallback to seed.json
|
|
36
75
|
let pipelineSlug = process.env.PO_PIPELINE_SLUG;
|
|
@@ -79,150 +118,255 @@ logger.group("Pipeline execution", {
|
|
|
79
118
|
pipelineSlug,
|
|
80
119
|
totalTasks: pipeline.tasks.length,
|
|
81
120
|
startFromTask: startFromTask || null,
|
|
121
|
+
runSingleTask,
|
|
82
122
|
});
|
|
83
123
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
pipeline.tasks.indexOf(taskName) < pipeline.tasks.indexOf(startFromTask)
|
|
89
|
-
) {
|
|
90
|
-
logger.log("Skipping task before restart point", {
|
|
91
|
-
taskName,
|
|
92
|
-
startFromTask,
|
|
93
|
-
});
|
|
94
|
-
continue;
|
|
95
|
-
}
|
|
124
|
+
// Helper function to check if all upstream dependencies are completed
|
|
125
|
+
function areDependenciesReady(taskName) {
|
|
126
|
+
const taskIndex = pipeline.tasks.indexOf(taskName);
|
|
127
|
+
if (taskIndex === -1) return false;
|
|
96
128
|
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
129
|
+
const upstreamTasks = pipeline.tasks.slice(0, taskIndex);
|
|
130
|
+
return upstreamTasks.every(
|
|
131
|
+
(upstreamTask) => status.tasks[upstreamTask]?.state === TaskState.DONE
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
try {
|
|
136
|
+
for (const taskName of pipeline.tasks) {
|
|
137
|
+
// Skip tasks before startFromTask when targeting a specific restart point
|
|
138
|
+
if (
|
|
139
|
+
startFromTask &&
|
|
140
|
+
pipeline.tasks.indexOf(taskName) < pipeline.tasks.indexOf(startFromTask)
|
|
141
|
+
) {
|
|
142
|
+
logger.log("Skipping task before restart point", {
|
|
143
|
+
taskName,
|
|
144
|
+
startFromTask,
|
|
145
|
+
});
|
|
146
|
+
continue;
|
|
105
147
|
}
|
|
106
|
-
continue;
|
|
107
|
-
}
|
|
108
148
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
149
|
+
if (status.tasks[taskName]?.state === TaskState.DONE) {
|
|
150
|
+
try {
|
|
151
|
+
const outputPath = path.join(workDir, "tasks", taskName, "output.json");
|
|
152
|
+
const output = JSON.parse(await fs.readFile(outputPath, "utf8"));
|
|
153
|
+
pipelineArtifacts[taskName] = output;
|
|
154
|
+
logger.log("Task already completed", { taskName });
|
|
155
|
+
} catch {
|
|
156
|
+
logger.warn("Failed to read completed task output", { taskName });
|
|
157
|
+
}
|
|
158
|
+
continue;
|
|
159
|
+
}
|
|
115
160
|
|
|
116
|
-
|
|
117
|
-
|
|
161
|
+
// Check lifecycle policy before starting task
|
|
162
|
+
const currentTaskState = status.tasks[taskName]?.state || "pending";
|
|
163
|
+
const dependenciesReady = areDependenciesReady(taskName);
|
|
118
164
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
taskName,
|
|
125
|
-
taskConfig: pipeline.taskConfig?.[taskName] || {},
|
|
126
|
-
statusPath: tasksStatusPath,
|
|
127
|
-
jobId,
|
|
128
|
-
meta: {
|
|
129
|
-
pipelineTasks: [...pipeline.tasks],
|
|
130
|
-
},
|
|
131
|
-
};
|
|
132
|
-
const modulePath = tasks[taskName];
|
|
133
|
-
if (!modulePath) throw new Error(`Task not registered: ${taskName}`);
|
|
134
|
-
|
|
135
|
-
// Resolve relative paths from task registry to absolute paths
|
|
136
|
-
const absoluteModulePath = path.isAbsolute(modulePath)
|
|
137
|
-
? modulePath
|
|
138
|
-
: path.resolve(path.dirname(TASK_REGISTRY), modulePath);
|
|
139
|
-
|
|
140
|
-
// Validate symlinks before task execution to ensure restart reliability
|
|
141
|
-
const poRoot = process.env.PO_ROOT || process.cwd();
|
|
142
|
-
const expectedTargets = {
|
|
143
|
-
nodeModules: path.join(path.resolve(poRoot, ".."), "node_modules"),
|
|
144
|
-
taskRoot: path.dirname(absoluteModulePath),
|
|
145
|
-
};
|
|
165
|
+
const lifecycleDecision = decideTransition({
|
|
166
|
+
op: "start",
|
|
167
|
+
taskState: currentTaskState,
|
|
168
|
+
dependenciesReady,
|
|
169
|
+
});
|
|
146
170
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
171
|
+
if (!lifecycleDecision.ok) {
|
|
172
|
+
logger.warn("lifecycle_block", {
|
|
173
|
+
jobId,
|
|
174
|
+
taskId: taskName,
|
|
175
|
+
op: "start",
|
|
176
|
+
reason: lifecycleDecision.reason,
|
|
177
|
+
});
|
|
151
178
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
179
|
+
// Create typed error for endpoints to handle
|
|
180
|
+
const lifecycleError = new Error(lifecycleDecision.reason);
|
|
181
|
+
lifecycleError.httpStatus = 409;
|
|
182
|
+
lifecycleError.error = "unsupported_lifecycle";
|
|
183
|
+
lifecycleError.reason = lifecycleDecision.reason;
|
|
184
|
+
throw lifecycleError;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
logger.log("Starting task", { taskName });
|
|
188
|
+
await updateStatus(taskName, {
|
|
189
|
+
state: TaskState.RUNNING,
|
|
190
|
+
startedAt: now(),
|
|
191
|
+
attempts: (status.tasks[taskName]?.attempts || 0) + 1,
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
const taskDir = path.join(workDir, "tasks", taskName);
|
|
195
|
+
await fs.mkdir(taskDir, { recursive: true });
|
|
196
|
+
|
|
197
|
+
try {
|
|
198
|
+
const ctx = {
|
|
199
|
+
workDir,
|
|
155
200
|
taskDir,
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
201
|
+
seed,
|
|
202
|
+
taskName,
|
|
203
|
+
taskConfig: pipeline.taskConfig?.[taskName] || {},
|
|
204
|
+
statusPath: tasksStatusPath,
|
|
205
|
+
jobId,
|
|
206
|
+
meta: {
|
|
207
|
+
pipelineTasks: [...pipeline.tasks],
|
|
208
|
+
},
|
|
209
|
+
};
|
|
210
|
+
const modulePath = tasks[taskName];
|
|
211
|
+
if (!modulePath) throw new Error(`Task not registered: ${taskName}`);
|
|
212
|
+
|
|
213
|
+
// Resolve relative paths from task registry to absolute paths
|
|
214
|
+
const absoluteModulePath = path.isAbsolute(modulePath)
|
|
215
|
+
? modulePath
|
|
216
|
+
: path.resolve(path.dirname(TASK_REGISTRY), modulePath);
|
|
217
|
+
|
|
218
|
+
// Validate symlinks before task execution to ensure restart reliability
|
|
219
|
+
const poRoot = process.env.PO_ROOT || process.cwd();
|
|
220
|
+
const expectedTargets = {
|
|
221
|
+
nodeModules: path.join(path.resolve(poRoot, ".."), "node_modules"),
|
|
222
|
+
taskRoot: path.dirname(absoluteModulePath),
|
|
223
|
+
};
|
|
159
224
|
|
|
160
|
-
const
|
|
225
|
+
const validationResult = await validateTaskSymlinks(
|
|
161
226
|
taskDir,
|
|
162
|
-
|
|
163
|
-
absoluteModulePath
|
|
227
|
+
expectedTargets
|
|
164
228
|
);
|
|
165
229
|
|
|
166
|
-
if (!
|
|
167
|
-
|
|
168
|
-
logger.error("Task symlink repair failed, aborting execution", {
|
|
230
|
+
if (!validationResult.isValid) {
|
|
231
|
+
logger.warn("Task symlinks validation failed, attempting repair", {
|
|
169
232
|
taskName,
|
|
170
233
|
taskDir,
|
|
171
|
-
errors:
|
|
172
|
-
|
|
234
|
+
errors: validationResult.errors,
|
|
235
|
+
validationDuration: validationResult.duration,
|
|
173
236
|
});
|
|
174
237
|
|
|
175
|
-
await
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
238
|
+
const repairResult = await repairTaskSymlinks(
|
|
239
|
+
taskDir,
|
|
240
|
+
poRoot,
|
|
241
|
+
absoluteModulePath
|
|
242
|
+
);
|
|
180
243
|
|
|
181
|
-
|
|
182
|
-
|
|
244
|
+
if (!repairResult.success) {
|
|
245
|
+
const errorMessage = `Failed to repair task symlinks for ${taskName}: ${repairResult.errors.join(", ")}`;
|
|
246
|
+
logger.error("Task symlink repair failed, aborting execution", {
|
|
247
|
+
taskName,
|
|
248
|
+
taskDir,
|
|
249
|
+
errors: repairResult.errors,
|
|
250
|
+
repairDuration: repairResult.duration,
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
await updateStatus(taskName, {
|
|
254
|
+
state: TaskState.FAILED,
|
|
255
|
+
endedAt: now(),
|
|
256
|
+
error: { message: errorMessage, type: "SymlinkRepairFailed" },
|
|
257
|
+
});
|
|
258
|
+
|
|
259
|
+
process.exitCode = 1;
|
|
260
|
+
process.exit(1);
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
logger.log("Task symlinks repaired successfully", {
|
|
264
|
+
taskName,
|
|
265
|
+
taskDir,
|
|
266
|
+
repairDuration: repairResult.duration,
|
|
267
|
+
relocatedEntry: repairResult.relocatedEntry,
|
|
268
|
+
});
|
|
269
|
+
} else {
|
|
270
|
+
logger.debug("Task symlinks validation passed", {
|
|
271
|
+
taskName,
|
|
272
|
+
taskDir,
|
|
273
|
+
validationDuration: validationResult.duration,
|
|
274
|
+
});
|
|
183
275
|
}
|
|
184
276
|
|
|
185
|
-
|
|
186
|
-
|
|
277
|
+
// Create symlink bridge for deterministic module resolution
|
|
278
|
+
const relocatedEntry = await ensureTaskSymlinkBridge({
|
|
187
279
|
taskDir,
|
|
188
|
-
|
|
189
|
-
|
|
280
|
+
poRoot,
|
|
281
|
+
taskModulePath: absoluteModulePath,
|
|
190
282
|
});
|
|
191
|
-
|
|
192
|
-
|
|
283
|
+
|
|
284
|
+
// Create fileIO for this task
|
|
285
|
+
const fileIO = createTaskFileIO({
|
|
286
|
+
workDir,
|
|
193
287
|
taskName,
|
|
194
|
-
|
|
195
|
-
|
|
288
|
+
getStage: () => null, // pipeline-runner doesn't have stages
|
|
289
|
+
statusPath: tasksStatusPath,
|
|
196
290
|
});
|
|
197
|
-
}
|
|
198
291
|
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
taskDir,
|
|
202
|
-
poRoot,
|
|
203
|
-
taskModulePath: absoluteModulePath,
|
|
204
|
-
});
|
|
292
|
+
logger.log("Running task", { taskName, modulePath: absoluteModulePath });
|
|
293
|
+
const result = await runPipeline(relocatedEntry, ctx);
|
|
205
294
|
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
295
|
+
if (!result.ok) {
|
|
296
|
+
logger.error("Task failed", {
|
|
297
|
+
taskName,
|
|
298
|
+
failedStage: result.failedStage,
|
|
299
|
+
error: result.error,
|
|
300
|
+
refinementAttempts: result.refinementAttempts || 0,
|
|
301
|
+
});
|
|
213
302
|
|
|
214
|
-
|
|
215
|
-
|
|
303
|
+
// Persist execution-logs.json and failure-details.json on task failure via IO
|
|
304
|
+
if (result.logs) {
|
|
305
|
+
await fileIO.writeLog(
|
|
306
|
+
generateLogName(
|
|
307
|
+
taskName,
|
|
308
|
+
"pipeline",
|
|
309
|
+
LogEvent.EXECUTION_LOGS,
|
|
310
|
+
LogFileExtension.JSON
|
|
311
|
+
),
|
|
312
|
+
JSON.stringify(result.logs, null, 2),
|
|
313
|
+
{ mode: "replace" }
|
|
314
|
+
);
|
|
315
|
+
}
|
|
316
|
+
const failureDetails = {
|
|
317
|
+
failedStage: result.failedStage,
|
|
318
|
+
error: result.error,
|
|
319
|
+
logs: result.logs,
|
|
320
|
+
context: result.context,
|
|
321
|
+
refinementAttempts: result.refinementAttempts || 0,
|
|
322
|
+
};
|
|
323
|
+
await fileIO.writeLog(
|
|
324
|
+
generateLogName(
|
|
325
|
+
taskName,
|
|
326
|
+
"pipeline",
|
|
327
|
+
LogEvent.FAILURE_DETAILS,
|
|
328
|
+
LogFileExtension.JSON
|
|
329
|
+
),
|
|
330
|
+
JSON.stringify(failureDetails, null, 2),
|
|
331
|
+
{ mode: "replace" }
|
|
332
|
+
);
|
|
333
|
+
|
|
334
|
+
// Update tasks-status.json with enriched failure context
|
|
335
|
+
await updateStatus(taskName, {
|
|
336
|
+
state: TaskState.FAILED,
|
|
337
|
+
endedAt: now(),
|
|
338
|
+
error: result.error, // Don't double-normalize - use result.error as-is
|
|
339
|
+
failedStage: result.failedStage,
|
|
340
|
+
refinementAttempts: result.refinementAttempts || 0,
|
|
341
|
+
stageLogPath: path.join(
|
|
342
|
+
workDir,
|
|
343
|
+
"files",
|
|
344
|
+
"logs",
|
|
345
|
+
`stage-${result.failedStage}.log`
|
|
346
|
+
),
|
|
347
|
+
errorContext: {
|
|
348
|
+
previousStage: result.context?.previousStage || "seed",
|
|
349
|
+
dataHasSeed: !!result.context?.data?.seed,
|
|
350
|
+
seedHasData: result.context?.data?.seed?.data !== undefined,
|
|
351
|
+
flagsKeys: Object.keys(result.context?.flags || {}),
|
|
352
|
+
},
|
|
353
|
+
});
|
|
216
354
|
|
|
217
|
-
|
|
218
|
-
|
|
355
|
+
// Exit with non-zero status but do not throw to keep consistent flow
|
|
356
|
+
process.exitCode = 1;
|
|
357
|
+
process.exit(1);
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
logger.log("Task completed successfully", {
|
|
219
361
|
taskName,
|
|
220
|
-
|
|
221
|
-
|
|
362
|
+
executionTimeMs:
|
|
363
|
+
result.logs?.reduce((total, log) => total + (log.ms || 0), 0) || 0,
|
|
222
364
|
refinementAttempts: result.refinementAttempts || 0,
|
|
223
365
|
});
|
|
224
366
|
|
|
225
|
-
//
|
|
367
|
+
// The file I/O system automatically handles writing outputs and updating tasks-status.json
|
|
368
|
+
// No need to manually write output.json or enumerate artifacts
|
|
369
|
+
|
|
226
370
|
if (result.logs) {
|
|
227
371
|
await fileIO.writeLog(
|
|
228
372
|
generateLogName(
|
|
@@ -235,129 +379,80 @@ for (const taskName of pipeline.tasks) {
|
|
|
235
379
|
{ mode: "replace" }
|
|
236
380
|
);
|
|
237
381
|
}
|
|
238
|
-
const failureDetails = {
|
|
239
|
-
failedStage: result.failedStage,
|
|
240
|
-
error: result.error,
|
|
241
|
-
logs: result.logs,
|
|
242
|
-
context: result.context,
|
|
243
|
-
refinementAttempts: result.refinementAttempts || 0,
|
|
244
|
-
};
|
|
245
|
-
await fileIO.writeLog(
|
|
246
|
-
generateLogName(
|
|
247
|
-
taskName,
|
|
248
|
-
"pipeline",
|
|
249
|
-
LogEvent.FAILURE_DETAILS,
|
|
250
|
-
LogFileExtension.JSON
|
|
251
|
-
),
|
|
252
|
-
JSON.stringify(failureDetails, null, 2),
|
|
253
|
-
{ mode: "replace" }
|
|
254
|
-
);
|
|
255
382
|
|
|
256
|
-
// Update tasks-status.json with enriched failure context
|
|
257
383
|
await updateStatus(taskName, {
|
|
258
|
-
state: TaskState.
|
|
384
|
+
state: TaskState.DONE,
|
|
259
385
|
endedAt: now(),
|
|
260
|
-
|
|
261
|
-
|
|
386
|
+
executionTimeMs:
|
|
387
|
+
result.logs?.reduce((total, log) => total + (log.ms || 0), 0) || 0,
|
|
262
388
|
refinementAttempts: result.refinementAttempts || 0,
|
|
263
|
-
stageLogPath: path.join(
|
|
264
|
-
workDir,
|
|
265
|
-
"files",
|
|
266
|
-
"logs",
|
|
267
|
-
`stage-${result.failedStage}.log`
|
|
268
|
-
),
|
|
269
|
-
errorContext: {
|
|
270
|
-
previousStage: result.context?.previousStage || "seed",
|
|
271
|
-
dataHasSeed: !!result.context?.data?.seed,
|
|
272
|
-
seedHasData: result.context?.data?.seed?.data !== undefined,
|
|
273
|
-
flagsKeys: Object.keys(result.context?.flags || {}),
|
|
274
|
-
},
|
|
275
389
|
});
|
|
276
390
|
|
|
277
|
-
//
|
|
391
|
+
// Check if this is a single task run and we've completed the target task
|
|
392
|
+
if (runSingleTask && taskName === startFromTask) {
|
|
393
|
+
logger.log("Stopping after single task execution", { taskName });
|
|
394
|
+
break;
|
|
395
|
+
}
|
|
396
|
+
} catch (err) {
|
|
397
|
+
await updateStatus(taskName, {
|
|
398
|
+
state: TaskState.FAILED,
|
|
399
|
+
endedAt: now(),
|
|
400
|
+
error: normalizeError(err),
|
|
401
|
+
});
|
|
278
402
|
process.exitCode = 1;
|
|
279
403
|
process.exit(1);
|
|
280
404
|
}
|
|
405
|
+
}
|
|
281
406
|
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
refinementAttempts: result.refinementAttempts || 0,
|
|
287
|
-
});
|
|
407
|
+
// Only move to complete if this wasn't a single task run
|
|
408
|
+
if (!runSingleTask) {
|
|
409
|
+
await fs.mkdir(COMPLETE_DIR, { recursive: true });
|
|
410
|
+
const dest = path.join(COMPLETE_DIR, jobId);
|
|
288
411
|
|
|
289
|
-
|
|
290
|
-
|
|
412
|
+
logger.log("Pipeline completed", {
|
|
413
|
+
jobId,
|
|
414
|
+
totalExecutionTime: Object.values(status.tasks).reduce(
|
|
415
|
+
(total, t) => total + (t.executionTimeMs || 0),
|
|
416
|
+
0
|
|
417
|
+
),
|
|
418
|
+
totalRefinementAttempts: Object.values(status.tasks).reduce(
|
|
419
|
+
(total, t) => total + (t.refinementAttempts || 0),
|
|
420
|
+
0
|
|
421
|
+
),
|
|
422
|
+
finalArtifacts: Object.keys(pipelineArtifacts),
|
|
423
|
+
});
|
|
291
424
|
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
425
|
+
await fs.rename(workDir, dest);
|
|
426
|
+
await appendLine(
|
|
427
|
+
path.join(COMPLETE_DIR, "runs.jsonl"),
|
|
428
|
+
JSON.stringify({
|
|
429
|
+
id: status.id,
|
|
430
|
+
finishedAt: now(),
|
|
431
|
+
tasks: Object.keys(status.tasks),
|
|
432
|
+
totalExecutionTime: Object.values(status.tasks).reduce(
|
|
433
|
+
(total, t) => total + (t.executionTimeMs || 0),
|
|
434
|
+
0
|
|
299
435
|
),
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
436
|
+
totalRefinementAttempts: Object.values(status.tasks).reduce(
|
|
437
|
+
(total, t) => total + (t.refinementAttempts || 0),
|
|
438
|
+
0
|
|
439
|
+
),
|
|
440
|
+
finalArtifacts: Object.keys(pipelineArtifacts),
|
|
441
|
+
}) + "\n"
|
|
442
|
+
);
|
|
304
443
|
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
result.logs?.reduce((total, log) => total + (log.ms || 0), 0) || 0,
|
|
310
|
-
refinementAttempts: result.refinementAttempts || 0,
|
|
311
|
-
});
|
|
312
|
-
} catch (err) {
|
|
313
|
-
await updateStatus(taskName, {
|
|
314
|
-
state: TaskState.FAILED,
|
|
315
|
-
endedAt: now(),
|
|
316
|
-
error: normalizeError(err),
|
|
317
|
-
});
|
|
318
|
-
process.exitCode = 1;
|
|
319
|
-
process.exit(1);
|
|
444
|
+
// Clean up task symlinks to avoid dangling links in archives
|
|
445
|
+
await cleanupTaskSymlinks(dest);
|
|
446
|
+
} else {
|
|
447
|
+
logger.log("Single task run completed, job remains in current", { jobId });
|
|
320
448
|
}
|
|
449
|
+
} catch (error) {
|
|
450
|
+
throw error;
|
|
451
|
+
} finally {
|
|
452
|
+
// Always ensure PID cleanup at the end of execution
|
|
453
|
+
await cleanupRunnerPid();
|
|
321
454
|
}
|
|
322
455
|
|
|
323
|
-
await fs.mkdir(COMPLETE_DIR, { recursive: true });
|
|
324
|
-
const dest = path.join(COMPLETE_DIR, jobId);
|
|
325
|
-
|
|
326
|
-
logger.log("Pipeline completed", {
|
|
327
|
-
jobId,
|
|
328
|
-
totalExecutionTime: Object.values(status.tasks).reduce(
|
|
329
|
-
(total, t) => total + (t.executionTimeMs || 0),
|
|
330
|
-
0
|
|
331
|
-
),
|
|
332
|
-
totalRefinementAttempts: Object.values(status.tasks).reduce(
|
|
333
|
-
(total, t) => total + (t.refinementAttempts || 0),
|
|
334
|
-
0
|
|
335
|
-
),
|
|
336
|
-
finalArtifacts: Object.keys(pipelineArtifacts),
|
|
337
|
-
});
|
|
338
|
-
|
|
339
|
-
await fs.rename(workDir, dest);
|
|
340
|
-
await appendLine(
|
|
341
|
-
path.join(COMPLETE_DIR, "runs.jsonl"),
|
|
342
|
-
JSON.stringify({
|
|
343
|
-
id: status.id,
|
|
344
|
-
finishedAt: now(),
|
|
345
|
-
tasks: Object.keys(status.tasks),
|
|
346
|
-
totalExecutionTime: Object.values(status.tasks).reduce(
|
|
347
|
-
(total, t) => total + (t.executionTimeMs || 0),
|
|
348
|
-
0
|
|
349
|
-
),
|
|
350
|
-
totalRefinementAttempts: Object.values(status.tasks).reduce(
|
|
351
|
-
(total, t) => total + (t.refinementAttempts || 0),
|
|
352
|
-
0
|
|
353
|
-
),
|
|
354
|
-
finalArtifacts: Object.keys(pipelineArtifacts),
|
|
355
|
-
}) + "\n"
|
|
356
|
-
);
|
|
357
|
-
|
|
358
|
-
// Clean up task symlinks to avoid dangling links in archives
|
|
359
|
-
await cleanupTaskSymlinks(dest);
|
|
360
|
-
|
|
361
456
|
logger.groupEnd();
|
|
362
457
|
|
|
363
458
|
function now() {
|