@ryanfw/prompt-orchestration-pipeline 0.6.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/package.json +1 -2
- package/src/api/validators/json.js +39 -0
- package/src/components/DAGGrid.jsx +392 -303
- package/src/components/JobCard.jsx +13 -11
- package/src/components/JobDetail.jsx +41 -71
- package/src/components/JobTable.jsx +32 -22
- package/src/components/Layout.jsx +0 -21
- package/src/components/LiveText.jsx +47 -0
- package/src/components/TaskDetailSidebar.jsx +216 -0
- package/src/components/TimerText.jsx +82 -0
- package/src/components/ui/RestartJobModal.jsx +140 -0
- package/src/components/ui/toast.jsx +138 -0
- package/src/config/models.js +322 -0
- package/src/config/statuses.js +119 -0
- package/src/core/config.js +2 -164
- package/src/core/file-io.js +1 -1
- package/src/core/module-loader.js +54 -40
- package/src/core/pipeline-runner.js +52 -26
- package/src/core/status-writer.js +147 -3
- package/src/core/symlink-bridge.js +55 -0
- package/src/core/symlink-utils.js +94 -0
- package/src/core/task-runner.js +267 -443
- package/src/llm/index.js +167 -52
- package/src/pages/Code.jsx +57 -3
- package/src/pages/PipelineDetail.jsx +92 -22
- package/src/pages/PromptPipelineDashboard.jsx +15 -36
- package/src/providers/anthropic.js +83 -69
- package/src/providers/base.js +52 -0
- package/src/providers/deepseek.js +17 -34
- package/src/providers/gemini.js +226 -0
- package/src/providers/openai.js +36 -106
- package/src/providers/zhipu.js +136 -0
- package/src/ui/client/adapters/job-adapter.js +16 -26
- package/src/ui/client/api.js +134 -0
- package/src/ui/client/hooks/useJobDetailWithUpdates.js +65 -178
- package/src/ui/client/index.css +9 -0
- package/src/ui/client/index.html +1 -0
- package/src/ui/client/main.jsx +18 -15
- package/src/ui/client/time-store.js +161 -0
- package/src/ui/config-bridge.js +15 -24
- package/src/ui/config-bridge.node.js +15 -24
- package/src/ui/dist/assets/{index-WgJUlSmE.js → index-DqkbzXZ1.js} +1408 -771
- package/src/ui/dist/assets/style-DBF9NQGk.css +62 -0
- package/src/ui/dist/index.html +3 -2
- package/src/ui/public/favicon.svg +12 -0
- package/src/ui/server.js +231 -38
- package/src/ui/transformers/status-transformer.js +18 -31
- package/src/ui/watcher.js +5 -1
- package/src/utils/dag.js +8 -4
- package/src/utils/duration.js +13 -19
- package/src/utils/formatters.js +27 -0
- package/src/utils/geometry-equality.js +83 -0
- package/src/utils/pipelines.js +5 -1
- package/src/utils/time-utils.js +40 -0
- package/src/utils/token-cost-calculator.js +4 -7
- package/src/utils/ui.jsx +14 -16
- package/src/components/ui/select.jsx +0 -27
- package/src/lib/utils.js +0 -6
- package/src/ui/client/hooks/useTicker.js +0 -26
- package/src/ui/config-bridge.browser.js +0 -149
- package/src/ui/dist/assets/style-x0V-5m8e.css +0 -62
package/src/core/task-runner.js
CHANGED
|
@@ -7,6 +7,8 @@ import { loadEnvironment } from "./environment.js";
|
|
|
7
7
|
import { createTaskFileIO } from "./file-io.js";
|
|
8
8
|
import { writeJobStatus } from "./status-writer.js";
|
|
9
9
|
import { computeDeterministicProgress } from "./progress.js";
|
|
10
|
+
import { TaskState } from "../config/statuses.js";
|
|
11
|
+
import { validateWithSchema } from "../api/validators/json.js";
|
|
10
12
|
|
|
11
13
|
/**
|
|
12
14
|
* Derives model key and token counts from LLM metric event.
|
|
@@ -155,24 +157,6 @@ function ensureLogDirectory(workDir, jobId) {
|
|
|
155
157
|
return logsPath;
|
|
156
158
|
}
|
|
157
159
|
|
|
158
|
-
/**
|
|
159
|
-
* Writes a compact pre-execution snapshot for debugging stage inputs.
|
|
160
|
-
* Safe: does not throw on write failure; logs warnings instead.
|
|
161
|
-
* @param {string} stageName - Name of the stage
|
|
162
|
-
* @param {object} snapshot - Summary data to persist
|
|
163
|
-
* @param {string} logsDir - Directory to write the snapshot into
|
|
164
|
-
*/
|
|
165
|
-
function writePreExecutionSnapshot(stageName, snapshot, logsDir) {
|
|
166
|
-
const snapshotPath = path.join(logsDir, `stage-${stageName}-context.json`);
|
|
167
|
-
try {
|
|
168
|
-
fs.writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2));
|
|
169
|
-
} catch (error) {
|
|
170
|
-
console.warn(
|
|
171
|
-
`[task-runner] Failed to write pre-execution snapshot for ${stageName}: ${error.message}`
|
|
172
|
-
);
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
|
|
176
160
|
/**
|
|
177
161
|
* Redirects console output to a log file for a stage.
|
|
178
162
|
* @param {string} logPath - The path to the log file
|
|
@@ -270,25 +254,10 @@ function persistStatusSnapshot(statusPath, updates) {
|
|
|
270
254
|
* Defines required flags (prerequisites) and produced flags (outputs) with their types.
|
|
271
255
|
*/
|
|
272
256
|
const FLAG_SCHEMAS = {
|
|
273
|
-
|
|
257
|
+
validateQuality: {
|
|
274
258
|
requires: {},
|
|
275
259
|
produces: {
|
|
276
|
-
|
|
277
|
-
lastValidationError: ["string", "object", "undefined"],
|
|
278
|
-
},
|
|
279
|
-
},
|
|
280
|
-
critique: {
|
|
281
|
-
requires: {},
|
|
282
|
-
produces: {
|
|
283
|
-
critiqueComplete: "boolean",
|
|
284
|
-
},
|
|
285
|
-
},
|
|
286
|
-
refine: {
|
|
287
|
-
requires: {
|
|
288
|
-
validationFailed: "boolean",
|
|
289
|
-
},
|
|
290
|
-
produces: {
|
|
291
|
-
refined: "boolean",
|
|
260
|
+
needsRefinement: "boolean",
|
|
292
261
|
},
|
|
293
262
|
},
|
|
294
263
|
};
|
|
@@ -345,19 +314,19 @@ const PIPELINE_STAGES = [
|
|
|
345
314
|
{
|
|
346
315
|
name: "critique",
|
|
347
316
|
handler: null, // Will be populated from dynamic module import
|
|
348
|
-
skipIf: (flags) => flags.
|
|
317
|
+
skipIf: (flags) => flags.needsRefinement !== true,
|
|
349
318
|
maxIterations: null,
|
|
350
319
|
},
|
|
351
320
|
{
|
|
352
321
|
name: "refine",
|
|
353
322
|
handler: null, // Will be populated from dynamic module import
|
|
354
|
-
skipIf: (flags) => flags.
|
|
355
|
-
maxIterations:
|
|
323
|
+
skipIf: (flags) => flags.needsRefinement !== true,
|
|
324
|
+
maxIterations: null,
|
|
356
325
|
},
|
|
357
326
|
{
|
|
358
327
|
name: "finalValidation",
|
|
359
328
|
handler: null, // Will be populated from dynamic module import
|
|
360
|
-
skipIf:
|
|
329
|
+
skipIf: (flags) => flags.needsRefinement !== true,
|
|
361
330
|
maxIterations: null,
|
|
362
331
|
},
|
|
363
332
|
{
|
|
@@ -446,24 +415,26 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
446
415
|
}
|
|
447
416
|
});
|
|
448
417
|
|
|
449
|
-
//
|
|
450
|
-
let fileIO = null;
|
|
418
|
+
// fileIO is mandatory for runner execution
|
|
451
419
|
if (
|
|
452
|
-
initialContext.workDir
|
|
453
|
-
initialContext.taskName
|
|
454
|
-
initialContext.statusPath
|
|
420
|
+
!initialContext.workDir ||
|
|
421
|
+
!initialContext.taskName ||
|
|
422
|
+
!initialContext.statusPath
|
|
455
423
|
) {
|
|
456
|
-
|
|
457
|
-
workDir: initialContext.workDir,
|
|
458
|
-
|
|
459
|
-
getStage: () => context.currentStage,
|
|
460
|
-
statusPath: initialContext.statusPath,
|
|
461
|
-
});
|
|
424
|
+
throw new Error(
|
|
425
|
+
`fileIO is required for task execution but missing required context. workDir: ${initialContext.workDir}, taskName: ${initialContext.taskName}, statusPath: ${initialContext.statusPath}`
|
|
426
|
+
);
|
|
462
427
|
}
|
|
463
428
|
|
|
464
|
-
|
|
429
|
+
const fileIO = createTaskFileIO({
|
|
430
|
+
workDir: initialContext.workDir,
|
|
431
|
+
taskName: initialContext.taskName,
|
|
432
|
+
getStage: () => context.currentStage,
|
|
433
|
+
statusPath: initialContext.statusPath,
|
|
434
|
+
});
|
|
435
|
+
|
|
436
|
+
// Extract seed for new context structure
|
|
465
437
|
const seed = initialContext.seed || initialContext;
|
|
466
|
-
const maxRefinements = seed.maxRefinements ?? 1; // Default to 1 unless explicitly set
|
|
467
438
|
|
|
468
439
|
// Create new context structure with io, llm, meta, data, flags, logs, currentStage
|
|
469
440
|
const context = {
|
|
@@ -487,10 +458,11 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
487
458
|
flags: {},
|
|
488
459
|
logs: [],
|
|
489
460
|
currentStage: null,
|
|
461
|
+
validators: {
|
|
462
|
+
validateWithSchema,
|
|
463
|
+
},
|
|
490
464
|
};
|
|
491
465
|
const logs = [];
|
|
492
|
-
let needsRefinement = false;
|
|
493
|
-
let refinementCount = 0;
|
|
494
466
|
let lastStageOutput = context.data.seed;
|
|
495
467
|
let lastStageName = "seed";
|
|
496
468
|
let lastExecutedStageName = "seed";
|
|
@@ -498,432 +470,285 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
498
470
|
// Ensure log directory exists before stage execution
|
|
499
471
|
const logsDir = ensureLogDirectory(context.meta.workDir, context.meta.jobId);
|
|
500
472
|
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
for (const stageConfig of PIPELINE_STAGES) {
|
|
506
|
-
const stageName = stageConfig.name;
|
|
507
|
-
const stageHandler = stageConfig.handler;
|
|
508
|
-
|
|
509
|
-
// Skip stages when skipIf predicate returns true
|
|
510
|
-
if (stageConfig.skipIf && stageConfig.skipIf(context.flags)) {
|
|
511
|
-
context.logs.push({
|
|
512
|
-
stage: stageName,
|
|
513
|
-
action: "skipped",
|
|
514
|
-
reason: "skipIf predicate returned true",
|
|
515
|
-
timestamp: new Date().toISOString(),
|
|
516
|
-
});
|
|
517
|
-
continue;
|
|
518
|
-
}
|
|
473
|
+
// Single-pass pipeline execution
|
|
474
|
+
for (const stageConfig of PIPELINE_STAGES) {
|
|
475
|
+
const stageName = stageConfig.name;
|
|
476
|
+
const stageHandler = stageConfig.handler;
|
|
519
477
|
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
478
|
+
// Skip stages when skipIf predicate returns true
|
|
479
|
+
if (stageConfig.skipIf && stageConfig.skipIf(context.flags)) {
|
|
480
|
+
context.logs.push({
|
|
481
|
+
stage: stageName,
|
|
482
|
+
action: "skipped",
|
|
483
|
+
reason: "skipIf predicate returned true",
|
|
484
|
+
timestamp: new Date().toISOString(),
|
|
485
|
+
});
|
|
486
|
+
continue;
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
// Skip if handler is not available (not implemented)
|
|
490
|
+
if (typeof stageHandler !== "function") {
|
|
491
|
+
logs.push({
|
|
492
|
+
stage: stageName,
|
|
493
|
+
skipped: true,
|
|
494
|
+
});
|
|
495
|
+
continue;
|
|
496
|
+
}
|
|
497
|
+
|
|
498
|
+
// Add console output capture before stage execution using IO
|
|
499
|
+
const logName = `stage-${stageName}.log`;
|
|
500
|
+
const logPath = path.join(context.meta.workDir, "files", "logs", logName);
|
|
501
|
+
console.debug("[task-runner] stage log path resolution via IO", {
|
|
502
|
+
stage: stageName,
|
|
503
|
+
workDir: context.meta.workDir,
|
|
504
|
+
jobId: context.meta.jobId,
|
|
505
|
+
logName,
|
|
506
|
+
logPath,
|
|
507
|
+
});
|
|
508
|
+
const restoreConsole = captureConsoleOutput(logPath);
|
|
529
509
|
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
510
|
+
// Set current stage before execution
|
|
511
|
+
context.currentStage = stageName;
|
|
512
|
+
|
|
513
|
+
// Write stage start status using writeJobStatus
|
|
514
|
+
if (context.meta.workDir && context.meta.taskName) {
|
|
515
|
+
try {
|
|
516
|
+
await writeJobStatus(context.meta.workDir, (snapshot) => {
|
|
517
|
+
snapshot.current = context.meta.taskName;
|
|
518
|
+
snapshot.currentStage = stageName;
|
|
519
|
+
snapshot.lastUpdated = new Date().toISOString();
|
|
520
|
+
|
|
521
|
+
// Ensure task exists and update task-specific fields
|
|
522
|
+
if (!snapshot.tasks[context.meta.taskName]) {
|
|
523
|
+
snapshot.tasks[context.meta.taskName] = {};
|
|
524
|
+
}
|
|
525
|
+
snapshot.tasks[context.meta.taskName].currentStage = stageName;
|
|
526
|
+
snapshot.tasks[context.meta.taskName].state = TaskState.RUNNING;
|
|
540
527
|
});
|
|
541
|
-
|
|
528
|
+
} catch (error) {
|
|
529
|
+
// Don't fail the pipeline if status write fails
|
|
530
|
+
console.warn(`Failed to write stage start status: ${error.message}`);
|
|
542
531
|
}
|
|
532
|
+
}
|
|
543
533
|
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
534
|
+
// Clone data and flags before stage execution
|
|
535
|
+
const stageData = JSON.parse(JSON.stringify(context.data));
|
|
536
|
+
const stageFlags = JSON.parse(JSON.stringify(context.flags));
|
|
537
|
+
const stageContext = {
|
|
538
|
+
io: context.io,
|
|
539
|
+
llm: context.llm,
|
|
540
|
+
meta: context.meta,
|
|
541
|
+
data: stageData,
|
|
542
|
+
flags: stageFlags,
|
|
543
|
+
currentStage: stageName,
|
|
544
|
+
output: JSON.parse(
|
|
545
|
+
JSON.stringify(
|
|
546
|
+
lastStageOutput !== undefined
|
|
547
|
+
? lastStageOutput
|
|
548
|
+
: (context.data.seed ?? null)
|
|
549
|
+
)
|
|
550
|
+
),
|
|
551
|
+
previousStage: lastExecutedStageName,
|
|
552
|
+
validators: context.validators,
|
|
553
|
+
};
|
|
554
|
+
|
|
555
|
+
// Write pre-execution snapshot for debugging inputs via IO
|
|
556
|
+
const snapshot = {
|
|
557
|
+
meta: { taskName: context.meta.taskName, jobId: context.meta.jobId },
|
|
558
|
+
previousStage: lastExecutedStageName,
|
|
559
|
+
dataSummary: {
|
|
560
|
+
keys: Object.keys(context.data),
|
|
561
|
+
hasSeed: !!context.data?.seed,
|
|
562
|
+
seedKeys: Object.keys(context.data?.seed || {}),
|
|
563
|
+
seedHasData: context.data?.seed?.data !== undefined,
|
|
564
|
+
},
|
|
565
|
+
flagsSummary: {
|
|
566
|
+
keys: Object.keys(context.flags),
|
|
567
|
+
},
|
|
568
|
+
outputSummary: {
|
|
569
|
+
type: typeof stageContext.output,
|
|
570
|
+
keys:
|
|
571
|
+
stageContext.output && typeof stageContext.output === "object"
|
|
572
|
+
? Object.keys(stageContext.output).slice(0, 20)
|
|
573
|
+
: [],
|
|
574
|
+
},
|
|
575
|
+
};
|
|
576
|
+
await context.io.writeLog(
|
|
577
|
+
`stage-${stageName}-context.json`,
|
|
578
|
+
JSON.stringify(snapshot, null, 2),
|
|
579
|
+
{ mode: "replace" }
|
|
580
|
+
);
|
|
581
|
+
|
|
582
|
+
// Validate prerequisite flags before stage execution
|
|
583
|
+
const requiredFlags = FLAG_SCHEMAS[stageName]?.requires;
|
|
584
|
+
if (requiredFlags && Object.keys(requiredFlags).length > 0) {
|
|
585
|
+
validateFlagTypes(stageName, context.flags, requiredFlags);
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
// Execute the stage
|
|
589
|
+
const start = performance.now();
|
|
590
|
+
let stageResult;
|
|
591
|
+
try {
|
|
592
|
+
context.logs.push({
|
|
593
|
+
stage: stageName,
|
|
594
|
+
action: "debugging",
|
|
595
|
+
data: stageContext,
|
|
596
|
+
});
|
|
597
|
+
|
|
598
|
+
console.log("STAGE CONTEXT", JSON.stringify(stageContext, null, 2));
|
|
599
|
+
stageResult = await stageHandler(stageContext);
|
|
600
|
+
|
|
601
|
+
// Validate stage result shape after execution
|
|
602
|
+
assertStageResult(stageName, stageResult);
|
|
603
|
+
|
|
604
|
+
// Validate produced flags against schema
|
|
605
|
+
const producedFlagsSchema = FLAG_SCHEMAS[stageName]?.produces;
|
|
606
|
+
if (producedFlagsSchema) {
|
|
607
|
+
validateFlagTypes(stageName, stageResult.flags, producedFlagsSchema);
|
|
597
608
|
}
|
|
598
609
|
|
|
599
|
-
//
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
610
|
+
// Check for flag type conflicts before merging
|
|
611
|
+
checkFlagTypeConflicts(context.flags, stageResult.flags, stageName);
|
|
612
|
+
|
|
613
|
+
// Store stage output in context.data
|
|
614
|
+
context.data[stageName] = stageResult.output;
|
|
615
|
+
|
|
616
|
+
// Only update lastStageOutput and lastExecutedStageName for non-validation stages
|
|
617
|
+
// This ensures previousStage and context.output skip validation stages
|
|
618
|
+
const validationStages = [
|
|
619
|
+
"validateStructure",
|
|
620
|
+
"validateQuality",
|
|
621
|
+
"validateFinal",
|
|
622
|
+
"finalValidation",
|
|
623
|
+
];
|
|
624
|
+
if (!validationStages.includes(stageName)) {
|
|
625
|
+
lastStageOutput = stageResult.output;
|
|
626
|
+
lastExecutedStageName = stageName;
|
|
611
627
|
}
|
|
612
628
|
|
|
613
|
-
//
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
`stage-${stageName}.log`
|
|
619
|
-
);
|
|
620
|
-
console.debug("[task-runner] stage log path resolution", {
|
|
629
|
+
// Merge stage flags into context.flags
|
|
630
|
+
context.flags = { ...context.flags, ...stageResult.flags };
|
|
631
|
+
|
|
632
|
+
// Add audit log entry after stage completes
|
|
633
|
+
context.logs.push({
|
|
621
634
|
stage: stageName,
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
635
|
+
action: "completed",
|
|
636
|
+
outputType: typeof stageResult.output,
|
|
637
|
+
flagKeys: Object.keys(stageResult.flags),
|
|
638
|
+
timestamp: new Date().toISOString(),
|
|
625
639
|
});
|
|
626
|
-
const restoreConsole = captureConsoleOutput(logPath);
|
|
627
|
-
|
|
628
|
-
// Set current stage before execution
|
|
629
|
-
context.currentStage = stageName;
|
|
630
640
|
|
|
631
|
-
// Write stage
|
|
641
|
+
// Write stage completion status
|
|
632
642
|
if (context.meta.workDir && context.meta.taskName) {
|
|
633
643
|
try {
|
|
634
644
|
await writeJobStatus(context.meta.workDir, (snapshot) => {
|
|
645
|
+
// Keep current task and stage as-is since we're still within the same task
|
|
635
646
|
snapshot.current = context.meta.taskName;
|
|
636
647
|
snapshot.currentStage = stageName;
|
|
637
648
|
snapshot.lastUpdated = new Date().toISOString();
|
|
638
649
|
|
|
650
|
+
// Compute deterministic progress after stage completion
|
|
651
|
+
const pct = computeDeterministicProgress(
|
|
652
|
+
context.meta.pipelineTasks || [],
|
|
653
|
+
context.meta.taskName,
|
|
654
|
+
stageName
|
|
655
|
+
);
|
|
656
|
+
snapshot.progress = pct;
|
|
657
|
+
|
|
658
|
+
// Debug log for progress computation
|
|
659
|
+
console.debug("[task-runner] stage completion progress", {
|
|
660
|
+
task: context.meta.taskName,
|
|
661
|
+
stage: stageName,
|
|
662
|
+
progress: pct,
|
|
663
|
+
});
|
|
664
|
+
|
|
639
665
|
// Ensure task exists and update task-specific fields
|
|
640
666
|
if (!snapshot.tasks[context.meta.taskName]) {
|
|
641
667
|
snapshot.tasks[context.meta.taskName] = {};
|
|
642
668
|
}
|
|
643
669
|
snapshot.tasks[context.meta.taskName].currentStage = stageName;
|
|
644
|
-
snapshot.tasks[context.meta.taskName].state =
|
|
670
|
+
snapshot.tasks[context.meta.taskName].state = TaskState.RUNNING;
|
|
645
671
|
});
|
|
646
672
|
} catch (error) {
|
|
647
673
|
// Don't fail the pipeline if status write fails
|
|
648
|
-
console.warn(
|
|
674
|
+
console.warn(
|
|
675
|
+
`Failed to write stage completion status: ${error.message}`
|
|
676
|
+
);
|
|
649
677
|
}
|
|
650
678
|
}
|
|
651
679
|
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
output: JSON.parse(
|
|
663
|
-
JSON.stringify(
|
|
664
|
-
lastStageOutput !== undefined
|
|
665
|
-
? lastStageOutput
|
|
666
|
-
: (context.data.seed ?? null)
|
|
667
|
-
)
|
|
668
|
-
),
|
|
669
|
-
previousStage: lastExecutedStageName,
|
|
670
|
-
};
|
|
680
|
+
const ms = +(performance.now() - start).toFixed(2);
|
|
681
|
+
logs.push({
|
|
682
|
+
stage: stageName,
|
|
683
|
+
ok: true,
|
|
684
|
+
ms,
|
|
685
|
+
});
|
|
686
|
+
} catch (error) {
|
|
687
|
+
console.error(`Stage ${stageName} failed:`, error);
|
|
688
|
+
const ms = +(performance.now() - start).toFixed(2);
|
|
689
|
+
const errInfo = normalizeError(error);
|
|
671
690
|
|
|
672
|
-
//
|
|
673
|
-
|
|
674
|
-
|
|
691
|
+
// Attach debug metadata to the error envelope for richer diagnostics
|
|
692
|
+
errInfo.debug = {
|
|
693
|
+
stage: stageName,
|
|
675
694
|
previousStage: lastExecutedStageName,
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
},
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
},
|
|
686
|
-
outputSummary: {
|
|
687
|
-
type: typeof stageContext.output,
|
|
688
|
-
keys:
|
|
689
|
-
stageContext.output && typeof stageContext.output === "object"
|
|
690
|
-
? Object.keys(stageContext.output).slice(0, 20)
|
|
691
|
-
: [],
|
|
692
|
-
},
|
|
695
|
+
logPath: path.join(
|
|
696
|
+
context.meta.workDir,
|
|
697
|
+
"files",
|
|
698
|
+
"logs",
|
|
699
|
+
`stage-${stageName}.log`
|
|
700
|
+
),
|
|
701
|
+
snapshotPath: path.join(logsDir, `stage-${stageName}-context.json`),
|
|
702
|
+
dataHasSeed: !!context.data?.seed,
|
|
703
|
+
seedHasData: context.data?.seed?.data !== undefined,
|
|
704
|
+
flagsKeys: Object.keys(context.flags || {}),
|
|
693
705
|
};
|
|
694
|
-
writePreExecutionSnapshot(stageName, snapshot, logsDir);
|
|
695
|
-
|
|
696
|
-
// Validate prerequisite flags before stage execution
|
|
697
|
-
const requiredFlags = FLAG_SCHEMAS[stageName]?.requires;
|
|
698
|
-
if (requiredFlags && Object.keys(requiredFlags).length > 0) {
|
|
699
|
-
validateFlagTypes(stageName, context.flags, requiredFlags);
|
|
700
|
-
}
|
|
701
|
-
|
|
702
|
-
// Execute the stage
|
|
703
|
-
const start = performance.now();
|
|
704
|
-
let stageResult;
|
|
705
|
-
try {
|
|
706
|
-
context.logs.push({
|
|
707
|
-
stage: stageName,
|
|
708
|
-
action: "debugging",
|
|
709
|
-
data: stageContext,
|
|
710
|
-
});
|
|
711
|
-
|
|
712
|
-
console.log("STAGE CONTEXT", JSON.stringify(stageContext, null, 2));
|
|
713
|
-
stageResult = await stageHandler(stageContext);
|
|
714
|
-
|
|
715
|
-
// Validate stage result shape after execution
|
|
716
|
-
assertStageResult(stageName, stageResult);
|
|
717
|
-
|
|
718
|
-
// Validate produced flags against schema
|
|
719
|
-
const producedFlagsSchema = FLAG_SCHEMAS[stageName]?.produces;
|
|
720
|
-
if (producedFlagsSchema) {
|
|
721
|
-
validateFlagTypes(stageName, stageResult.flags, producedFlagsSchema);
|
|
722
|
-
}
|
|
723
|
-
|
|
724
|
-
// Check for flag type conflicts before merging
|
|
725
|
-
checkFlagTypeConflicts(context.flags, stageResult.flags, stageName);
|
|
726
|
-
|
|
727
|
-
// Store stage output in context.data
|
|
728
|
-
context.data[stageName] = stageResult.output;
|
|
729
|
-
lastStageName = stageName;
|
|
730
|
-
|
|
731
|
-
// Only update lastStageOutput and lastExecutedStageName for non-validation stages
|
|
732
|
-
// This ensures previousStage and context.output skip validation stages
|
|
733
|
-
const validationStages = [
|
|
734
|
-
"validateStructure",
|
|
735
|
-
"validateQuality",
|
|
736
|
-
"validateFinal",
|
|
737
|
-
"finalValidation",
|
|
738
|
-
];
|
|
739
|
-
if (!validationStages.includes(stageName)) {
|
|
740
|
-
lastStageOutput = stageResult.output;
|
|
741
|
-
lastExecutedStageName = stageName;
|
|
742
|
-
}
|
|
743
|
-
|
|
744
|
-
// Merge stage flags into context.flags
|
|
745
|
-
context.flags = { ...context.flags, ...stageResult.flags };
|
|
746
|
-
|
|
747
|
-
// Add audit log entry after stage completes
|
|
748
|
-
context.logs.push({
|
|
749
|
-
stage: stageName,
|
|
750
|
-
action: "completed",
|
|
751
|
-
outputType: typeof stageResult.output,
|
|
752
|
-
flagKeys: Object.keys(stageResult.flags),
|
|
753
|
-
timestamp: new Date().toISOString(),
|
|
754
|
-
});
|
|
755
|
-
|
|
756
|
-
// Write stage completion status
|
|
757
|
-
if (context.meta.workDir && context.meta.taskName) {
|
|
758
|
-
try {
|
|
759
|
-
await writeJobStatus(context.meta.workDir, (snapshot) => {
|
|
760
|
-
// Keep current task and stage as-is since we're still within the same task
|
|
761
|
-
snapshot.current = context.meta.taskName;
|
|
762
|
-
snapshot.currentStage = stageName;
|
|
763
|
-
snapshot.lastUpdated = new Date().toISOString();
|
|
764
|
-
|
|
765
|
-
// Compute deterministic progress after stage completion
|
|
766
|
-
const pct = computeDeterministicProgress(
|
|
767
|
-
context.meta.pipelineTasks || [],
|
|
768
|
-
context.meta.taskName,
|
|
769
|
-
stageName
|
|
770
|
-
);
|
|
771
|
-
snapshot.progress = pct;
|
|
772
|
-
|
|
773
|
-
// Debug log for progress computation
|
|
774
|
-
console.debug("[task-runner] stage completion progress", {
|
|
775
|
-
task: context.meta.taskName,
|
|
776
|
-
stage: stageName,
|
|
777
|
-
progress: pct,
|
|
778
|
-
});
|
|
779
|
-
|
|
780
|
-
// Ensure task exists and update task-specific fields
|
|
781
|
-
if (!snapshot.tasks[context.meta.taskName]) {
|
|
782
|
-
snapshot.tasks[context.meta.taskName] = {};
|
|
783
|
-
}
|
|
784
|
-
snapshot.tasks[context.meta.taskName].currentStage = stageName;
|
|
785
|
-
snapshot.tasks[context.meta.taskName].state = "running";
|
|
786
|
-
});
|
|
787
|
-
} catch (error) {
|
|
788
|
-
// Don't fail the pipeline if status write fails
|
|
789
|
-
console.warn(
|
|
790
|
-
`Failed to write stage completion status: ${error.message}`
|
|
791
|
-
);
|
|
792
|
-
}
|
|
793
|
-
}
|
|
794
|
-
|
|
795
|
-
const ms = +(performance.now() - start).toFixed(2);
|
|
796
|
-
logs.push({
|
|
797
|
-
stage: stageName,
|
|
798
|
-
ok: true,
|
|
799
|
-
ms,
|
|
800
|
-
refinementCycle: refinementCount,
|
|
801
|
-
});
|
|
802
706
|
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
needsRefinement = true;
|
|
810
|
-
// Don't reset validationFailed here - let the refinement cycle handle it
|
|
811
|
-
break;
|
|
812
|
-
}
|
|
813
|
-
} catch (error) {
|
|
814
|
-
console.error(`Stage ${stageName} failed:`, error);
|
|
815
|
-
const ms = +(performance.now() - start).toFixed(2);
|
|
816
|
-
const errInfo = normalizeError(error);
|
|
817
|
-
|
|
818
|
-
// Attach debug metadata to the error envelope for richer diagnostics
|
|
819
|
-
errInfo.debug = {
|
|
820
|
-
stage: stageName,
|
|
821
|
-
previousStage: lastExecutedStageName,
|
|
822
|
-
refinementCycle: refinementCount,
|
|
823
|
-
logPath: path.join(
|
|
824
|
-
context.meta.workDir,
|
|
825
|
-
"files",
|
|
826
|
-
"logs",
|
|
827
|
-
`stage-${stageName}.log`
|
|
828
|
-
),
|
|
829
|
-
snapshotPath: path.join(logsDir, `stage-${stageName}-context.json`),
|
|
830
|
-
dataHasSeed: !!context.data?.seed,
|
|
831
|
-
seedHasData: context.data?.seed?.data !== undefined,
|
|
832
|
-
flagsKeys: Object.keys(context.flags || {}),
|
|
833
|
-
};
|
|
834
|
-
|
|
835
|
-
logs.push({
|
|
836
|
-
stage: stageName,
|
|
837
|
-
ok: false,
|
|
838
|
-
ms,
|
|
839
|
-
error: errInfo,
|
|
840
|
-
refinementCycle: refinementCount,
|
|
841
|
-
});
|
|
842
|
-
|
|
843
|
-
// For validation stages, trigger refinement if we haven't exceeded max refinements AND maxRefinements > 0
|
|
844
|
-
if (
|
|
845
|
-
(stageName === "validateStructure" ||
|
|
846
|
-
stageName === "validateQuality") &&
|
|
847
|
-
maxRefinements > 0 &&
|
|
848
|
-
refinementCount < maxRefinements
|
|
849
|
-
) {
|
|
850
|
-
context.flags.lastValidationError = errInfo;
|
|
851
|
-
context.flags.validationFailed = true; // Set the flag to trigger refinement
|
|
852
|
-
needsRefinement = true;
|
|
853
|
-
break;
|
|
854
|
-
}
|
|
707
|
+
logs.push({
|
|
708
|
+
stage: stageName,
|
|
709
|
+
ok: false,
|
|
710
|
+
ms,
|
|
711
|
+
error: errInfo,
|
|
712
|
+
});
|
|
855
713
|
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
// Ensure task exists and update task-specific fields
|
|
866
|
-
if (!snapshot.tasks[context.meta.taskName]) {
|
|
867
|
-
snapshot.tasks[context.meta.taskName] = {};
|
|
868
|
-
}
|
|
869
|
-
snapshot.tasks[context.meta.taskName].state = "failed";
|
|
870
|
-
snapshot.tasks[context.meta.taskName].failedStage = stageName;
|
|
871
|
-
snapshot.tasks[context.meta.taskName].currentStage = stageName;
|
|
872
|
-
});
|
|
873
|
-
} catch (error) {
|
|
874
|
-
// Don't fail the pipeline if status write fails
|
|
875
|
-
console.warn(`Failed to write failure status: ${error.message}`);
|
|
876
|
-
}
|
|
877
|
-
}
|
|
714
|
+
// Write failure status using writeJobStatus
|
|
715
|
+
if (context.meta.workDir && context.meta.taskName) {
|
|
716
|
+
try {
|
|
717
|
+
await writeJobStatus(context.meta.workDir, (snapshot) => {
|
|
718
|
+
snapshot.current = context.meta.taskName;
|
|
719
|
+
snapshot.currentStage = stageName;
|
|
720
|
+
snapshot.state = TaskState.FAILED;
|
|
721
|
+
snapshot.lastUpdated = new Date().toISOString();
|
|
878
722
|
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
};
|
|
891
|
-
} finally {
|
|
892
|
-
// Add console output restoration after stage execution
|
|
893
|
-
if (restoreConsole) {
|
|
894
|
-
restoreConsole();
|
|
723
|
+
// Ensure task exists and update task-specific fields
|
|
724
|
+
if (!snapshot.tasks[context.meta.taskName]) {
|
|
725
|
+
snapshot.tasks[context.meta.taskName] = {};
|
|
726
|
+
}
|
|
727
|
+
snapshot.tasks[context.meta.taskName].state = TaskState.FAILED;
|
|
728
|
+
snapshot.tasks[context.meta.taskName].failedStage = stageName;
|
|
729
|
+
snapshot.tasks[context.meta.taskName].currentStage = stageName;
|
|
730
|
+
});
|
|
731
|
+
} catch (error) {
|
|
732
|
+
// Don't fail the pipeline if status write fails
|
|
733
|
+
console.warn(`Failed to write failure status: ${error.message}`);
|
|
895
734
|
}
|
|
896
735
|
}
|
|
897
|
-
}
|
|
898
736
|
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
737
|
+
await tokenWriteQueue.catch(() => {});
|
|
738
|
+
llmEvents.off("llm:request:complete", onLLMComplete);
|
|
739
|
+
|
|
740
|
+
// Fail immediately on any stage error
|
|
741
|
+
return {
|
|
742
|
+
ok: false,
|
|
743
|
+
failedStage: stageName,
|
|
744
|
+
error: errInfo,
|
|
745
|
+
logs,
|
|
746
|
+
context,
|
|
747
|
+
};
|
|
748
|
+
} finally {
|
|
749
|
+
// Add console output restoration after stage execution
|
|
750
|
+
restoreConsole();
|
|
908
751
|
}
|
|
909
|
-
} while (needsRefinement && refinementCount <= maxRefinements);
|
|
910
|
-
|
|
911
|
-
// Only fail on validationFailed if we actually have validation functions
|
|
912
|
-
const hasValidation =
|
|
913
|
-
typeof tasks.validateStructure === "function" ||
|
|
914
|
-
typeof tasks.validateQuality === "function";
|
|
915
|
-
|
|
916
|
-
if (context.flags.validationFailed && hasValidation) {
|
|
917
|
-
await tokenWriteQueue.catch(() => {});
|
|
918
|
-
llmEvents.off("llm:request:complete", onLLMComplete);
|
|
919
|
-
return {
|
|
920
|
-
ok: false,
|
|
921
|
-
failedStage: "final-validation",
|
|
922
|
-
error: { message: "Validation failed after all refinement attempts" },
|
|
923
|
-
logs,
|
|
924
|
-
context,
|
|
925
|
-
refinementAttempts: refinementCount,
|
|
926
|
-
};
|
|
927
752
|
}
|
|
928
753
|
|
|
929
754
|
// Flush any trailing token usage appends before cleanup
|
|
@@ -937,7 +762,7 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
937
762
|
await writeJobStatus(context.meta.workDir, (snapshot) => {
|
|
938
763
|
snapshot.current = null;
|
|
939
764
|
snapshot.currentStage = null;
|
|
940
|
-
snapshot.state =
|
|
765
|
+
snapshot.state = TaskState.DONE;
|
|
941
766
|
snapshot.progress = 100;
|
|
942
767
|
snapshot.lastUpdated = new Date().toISOString();
|
|
943
768
|
|
|
@@ -945,7 +770,7 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
945
770
|
if (!snapshot.tasks[context.meta.taskName]) {
|
|
946
771
|
snapshot.tasks[context.meta.taskName] = {};
|
|
947
772
|
}
|
|
948
|
-
snapshot.tasks[context.meta.taskName].state =
|
|
773
|
+
snapshot.tasks[context.meta.taskName].state = TaskState.DONE;
|
|
949
774
|
snapshot.tasks[context.meta.taskName].currentStage = null;
|
|
950
775
|
});
|
|
951
776
|
} catch (error) {
|
|
@@ -958,7 +783,6 @@ export async function runPipeline(modulePath, initialContext = {}) {
|
|
|
958
783
|
ok: true,
|
|
959
784
|
logs,
|
|
960
785
|
context,
|
|
961
|
-
refinementAttempts: refinementCount,
|
|
962
786
|
llmMetrics,
|
|
963
787
|
};
|
|
964
788
|
}
|