@ryanfw/prompt-orchestration-pipeline 0.6.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/package.json +1 -2
- package/src/api/validators/json.js +39 -0
- package/src/components/DAGGrid.jsx +392 -303
- package/src/components/JobCard.jsx +13 -11
- package/src/components/JobDetail.jsx +41 -71
- package/src/components/JobTable.jsx +32 -22
- package/src/components/Layout.jsx +0 -21
- package/src/components/LiveText.jsx +47 -0
- package/src/components/TaskDetailSidebar.jsx +216 -0
- package/src/components/TimerText.jsx +82 -0
- package/src/components/ui/RestartJobModal.jsx +140 -0
- package/src/components/ui/toast.jsx +138 -0
- package/src/config/models.js +322 -0
- package/src/config/statuses.js +119 -0
- package/src/core/config.js +2 -164
- package/src/core/file-io.js +1 -1
- package/src/core/module-loader.js +54 -40
- package/src/core/pipeline-runner.js +52 -26
- package/src/core/status-writer.js +147 -3
- package/src/core/symlink-bridge.js +55 -0
- package/src/core/symlink-utils.js +94 -0
- package/src/core/task-runner.js +267 -443
- package/src/llm/index.js +167 -52
- package/src/pages/Code.jsx +57 -3
- package/src/pages/PipelineDetail.jsx +92 -22
- package/src/pages/PromptPipelineDashboard.jsx +15 -36
- package/src/providers/anthropic.js +83 -69
- package/src/providers/base.js +52 -0
- package/src/providers/deepseek.js +17 -34
- package/src/providers/gemini.js +226 -0
- package/src/providers/openai.js +36 -106
- package/src/providers/zhipu.js +136 -0
- package/src/ui/client/adapters/job-adapter.js +16 -26
- package/src/ui/client/api.js +134 -0
- package/src/ui/client/hooks/useJobDetailWithUpdates.js +65 -178
- package/src/ui/client/index.css +9 -0
- package/src/ui/client/index.html +1 -0
- package/src/ui/client/main.jsx +18 -15
- package/src/ui/client/time-store.js +161 -0
- package/src/ui/config-bridge.js +15 -24
- package/src/ui/config-bridge.node.js +15 -24
- package/src/ui/dist/assets/{index-WgJUlSmE.js → index-DqkbzXZ1.js} +1408 -771
- package/src/ui/dist/assets/style-DBF9NQGk.css +62 -0
- package/src/ui/dist/index.html +3 -2
- package/src/ui/public/favicon.svg +12 -0
- package/src/ui/server.js +231 -38
- package/src/ui/transformers/status-transformer.js +18 -31
- package/src/ui/watcher.js +5 -1
- package/src/utils/dag.js +8 -4
- package/src/utils/duration.js +13 -19
- package/src/utils/formatters.js +27 -0
- package/src/utils/geometry-equality.js +83 -0
- package/src/utils/pipelines.js +5 -1
- package/src/utils/time-utils.js +40 -0
- package/src/utils/token-cost-calculator.js +4 -7
- package/src/utils/ui.jsx +14 -16
- package/src/components/ui/select.jsx +0 -27
- package/src/lib/utils.js +0 -6
- package/src/ui/client/hooks/useTicker.js +0 -26
- package/src/ui/config-bridge.browser.js +0 -149
- package/src/ui/dist/assets/style-x0V-5m8e.css +0 -62
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Canonical status constants and utilities for the prompt orchestration pipeline.
|
|
3
|
+
* This module serves as the single source of truth for all status-related values.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
// Task states (per-task execution status)
|
|
7
|
+
export const TaskState = Object.freeze({
|
|
8
|
+
PENDING: "pending",
|
|
9
|
+
RUNNING: "running",
|
|
10
|
+
DONE: "done",
|
|
11
|
+
FAILED: "failed",
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
// Job statuses (computed aggregate from task states)
|
|
15
|
+
export const JobStatus = Object.freeze({
|
|
16
|
+
PENDING: "pending",
|
|
17
|
+
RUNNING: "running",
|
|
18
|
+
FAILED: "failed",
|
|
19
|
+
COMPLETE: "complete",
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
// Job locations (filesystem lifecycle buckets)
|
|
23
|
+
export const JobLocation = Object.freeze({
|
|
24
|
+
PENDING: "pending",
|
|
25
|
+
CURRENT: "current",
|
|
26
|
+
COMPLETE: "complete",
|
|
27
|
+
REJECTED: "rejected",
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// Validation sets
|
|
31
|
+
export const VALID_TASK_STATES = new Set(Object.values(TaskState));
|
|
32
|
+
export const VALID_JOB_STATUSES = new Set(Object.values(JobStatus));
|
|
33
|
+
export const VALID_JOB_LOCATIONS = new Set(Object.values(JobLocation));
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Normalizes a task state string to canonical form.
|
|
37
|
+
* @param {string} state - Raw task state
|
|
38
|
+
* @returns {string} Canonical task state
|
|
39
|
+
*/
|
|
40
|
+
export function normalizeTaskState(state) {
|
|
41
|
+
if (typeof state !== "string") {
|
|
42
|
+
return TaskState.PENDING;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const normalized = state.toLowerCase().trim();
|
|
46
|
+
|
|
47
|
+
// Handle common synonyms
|
|
48
|
+
switch (normalized) {
|
|
49
|
+
case "error":
|
|
50
|
+
return TaskState.FAILED;
|
|
51
|
+
case "succeeded":
|
|
52
|
+
return TaskState.DONE;
|
|
53
|
+
case TaskState.PENDING:
|
|
54
|
+
case TaskState.RUNNING:
|
|
55
|
+
case TaskState.DONE:
|
|
56
|
+
case TaskState.FAILED:
|
|
57
|
+
return normalized;
|
|
58
|
+
default:
|
|
59
|
+
return TaskState.PENDING;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Normalizes a job status string to canonical form.
|
|
65
|
+
* @param {string} status - Raw job status
|
|
66
|
+
* @returns {string} Canonical job status
|
|
67
|
+
*/
|
|
68
|
+
export function normalizeJobStatus(status) {
|
|
69
|
+
if (typeof status !== "string") {
|
|
70
|
+
return JobStatus.PENDING;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const normalized = status.toLowerCase().trim();
|
|
74
|
+
|
|
75
|
+
// Handle common synonyms
|
|
76
|
+
switch (normalized) {
|
|
77
|
+
case "completed":
|
|
78
|
+
return JobStatus.COMPLETE;
|
|
79
|
+
case "error":
|
|
80
|
+
return JobStatus.FAILED;
|
|
81
|
+
case JobStatus.PENDING:
|
|
82
|
+
case JobStatus.RUNNING:
|
|
83
|
+
case JobStatus.FAILED:
|
|
84
|
+
case JobStatus.COMPLETE:
|
|
85
|
+
return normalized;
|
|
86
|
+
default:
|
|
87
|
+
return JobStatus.PENDING;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Derives job status from an array of task states.
|
|
93
|
+
* Priority: failed > running > complete > pending
|
|
94
|
+
* @param {Array<Object>} tasks - Array of task objects with state property
|
|
95
|
+
* @returns {string} Canonical job status
|
|
96
|
+
*/
|
|
97
|
+
export function deriveJobStatusFromTasks(tasks) {
|
|
98
|
+
if (!Array.isArray(tasks) || tasks.length === 0) {
|
|
99
|
+
return JobStatus.PENDING;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// Normalize all task states first
|
|
103
|
+
const normalizedStates = tasks.map((task) => normalizeTaskState(task.state));
|
|
104
|
+
|
|
105
|
+
// Apply priority rules
|
|
106
|
+
if (normalizedStates.some((state) => state === TaskState.FAILED)) {
|
|
107
|
+
return JobStatus.FAILED;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
if (normalizedStates.some((state) => state === TaskState.RUNNING)) {
|
|
111
|
+
return JobStatus.RUNNING;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
if (normalizedStates.every((state) => state === TaskState.DONE)) {
|
|
115
|
+
return JobStatus.COMPLETE;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
return JobStatus.PENDING;
|
|
119
|
+
}
|
package/src/core/config.js
CHANGED
|
@@ -187,168 +187,6 @@ export const defaultConfig = {
|
|
|
187
187
|
maxConcurrency: 5,
|
|
188
188
|
retryMaxAttempts: 3,
|
|
189
189
|
retryBackoffMs: 1000,
|
|
190
|
-
models: {
|
|
191
|
-
// DeepSeek (2025)
|
|
192
|
-
"deepseek:chat": {
|
|
193
|
-
provider: "deepseek",
|
|
194
|
-
model: "deepseek-chat", // V3.2 Exp (non-thinking) under the hood
|
|
195
|
-
tokenCostInPerMillion: 0.27,
|
|
196
|
-
tokenCostOutPerMillion: 1.1,
|
|
197
|
-
},
|
|
198
|
-
"deepseek:reasoner": {
|
|
199
|
-
provider: "deepseek",
|
|
200
|
-
model: "deepseek-reasoner", // R1 family
|
|
201
|
-
tokenCostInPerMillion: 0.55,
|
|
202
|
-
tokenCostOutPerMillion: 2.19,
|
|
203
|
-
},
|
|
204
|
-
|
|
205
|
-
// — OpenAI (2024 legacy still callable) —
|
|
206
|
-
"openai:gpt-4": {
|
|
207
|
-
provider: "openai",
|
|
208
|
-
model: "gpt-4",
|
|
209
|
-
tokenCostInPerMillion: 30.0,
|
|
210
|
-
tokenCostOutPerMillion: 60.0,
|
|
211
|
-
},
|
|
212
|
-
"openai:gpt-4-turbo": {
|
|
213
|
-
provider: "openai",
|
|
214
|
-
model: "gpt-4-turbo",
|
|
215
|
-
tokenCostInPerMillion: 10.0,
|
|
216
|
-
tokenCostOutPerMillion: 30.0,
|
|
217
|
-
},
|
|
218
|
-
|
|
219
|
-
// — OpenAI (2025) —
|
|
220
|
-
"openai:gpt-5": {
|
|
221
|
-
provider: "openai",
|
|
222
|
-
model: "gpt-5-chat-latest", // alias tracks GPT-5 pricing
|
|
223
|
-
tokenCostInPerMillion: 1.25,
|
|
224
|
-
tokenCostOutPerMillion: 10.0,
|
|
225
|
-
},
|
|
226
|
-
|
|
227
|
-
"openai:gpt-5-core": {
|
|
228
|
-
provider: "openai",
|
|
229
|
-
model: "gpt-5", // flagship
|
|
230
|
-
tokenCostInPerMillion: 1.25,
|
|
231
|
-
tokenCostOutPerMillion: 10.0,
|
|
232
|
-
},
|
|
233
|
-
"openai:gpt-5-chat": {
|
|
234
|
-
provider: "openai",
|
|
235
|
-
model: "gpt-5-chat-latest", // Chat variant
|
|
236
|
-
tokenCostInPerMillion: 1.25,
|
|
237
|
-
tokenCostOutPerMillion: 10.0,
|
|
238
|
-
},
|
|
239
|
-
"openai:gpt-5-pro": {
|
|
240
|
-
provider: "openai",
|
|
241
|
-
model: "gpt-5-pro", // higher-compute tier
|
|
242
|
-
tokenCostInPerMillion: 15.0,
|
|
243
|
-
tokenCostOutPerMillion: 120.0,
|
|
244
|
-
},
|
|
245
|
-
"openai:gpt-5-mini": {
|
|
246
|
-
provider: "openai",
|
|
247
|
-
model: "gpt-5-mini",
|
|
248
|
-
tokenCostInPerMillion: 0.25,
|
|
249
|
-
tokenCostOutPerMillion: 2.0,
|
|
250
|
-
},
|
|
251
|
-
"openai:gpt-5-nano": {
|
|
252
|
-
provider: "openai",
|
|
253
|
-
model: "gpt-5-nano",
|
|
254
|
-
tokenCostInPerMillion: 0.05,
|
|
255
|
-
tokenCostOutPerMillion: 0.4,
|
|
256
|
-
},
|
|
257
|
-
|
|
258
|
-
// — Google Gemini (2025) —
|
|
259
|
-
"gemini:2.5-pro": {
|
|
260
|
-
provider: "google",
|
|
261
|
-
model: "gemini-2.5-pro", // ≤200k input tier shown; >200k is higher
|
|
262
|
-
tokenCostInPerMillion: 1.25,
|
|
263
|
-
tokenCostOutPerMillion: 10.0,
|
|
264
|
-
},
|
|
265
|
-
"gemini:2.5-flash": {
|
|
266
|
-
provider: "google",
|
|
267
|
-
model: "gemini-2.5-flash",
|
|
268
|
-
tokenCostInPerMillion: 0.3,
|
|
269
|
-
tokenCostOutPerMillion: 2.5,
|
|
270
|
-
},
|
|
271
|
-
"gemini:2.5-flash-lite": {
|
|
272
|
-
provider: "google",
|
|
273
|
-
model: "gemini-2.5-flash-lite",
|
|
274
|
-
tokenCostInPerMillion: 0.1,
|
|
275
|
-
tokenCostOutPerMillion: 0.4,
|
|
276
|
-
},
|
|
277
|
-
"gemini:2.5-flash-image": {
|
|
278
|
-
provider: "google",
|
|
279
|
-
model: "gemini-2.5-flash-image",
|
|
280
|
-
// Inputs follow 2.5 Flash text pricing; outputs are **image tokens** at $30/M (≈$0.039 per 1024² image)
|
|
281
|
-
tokenCostInPerMillion: 0.3,
|
|
282
|
-
tokenCostOutPerMillion: 30.0,
|
|
283
|
-
},
|
|
284
|
-
|
|
285
|
-
// — Z.ai (formerly Zhipu) —
|
|
286
|
-
"zai:glm-4.6": {
|
|
287
|
-
provider: "zai",
|
|
288
|
-
model: "GLM-4.6",
|
|
289
|
-
tokenCostInPerMillion: 0.6,
|
|
290
|
-
tokenCostOutPerMillion: 2.2,
|
|
291
|
-
},
|
|
292
|
-
"zai:glm-4.5": {
|
|
293
|
-
provider: "zai",
|
|
294
|
-
model: "GLM-4.5",
|
|
295
|
-
tokenCostInPerMillion: 0.6,
|
|
296
|
-
tokenCostOutPerMillion: 2.2,
|
|
297
|
-
},
|
|
298
|
-
"zai:glm-4.5-air": {
|
|
299
|
-
provider: "zai",
|
|
300
|
-
model: "GLM-4.5-Air",
|
|
301
|
-
tokenCostInPerMillion: 0.2,
|
|
302
|
-
tokenCostOutPerMillion: 1.1,
|
|
303
|
-
},
|
|
304
|
-
|
|
305
|
-
// — Anthropic —
|
|
306
|
-
// current (Claude 4.5 / 4.1)
|
|
307
|
-
"anthropic:sonnet-4-5": {
|
|
308
|
-
provider: "anthropic",
|
|
309
|
-
model: "claude-sonnet-4-5",
|
|
310
|
-
tokenCostInPerMillion: 3.0,
|
|
311
|
-
tokenCostOutPerMillion: 15.0,
|
|
312
|
-
},
|
|
313
|
-
"anthropic:haiku-4-5": {
|
|
314
|
-
provider: "anthropic",
|
|
315
|
-
model: "claude-haiku-4-5",
|
|
316
|
-
tokenCostInPerMillion: 1.0,
|
|
317
|
-
tokenCostOutPerMillion: 5.0,
|
|
318
|
-
},
|
|
319
|
-
"anthropic:opus-4-1": {
|
|
320
|
-
provider: "anthropic",
|
|
321
|
-
model: "claude-opus-4-1",
|
|
322
|
-
tokenCostInPerMillion: 15.0,
|
|
323
|
-
tokenCostOutPerMillion: 75.0,
|
|
324
|
-
},
|
|
325
|
-
|
|
326
|
-
// legacy / still available
|
|
327
|
-
"anthropic:sonnet-4": {
|
|
328
|
-
provider: "anthropic",
|
|
329
|
-
model: "claude-sonnet-4-0",
|
|
330
|
-
tokenCostInPerMillion: 3.0,
|
|
331
|
-
tokenCostOutPerMillion: 15.0,
|
|
332
|
-
},
|
|
333
|
-
"anthropic:sonnet-3-7": {
|
|
334
|
-
provider: "anthropic",
|
|
335
|
-
model: "claude-3-7-sonnet-20250219",
|
|
336
|
-
tokenCostInPerMillion: 3.0,
|
|
337
|
-
tokenCostOutPerMillion: 15.0,
|
|
338
|
-
},
|
|
339
|
-
"anthropic:opus-4": {
|
|
340
|
-
provider: "anthropic",
|
|
341
|
-
model: "claude-opus-4-0",
|
|
342
|
-
tokenCostInPerMillion: 15.0,
|
|
343
|
-
tokenCostOutPerMillion: 75.0,
|
|
344
|
-
},
|
|
345
|
-
"anthropic:haiku-3-5": {
|
|
346
|
-
provider: "anthropic",
|
|
347
|
-
model: "claude-3-5-haiku-20241022",
|
|
348
|
-
tokenCostInPerMillion: 0.8,
|
|
349
|
-
tokenCostOutPerMillion: 4.0,
|
|
350
|
-
},
|
|
351
|
-
},
|
|
352
190
|
},
|
|
353
191
|
ui: {
|
|
354
192
|
port: 3000,
|
|
@@ -645,14 +483,14 @@ export async function loadConfig(options = {}) {
|
|
|
645
483
|
await validateConfig(config);
|
|
646
484
|
}
|
|
647
485
|
|
|
648
|
-
// Cache
|
|
486
|
+
// Cache
|
|
649
487
|
currentConfig = config;
|
|
650
488
|
|
|
651
489
|
return config;
|
|
652
490
|
}
|
|
653
491
|
|
|
654
492
|
/**
|
|
655
|
-
* Get
|
|
493
|
+
* Get current configuration
|
|
656
494
|
* Loads default config if not already loaded
|
|
657
495
|
*
|
|
658
496
|
* @returns {Object} Current configuration
|
package/src/core/file-io.js
CHANGED
|
@@ -2,8 +2,6 @@ import fsp from "node:fs/promises";
|
|
|
2
2
|
import path from "node:path";
|
|
3
3
|
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
4
4
|
|
|
5
|
-
const WORKSPACE_CACHE_DIR = path.join(process.cwd(), ".tmp-task-modules");
|
|
6
|
-
|
|
7
5
|
/**
|
|
8
6
|
* Convert supported modulePath formats into a file:// URL.
|
|
9
7
|
* @param {string | URL} modulePath
|
|
@@ -88,70 +86,86 @@ function createMissingModuleError(modulePath, originalError) {
|
|
|
88
86
|
}
|
|
89
87
|
|
|
90
88
|
/**
|
|
91
|
-
* Copy a module file
|
|
89
|
+
* Copy a module file adjacent to its original location with a unique name.
|
|
92
90
|
* @param {string} sourcePath
|
|
93
91
|
* @returns {Promise<string>}
|
|
94
92
|
*/
|
|
95
|
-
async function
|
|
96
|
-
|
|
93
|
+
async function copyModuleAdjacent(sourcePath) {
|
|
94
|
+
const dir = path.dirname(sourcePath);
|
|
97
95
|
const ext = path.extname(sourcePath) || ".js";
|
|
98
96
|
const base = path.basename(sourcePath, ext);
|
|
99
97
|
const uniqueSuffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
|
100
|
-
const destFile = path.join(
|
|
101
|
-
WORKSPACE_CACHE_DIR,
|
|
102
|
-
`${base}.${uniqueSuffix}${ext}`
|
|
103
|
-
);
|
|
98
|
+
const destFile = path.join(dir, `.cache.${base}.${uniqueSuffix}${ext}`);
|
|
104
99
|
await fsp.copyFile(sourcePath, destFile);
|
|
105
100
|
return destFile;
|
|
106
101
|
}
|
|
107
102
|
|
|
108
103
|
/**
|
|
109
104
|
* Dynamically import a module with cache busting while remaining compatible with Node's file:/// resolution.
|
|
110
|
-
* Falls back to copying the module
|
|
105
|
+
* Falls back to copying the module adjacent to its original location when query parameters break filesystem resolution.
|
|
111
106
|
* @param {string | URL} modulePath
|
|
112
107
|
* @returns {Promise<any>} Module namespace object
|
|
113
108
|
*/
|
|
114
109
|
export async function loadFreshModule(modulePath) {
|
|
115
110
|
const fileUrl = resolveToFileURL(modulePath);
|
|
116
|
-
const cacheBustedUrl = `${fileUrl.href}?t=${Date.now()}`;
|
|
117
111
|
|
|
112
|
+
// First attempt direct import without cache busting
|
|
118
113
|
try {
|
|
119
|
-
return await import(
|
|
114
|
+
return await import(fileUrl.href);
|
|
120
115
|
} catch (error) {
|
|
121
|
-
if (!isModuleNotFoundError(error)
|
|
116
|
+
if (!isModuleNotFoundError(error)) {
|
|
122
117
|
throw error;
|
|
123
118
|
}
|
|
124
119
|
|
|
125
|
-
|
|
126
|
-
|
|
120
|
+
// Second attempt: try cache-busted import
|
|
121
|
+
const cacheBustedUrl = `${fileUrl.href}?t=${Date.now()}`;
|
|
127
122
|
try {
|
|
128
|
-
await
|
|
129
|
-
} catch {
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
)
|
|
134
|
-
|
|
123
|
+
return await import(cacheBustedUrl);
|
|
124
|
+
} catch (cacheBustedError) {
|
|
125
|
+
if (
|
|
126
|
+
!isModuleNotFoundError(cacheBustedError) ||
|
|
127
|
+
fileUrl.protocol !== "file:"
|
|
128
|
+
) {
|
|
129
|
+
throw cacheBustedError;
|
|
130
|
+
}
|
|
135
131
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
132
|
+
const absolutePath = fileURLToPath(fileUrl);
|
|
133
|
+
|
|
134
|
+
try {
|
|
135
|
+
await fsp.access(absolutePath);
|
|
136
|
+
} catch {
|
|
137
|
+
throw createMissingModuleError(
|
|
138
|
+
absolutePath,
|
|
139
|
+
/** @type {Error} */ (cacheBustedError)
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// Third attempt: copy adjacent and import
|
|
144
|
+
let adjacentCopy;
|
|
145
|
+
try {
|
|
146
|
+
adjacentCopy = await copyModuleAdjacent(absolutePath);
|
|
147
|
+
const adjacentUrl = `${pathToFileURL(adjacentCopy).href}?t=${Date.now()}`;
|
|
148
|
+
return await import(adjacentUrl);
|
|
149
|
+
} catch (fallbackError) {
|
|
150
|
+
const messageLines = [
|
|
151
|
+
`Failed to load module "${absolutePath}" after attempting direct import, cache-busting import, and adjacent copy fallback.`,
|
|
152
|
+
`Direct import URL: ${fileUrl.href}`,
|
|
153
|
+
`Cache-busted URL: ${cacheBustedUrl}`,
|
|
154
|
+
`Adjacent fallback path attempted: ${adjacentCopy || "[adjacent copy creation failed]"}`,
|
|
155
|
+
`Original error: ${/** @type {Error} */ (error).message}`,
|
|
156
|
+
`Cache-bust error: ${/** @type {Error} */ (cacheBustedError).message}`,
|
|
157
|
+
`Fallback error: ${/** @type {Error} */ (fallbackError).message}`,
|
|
158
|
+
];
|
|
159
|
+
const combined = new Error(messageLines.join("\n"));
|
|
160
|
+
if ("cause" in Error.prototype) {
|
|
161
|
+
combined.cause = fallbackError;
|
|
162
|
+
} else {
|
|
163
|
+
combined.fallbackError = fallbackError;
|
|
164
|
+
}
|
|
165
|
+
combined.initialError = error;
|
|
166
|
+
combined.cacheBustedError = cacheBustedError;
|
|
167
|
+
throw combined;
|
|
152
168
|
}
|
|
153
|
-
combined.initialError = error;
|
|
154
|
-
throw combined;
|
|
155
169
|
}
|
|
156
170
|
}
|
|
157
171
|
}
|
|
@@ -5,6 +5,10 @@ import { loadFreshModule } from "./module-loader.js";
|
|
|
5
5
|
import { validatePipelineOrThrow } from "./validation.js";
|
|
6
6
|
import { getPipelineConfig } from "./config.js";
|
|
7
7
|
import { writeJobStatus } from "./status-writer.js";
|
|
8
|
+
import { TaskState } from "../config/statuses.js";
|
|
9
|
+
import { ensureTaskSymlinkBridge } from "./symlink-bridge.js";
|
|
10
|
+
import { cleanupTaskSymlinks } from "./symlink-utils.js";
|
|
11
|
+
import { createTaskFileIO } from "./file-io.js";
|
|
8
12
|
|
|
9
13
|
const ROOT = process.env.PO_ROOT || process.cwd();
|
|
10
14
|
const DATA_DIR = path.join(ROOT, process.env.PO_DATA_DIR || "pipeline-data");
|
|
@@ -18,6 +22,8 @@ if (!jobId) throw new Error("runner requires jobId as argument");
|
|
|
18
22
|
|
|
19
23
|
const workDir = path.join(CURRENT_DIR, jobId);
|
|
20
24
|
|
|
25
|
+
const startFromTask = process.env.PO_START_FROM_TASK;
|
|
26
|
+
|
|
21
27
|
// Get pipeline slug from environment or fallback to seed.json
|
|
22
28
|
let pipelineSlug = process.env.PO_PIPELINE_SLUG;
|
|
23
29
|
if (!pipelineSlug) {
|
|
@@ -61,7 +67,15 @@ const seed = JSON.parse(
|
|
|
61
67
|
let pipelineArtifacts = {};
|
|
62
68
|
|
|
63
69
|
for (const taskName of pipeline.tasks) {
|
|
64
|
-
|
|
70
|
+
// Skip tasks before startFromTask when targeting a specific restart point
|
|
71
|
+
if (
|
|
72
|
+
startFromTask &&
|
|
73
|
+
pipeline.tasks.indexOf(taskName) < pipeline.tasks.indexOf(startFromTask)
|
|
74
|
+
) {
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if (status.tasks[taskName]?.state === TaskState.DONE) {
|
|
65
79
|
try {
|
|
66
80
|
const outputPath = path.join(workDir, "tasks", taskName, "output.json");
|
|
67
81
|
const output = JSON.parse(await fs.readFile(outputPath, "utf8"));
|
|
@@ -71,17 +85,13 @@ for (const taskName of pipeline.tasks) {
|
|
|
71
85
|
}
|
|
72
86
|
|
|
73
87
|
await updateStatus(taskName, {
|
|
74
|
-
state:
|
|
88
|
+
state: TaskState.RUNNING,
|
|
75
89
|
startedAt: now(),
|
|
76
90
|
attempts: (status.tasks[taskName]?.attempts || 0) + 1,
|
|
77
91
|
});
|
|
78
92
|
|
|
79
93
|
const taskDir = path.join(workDir, "tasks", taskName);
|
|
80
94
|
await fs.mkdir(taskDir, { recursive: true });
|
|
81
|
-
await atomicWrite(
|
|
82
|
-
path.join(taskDir, "letter.json"),
|
|
83
|
-
JSON.stringify({ task: taskName, at: now() }, null, 2)
|
|
84
|
-
);
|
|
85
95
|
|
|
86
96
|
try {
|
|
87
97
|
const ctx = {
|
|
@@ -104,14 +114,31 @@ for (const taskName of pipeline.tasks) {
|
|
|
104
114
|
? modulePath
|
|
105
115
|
: path.resolve(path.dirname(TASK_REGISTRY), modulePath);
|
|
106
116
|
|
|
107
|
-
|
|
117
|
+
// Create symlink bridge for deterministic module resolution
|
|
118
|
+
const poRoot = process.env.PO_ROOT || process.cwd();
|
|
119
|
+
const relocatedEntry = await ensureTaskSymlinkBridge({
|
|
120
|
+
taskDir,
|
|
121
|
+
poRoot,
|
|
122
|
+
taskModulePath: absoluteModulePath,
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
// Create fileIO for this task
|
|
126
|
+
const fileIO = createTaskFileIO({
|
|
127
|
+
workDir,
|
|
128
|
+
taskName,
|
|
129
|
+
getStage: () => null, // pipeline-runner doesn't have stages
|
|
130
|
+
statusPath: tasksStatusPath,
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
const result = await runPipeline(relocatedEntry, ctx);
|
|
108
134
|
|
|
109
135
|
if (!result.ok) {
|
|
110
|
-
// Persist execution-logs.json and failure-details.json on task failure
|
|
136
|
+
// Persist execution-logs.json and failure-details.json on task failure via IO
|
|
111
137
|
if (result.logs) {
|
|
112
|
-
await
|
|
113
|
-
|
|
114
|
-
JSON.stringify(result.logs, null, 2)
|
|
138
|
+
await fileIO.writeLog(
|
|
139
|
+
"execution-logs.json",
|
|
140
|
+
JSON.stringify(result.logs, null, 2),
|
|
141
|
+
{ mode: "replace" }
|
|
115
142
|
);
|
|
116
143
|
}
|
|
117
144
|
const failureDetails = {
|
|
@@ -121,14 +148,15 @@ for (const taskName of pipeline.tasks) {
|
|
|
121
148
|
context: result.context,
|
|
122
149
|
refinementAttempts: result.refinementAttempts || 0,
|
|
123
150
|
};
|
|
124
|
-
await
|
|
125
|
-
|
|
126
|
-
JSON.stringify(failureDetails, null, 2)
|
|
151
|
+
await fileIO.writeLog(
|
|
152
|
+
"failure-details.json",
|
|
153
|
+
JSON.stringify(failureDetails, null, 2),
|
|
154
|
+
{ mode: "replace" }
|
|
127
155
|
);
|
|
128
156
|
|
|
129
157
|
// Update tasks-status.json with enriched failure context
|
|
130
158
|
await updateStatus(taskName, {
|
|
131
|
-
state:
|
|
159
|
+
state: TaskState.FAILED,
|
|
132
160
|
endedAt: now(),
|
|
133
161
|
error: result.error, // Don't double-normalize - use result.error as-is
|
|
134
162
|
failedStage: result.failedStage,
|
|
@@ -156,14 +184,15 @@ for (const taskName of pipeline.tasks) {
|
|
|
156
184
|
// No need to manually write output.json or enumerate artifacts
|
|
157
185
|
|
|
158
186
|
if (result.logs) {
|
|
159
|
-
await
|
|
160
|
-
|
|
161
|
-
JSON.stringify(result.logs, null, 2)
|
|
187
|
+
await fileIO.writeLog(
|
|
188
|
+
"execution-logs.json",
|
|
189
|
+
JSON.stringify(result.logs, null, 2),
|
|
190
|
+
{ mode: "replace" }
|
|
162
191
|
);
|
|
163
192
|
}
|
|
164
193
|
|
|
165
194
|
await updateStatus(taskName, {
|
|
166
|
-
state:
|
|
195
|
+
state: TaskState.DONE,
|
|
167
196
|
endedAt: now(),
|
|
168
197
|
executionTimeMs:
|
|
169
198
|
result.logs?.reduce((total, log) => total + (log.ms || 0), 0) || 0,
|
|
@@ -171,7 +200,7 @@ for (const taskName of pipeline.tasks) {
|
|
|
171
200
|
});
|
|
172
201
|
} catch (err) {
|
|
173
202
|
await updateStatus(taskName, {
|
|
174
|
-
state:
|
|
203
|
+
state: TaskState.FAILED,
|
|
175
204
|
endedAt: now(),
|
|
176
205
|
error: normalizeError(err),
|
|
177
206
|
});
|
|
@@ -201,6 +230,9 @@ await appendLine(
|
|
|
201
230
|
}) + "\n"
|
|
202
231
|
);
|
|
203
232
|
|
|
233
|
+
// Clean up task symlinks to avoid dangling links in archives
|
|
234
|
+
await cleanupTaskSymlinks(dest);
|
|
235
|
+
|
|
204
236
|
function now() {
|
|
205
237
|
return new Date().toISOString();
|
|
206
238
|
}
|
|
@@ -225,12 +257,6 @@ async function appendLine(file, line) {
|
|
|
225
257
|
await fs.appendFile(file, line);
|
|
226
258
|
}
|
|
227
259
|
|
|
228
|
-
async function atomicWrite(file, data) {
|
|
229
|
-
const tmp = file + ".tmp";
|
|
230
|
-
await fs.writeFile(tmp, data);
|
|
231
|
-
await fs.rename(tmp, file);
|
|
232
|
-
}
|
|
233
|
-
|
|
234
260
|
function normalizeError(e) {
|
|
235
261
|
// If it's already a structured error object with a message string, pass it through
|
|
236
262
|
if (e && typeof e === "object" && typeof e.message === "string") {
|