@ryanfw/prompt-orchestration-pipeline 0.0.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +415 -24
- package/package.json +46 -8
- package/src/api/files.js +48 -0
- package/src/api/index.js +149 -53
- package/src/api/validators/seed.js +141 -0
- package/src/cli/index.js +444 -29
- package/src/cli/run-orchestrator.js +39 -0
- package/src/cli/update-pipeline-json.js +47 -0
- package/src/components/DAGGrid.jsx +649 -0
- package/src/components/JobCard.jsx +96 -0
- package/src/components/JobDetail.jsx +159 -0
- package/src/components/JobTable.jsx +202 -0
- package/src/components/Layout.jsx +134 -0
- package/src/components/TaskFilePane.jsx +570 -0
- package/src/components/UploadSeed.jsx +239 -0
- package/src/components/ui/badge.jsx +20 -0
- package/src/components/ui/button.jsx +43 -0
- package/src/components/ui/card.jsx +20 -0
- package/src/components/ui/focus-styles.css +60 -0
- package/src/components/ui/progress.jsx +26 -0
- package/src/components/ui/select.jsx +27 -0
- package/src/components/ui/separator.jsx +6 -0
- package/src/config/paths.js +99 -0
- package/src/core/config.js +270 -9
- package/src/core/file-io.js +202 -0
- package/src/core/module-loader.js +157 -0
- package/src/core/orchestrator.js +275 -294
- package/src/core/pipeline-runner.js +95 -41
- package/src/core/progress.js +66 -0
- package/src/core/status-writer.js +331 -0
- package/src/core/task-runner.js +719 -73
- package/src/core/validation.js +120 -1
- package/src/lib/utils.js +6 -0
- package/src/llm/README.md +139 -30
- package/src/llm/index.js +222 -72
- package/src/pages/PipelineDetail.jsx +111 -0
- package/src/pages/PromptPipelineDashboard.jsx +223 -0
- package/src/providers/deepseek.js +3 -15
- package/src/ui/client/adapters/job-adapter.js +258 -0
- package/src/ui/client/bootstrap.js +120 -0
- package/src/ui/client/hooks/useJobDetailWithUpdates.js +619 -0
- package/src/ui/client/hooks/useJobList.js +50 -0
- package/src/ui/client/hooks/useJobListWithUpdates.js +335 -0
- package/src/ui/client/hooks/useTicker.js +26 -0
- package/src/ui/client/index.css +31 -0
- package/src/ui/client/index.html +18 -0
- package/src/ui/client/main.jsx +38 -0
- package/src/ui/config-bridge.browser.js +149 -0
- package/src/ui/config-bridge.js +149 -0
- package/src/ui/config-bridge.node.js +310 -0
- package/src/ui/dist/assets/index-CxcrauYR.js +22702 -0
- package/src/ui/dist/assets/style-D6K_oQ12.css +62 -0
- package/src/ui/dist/index.html +19 -0
- package/src/ui/endpoints/job-endpoints.js +300 -0
- package/src/ui/file-reader.js +216 -0
- package/src/ui/job-change-detector.js +83 -0
- package/src/ui/job-index.js +231 -0
- package/src/ui/job-reader.js +274 -0
- package/src/ui/job-scanner.js +188 -0
- package/src/ui/public/app.js +3 -1
- package/src/ui/server.js +1636 -59
- package/src/ui/sse-enhancer.js +149 -0
- package/src/ui/sse.js +204 -0
- package/src/ui/state-snapshot.js +252 -0
- package/src/ui/transformers/list-transformer.js +347 -0
- package/src/ui/transformers/status-transformer.js +307 -0
- package/src/ui/watcher.js +61 -7
- package/src/utils/dag.js +101 -0
- package/src/utils/duration.js +126 -0
- package/src/utils/id-generator.js +30 -0
- package/src/utils/jobs.js +7 -0
- package/src/utils/pipelines.js +44 -0
- package/src/utils/task-files.js +271 -0
- package/src/utils/ui.jsx +76 -0
- package/src/ui/public/index.html +0 -53
- package/src/ui/public/style.css +0 -341
package/src/ui/watcher.js
CHANGED
|
@@ -4,6 +4,18 @@
|
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
6
|
import chokidar from "chokidar";
|
|
7
|
+
import path from "node:path";
|
|
8
|
+
import { detectJobChange } from "./job-change-detector.js";
|
|
9
|
+
import { sseEnhancer } from "./sse-enhancer.js";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Normalize path separators to forward slash and trim
|
|
13
|
+
* Reuses the same logic from job-change-detector
|
|
14
|
+
*/
|
|
15
|
+
function normalizePath(p) {
|
|
16
|
+
if (!p || typeof p !== "string") return "";
|
|
17
|
+
return p.replace(/\\/g, "/").replace(/\/\/+/g, "/");
|
|
18
|
+
}
|
|
7
19
|
|
|
8
20
|
/**
|
|
9
21
|
* Start watching specified paths for file changes
|
|
@@ -11,10 +23,15 @@ import chokidar from "chokidar";
|
|
|
11
23
|
* @param {Function} onChange - Callback function to handle file changes
|
|
12
24
|
* @param {Object} options - Configuration options
|
|
13
25
|
* @param {number} options.debounceMs - Debounce time in milliseconds (default: 200)
|
|
26
|
+
* @param {string} options.baseDir - Base directory for path normalization (required)
|
|
14
27
|
* @returns {Object} Watcher instance with close method
|
|
15
28
|
*/
|
|
16
29
|
export function start(paths, onChange, options = {}) {
|
|
17
|
-
|
|
30
|
+
if (!options.baseDir) {
|
|
31
|
+
throw new Error("options.baseDir is required");
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const { baseDir, debounceMs = 200 } = options;
|
|
18
35
|
let debounceTimer = null;
|
|
19
36
|
let pendingChanges = [];
|
|
20
37
|
|
|
@@ -42,19 +59,56 @@ export function start(paths, onChange, options = {}) {
|
|
|
42
59
|
};
|
|
43
60
|
|
|
44
61
|
// Handle file events
|
|
45
|
-
watcher.on("add", (
|
|
46
|
-
|
|
62
|
+
watcher.on("add", (rawPath) => {
|
|
63
|
+
// Compute relative path from baseDir and normalize
|
|
64
|
+
const rel = normalizePath(path.relative(baseDir, rawPath));
|
|
65
|
+
// Always use relative path for consistency with tests
|
|
66
|
+
const normalizedPath = rel;
|
|
67
|
+
|
|
68
|
+
console.debug("[Watcher] File added:", normalizedPath);
|
|
69
|
+
pendingChanges.push({ path: normalizedPath, type: "created" });
|
|
47
70
|
scheduleFlush();
|
|
71
|
+
|
|
72
|
+
// Check for job-specific changes with normalized path
|
|
73
|
+
const jobChange = detectJobChange(normalizedPath);
|
|
74
|
+
if (jobChange) {
|
|
75
|
+
console.debug("[Watcher] Job change detected:", jobChange);
|
|
76
|
+
sseEnhancer.handleJobChange(jobChange);
|
|
77
|
+
}
|
|
48
78
|
});
|
|
49
79
|
|
|
50
|
-
watcher.on("change", (
|
|
51
|
-
|
|
80
|
+
watcher.on("change", (rawPath) => {
|
|
81
|
+
// Compute relative path from baseDir and normalize
|
|
82
|
+
const rel = normalizePath(path.relative(baseDir, rawPath));
|
|
83
|
+
// Always use relative path for consistency with tests
|
|
84
|
+
const normalizedPath = rel;
|
|
85
|
+
|
|
86
|
+
console.debug("[Watcher] File changed:", normalizedPath);
|
|
87
|
+
pendingChanges.push({ path: normalizedPath, type: "modified" });
|
|
52
88
|
scheduleFlush();
|
|
89
|
+
|
|
90
|
+
// Check for job-specific changes with normalized path
|
|
91
|
+
const jobChange = detectJobChange(normalizedPath);
|
|
92
|
+
if (jobChange) {
|
|
93
|
+
console.debug("[Watcher] Job change detected:", jobChange);
|
|
94
|
+
sseEnhancer.handleJobChange(jobChange);
|
|
95
|
+
}
|
|
53
96
|
});
|
|
54
97
|
|
|
55
|
-
watcher.on("unlink", (
|
|
56
|
-
|
|
98
|
+
watcher.on("unlink", (rawPath) => {
|
|
99
|
+
// Compute relative path from baseDir and normalize
|
|
100
|
+
const rel = normalizePath(path.relative(baseDir, rawPath));
|
|
101
|
+
// Always use relative path for consistency with tests
|
|
102
|
+
const normalizedPath = rel;
|
|
103
|
+
|
|
104
|
+
pendingChanges.push({ path: normalizedPath, type: "deleted" });
|
|
57
105
|
scheduleFlush();
|
|
106
|
+
|
|
107
|
+
// Check for job-specific changes with normalized path
|
|
108
|
+
const jobChange = detectJobChange(normalizedPath);
|
|
109
|
+
if (jobChange) {
|
|
110
|
+
sseEnhancer.handleJobChange(jobChange);
|
|
111
|
+
}
|
|
58
112
|
});
|
|
59
113
|
|
|
60
114
|
// Return watcher with enhanced close method
|
package/src/utils/dag.js
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
function normalizeJobTasks(tasks) {
|
|
2
|
+
if (!tasks) return {};
|
|
3
|
+
|
|
4
|
+
if (Array.isArray(tasks)) {
|
|
5
|
+
const taskMap = {};
|
|
6
|
+
for (const task of tasks) {
|
|
7
|
+
const taskId = task?.name || task?.id;
|
|
8
|
+
if (taskId) taskMap[taskId] = task;
|
|
9
|
+
}
|
|
10
|
+
return taskMap;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
return tasks;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function computeTaskStage(job, taskId) {
|
|
17
|
+
const tasks = normalizeJobTasks(job?.tasks);
|
|
18
|
+
const t = tasks?.[taskId];
|
|
19
|
+
|
|
20
|
+
// Priority 1: Task-level currentStage (most specific)
|
|
21
|
+
if (typeof t?.currentStage === "string" && t.currentStage.length > 0) {
|
|
22
|
+
return t.currentStage;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Priority 2: Job-level currentStage ONLY if this task IS the current task
|
|
26
|
+
if (
|
|
27
|
+
job?.current === taskId &&
|
|
28
|
+
typeof job?.currentStage === "string" &&
|
|
29
|
+
job.currentStage.length > 0
|
|
30
|
+
) {
|
|
31
|
+
return job.currentStage;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Priority 3: failedStage for failed tasks
|
|
35
|
+
if (typeof t?.failedStage === "string" && t.failedStage.length > 0) {
|
|
36
|
+
return t.failedStage;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Priority 4: Error debug info
|
|
40
|
+
if (typeof t?.error?.debug?.stage === "string") {
|
|
41
|
+
return t.error.debug.stage;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// No stage information available
|
|
45
|
+
return undefined;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function computeDagItems(job, pipeline) {
|
|
49
|
+
const jobTasks = normalizeJobTasks(job?.tasks);
|
|
50
|
+
const pipelineTasks = pipeline?.tasks || [];
|
|
51
|
+
|
|
52
|
+
const pipelineItems = pipelineTasks.map((taskId) => {
|
|
53
|
+
const jobTask = jobTasks[taskId];
|
|
54
|
+
return {
|
|
55
|
+
id: taskId,
|
|
56
|
+
status: jobTask ? jobTask.state : "pending",
|
|
57
|
+
source: "pipeline",
|
|
58
|
+
stage: computeTaskStage(job, taskId),
|
|
59
|
+
};
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
const pipelineTaskIds = new Set(pipelineTasks);
|
|
63
|
+
const jobOnlyTaskIds = Object.keys(jobTasks).filter(
|
|
64
|
+
(taskId) => !pipelineTaskIds.has(taskId)
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
const jobOnlyItems = jobOnlyTaskIds.map((taskId) => {
|
|
68
|
+
const jobTask = jobTasks[taskId];
|
|
69
|
+
return {
|
|
70
|
+
id: taskId,
|
|
71
|
+
status: jobTask.state,
|
|
72
|
+
source: "job-extra",
|
|
73
|
+
stage: computeTaskStage(job, taskId),
|
|
74
|
+
};
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
return [...pipelineItems, ...jobOnlyItems];
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export function computeActiveIndex(items) {
|
|
81
|
+
if (!items || items.length === 0) return 0;
|
|
82
|
+
|
|
83
|
+
// Find first running task
|
|
84
|
+
const firstRunningIndex = items.findIndex(
|
|
85
|
+
(item) => item.status === "running"
|
|
86
|
+
);
|
|
87
|
+
if (firstRunningIndex !== -1) return firstRunningIndex;
|
|
88
|
+
|
|
89
|
+
// Find first failed task
|
|
90
|
+
const firstFailedIndex = items.findIndex((item) => item.status === "failed");
|
|
91
|
+
if (firstFailedIndex !== -1) return firstFailedIndex;
|
|
92
|
+
|
|
93
|
+
// Find last completed task
|
|
94
|
+
let lastDoneIndex = -1;
|
|
95
|
+
items.forEach((item, index) => {
|
|
96
|
+
if (item.status === "done") lastDoneIndex = index;
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
if (lastDoneIndex !== -1) return lastDoneIndex;
|
|
100
|
+
return 0;
|
|
101
|
+
}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Duration policy utilities for consistent time display across components
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Normalizes task state names to canonical values
|
|
7
|
+
* @param {string} state - Raw task state
|
|
8
|
+
* @returns {string} Normalized state
|
|
9
|
+
*/
|
|
10
|
+
export function normalizeState(state) {
|
|
11
|
+
switch (state) {
|
|
12
|
+
case "done":
|
|
13
|
+
return "completed";
|
|
14
|
+
case "failed":
|
|
15
|
+
case "error":
|
|
16
|
+
return "error";
|
|
17
|
+
case "pending":
|
|
18
|
+
case "running":
|
|
19
|
+
case "current":
|
|
20
|
+
case "completed":
|
|
21
|
+
case "rejected":
|
|
22
|
+
return state;
|
|
23
|
+
default:
|
|
24
|
+
return state; // Pass through unknown states
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Calculates display duration for a task according to policy rules
|
|
30
|
+
* @param {Object} task - Task object with state, startedAt, endedAt, executionTime, executionTimeMs
|
|
31
|
+
* @param {number} now - Current timestamp (default: Date.now())
|
|
32
|
+
* @returns {number} Duration in milliseconds
|
|
33
|
+
*/
|
|
34
|
+
export function taskDisplayDurationMs(task, now = Date.now()) {
|
|
35
|
+
const { state, startedAt, endedAt, executionTime, executionTimeMs } = task;
|
|
36
|
+
const normalizedState = normalizeState(state);
|
|
37
|
+
|
|
38
|
+
switch (normalizedState) {
|
|
39
|
+
case "pending":
|
|
40
|
+
return 0;
|
|
41
|
+
|
|
42
|
+
case "running":
|
|
43
|
+
case "current":
|
|
44
|
+
if (!startedAt) {
|
|
45
|
+
return 0;
|
|
46
|
+
}
|
|
47
|
+
const startTime = Date.parse(startedAt);
|
|
48
|
+
return Math.max(0, now - startTime);
|
|
49
|
+
|
|
50
|
+
case "completed":
|
|
51
|
+
// Prefer executionTimeMs or executionTime if available, even without startedAt
|
|
52
|
+
const execTime =
|
|
53
|
+
executionTimeMs != null ? executionTimeMs : executionTime;
|
|
54
|
+
if (typeof execTime === "number" && execTime >= 0) {
|
|
55
|
+
return execTime;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// If no execution time, calculate from timestamps
|
|
59
|
+
if (!startedAt) {
|
|
60
|
+
return 0;
|
|
61
|
+
}
|
|
62
|
+
const completedStartTime = Date.parse(startedAt);
|
|
63
|
+
const endTime = endedAt ? Date.parse(endedAt) : now;
|
|
64
|
+
return Math.max(0, endTime - completedStartTime);
|
|
65
|
+
|
|
66
|
+
case "rejected":
|
|
67
|
+
return 0;
|
|
68
|
+
|
|
69
|
+
default:
|
|
70
|
+
return 0;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Calculates cumulative duration across all tasks in a job
|
|
76
|
+
* @param {Object} job - Job object with tasks (array or object)
|
|
77
|
+
* @param {number} now - Current timestamp (default: Date.now())
|
|
78
|
+
* @returns {number} Total duration in milliseconds
|
|
79
|
+
*/
|
|
80
|
+
export function jobCumulativeDurationMs(job, now = Date.now()) {
|
|
81
|
+
const { tasks } = job;
|
|
82
|
+
|
|
83
|
+
if (!tasks) {
|
|
84
|
+
return 0;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
let taskList;
|
|
88
|
+
if (Array.isArray(tasks)) {
|
|
89
|
+
taskList = tasks;
|
|
90
|
+
} else if (typeof tasks === "object") {
|
|
91
|
+
taskList = Object.values(tasks);
|
|
92
|
+
} else {
|
|
93
|
+
return 0;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
return taskList.reduce((total, task) => {
|
|
97
|
+
return total + taskDisplayDurationMs(task, now);
|
|
98
|
+
}, 0);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Legacy helpers (kept for compatibility but not used for policy)
|
|
102
|
+
export function fmtDuration(ms) {
|
|
103
|
+
if (ms <= 0) return "0s";
|
|
104
|
+
|
|
105
|
+
const seconds = Math.floor(ms / 1000);
|
|
106
|
+
const minutes = Math.floor(seconds / 60);
|
|
107
|
+
const hours = Math.floor(minutes / 60);
|
|
108
|
+
|
|
109
|
+
if (hours > 0) {
|
|
110
|
+
const remainingMinutes = minutes % 60;
|
|
111
|
+
const remainingSeconds = seconds % 60;
|
|
112
|
+
if (remainingSeconds > 0) {
|
|
113
|
+
return `${hours}h ${remainingMinutes}m ${remainingSeconds}s`;
|
|
114
|
+
} else {
|
|
115
|
+
return `${hours}h ${remainingMinutes}m`;
|
|
116
|
+
}
|
|
117
|
+
} else if (minutes > 0) {
|
|
118
|
+
return `${minutes}m ${seconds % 60}s`;
|
|
119
|
+
} else {
|
|
120
|
+
return `${seconds}s`;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
export function elapsedBetween(startTime, endTime = Date.now()) {
|
|
125
|
+
return Math.max(0, endTime - startTime);
|
|
126
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Utility functions for generating random IDs
|
|
3
|
+
* @module utils/id-generator
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { randomBytes } from "node:crypto";
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Generate a random job ID using crypto.randomBytes
|
|
10
|
+
* @param {number} [length=12] - Length of the ID in bytes
|
|
11
|
+
* @returns {string} Random alphanumeric ID
|
|
12
|
+
*/
|
|
13
|
+
export function generateJobId(length = 12) {
|
|
14
|
+
const bytes = randomBytes(length);
|
|
15
|
+
return bytes
|
|
16
|
+
.toString("base64")
|
|
17
|
+
.replace(/[+/=]/g, "") // Remove URL-unsafe characters
|
|
18
|
+
.substring(0, length); // Ensure consistent length
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Generate a random job ID with a prefix
|
|
23
|
+
* @param {string} [prefix='job'] - Prefix for the ID
|
|
24
|
+
* @param {number} [length=8] - Length of the random part
|
|
25
|
+
* @returns {string} Random ID with prefix
|
|
26
|
+
*/
|
|
27
|
+
export function generateJobIdWithPrefix(prefix = "job", length = 8) {
|
|
28
|
+
const randomPart = generateJobId(length);
|
|
29
|
+
return `${prefix}_${randomPart}`;
|
|
30
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
export function humanizePipelineSlug(slug) {
|
|
2
|
+
if (!slug || typeof slug !== "string") return "";
|
|
3
|
+
return slug
|
|
4
|
+
.replace(/[-_]+/g, " ")
|
|
5
|
+
.split(" ")
|
|
6
|
+
.filter(Boolean)
|
|
7
|
+
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
|
8
|
+
.join(" ");
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function derivePipelineMetadata(source = {}) {
|
|
12
|
+
const pipelineValue = source?.pipeline;
|
|
13
|
+
const pipelineSlugFromSource =
|
|
14
|
+
source?.pipelineSlug ??
|
|
15
|
+
(typeof pipelineValue === "string" ? pipelineValue : null);
|
|
16
|
+
|
|
17
|
+
const pipelineLabel =
|
|
18
|
+
source?.pipelineLabel ??
|
|
19
|
+
(typeof pipelineSlugFromSource === "string"
|
|
20
|
+
? humanizePipelineSlug(pipelineSlugFromSource)
|
|
21
|
+
: null);
|
|
22
|
+
|
|
23
|
+
const pipelineObject =
|
|
24
|
+
pipelineValue &&
|
|
25
|
+
typeof pipelineValue === "object" &&
|
|
26
|
+
!Array.isArray(pipelineValue)
|
|
27
|
+
? pipelineValue
|
|
28
|
+
: null;
|
|
29
|
+
|
|
30
|
+
const pipeline =
|
|
31
|
+
pipelineObject ??
|
|
32
|
+
(typeof pipelineSlugFromSource === "string"
|
|
33
|
+
? pipelineSlugFromSource
|
|
34
|
+
: null);
|
|
35
|
+
|
|
36
|
+
return {
|
|
37
|
+
pipeline,
|
|
38
|
+
pipelineSlug:
|
|
39
|
+
typeof pipelineSlugFromSource === "string"
|
|
40
|
+
? pipelineSlugFromSource
|
|
41
|
+
: null,
|
|
42
|
+
pipelineLabel: pipelineLabel || null,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Task files selector utilities.
|
|
3
|
+
*
|
|
4
|
+
* Single source of truth for normalizing `task.files` structure that flows
|
|
5
|
+
* from tasks_status.json into the UI. The enforced contract is:
|
|
6
|
+
*
|
|
7
|
+
* {
|
|
8
|
+
* artifacts: string[],
|
|
9
|
+
* logs: string[],
|
|
10
|
+
* tmp: string[]
|
|
11
|
+
* }
|
|
12
|
+
*
|
|
13
|
+
* Any other keys (e.g., legacy "input"/"output") are ignored with a warning.
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* @typedef {Object} TaskFiles
|
|
18
|
+
* @property {string[]} artifacts
|
|
19
|
+
* @property {string[]} logs
|
|
20
|
+
* @property {string[]} tmp
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
const CATEGORY_KEYS = ["artifacts", "logs", "tmp"];
|
|
24
|
+
const LEGACY_KEY_SET = new Set([
|
|
25
|
+
"input",
|
|
26
|
+
"inputs",
|
|
27
|
+
"output",
|
|
28
|
+
"outputs",
|
|
29
|
+
"legacyInput",
|
|
30
|
+
"legacyOutput",
|
|
31
|
+
"inputFiles",
|
|
32
|
+
"outputFiles",
|
|
33
|
+
]);
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Produce a fresh TaskFiles object with empty arrays.
|
|
37
|
+
* @returns {TaskFiles}
|
|
38
|
+
*/
|
|
39
|
+
export function createEmptyTaskFiles() {
|
|
40
|
+
return { artifacts: [], logs: [], tmp: [] };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Normalize an unknown value into an array of strings.
|
|
45
|
+
* @param {unknown} value
|
|
46
|
+
* @returns {string[]}
|
|
47
|
+
*/
|
|
48
|
+
function coerceStringArray(value) {
|
|
49
|
+
if (!Array.isArray(value)) return [];
|
|
50
|
+
return value.filter((entry) => typeof entry === "string");
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Emit warnings for unsupported task.files keys while ensuring we do not block execution.
|
|
55
|
+
* @param {string[]} keys
|
|
56
|
+
*/
|
|
57
|
+
function reportUnsupportedKeys(keys) {
|
|
58
|
+
if (keys.length === 0) return;
|
|
59
|
+
|
|
60
|
+
const legacyKeys = keys.filter((key) => LEGACY_KEY_SET.has(key));
|
|
61
|
+
const otherKeys = keys.filter((key) => !LEGACY_KEY_SET.has(key));
|
|
62
|
+
|
|
63
|
+
if (legacyKeys.length > 0) {
|
|
64
|
+
console.warn(
|
|
65
|
+
`[task-files] Ignoring unsupported legacy keys: ${legacyKeys.join(", ")}`
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
if (otherKeys.length > 0) {
|
|
69
|
+
console.warn(
|
|
70
|
+
`[task-files] Ignoring unsupported task.files keys: ${otherKeys.join(", ")}`
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Normalize an arbitrary input into a strict TaskFiles structure.
|
|
77
|
+
* @param {unknown} candidate
|
|
78
|
+
* @returns {TaskFiles}
|
|
79
|
+
*/
|
|
80
|
+
export function normalizeTaskFiles(candidate) {
|
|
81
|
+
const safeCandidate =
|
|
82
|
+
candidate && typeof candidate === "object" && !Array.isArray(candidate)
|
|
83
|
+
? candidate
|
|
84
|
+
: {};
|
|
85
|
+
|
|
86
|
+
const unsupportedKeys = Object.keys(safeCandidate).filter(
|
|
87
|
+
(key) => !CATEGORY_KEYS.includes(key)
|
|
88
|
+
);
|
|
89
|
+
reportUnsupportedKeys(unsupportedKeys);
|
|
90
|
+
|
|
91
|
+
return {
|
|
92
|
+
artifacts: coerceStringArray(safeCandidate.artifacts),
|
|
93
|
+
logs: coerceStringArray(safeCandidate.logs),
|
|
94
|
+
tmp: coerceStringArray(safeCandidate.tmp),
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Ensure provided task object has a normalized `files` property that matches
|
|
100
|
+
* the enforced contract. Returns the normalized structure for convenience.
|
|
101
|
+
* @param {Record<string, unknown> | null | undefined} task
|
|
102
|
+
* @returns {TaskFiles}
|
|
103
|
+
*/
|
|
104
|
+
export function ensureTaskFiles(task) {
|
|
105
|
+
const normalized = normalizeTaskFiles(task?.files);
|
|
106
|
+
if (task && typeof task === "object") {
|
|
107
|
+
task.files = normalized;
|
|
108
|
+
}
|
|
109
|
+
return normalized;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Determine whether a task matches a provided identifier.
|
|
114
|
+
* @param {Record<string, unknown>} task
|
|
115
|
+
* @param {string | number} taskId
|
|
116
|
+
* @returns {boolean}
|
|
117
|
+
*/
|
|
118
|
+
function matchesTaskIdentifier(task, taskId) {
|
|
119
|
+
if (!task || typeof task !== "object" || taskId == null) return false;
|
|
120
|
+
const target = String(taskId);
|
|
121
|
+
if (task.id != null && String(task.id) === target) return true;
|
|
122
|
+
if (task.name != null && String(task.name) === target) return true;
|
|
123
|
+
return false;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Locate a task within a provided tasks collection.
|
|
128
|
+
* @param {unknown} tasks
|
|
129
|
+
* @param {string | number} taskId
|
|
130
|
+
* @returns {Record<string, unknown> | null}
|
|
131
|
+
*/
|
|
132
|
+
function findTaskCandidate(tasks, taskId) {
|
|
133
|
+
if (!tasks || taskId == null) return null;
|
|
134
|
+
|
|
135
|
+
if (Array.isArray(tasks)) {
|
|
136
|
+
if (typeof taskId === "number" && tasks[taskId]) {
|
|
137
|
+
const indexedTask = tasks[taskId];
|
|
138
|
+
if (indexedTask && typeof indexedTask === "object") {
|
|
139
|
+
return indexedTask;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
return tasks.find((task) => matchesTaskIdentifier(task, taskId)) ?? null;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (typeof tasks === "object") {
|
|
147
|
+
const direct = tasks[taskId];
|
|
148
|
+
if (direct && typeof direct === "object") {
|
|
149
|
+
return direct;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
for (const task of Object.values(tasks)) {
|
|
153
|
+
if (matchesTaskIdentifier(task, taskId)) {
|
|
154
|
+
return task;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return null;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Public selector that retrieves a strict TaskFiles structure for a specific task.
|
|
164
|
+
* @param {Object} job
|
|
165
|
+
* @param {string | number} taskId
|
|
166
|
+
* @returns {TaskFiles}
|
|
167
|
+
*/
|
|
168
|
+
export function getTaskFilesForTask(job, taskId) {
|
|
169
|
+
console.debug("[getTaskFilesForTask] Called with:", { job, taskId });
|
|
170
|
+
|
|
171
|
+
if (!job || typeof job !== "object") {
|
|
172
|
+
console.debug("[getTaskFilesForTask] No job or invalid job object");
|
|
173
|
+
return createEmptyTaskFiles();
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const taskCandidate = findTaskCandidate(job.tasks, taskId);
|
|
177
|
+
if (!taskCandidate) {
|
|
178
|
+
console.debug(
|
|
179
|
+
"[getTaskFilesForTask] No task candidate found for taskId:",
|
|
180
|
+
taskId
|
|
181
|
+
);
|
|
182
|
+
return createEmptyTaskFiles();
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
const result = ensureTaskFiles(taskCandidate);
|
|
186
|
+
console.debug("[getTaskFilesForTask] Task files result:", { taskId, result });
|
|
187
|
+
return result;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* List task files by type with fallback to empty task files
|
|
192
|
+
* @param {string} jobId - Job ID
|
|
193
|
+
* @param {string} taskId - Task ID
|
|
194
|
+
* @param {string} type - File type (artifacts/logs/tmp)
|
|
195
|
+
* @returns {Promise<string[]>} Array of file names
|
|
196
|
+
*/
|
|
197
|
+
export async function listTaskFiles(jobId, taskId, type) {
|
|
198
|
+
try {
|
|
199
|
+
console.debug("[listTaskFiles] Called with:", { jobId, taskId, type });
|
|
200
|
+
|
|
201
|
+
// Use fetch API directly for consistency with UI
|
|
202
|
+
const apiUrl = new URL(
|
|
203
|
+
`/api/jobs/${jobId}/tasks/${taskId}/files?type=${type}`,
|
|
204
|
+
window.location.origin
|
|
205
|
+
);
|
|
206
|
+
|
|
207
|
+
console.debug("[listTaskFiles] Fetching from:", apiUrl.toString());
|
|
208
|
+
|
|
209
|
+
const response = await fetch(apiUrl.toString());
|
|
210
|
+
const data = await response.json();
|
|
211
|
+
|
|
212
|
+
console.debug("[listTaskFiles] Response:", data);
|
|
213
|
+
|
|
214
|
+
if (data.ok && data.data && data.data.files) {
|
|
215
|
+
const fileNames = data.data.files.map((f) => f.name);
|
|
216
|
+
console.debug("[listTaskFiles] Found files:", fileNames);
|
|
217
|
+
return fileNames;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
console.debug("[listTaskFiles] No valid files found in response");
|
|
221
|
+
return [];
|
|
222
|
+
} catch (error) {
|
|
223
|
+
console.error("[listTaskFiles] Error:", error);
|
|
224
|
+
return [];
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Read task file content with proper error handling
|
|
230
|
+
* @param {string} jobId - Job ID
|
|
231
|
+
* @param {string} taskId - Task ID
|
|
232
|
+
* @param {string} type - File type (artifacts/logs/tmp)
|
|
233
|
+
* @param {string} filename - File name
|
|
234
|
+
* @returns {Promise<Object|null>} File object or null if error
|
|
235
|
+
*/
|
|
236
|
+
export async function readTaskFile(jobId, taskId, type, filename) {
|
|
237
|
+
try {
|
|
238
|
+
console.debug("[readTaskFile] Called with:", {
|
|
239
|
+
jobId,
|
|
240
|
+
taskId,
|
|
241
|
+
type,
|
|
242
|
+
filename,
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
// Use fetch API directly for consistency with UI
|
|
246
|
+
const apiUrl = new URL(
|
|
247
|
+
`/api/jobs/${jobId}/tasks/${taskId}/file?type=${type}&filename=${encodeURIComponent(
|
|
248
|
+
filename
|
|
249
|
+
)}`,
|
|
250
|
+
window.location.origin
|
|
251
|
+
);
|
|
252
|
+
|
|
253
|
+
console.debug("[readTaskFile] Fetching from:", apiUrl.toString());
|
|
254
|
+
|
|
255
|
+
const response = await fetch(apiUrl.toString());
|
|
256
|
+
const data = await response.json();
|
|
257
|
+
|
|
258
|
+
console.debug("[readTaskFile] Response:", data);
|
|
259
|
+
|
|
260
|
+
if (data.ok) {
|
|
261
|
+
console.debug("[readTaskFile] File read successfully");
|
|
262
|
+
return data;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
console.error("[readTaskFile] File read failed:", data);
|
|
266
|
+
return null;
|
|
267
|
+
} catch (error) {
|
|
268
|
+
console.error("[readTaskFile] Error:", error);
|
|
269
|
+
return null;
|
|
270
|
+
}
|
|
271
|
+
}
|