@ryanfw/prompt-orchestration-pipeline 0.12.0 → 0.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +10 -1
- package/src/cli/analyze-task.js +51 -0
- package/src/cli/index.js +8 -0
- package/src/components/AddPipelineSidebar.jsx +144 -0
- package/src/components/AnalysisProgressTray.jsx +87 -0
- package/src/components/JobTable.jsx +4 -3
- package/src/components/Layout.jsx +142 -139
- package/src/components/MarkdownRenderer.jsx +149 -0
- package/src/components/PipelineDAGGrid.jsx +404 -0
- package/src/components/PipelineTypeTaskSidebar.jsx +96 -0
- package/src/components/SchemaPreviewPanel.jsx +97 -0
- package/src/components/StageTimeline.jsx +36 -0
- package/src/components/TaskAnalysisDisplay.jsx +227 -0
- package/src/components/TaskCreationSidebar.jsx +447 -0
- package/src/components/TaskDetailSidebar.jsx +119 -117
- package/src/components/TaskFilePane.jsx +94 -39
- package/src/components/ui/button.jsx +59 -27
- package/src/components/ui/sidebar.jsx +118 -0
- package/src/config/models.js +99 -67
- package/src/core/config.js +4 -1
- package/src/llm/index.js +129 -9
- package/src/pages/PipelineDetail.jsx +6 -6
- package/src/pages/PipelineList.jsx +214 -0
- package/src/pages/PipelineTypeDetail.jsx +234 -0
- package/src/providers/deepseek.js +76 -16
- package/src/providers/openai.js +61 -34
- package/src/task-analysis/enrichers/analysis-writer.js +62 -0
- package/src/task-analysis/enrichers/schema-deducer.js +145 -0
- package/src/task-analysis/enrichers/schema-writer.js +74 -0
- package/src/task-analysis/extractors/artifacts.js +137 -0
- package/src/task-analysis/extractors/llm-calls.js +176 -0
- package/src/task-analysis/extractors/stages.js +51 -0
- package/src/task-analysis/index.js +103 -0
- package/src/task-analysis/parser.js +28 -0
- package/src/task-analysis/utils/ast.js +43 -0
- package/src/ui/client/hooks/useAnalysisProgress.js +145 -0
- package/src/ui/client/index.css +64 -0
- package/src/ui/client/main.jsx +4 -0
- package/src/ui/client/sse-fetch.js +120 -0
- package/src/ui/dist/assets/index-cjHV9mYW.js +82578 -0
- package/src/ui/dist/assets/index-cjHV9mYW.js.map +1 -0
- package/src/ui/dist/assets/style-CoM9SoQF.css +180 -0
- package/src/ui/dist/index.html +2 -2
- package/src/ui/endpoints/create-pipeline-endpoint.js +194 -0
- package/src/ui/endpoints/pipeline-analysis-endpoint.js +246 -0
- package/src/ui/endpoints/pipeline-type-detail-endpoint.js +181 -0
- package/src/ui/endpoints/pipelines-endpoint.js +133 -0
- package/src/ui/endpoints/schema-file-endpoint.js +105 -0
- package/src/ui/endpoints/task-analysis-endpoint.js +104 -0
- package/src/ui/endpoints/task-creation-endpoint.js +114 -0
- package/src/ui/endpoints/task-save-endpoint.js +101 -0
- package/src/ui/express-app.js +45 -0
- package/src/ui/lib/analysis-lock.js +67 -0
- package/src/ui/lib/sse.js +30 -0
- package/src/ui/server.js +4 -0
- package/src/ui/utils/slug.js +31 -0
- package/src/ui/watcher.js +28 -2
- package/src/ui/dist/assets/index-B320avRx.js +0 -26613
- package/src/ui/dist/assets/index-B320avRx.js.map +0 -1
- package/src/ui/dist/assets/style-BYCoLBnK.css +0 -62
package/src/ui/dist/index.html
CHANGED
|
@@ -11,8 +11,8 @@
|
|
|
11
11
|
/>
|
|
12
12
|
<title>Prompt Pipeline Dashboard</title>
|
|
13
13
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
|
14
|
-
<script type="module" crossorigin src="/assets/index-
|
|
15
|
-
<link rel="stylesheet" crossorigin href="/assets/style-
|
|
14
|
+
<script type="module" crossorigin src="/assets/index-cjHV9mYW.js"></script>
|
|
15
|
+
<link rel="stylesheet" crossorigin href="/assets/style-CoM9SoQF.css">
|
|
16
16
|
</head>
|
|
17
17
|
<body>
|
|
18
18
|
<div id="root"></div>
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Create pipeline endpoint (logic-only)
|
|
3
|
+
*
|
|
4
|
+
* Exports:
|
|
5
|
+
* - handleCreatePipeline(req, res) -> HTTP request handler
|
|
6
|
+
*
|
|
7
|
+
* This function creates a new pipeline type by:
|
|
8
|
+
* - Validating name and description
|
|
9
|
+
* - Generating a slug from the provided name
|
|
10
|
+
* - Ensuring slug uniqueness in the registry
|
|
11
|
+
* - Creating directory structure and starter files
|
|
12
|
+
* - Updating the pipeline registry atomically
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import { getConfig } from "../../core/config.js";
|
|
16
|
+
import { generateSlug, ensureUniqueSlug } from "../utils/slug.js";
|
|
17
|
+
import { promises as fs } from "node:fs";
|
|
18
|
+
import path from "node:path";
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Create starter files for a new pipeline
|
|
22
|
+
*/
|
|
23
|
+
async function createStarterFiles(pipelineDir, slug, name, description) {
|
|
24
|
+
// Create tasks directory
|
|
25
|
+
const tasksDir = path.join(pipelineDir, "tasks");
|
|
26
|
+
await fs.mkdir(tasksDir, { recursive: true });
|
|
27
|
+
|
|
28
|
+
// Create pipeline.json with correct schema
|
|
29
|
+
const pipelineJsonPath = path.join(pipelineDir, "pipeline.json");
|
|
30
|
+
const pipelineJsonContent = JSON.stringify(
|
|
31
|
+
{
|
|
32
|
+
name: slug,
|
|
33
|
+
version: "1.0.0",
|
|
34
|
+
description: description,
|
|
35
|
+
tasks: [],
|
|
36
|
+
},
|
|
37
|
+
null,
|
|
38
|
+
2
|
|
39
|
+
);
|
|
40
|
+
await fs.writeFile(pipelineJsonPath, pipelineJsonContent, "utf8");
|
|
41
|
+
|
|
42
|
+
// Create tasks/index.js
|
|
43
|
+
const tasksIndexPath = path.join(tasksDir, "index.js");
|
|
44
|
+
const tasksIndexContent = `// Task registry for ${slug}\nmodule.exports = { tasks: {} };\n`;
|
|
45
|
+
await fs.writeFile(tasksIndexPath, tasksIndexContent, "utf8");
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Handle pipeline creation request
|
|
50
|
+
*
|
|
51
|
+
* Behavior:
|
|
52
|
+
* - Validate name and description are present
|
|
53
|
+
* - Generate slug from name (kebab-case, max 47 chars)
|
|
54
|
+
* - Ensure slug uniqueness in registry
|
|
55
|
+
* - Create directory structure and starter files
|
|
56
|
+
* - Update registry.json atomically using temp file
|
|
57
|
+
* - Return slug on success
|
|
58
|
+
*/
|
|
59
|
+
export async function handleCreatePipeline(req, res) {
|
|
60
|
+
console.log("[CreatePipelineEndpoint] POST /api/pipelines called");
|
|
61
|
+
|
|
62
|
+
try {
|
|
63
|
+
const { name, description } = req.body;
|
|
64
|
+
|
|
65
|
+
// Validate required fields
|
|
66
|
+
if (!name || typeof name !== "string" || name.trim() === "") {
|
|
67
|
+
res.status(400).json({ error: "Name and description are required" });
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (
|
|
72
|
+
!description ||
|
|
73
|
+
typeof description !== "string" ||
|
|
74
|
+
description.trim() === ""
|
|
75
|
+
) {
|
|
76
|
+
res.status(400).json({ error: "Name and description are required" });
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const config = getConfig();
|
|
81
|
+
const rootDir = config.paths?.root;
|
|
82
|
+
|
|
83
|
+
if (!rootDir) {
|
|
84
|
+
res.status(500).json({ error: "Failed to create pipeline" });
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const pipelineConfigDir = path.join(rootDir, "pipeline-config");
|
|
89
|
+
const registryPath = path.join(pipelineConfigDir, "registry.json");
|
|
90
|
+
|
|
91
|
+
// Read existing registry
|
|
92
|
+
let registryData;
|
|
93
|
+
try {
|
|
94
|
+
const contents = await fs.readFile(registryPath, "utf8");
|
|
95
|
+
registryData = JSON.parse(contents);
|
|
96
|
+
} catch (error) {
|
|
97
|
+
if (error.code === "ENOENT") {
|
|
98
|
+
// Create registry file with empty pipelines object
|
|
99
|
+
await fs.mkdir(pipelineConfigDir, { recursive: true });
|
|
100
|
+
registryData = { pipelines: {} };
|
|
101
|
+
} else if (error instanceof SyntaxError) {
|
|
102
|
+
console.error(
|
|
103
|
+
"[CreatePipelineEndpoint] Invalid JSON in registry:",
|
|
104
|
+
error
|
|
105
|
+
);
|
|
106
|
+
res.status(500).json({ error: "Failed to create pipeline" });
|
|
107
|
+
return;
|
|
108
|
+
} else {
|
|
109
|
+
throw error;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Validate registry structure
|
|
114
|
+
if (
|
|
115
|
+
!registryData ||
|
|
116
|
+
typeof registryData !== "object" ||
|
|
117
|
+
!registryData.pipelines ||
|
|
118
|
+
typeof registryData.pipelines !== "object"
|
|
119
|
+
) {
|
|
120
|
+
console.error("[CreatePipelineEndpoint] Invalid registry structure");
|
|
121
|
+
res.status(500).json({ error: "Failed to create pipeline" });
|
|
122
|
+
return;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Generate unique slug
|
|
126
|
+
const baseSlug = generateSlug(name.trim());
|
|
127
|
+
if (!baseSlug) {
|
|
128
|
+
res
|
|
129
|
+
.status(400)
|
|
130
|
+
.json({ error: "Invalid pipeline name; unable to generate slug" });
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
const existingSlugs = new Set(Object.keys(registryData.pipelines));
|
|
134
|
+
const slug = ensureUniqueSlug(baseSlug, existingSlugs);
|
|
135
|
+
|
|
136
|
+
// Generate paths
|
|
137
|
+
const pipelineDir = path.join(pipelineConfigDir, slug);
|
|
138
|
+
const pipelinePath = path.join("pipeline-config", slug, "pipeline.json");
|
|
139
|
+
const taskRegistryPath = path.join(
|
|
140
|
+
"pipeline-config",
|
|
141
|
+
slug,
|
|
142
|
+
"tasks/index.js"
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
// Create starter files
|
|
146
|
+
try {
|
|
147
|
+
await createStarterFiles(
|
|
148
|
+
pipelineDir,
|
|
149
|
+
slug,
|
|
150
|
+
name.trim(),
|
|
151
|
+
description.trim()
|
|
152
|
+
);
|
|
153
|
+
} catch (error) {
|
|
154
|
+
console.error("[CreatePipelineEndpoint] Failed to create files:", error);
|
|
155
|
+
res.status(500).json({ error: "Failed to create pipeline" });
|
|
156
|
+
return;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Update registry atomically using temp file
|
|
160
|
+
try {
|
|
161
|
+
registryData.pipelines[slug] = {
|
|
162
|
+
name: name.trim(),
|
|
163
|
+
description: description.trim(),
|
|
164
|
+
pipelinePath,
|
|
165
|
+
taskRegistryPath,
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
const tempPath = `${registryPath}.${Date.now()}.tmp`;
|
|
169
|
+
await fs.writeFile(
|
|
170
|
+
tempPath,
|
|
171
|
+
JSON.stringify(registryData, null, 2),
|
|
172
|
+
"utf8"
|
|
173
|
+
);
|
|
174
|
+
await fs.rename(tempPath, registryPath);
|
|
175
|
+
} catch (error) {
|
|
176
|
+
console.error(
|
|
177
|
+
"[CreatePipelineEndpoint] Failed to update registry:",
|
|
178
|
+
error
|
|
179
|
+
);
|
|
180
|
+
res.status(500).json({ error: "Failed to create pipeline" });
|
|
181
|
+
return;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
console.log(
|
|
185
|
+
"[CreatePipelineEndpoint] Pipeline created successfully:",
|
|
186
|
+
slug
|
|
187
|
+
);
|
|
188
|
+
|
|
189
|
+
res.status(200).json({ slug });
|
|
190
|
+
} catch (err) {
|
|
191
|
+
console.error("[CreatePipelineEndpoint] Unexpected error:", err);
|
|
192
|
+
res.status(500).json({ error: "Failed to create pipeline" });
|
|
193
|
+
}
|
|
194
|
+
}
|
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
import { promises as fs } from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { streamSSE } from "../lib/sse.js";
|
|
4
|
+
import { acquireLock, releaseLock } from "../lib/analysis-lock.js";
|
|
5
|
+
import { getPipelineConfig } from "../../core/config.js";
|
|
6
|
+
import { analyzeTask } from "../../task-analysis/index.js";
|
|
7
|
+
import { writeAnalysisFile } from "../../task-analysis/enrichers/analysis-writer.js";
|
|
8
|
+
import { deduceArtifactSchema } from "../../task-analysis/enrichers/schema-deducer.js";
|
|
9
|
+
import { writeSchemaFiles } from "../../task-analysis/enrichers/schema-writer.js";
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Handle pipeline analysis endpoint.
|
|
13
|
+
* Analyzes all tasks in a pipeline and deduces schemas for artifacts.
|
|
14
|
+
*
|
|
15
|
+
* @param {object} req - Express request object
|
|
16
|
+
* @param {object} res - Express response object
|
|
17
|
+
*/
|
|
18
|
+
export async function handlePipelineAnalysis(req, res) {
|
|
19
|
+
const slug = req.params.slug;
|
|
20
|
+
const startTime = Date.now();
|
|
21
|
+
|
|
22
|
+
// Validate slug format
|
|
23
|
+
if (!slug || typeof slug !== "string") {
|
|
24
|
+
return res.status(400).json({
|
|
25
|
+
ok: false,
|
|
26
|
+
code: "invalid_slug",
|
|
27
|
+
message: "Missing or invalid slug parameter",
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
if (!/^[A-Za-z0-9_-]+$/.test(slug)) {
|
|
32
|
+
return res.status(400).json({
|
|
33
|
+
ok: false,
|
|
34
|
+
code: "invalid_slug",
|
|
35
|
+
message:
|
|
36
|
+
"Invalid slug format: only alphanumeric, hyphens, and underscores allowed",
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Try to acquire lock
|
|
41
|
+
const lockResult = acquireLock(slug);
|
|
42
|
+
if (!lockResult.acquired) {
|
|
43
|
+
return res.status(409).json({
|
|
44
|
+
ok: false,
|
|
45
|
+
code: "analysis_locked",
|
|
46
|
+
heldBy: lockResult.heldBy,
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Create SSE stream
|
|
51
|
+
const stream = streamSSE(res);
|
|
52
|
+
let lockReleased = false;
|
|
53
|
+
|
|
54
|
+
const releaseLockSafely = () => {
|
|
55
|
+
if (!lockReleased) {
|
|
56
|
+
releaseLock(slug);
|
|
57
|
+
lockReleased = true;
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
// Handle client disconnect
|
|
62
|
+
req.on("close", () => {
|
|
63
|
+
console.log(`[PipelineAnalysis] Client disconnected for ${slug}`);
|
|
64
|
+
releaseLockSafely();
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
// Get pipeline configuration
|
|
69
|
+
let pipelineConfig;
|
|
70
|
+
try {
|
|
71
|
+
pipelineConfig = getPipelineConfig(slug);
|
|
72
|
+
} catch (error) {
|
|
73
|
+
stream.send("error", {
|
|
74
|
+
message: `Pipeline '${slug}' not found in registry`,
|
|
75
|
+
});
|
|
76
|
+
stream.end();
|
|
77
|
+
releaseLockSafely();
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const pipelineDir = path.dirname(pipelineConfig.pipelineJsonPath);
|
|
82
|
+
|
|
83
|
+
// Read pipeline.json
|
|
84
|
+
let pipelineData;
|
|
85
|
+
try {
|
|
86
|
+
const contents = await fs.readFile(
|
|
87
|
+
pipelineConfig.pipelineJsonPath,
|
|
88
|
+
"utf8"
|
|
89
|
+
);
|
|
90
|
+
pipelineData = JSON.parse(contents);
|
|
91
|
+
} catch (error) {
|
|
92
|
+
stream.send("error", {
|
|
93
|
+
message: `Failed to read pipeline.json: ${error.message}`,
|
|
94
|
+
});
|
|
95
|
+
stream.end();
|
|
96
|
+
releaseLockSafely();
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
if (!Array.isArray(pipelineData.tasks)) {
|
|
101
|
+
stream.send("error", {
|
|
102
|
+
message: "Invalid pipeline.json: tasks array not found",
|
|
103
|
+
});
|
|
104
|
+
stream.end();
|
|
105
|
+
releaseLockSafely();
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
const tasks = pipelineData.tasks;
|
|
110
|
+
const totalTasks = tasks.length;
|
|
111
|
+
|
|
112
|
+
// Pre-analyze all tasks to count total artifacts (only JSON files need schema deduction)
|
|
113
|
+
const taskAnalyses = [];
|
|
114
|
+
let totalArtifacts = 0;
|
|
115
|
+
|
|
116
|
+
for (const taskId of tasks) {
|
|
117
|
+
const taskFilePath = path.join(pipelineDir, "tasks", `${taskId}.js`);
|
|
118
|
+
try {
|
|
119
|
+
const taskCode = await fs.readFile(taskFilePath, "utf8");
|
|
120
|
+
const analysis = analyzeTask(taskCode, taskFilePath);
|
|
121
|
+
taskAnalyses.push({ taskId, taskCode, analysis });
|
|
122
|
+
// Only count JSON artifacts for schema deduction
|
|
123
|
+
totalArtifacts += analysis.artifacts.writes.filter((a) =>
|
|
124
|
+
a.fileName.endsWith(".json")
|
|
125
|
+
).length;
|
|
126
|
+
} catch (error) {
|
|
127
|
+
stream.send("error", {
|
|
128
|
+
message: `Failed to analyze task '${taskId}': ${error.message}`,
|
|
129
|
+
taskId,
|
|
130
|
+
});
|
|
131
|
+
stream.end();
|
|
132
|
+
releaseLockSafely();
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// Send started event
|
|
138
|
+
stream.send("started", {
|
|
139
|
+
pipelineSlug: slug,
|
|
140
|
+
totalTasks,
|
|
141
|
+
totalArtifacts,
|
|
142
|
+
});
|
|
143
|
+
|
|
144
|
+
let completedTasks = 0;
|
|
145
|
+
let completedArtifacts = 0;
|
|
146
|
+
|
|
147
|
+
// Process each task
|
|
148
|
+
for (let taskIndex = 0; taskIndex < taskAnalyses.length; taskIndex++) {
|
|
149
|
+
const { taskId, taskCode, analysis } = taskAnalyses[taskIndex];
|
|
150
|
+
|
|
151
|
+
stream.send("task:start", {
|
|
152
|
+
taskId,
|
|
153
|
+
taskIndex,
|
|
154
|
+
totalTasks,
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
// Write analysis file
|
|
158
|
+
try {
|
|
159
|
+
await writeAnalysisFile(pipelineDir, taskId, analysis);
|
|
160
|
+
} catch (error) {
|
|
161
|
+
stream.send("error", {
|
|
162
|
+
message: `Failed to write analysis for task '${taskId}': ${error.message}`,
|
|
163
|
+
taskId,
|
|
164
|
+
});
|
|
165
|
+
stream.end();
|
|
166
|
+
releaseLockSafely();
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// Process each artifact write
|
|
171
|
+
const artifacts = analysis.artifacts.writes;
|
|
172
|
+
let jsonArtifactIndex = 0;
|
|
173
|
+
|
|
174
|
+
for (
|
|
175
|
+
let artifactIndex = 0;
|
|
176
|
+
artifactIndex < artifacts.length;
|
|
177
|
+
artifactIndex++
|
|
178
|
+
) {
|
|
179
|
+
const artifact = artifacts[artifactIndex];
|
|
180
|
+
|
|
181
|
+
// Skip non-JSON artifacts (only JSON files need schema deduction)
|
|
182
|
+
if (!artifact.fileName.endsWith(".json")) {
|
|
183
|
+
continue;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
stream.send("artifact:start", {
|
|
187
|
+
taskId,
|
|
188
|
+
artifactName: artifact.fileName,
|
|
189
|
+
artifactIndex: jsonArtifactIndex,
|
|
190
|
+
totalArtifacts,
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
try {
|
|
194
|
+
const deducedSchema = await deduceArtifactSchema(taskCode, artifact);
|
|
195
|
+
await writeSchemaFiles(pipelineDir, artifact.fileName, deducedSchema);
|
|
196
|
+
} catch (error) {
|
|
197
|
+
stream.send("error", {
|
|
198
|
+
message: `Failed to deduce schema for artifact '${artifact.fileName}': ${error.message}`,
|
|
199
|
+
taskId,
|
|
200
|
+
artifactName: artifact.fileName,
|
|
201
|
+
});
|
|
202
|
+
stream.end();
|
|
203
|
+
releaseLockSafely();
|
|
204
|
+
return;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
stream.send("artifact:complete", {
|
|
208
|
+
taskId,
|
|
209
|
+
artifactName: artifact.fileName,
|
|
210
|
+
artifactIndex: jsonArtifactIndex,
|
|
211
|
+
totalArtifacts,
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
completedArtifacts++;
|
|
215
|
+
jsonArtifactIndex++;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
stream.send("task:complete", {
|
|
219
|
+
taskId,
|
|
220
|
+
taskIndex,
|
|
221
|
+
totalTasks,
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
completedTasks++;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// Send complete event
|
|
228
|
+
const durationMs = Date.now() - startTime;
|
|
229
|
+
stream.send("complete", {
|
|
230
|
+
pipelineSlug: slug,
|
|
231
|
+
tasksAnalyzed: completedTasks,
|
|
232
|
+
artifactsProcessed: completedArtifacts,
|
|
233
|
+
durationMs,
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
stream.end();
|
|
237
|
+
releaseLockSafely();
|
|
238
|
+
} catch (error) {
|
|
239
|
+
console.error(`[PipelineAnalysis] Unexpected error:`, error);
|
|
240
|
+
stream.send("error", {
|
|
241
|
+
message: `Unexpected error: ${error.message}`,
|
|
242
|
+
});
|
|
243
|
+
stream.end();
|
|
244
|
+
releaseLockSafely();
|
|
245
|
+
}
|
|
246
|
+
}
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pipeline type detail endpoint (logic-only)
|
|
3
|
+
*
|
|
4
|
+
* Exports:
|
|
5
|
+
* - handlePipelineTypeDetail(slug) -> Core logic function
|
|
6
|
+
* - handlePipelineTypeDetailRequest(req, res) -> HTTP response wrapper
|
|
7
|
+
*
|
|
8
|
+
* This function returns a read-only pipeline definition with tasks ordered
|
|
9
|
+
* as specified in pipeline.json for rendering a static DAG visualization.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import { getPipelineConfig, getConfig } from "../../core/config.js";
|
|
13
|
+
import { sendJson } from "../utils/http-utils.js";
|
|
14
|
+
import * as configBridge from "../config-bridge.js";
|
|
15
|
+
import { promises as fs } from "node:fs";
|
|
16
|
+
import path from "node:path";
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Return pipeline type detail suitable for the API.
|
|
20
|
+
*
|
|
21
|
+
* Behavior:
|
|
22
|
+
* - Use getPipelineConfig(slug) to resolve pipeline.json path
|
|
23
|
+
* - Read and parse pipeline.json from the resolved path
|
|
24
|
+
* - Validate that parsed data contains a tasks array
|
|
25
|
+
* - Return tasks as { id, title, status: 'definition' } in order
|
|
26
|
+
* - Handle all error cases with explicit error responses
|
|
27
|
+
*
|
|
28
|
+
* @param {string} slug - Pipeline slug identifier
|
|
29
|
+
* @returns {Object} Response envelope { ok: true, data } or error envelope
|
|
30
|
+
*/
|
|
31
|
+
export async function handlePipelineTypeDetail(slug) {
|
|
32
|
+
console.log(`[PipelineTypeDetailEndpoint] GET /api/pipelines/${slug} called`);
|
|
33
|
+
|
|
34
|
+
// Validate slug parameter
|
|
35
|
+
if (!slug || typeof slug !== "string") {
|
|
36
|
+
return configBridge.createErrorResponse(
|
|
37
|
+
configBridge.Constants.ERROR_CODES.BAD_REQUEST,
|
|
38
|
+
"Invalid slug parameter"
|
|
39
|
+
);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Enforce safe characters in slug to prevent path traversal and similar issues
|
|
43
|
+
const slugIsValid = /^[A-Za-z0-9_-]+$/.test(slug);
|
|
44
|
+
if (!slugIsValid) {
|
|
45
|
+
return configBridge.createErrorResponse(
|
|
46
|
+
configBridge.Constants.ERROR_CODES.BAD_REQUEST,
|
|
47
|
+
"Invalid slug parameter: only letters, numbers, hyphens, and underscores are allowed"
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
try {
|
|
51
|
+
// Resolve pipeline configuration using existing config system
|
|
52
|
+
let pipelineConfig;
|
|
53
|
+
try {
|
|
54
|
+
pipelineConfig = getPipelineConfig(slug);
|
|
55
|
+
} catch (error) {
|
|
56
|
+
return configBridge.createErrorResponse(
|
|
57
|
+
configBridge.Constants.ERROR_CODES.NOT_FOUND,
|
|
58
|
+
`Pipeline '${slug}' not found in registry`
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const pipelineJsonPath = pipelineConfig.pipelineJsonPath;
|
|
63
|
+
|
|
64
|
+
// Check if pipeline.json exists
|
|
65
|
+
try {
|
|
66
|
+
await fs.access(pipelineJsonPath);
|
|
67
|
+
} catch (error) {
|
|
68
|
+
if (error.code === "ENOENT") {
|
|
69
|
+
return configBridge.createErrorResponse(
|
|
70
|
+
configBridge.Constants.ERROR_CODES.NOT_FOUND,
|
|
71
|
+
`pipeline.json not found for pipeline '${slug}'`,
|
|
72
|
+
pipelineJsonPath
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
throw error;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Read and parse pipeline.json
|
|
79
|
+
let pipelineData;
|
|
80
|
+
try {
|
|
81
|
+
const contents = await fs.readFile(pipelineJsonPath, "utf8");
|
|
82
|
+
pipelineData = JSON.parse(contents);
|
|
83
|
+
} catch (error) {
|
|
84
|
+
if (error instanceof SyntaxError) {
|
|
85
|
+
return configBridge.createErrorResponse(
|
|
86
|
+
configBridge.Constants.ERROR_CODES.INVALID_JSON,
|
|
87
|
+
"Invalid JSON in pipeline.json",
|
|
88
|
+
pipelineJsonPath
|
|
89
|
+
);
|
|
90
|
+
}
|
|
91
|
+
throw error;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Validate pipeline structure
|
|
95
|
+
if (
|
|
96
|
+
!pipelineData ||
|
|
97
|
+
typeof pipelineData !== "object" ||
|
|
98
|
+
!Array.isArray(pipelineData.tasks)
|
|
99
|
+
) {
|
|
100
|
+
return configBridge.createErrorResponse(
|
|
101
|
+
configBridge.Constants.ERROR_CODES.INVALID_JSON,
|
|
102
|
+
"Invalid pipeline.json format: expected 'tasks' array",
|
|
103
|
+
pipelineJsonPath
|
|
104
|
+
);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Transform tasks to API format
|
|
108
|
+
const tasks = pipelineData.tasks.map((taskId, index) => {
|
|
109
|
+
if (typeof taskId !== "string" || !taskId.trim()) {
|
|
110
|
+
throw new Error(`Invalid task ID at index ${index}: ${taskId}`);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
id: taskId,
|
|
115
|
+
title: taskId.charAt(0).toUpperCase() + taskId.slice(1),
|
|
116
|
+
status: "definition",
|
|
117
|
+
};
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
// Get pipeline metadata from config for name/description
|
|
121
|
+
const config = getConfig();
|
|
122
|
+
const pipelineMetadata = config.pipelines?.[slug] || {};
|
|
123
|
+
|
|
124
|
+
return {
|
|
125
|
+
ok: true,
|
|
126
|
+
data: {
|
|
127
|
+
slug,
|
|
128
|
+
name: pipelineMetadata.name || slug,
|
|
129
|
+
description: pipelineMetadata.description || "",
|
|
130
|
+
tasks,
|
|
131
|
+
},
|
|
132
|
+
};
|
|
133
|
+
} catch (err) {
|
|
134
|
+
console.error("handlePipelineTypeDetail error:", err);
|
|
135
|
+
return configBridge.createErrorResponse(
|
|
136
|
+
configBridge.Constants.ERROR_CODES.FS_ERROR,
|
|
137
|
+
"Failed to read pipeline configuration"
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* HTTP wrapper function for pipeline type detail requests.
|
|
144
|
+
* Calls handlePipelineTypeDetail(slug) and sends the response using sendJson().
|
|
145
|
+
*
|
|
146
|
+
* @param {http.IncomingMessage} req - HTTP request object
|
|
147
|
+
* @param {http.ServerResponse} res - HTTP response object
|
|
148
|
+
*/
|
|
149
|
+
export async function handlePipelineTypeDetailRequest(req, res) {
|
|
150
|
+
console.info(
|
|
151
|
+
"[PipelineTypeDetailEndpoint] handlePipelineTypeDetailRequest called"
|
|
152
|
+
);
|
|
153
|
+
|
|
154
|
+
try {
|
|
155
|
+
const slug = req.params.slug;
|
|
156
|
+
const result = await handlePipelineTypeDetail(slug);
|
|
157
|
+
|
|
158
|
+
if (result.ok) {
|
|
159
|
+
sendJson(res, 200, result);
|
|
160
|
+
} else {
|
|
161
|
+
// Map error codes to appropriate HTTP status codes
|
|
162
|
+
const statusCode =
|
|
163
|
+
result.code === configBridge.Constants.ERROR_CODES.NOT_FOUND
|
|
164
|
+
? 404
|
|
165
|
+
: result.code === configBridge.Constants.ERROR_CODES.BAD_REQUEST
|
|
166
|
+
? 400
|
|
167
|
+
: result.code === configBridge.Constants.ERROR_CODES.INVALID_JSON ||
|
|
168
|
+
result.code === configBridge.Constants.ERROR_CODES.FS_ERROR
|
|
169
|
+
? 500
|
|
170
|
+
: 500;
|
|
171
|
+
sendJson(res, statusCode, result);
|
|
172
|
+
}
|
|
173
|
+
} catch (err) {
|
|
174
|
+
console.error("handlePipelineTypeDetailRequest unexpected error:", err);
|
|
175
|
+
sendJson(res, 500, {
|
|
176
|
+
ok: false,
|
|
177
|
+
code: "internal_error",
|
|
178
|
+
message: "Internal server error",
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
}
|