@ryanfw/prompt-orchestration-pipeline 0.11.0 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/package.json +11 -1
  2. package/src/cli/analyze-task.js +51 -0
  3. package/src/cli/index.js +8 -0
  4. package/src/components/AddPipelineSidebar.jsx +144 -0
  5. package/src/components/AnalysisProgressTray.jsx +87 -0
  6. package/src/components/DAGGrid.jsx +157 -47
  7. package/src/components/JobTable.jsx +4 -3
  8. package/src/components/Layout.jsx +142 -139
  9. package/src/components/MarkdownRenderer.jsx +149 -0
  10. package/src/components/PipelineDAGGrid.jsx +404 -0
  11. package/src/components/PipelineTypeTaskSidebar.jsx +96 -0
  12. package/src/components/SchemaPreviewPanel.jsx +97 -0
  13. package/src/components/StageTimeline.jsx +36 -0
  14. package/src/components/TaskAnalysisDisplay.jsx +227 -0
  15. package/src/components/TaskCreationSidebar.jsx +447 -0
  16. package/src/components/TaskDetailSidebar.jsx +119 -117
  17. package/src/components/TaskFilePane.jsx +94 -39
  18. package/src/components/ui/RestartJobModal.jsx +26 -6
  19. package/src/components/ui/StopJobModal.jsx +183 -0
  20. package/src/components/ui/button.jsx +59 -27
  21. package/src/components/ui/sidebar.jsx +118 -0
  22. package/src/config/models.js +99 -67
  23. package/src/core/config.js +11 -4
  24. package/src/core/lifecycle-policy.js +62 -0
  25. package/src/core/pipeline-runner.js +312 -217
  26. package/src/core/status-writer.js +84 -0
  27. package/src/llm/index.js +129 -9
  28. package/src/pages/Code.jsx +8 -1
  29. package/src/pages/PipelineDetail.jsx +84 -2
  30. package/src/pages/PipelineList.jsx +214 -0
  31. package/src/pages/PipelineTypeDetail.jsx +234 -0
  32. package/src/pages/PromptPipelineDashboard.jsx +10 -11
  33. package/src/providers/deepseek.js +76 -16
  34. package/src/providers/openai.js +61 -34
  35. package/src/task-analysis/enrichers/analysis-writer.js +62 -0
  36. package/src/task-analysis/enrichers/schema-deducer.js +145 -0
  37. package/src/task-analysis/enrichers/schema-writer.js +74 -0
  38. package/src/task-analysis/extractors/artifacts.js +137 -0
  39. package/src/task-analysis/extractors/llm-calls.js +176 -0
  40. package/src/task-analysis/extractors/stages.js +51 -0
  41. package/src/task-analysis/index.js +103 -0
  42. package/src/task-analysis/parser.js +28 -0
  43. package/src/task-analysis/utils/ast.js +43 -0
  44. package/src/ui/client/adapters/job-adapter.js +60 -0
  45. package/src/ui/client/api.js +233 -8
  46. package/src/ui/client/hooks/useAnalysisProgress.js +145 -0
  47. package/src/ui/client/hooks/useJobList.js +14 -1
  48. package/src/ui/client/index.css +64 -0
  49. package/src/ui/client/main.jsx +4 -0
  50. package/src/ui/client/sse-fetch.js +120 -0
  51. package/src/ui/dist/app.js +262 -0
  52. package/src/ui/dist/assets/index-cjHV9mYW.js +82578 -0
  53. package/src/ui/dist/assets/index-cjHV9mYW.js.map +1 -0
  54. package/src/ui/dist/assets/style-CoM9SoQF.css +180 -0
  55. package/src/ui/dist/favicon.svg +12 -0
  56. package/src/ui/dist/index.html +2 -2
  57. package/src/ui/endpoints/create-pipeline-endpoint.js +194 -0
  58. package/src/ui/endpoints/file-endpoints.js +330 -0
  59. package/src/ui/endpoints/job-control-endpoints.js +1001 -0
  60. package/src/ui/endpoints/job-endpoints.js +62 -0
  61. package/src/ui/endpoints/pipeline-analysis-endpoint.js +246 -0
  62. package/src/ui/endpoints/pipeline-type-detail-endpoint.js +181 -0
  63. package/src/ui/endpoints/pipelines-endpoint.js +133 -0
  64. package/src/ui/endpoints/schema-file-endpoint.js +105 -0
  65. package/src/ui/endpoints/sse-endpoints.js +223 -0
  66. package/src/ui/endpoints/state-endpoint.js +85 -0
  67. package/src/ui/endpoints/task-analysis-endpoint.js +104 -0
  68. package/src/ui/endpoints/task-creation-endpoint.js +114 -0
  69. package/src/ui/endpoints/task-save-endpoint.js +101 -0
  70. package/src/ui/endpoints/upload-endpoints.js +406 -0
  71. package/src/ui/express-app.js +227 -0
  72. package/src/ui/lib/analysis-lock.js +67 -0
  73. package/src/ui/lib/sse.js +30 -0
  74. package/src/ui/server.js +42 -1880
  75. package/src/ui/sse-broadcast.js +93 -0
  76. package/src/ui/utils/http-utils.js +139 -0
  77. package/src/ui/utils/mime-types.js +196 -0
  78. package/src/ui/utils/slug.js +31 -0
  79. package/src/ui/vite.config.js +22 -0
  80. package/src/ui/watcher.js +28 -2
  81. package/src/utils/jobs.js +39 -0
  82. package/src/ui/dist/assets/index-DeDzq-Kk.js +0 -23863
  83. package/src/ui/dist/assets/style-aBtD_Yrs.css +0 -62
@@ -4,6 +4,8 @@
4
4
  * Exports:
5
5
  * - handleJobList() -> { ok: true, data: [...] } | error envelope
6
6
  * - handleJobDetail(jobId) -> { ok: true, data: {...} } | error envelope
7
+ * - handleJobListRequest(req, res) -> HTTP response wrapper
8
+ * - handleJobDetailRequest(req, res, jobId) -> HTTP response wrapper
7
9
  * - getEndpointStats(jobListResponses, jobDetailResponses) -> stats object
8
10
  *
9
11
  * These functions return structured results (not HTTP responses) so the server
@@ -19,6 +21,7 @@ import {
19
21
  transformJobListForAPI,
20
22
  } from "../transformers/list-transformer.js";
21
23
  import * as configBridge from "../config-bridge.js";
24
+ import { sendJson } from "../utils/http-utils.js";
22
25
  import fs from "node:fs/promises";
23
26
  import path from "node:path";
24
27
  import { getJobPipelinePath } from "../../config/paths.js";
@@ -257,6 +260,65 @@ async function handleJobDetailById(jobId) {
257
260
  }
258
261
  }
259
262
 
263
+ /**
264
+ * HTTP wrapper function for job list requests.
265
+ * Calls handleJobList() and sends the response using sendJson().
266
+ */
267
+ export async function handleJobListRequest(req, res) {
268
+ console.info("[JobEndpoints] handleJobListRequest called");
269
+
270
+ try {
271
+ const result = await handleJobList();
272
+
273
+ if (result.ok) {
274
+ sendJson(res, 200, result);
275
+ } else {
276
+ // Map error codes to appropriate HTTP status codes
277
+ const statusCode = result.code === "fs_error" ? 500 : 400;
278
+ sendJson(res, statusCode, result);
279
+ }
280
+ } catch (err) {
281
+ console.error("handleJobListRequest unexpected error:", err);
282
+ sendJson(res, 500, {
283
+ ok: false,
284
+ code: "internal_error",
285
+ message: "Internal server error",
286
+ });
287
+ }
288
+ }
289
+
290
+ /**
291
+ * HTTP wrapper function for job detail requests.
292
+ * Calls handleJobDetail(jobId) and sends the response using sendJson().
293
+ */
294
+ export async function handleJobDetailRequest(req, res, jobId) {
295
+ console.log(`[JobEndpoints] handleJobDetailRequest called for job: ${jobId}`);
296
+
297
+ try {
298
+ const result = await handleJobDetail(jobId);
299
+
300
+ if (result.ok) {
301
+ sendJson(res, 200, result);
302
+ } else {
303
+ // Map error codes to appropriate HTTP status codes
304
+ let statusCode = 400;
305
+ if (result.code === "job_not_found") {
306
+ statusCode = 404;
307
+ } else if (result.code === "fs_error") {
308
+ statusCode = 500;
309
+ }
310
+ sendJson(res, statusCode, result);
311
+ }
312
+ } catch (err) {
313
+ console.error("handleJobDetailRequest unexpected error:", err);
314
+ sendJson(res, 500, {
315
+ ok: false,
316
+ code: "internal_error",
317
+ message: "Internal server error",
318
+ });
319
+ }
320
+ }
321
+
260
322
  /**
261
323
  * Compute endpoint statistics for test assertions.
262
324
  * jobListResponses/jobDetailResponses are arrays of response envelopes.
@@ -0,0 +1,246 @@
1
+ import { promises as fs } from "node:fs";
2
+ import path from "node:path";
3
+ import { streamSSE } from "../lib/sse.js";
4
+ import { acquireLock, releaseLock } from "../lib/analysis-lock.js";
5
+ import { getPipelineConfig } from "../../core/config.js";
6
+ import { analyzeTask } from "../../task-analysis/index.js";
7
+ import { writeAnalysisFile } from "../../task-analysis/enrichers/analysis-writer.js";
8
+ import { deduceArtifactSchema } from "../../task-analysis/enrichers/schema-deducer.js";
9
+ import { writeSchemaFiles } from "../../task-analysis/enrichers/schema-writer.js";
10
+
11
+ /**
12
+ * Handle pipeline analysis endpoint.
13
+ * Analyzes all tasks in a pipeline and deduces schemas for artifacts.
14
+ *
15
+ * @param {object} req - Express request object
16
+ * @param {object} res - Express response object
17
+ */
18
+ export async function handlePipelineAnalysis(req, res) {
19
+ const slug = req.params.slug;
20
+ const startTime = Date.now();
21
+
22
+ // Validate slug format
23
+ if (!slug || typeof slug !== "string") {
24
+ return res.status(400).json({
25
+ ok: false,
26
+ code: "invalid_slug",
27
+ message: "Missing or invalid slug parameter",
28
+ });
29
+ }
30
+
31
+ if (!/^[A-Za-z0-9_-]+$/.test(slug)) {
32
+ return res.status(400).json({
33
+ ok: false,
34
+ code: "invalid_slug",
35
+ message:
36
+ "Invalid slug format: only alphanumeric, hyphens, and underscores allowed",
37
+ });
38
+ }
39
+
40
+ // Try to acquire lock
41
+ const lockResult = acquireLock(slug);
42
+ if (!lockResult.acquired) {
43
+ return res.status(409).json({
44
+ ok: false,
45
+ code: "analysis_locked",
46
+ heldBy: lockResult.heldBy,
47
+ });
48
+ }
49
+
50
+ // Create SSE stream
51
+ const stream = streamSSE(res);
52
+ let lockReleased = false;
53
+
54
+ const releaseLockSafely = () => {
55
+ if (!lockReleased) {
56
+ releaseLock(slug);
57
+ lockReleased = true;
58
+ }
59
+ };
60
+
61
+ // Handle client disconnect
62
+ req.on("close", () => {
63
+ console.log(`[PipelineAnalysis] Client disconnected for ${slug}`);
64
+ releaseLockSafely();
65
+ });
66
+
67
+ try {
68
+ // Get pipeline configuration
69
+ let pipelineConfig;
70
+ try {
71
+ pipelineConfig = getPipelineConfig(slug);
72
+ } catch (error) {
73
+ stream.send("error", {
74
+ message: `Pipeline '${slug}' not found in registry`,
75
+ });
76
+ stream.end();
77
+ releaseLockSafely();
78
+ return;
79
+ }
80
+
81
+ const pipelineDir = path.dirname(pipelineConfig.pipelineJsonPath);
82
+
83
+ // Read pipeline.json
84
+ let pipelineData;
85
+ try {
86
+ const contents = await fs.readFile(
87
+ pipelineConfig.pipelineJsonPath,
88
+ "utf8"
89
+ );
90
+ pipelineData = JSON.parse(contents);
91
+ } catch (error) {
92
+ stream.send("error", {
93
+ message: `Failed to read pipeline.json: ${error.message}`,
94
+ });
95
+ stream.end();
96
+ releaseLockSafely();
97
+ return;
98
+ }
99
+
100
+ if (!Array.isArray(pipelineData.tasks)) {
101
+ stream.send("error", {
102
+ message: "Invalid pipeline.json: tasks array not found",
103
+ });
104
+ stream.end();
105
+ releaseLockSafely();
106
+ return;
107
+ }
108
+
109
+ const tasks = pipelineData.tasks;
110
+ const totalTasks = tasks.length;
111
+
112
+ // Pre-analyze all tasks to count total artifacts (only JSON files need schema deduction)
113
+ const taskAnalyses = [];
114
+ let totalArtifacts = 0;
115
+
116
+ for (const taskId of tasks) {
117
+ const taskFilePath = path.join(pipelineDir, "tasks", `${taskId}.js`);
118
+ try {
119
+ const taskCode = await fs.readFile(taskFilePath, "utf8");
120
+ const analysis = analyzeTask(taskCode, taskFilePath);
121
+ taskAnalyses.push({ taskId, taskCode, analysis });
122
+ // Only count JSON artifacts for schema deduction
123
+ totalArtifacts += analysis.artifacts.writes.filter((a) =>
124
+ a.fileName.endsWith(".json")
125
+ ).length;
126
+ } catch (error) {
127
+ stream.send("error", {
128
+ message: `Failed to analyze task '${taskId}': ${error.message}`,
129
+ taskId,
130
+ });
131
+ stream.end();
132
+ releaseLockSafely();
133
+ return;
134
+ }
135
+ }
136
+
137
+ // Send started event
138
+ stream.send("started", {
139
+ pipelineSlug: slug,
140
+ totalTasks,
141
+ totalArtifacts,
142
+ });
143
+
144
+ let completedTasks = 0;
145
+ let completedArtifacts = 0;
146
+
147
+ // Process each task
148
+ for (let taskIndex = 0; taskIndex < taskAnalyses.length; taskIndex++) {
149
+ const { taskId, taskCode, analysis } = taskAnalyses[taskIndex];
150
+
151
+ stream.send("task:start", {
152
+ taskId,
153
+ taskIndex,
154
+ totalTasks,
155
+ });
156
+
157
+ // Write analysis file
158
+ try {
159
+ await writeAnalysisFile(pipelineDir, taskId, analysis);
160
+ } catch (error) {
161
+ stream.send("error", {
162
+ message: `Failed to write analysis for task '${taskId}': ${error.message}`,
163
+ taskId,
164
+ });
165
+ stream.end();
166
+ releaseLockSafely();
167
+ return;
168
+ }
169
+
170
+ // Process each artifact write
171
+ const artifacts = analysis.artifacts.writes;
172
+ let jsonArtifactIndex = 0;
173
+
174
+ for (
175
+ let artifactIndex = 0;
176
+ artifactIndex < artifacts.length;
177
+ artifactIndex++
178
+ ) {
179
+ const artifact = artifacts[artifactIndex];
180
+
181
+ // Skip non-JSON artifacts (only JSON files need schema deduction)
182
+ if (!artifact.fileName.endsWith(".json")) {
183
+ continue;
184
+ }
185
+
186
+ stream.send("artifact:start", {
187
+ taskId,
188
+ artifactName: artifact.fileName,
189
+ artifactIndex: jsonArtifactIndex,
190
+ totalArtifacts,
191
+ });
192
+
193
+ try {
194
+ const deducedSchema = await deduceArtifactSchema(taskCode, artifact);
195
+ await writeSchemaFiles(pipelineDir, artifact.fileName, deducedSchema);
196
+ } catch (error) {
197
+ stream.send("error", {
198
+ message: `Failed to deduce schema for artifact '${artifact.fileName}': ${error.message}`,
199
+ taskId,
200
+ artifactName: artifact.fileName,
201
+ });
202
+ stream.end();
203
+ releaseLockSafely();
204
+ return;
205
+ }
206
+
207
+ stream.send("artifact:complete", {
208
+ taskId,
209
+ artifactName: artifact.fileName,
210
+ artifactIndex: jsonArtifactIndex,
211
+ totalArtifacts,
212
+ });
213
+
214
+ completedArtifacts++;
215
+ jsonArtifactIndex++;
216
+ }
217
+
218
+ stream.send("task:complete", {
219
+ taskId,
220
+ taskIndex,
221
+ totalTasks,
222
+ });
223
+
224
+ completedTasks++;
225
+ }
226
+
227
+ // Send complete event
228
+ const durationMs = Date.now() - startTime;
229
+ stream.send("complete", {
230
+ pipelineSlug: slug,
231
+ tasksAnalyzed: completedTasks,
232
+ artifactsProcessed: completedArtifacts,
233
+ durationMs,
234
+ });
235
+
236
+ stream.end();
237
+ releaseLockSafely();
238
+ } catch (error) {
239
+ console.error(`[PipelineAnalysis] Unexpected error:`, error);
240
+ stream.send("error", {
241
+ message: `Unexpected error: ${error.message}`,
242
+ });
243
+ stream.end();
244
+ releaseLockSafely();
245
+ }
246
+ }
@@ -0,0 +1,181 @@
1
+ /**
2
+ * Pipeline type detail endpoint (logic-only)
3
+ *
4
+ * Exports:
5
+ * - handlePipelineTypeDetail(slug) -> Core logic function
6
+ * - handlePipelineTypeDetailRequest(req, res) -> HTTP response wrapper
7
+ *
8
+ * This function returns a read-only pipeline definition with tasks ordered
9
+ * as specified in pipeline.json for rendering a static DAG visualization.
10
+ */
11
+
12
+ import { getPipelineConfig, getConfig } from "../../core/config.js";
13
+ import { sendJson } from "../utils/http-utils.js";
14
+ import * as configBridge from "../config-bridge.js";
15
+ import { promises as fs } from "node:fs";
16
+ import path from "node:path";
17
+
18
+ /**
19
+ * Return pipeline type detail suitable for the API.
20
+ *
21
+ * Behavior:
22
+ * - Use getPipelineConfig(slug) to resolve pipeline.json path
23
+ * - Read and parse pipeline.json from the resolved path
24
+ * - Validate that parsed data contains a tasks array
25
+ * - Return tasks as { id, title, status: 'definition' } in order
26
+ * - Handle all error cases with explicit error responses
27
+ *
28
+ * @param {string} slug - Pipeline slug identifier
29
+ * @returns {Object} Response envelope { ok: true, data } or error envelope
30
+ */
31
+ export async function handlePipelineTypeDetail(slug) {
32
+ console.log(`[PipelineTypeDetailEndpoint] GET /api/pipelines/${slug} called`);
33
+
34
+ // Validate slug parameter
35
+ if (!slug || typeof slug !== "string") {
36
+ return configBridge.createErrorResponse(
37
+ configBridge.Constants.ERROR_CODES.BAD_REQUEST,
38
+ "Invalid slug parameter"
39
+ );
40
+ }
41
+
42
+ // Enforce safe characters in slug to prevent path traversal and similar issues
43
+ const slugIsValid = /^[A-Za-z0-9_-]+$/.test(slug);
44
+ if (!slugIsValid) {
45
+ return configBridge.createErrorResponse(
46
+ configBridge.Constants.ERROR_CODES.BAD_REQUEST,
47
+ "Invalid slug parameter: only letters, numbers, hyphens, and underscores are allowed"
48
+ );
49
+ }
50
+ try {
51
+ // Resolve pipeline configuration using existing config system
52
+ let pipelineConfig;
53
+ try {
54
+ pipelineConfig = getPipelineConfig(slug);
55
+ } catch (error) {
56
+ return configBridge.createErrorResponse(
57
+ configBridge.Constants.ERROR_CODES.NOT_FOUND,
58
+ `Pipeline '${slug}' not found in registry`
59
+ );
60
+ }
61
+
62
+ const pipelineJsonPath = pipelineConfig.pipelineJsonPath;
63
+
64
+ // Check if pipeline.json exists
65
+ try {
66
+ await fs.access(pipelineJsonPath);
67
+ } catch (error) {
68
+ if (error.code === "ENOENT") {
69
+ return configBridge.createErrorResponse(
70
+ configBridge.Constants.ERROR_CODES.NOT_FOUND,
71
+ `pipeline.json not found for pipeline '${slug}'`,
72
+ pipelineJsonPath
73
+ );
74
+ }
75
+ throw error;
76
+ }
77
+
78
+ // Read and parse pipeline.json
79
+ let pipelineData;
80
+ try {
81
+ const contents = await fs.readFile(pipelineJsonPath, "utf8");
82
+ pipelineData = JSON.parse(contents);
83
+ } catch (error) {
84
+ if (error instanceof SyntaxError) {
85
+ return configBridge.createErrorResponse(
86
+ configBridge.Constants.ERROR_CODES.INVALID_JSON,
87
+ "Invalid JSON in pipeline.json",
88
+ pipelineJsonPath
89
+ );
90
+ }
91
+ throw error;
92
+ }
93
+
94
+ // Validate pipeline structure
95
+ if (
96
+ !pipelineData ||
97
+ typeof pipelineData !== "object" ||
98
+ !Array.isArray(pipelineData.tasks)
99
+ ) {
100
+ return configBridge.createErrorResponse(
101
+ configBridge.Constants.ERROR_CODES.INVALID_JSON,
102
+ "Invalid pipeline.json format: expected 'tasks' array",
103
+ pipelineJsonPath
104
+ );
105
+ }
106
+
107
+ // Transform tasks to API format
108
+ const tasks = pipelineData.tasks.map((taskId, index) => {
109
+ if (typeof taskId !== "string" || !taskId.trim()) {
110
+ throw new Error(`Invalid task ID at index ${index}: ${taskId}`);
111
+ }
112
+
113
+ return {
114
+ id: taskId,
115
+ title: taskId.charAt(0).toUpperCase() + taskId.slice(1),
116
+ status: "definition",
117
+ };
118
+ });
119
+
120
+ // Get pipeline metadata from config for name/description
121
+ const config = getConfig();
122
+ const pipelineMetadata = config.pipelines?.[slug] || {};
123
+
124
+ return {
125
+ ok: true,
126
+ data: {
127
+ slug,
128
+ name: pipelineMetadata.name || slug,
129
+ description: pipelineMetadata.description || "",
130
+ tasks,
131
+ },
132
+ };
133
+ } catch (err) {
134
+ console.error("handlePipelineTypeDetail error:", err);
135
+ return configBridge.createErrorResponse(
136
+ configBridge.Constants.ERROR_CODES.FS_ERROR,
137
+ "Failed to read pipeline configuration"
138
+ );
139
+ }
140
+ }
141
+
142
+ /**
143
+ * HTTP wrapper function for pipeline type detail requests.
144
+ * Calls handlePipelineTypeDetail(slug) and sends the response using sendJson().
145
+ *
146
+ * @param {http.IncomingMessage} req - HTTP request object
147
+ * @param {http.ServerResponse} res - HTTP response object
148
+ */
149
+ export async function handlePipelineTypeDetailRequest(req, res) {
150
+ console.info(
151
+ "[PipelineTypeDetailEndpoint] handlePipelineTypeDetailRequest called"
152
+ );
153
+
154
+ try {
155
+ const slug = req.params.slug;
156
+ const result = await handlePipelineTypeDetail(slug);
157
+
158
+ if (result.ok) {
159
+ sendJson(res, 200, result);
160
+ } else {
161
+ // Map error codes to appropriate HTTP status codes
162
+ const statusCode =
163
+ result.code === configBridge.Constants.ERROR_CODES.NOT_FOUND
164
+ ? 404
165
+ : result.code === configBridge.Constants.ERROR_CODES.BAD_REQUEST
166
+ ? 400
167
+ : result.code === configBridge.Constants.ERROR_CODES.INVALID_JSON ||
168
+ result.code === configBridge.Constants.ERROR_CODES.FS_ERROR
169
+ ? 500
170
+ : 500;
171
+ sendJson(res, statusCode, result);
172
+ }
173
+ } catch (err) {
174
+ console.error("handlePipelineTypeDetailRequest unexpected error:", err);
175
+ sendJson(res, 500, {
176
+ ok: false,
177
+ code: "internal_error",
178
+ message: "Internal server error",
179
+ });
180
+ }
181
+ }
@@ -0,0 +1,133 @@
1
+ /**
2
+ * Pipelines endpoint (logic-only)
3
+ *
4
+ * Exports:
5
+ * - handlePipelinesRequest() -> HTTP response wrapper
6
+ *
7
+ * This function returns structured pipeline metadata from the registry
8
+ * so the frontend can display available pipelines.
9
+ */
10
+
11
+ import { getConfig } from "../../core/config.js";
12
+ import { sendJson } from "../utils/http-utils.js";
13
+ import * as configBridge from "../config-bridge.js";
14
+ import { promises as fs } from "node:fs";
15
+ import path from "node:path";
16
+
17
+ /**
18
+ * Return pipeline metadata suitable for the API.
19
+ *
20
+ * Behavior:
21
+ * - Read pipeline registry from config system
22
+ * - Return slug, name, description for each pipeline
23
+ * - Handle empty registry (return 200 with empty array)
24
+ * - Handle malformed JSON (return 500 with specific error)
25
+ * - Handle missing registry file (return 200 with empty array)
26
+ */
27
+ export async function handlePipelinesRequest() {
28
+ console.log("[PipelinesEndpoint] GET /api/pipelines called");
29
+
30
+ try {
31
+ const config = getConfig();
32
+ const rootDir = config.paths?.root;
33
+
34
+ if (!rootDir) {
35
+ return configBridge.createErrorResponse(
36
+ configBridge.Constants.ERROR_CODES.BAD_REQUEST,
37
+ "PO_ROOT not configured"
38
+ );
39
+ }
40
+
41
+ const registryPath = path.join(rootDir, "pipeline-config", "registry.json");
42
+
43
+ // Check if registry file exists
44
+ try {
45
+ await fs.access(registryPath);
46
+ } catch (error) {
47
+ if (error.code === "ENOENT") {
48
+ // Missing registry file - return empty array as specified
49
+ return { ok: true, data: { pipelines: [] } };
50
+ }
51
+ throw error;
52
+ }
53
+
54
+ // Read and parse registry file
55
+ let registryData;
56
+ try {
57
+ const contents = await fs.readFile(registryPath, "utf8");
58
+ registryData = JSON.parse(contents);
59
+ } catch (error) {
60
+ if (error instanceof SyntaxError) {
61
+ return configBridge.createErrorResponse(
62
+ configBridge.Constants.ERROR_CODES.INVALID_JSON,
63
+ "Invalid JSON in pipeline registry",
64
+ registryPath
65
+ );
66
+ }
67
+ throw error;
68
+ }
69
+
70
+ // Validate registry structure
71
+ if (
72
+ !registryData ||
73
+ typeof registryData !== "object" ||
74
+ !registryData.pipelines ||
75
+ typeof registryData.pipelines !== "object"
76
+ ) {
77
+ return configBridge.createErrorResponse(
78
+ configBridge.Constants.ERROR_CODES.INVALID_JSON,
79
+ "Invalid pipeline registry format: expected 'pipelines' object",
80
+ registryPath
81
+ );
82
+ }
83
+
84
+ // Transform pipeline entries to API format
85
+ const pipelines = [];
86
+ for (const [slug, entry] of Object.entries(registryData.pipelines)) {
87
+ pipelines.push({
88
+ slug,
89
+ name: entry?.name || slug,
90
+ description: entry?.description || "",
91
+ });
92
+ }
93
+
94
+ return { ok: true, data: { pipelines } };
95
+ } catch (err) {
96
+ console.error("handlePipelinesRequest error:", err);
97
+ return configBridge.createErrorResponse(
98
+ configBridge.Constants.ERROR_CODES.FS_ERROR,
99
+ "Failed to read pipeline registry"
100
+ );
101
+ }
102
+ }
103
+
104
+ /**
105
+ * HTTP wrapper function for pipelines requests.
106
+ * Calls handlePipelinesRequest() and sends the response using sendJson().
107
+ */
108
+ export async function handlePipelinesHttpRequest(req, res) {
109
+ console.info("[PipelinesEndpoint] handlePipelinesHttpRequest called");
110
+
111
+ try {
112
+ const result = await handlePipelinesRequest();
113
+
114
+ if (result.ok) {
115
+ sendJson(res, 200, result);
116
+ } else {
117
+ // Map error codes to appropriate HTTP status codes
118
+ const statusCode =
119
+ result.code === configBridge.Constants.ERROR_CODES.INVALID_JSON ||
120
+ result.code === configBridge.Constants.ERROR_CODES.FS_ERROR
121
+ ? 500
122
+ : 400;
123
+ sendJson(res, statusCode, result);
124
+ }
125
+ } catch (err) {
126
+ console.error("handlePipelinesHttpRequest unexpected error:", err);
127
+ sendJson(res, 500, {
128
+ ok: false,
129
+ code: "internal_error",
130
+ message: "Internal server error",
131
+ });
132
+ }
133
+ }