@ryanfw/prompt-orchestration-pipeline 0.12.0 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/package.json +10 -1
  2. package/src/cli/analyze-task.js +51 -0
  3. package/src/cli/index.js +8 -0
  4. package/src/components/AddPipelineSidebar.jsx +144 -0
  5. package/src/components/AnalysisProgressTray.jsx +87 -0
  6. package/src/components/JobTable.jsx +4 -3
  7. package/src/components/Layout.jsx +142 -139
  8. package/src/components/MarkdownRenderer.jsx +149 -0
  9. package/src/components/PipelineDAGGrid.jsx +404 -0
  10. package/src/components/PipelineTypeTaskSidebar.jsx +96 -0
  11. package/src/components/SchemaPreviewPanel.jsx +97 -0
  12. package/src/components/StageTimeline.jsx +36 -0
  13. package/src/components/TaskAnalysisDisplay.jsx +227 -0
  14. package/src/components/TaskCreationSidebar.jsx +447 -0
  15. package/src/components/TaskDetailSidebar.jsx +119 -117
  16. package/src/components/TaskFilePane.jsx +94 -39
  17. package/src/components/ui/button.jsx +59 -27
  18. package/src/components/ui/sidebar.jsx +118 -0
  19. package/src/config/models.js +99 -67
  20. package/src/core/config.js +4 -1
  21. package/src/llm/index.js +129 -9
  22. package/src/pages/PipelineDetail.jsx +6 -6
  23. package/src/pages/PipelineList.jsx +214 -0
  24. package/src/pages/PipelineTypeDetail.jsx +234 -0
  25. package/src/providers/deepseek.js +76 -16
  26. package/src/providers/openai.js +61 -34
  27. package/src/task-analysis/enrichers/analysis-writer.js +62 -0
  28. package/src/task-analysis/enrichers/schema-deducer.js +145 -0
  29. package/src/task-analysis/enrichers/schema-writer.js +74 -0
  30. package/src/task-analysis/extractors/artifacts.js +137 -0
  31. package/src/task-analysis/extractors/llm-calls.js +176 -0
  32. package/src/task-analysis/extractors/stages.js +51 -0
  33. package/src/task-analysis/index.js +103 -0
  34. package/src/task-analysis/parser.js +28 -0
  35. package/src/task-analysis/utils/ast.js +43 -0
  36. package/src/ui/client/hooks/useAnalysisProgress.js +145 -0
  37. package/src/ui/client/index.css +64 -0
  38. package/src/ui/client/main.jsx +4 -0
  39. package/src/ui/client/sse-fetch.js +120 -0
  40. package/src/ui/dist/assets/index-cjHV9mYW.js +82578 -0
  41. package/src/ui/dist/assets/index-cjHV9mYW.js.map +1 -0
  42. package/src/ui/dist/assets/style-CoM9SoQF.css +180 -0
  43. package/src/ui/dist/index.html +2 -2
  44. package/src/ui/endpoints/create-pipeline-endpoint.js +194 -0
  45. package/src/ui/endpoints/pipeline-analysis-endpoint.js +246 -0
  46. package/src/ui/endpoints/pipeline-type-detail-endpoint.js +181 -0
  47. package/src/ui/endpoints/pipelines-endpoint.js +133 -0
  48. package/src/ui/endpoints/schema-file-endpoint.js +105 -0
  49. package/src/ui/endpoints/task-analysis-endpoint.js +104 -0
  50. package/src/ui/endpoints/task-creation-endpoint.js +114 -0
  51. package/src/ui/endpoints/task-save-endpoint.js +101 -0
  52. package/src/ui/express-app.js +45 -0
  53. package/src/ui/lib/analysis-lock.js +67 -0
  54. package/src/ui/lib/sse.js +30 -0
  55. package/src/ui/server.js +4 -0
  56. package/src/ui/utils/slug.js +31 -0
  57. package/src/ui/watcher.js +28 -2
  58. package/src/ui/dist/assets/index-B320avRx.js +0 -26613
  59. package/src/ui/dist/assets/index-B320avRx.js.map +0 -1
  60. package/src/ui/dist/assets/style-BYCoLBnK.css +0 -62
@@ -0,0 +1,133 @@
1
+ /**
2
+ * Pipelines endpoint (logic-only)
3
+ *
4
+ * Exports:
5
+ * - handlePipelinesRequest() -> HTTP response wrapper
6
+ *
7
+ * This function returns structured pipeline metadata from the registry
8
+ * so the frontend can display available pipelines.
9
+ */
10
+
11
+ import { getConfig } from "../../core/config.js";
12
+ import { sendJson } from "../utils/http-utils.js";
13
+ import * as configBridge from "../config-bridge.js";
14
+ import { promises as fs } from "node:fs";
15
+ import path from "node:path";
16
+
17
+ /**
18
+ * Return pipeline metadata suitable for the API.
19
+ *
20
+ * Behavior:
21
+ * - Read pipeline registry from config system
22
+ * - Return slug, name, description for each pipeline
23
+ * - Handle empty registry (return 200 with empty array)
24
+ * - Handle malformed JSON (return 500 with specific error)
25
+ * - Handle missing registry file (return 200 with empty array)
26
+ */
27
+ export async function handlePipelinesRequest() {
28
+ console.log("[PipelinesEndpoint] GET /api/pipelines called");
29
+
30
+ try {
31
+ const config = getConfig();
32
+ const rootDir = config.paths?.root;
33
+
34
+ if (!rootDir) {
35
+ return configBridge.createErrorResponse(
36
+ configBridge.Constants.ERROR_CODES.BAD_REQUEST,
37
+ "PO_ROOT not configured"
38
+ );
39
+ }
40
+
41
+ const registryPath = path.join(rootDir, "pipeline-config", "registry.json");
42
+
43
+ // Check if registry file exists
44
+ try {
45
+ await fs.access(registryPath);
46
+ } catch (error) {
47
+ if (error.code === "ENOENT") {
48
+ // Missing registry file - return empty array as specified
49
+ return { ok: true, data: { pipelines: [] } };
50
+ }
51
+ throw error;
52
+ }
53
+
54
+ // Read and parse registry file
55
+ let registryData;
56
+ try {
57
+ const contents = await fs.readFile(registryPath, "utf8");
58
+ registryData = JSON.parse(contents);
59
+ } catch (error) {
60
+ if (error instanceof SyntaxError) {
61
+ return configBridge.createErrorResponse(
62
+ configBridge.Constants.ERROR_CODES.INVALID_JSON,
63
+ "Invalid JSON in pipeline registry",
64
+ registryPath
65
+ );
66
+ }
67
+ throw error;
68
+ }
69
+
70
+ // Validate registry structure
71
+ if (
72
+ !registryData ||
73
+ typeof registryData !== "object" ||
74
+ !registryData.pipelines ||
75
+ typeof registryData.pipelines !== "object"
76
+ ) {
77
+ return configBridge.createErrorResponse(
78
+ configBridge.Constants.ERROR_CODES.INVALID_JSON,
79
+ "Invalid pipeline registry format: expected 'pipelines' object",
80
+ registryPath
81
+ );
82
+ }
83
+
84
+ // Transform pipeline entries to API format
85
+ const pipelines = [];
86
+ for (const [slug, entry] of Object.entries(registryData.pipelines)) {
87
+ pipelines.push({
88
+ slug,
89
+ name: entry?.name || slug,
90
+ description: entry?.description || "",
91
+ });
92
+ }
93
+
94
+ return { ok: true, data: { pipelines } };
95
+ } catch (err) {
96
+ console.error("handlePipelinesRequest error:", err);
97
+ return configBridge.createErrorResponse(
98
+ configBridge.Constants.ERROR_CODES.FS_ERROR,
99
+ "Failed to read pipeline registry"
100
+ );
101
+ }
102
+ }
103
+
104
+ /**
105
+ * HTTP wrapper function for pipelines requests.
106
+ * Calls handlePipelinesRequest() and sends the response using sendJson().
107
+ */
108
+ export async function handlePipelinesHttpRequest(req, res) {
109
+ console.info("[PipelinesEndpoint] handlePipelinesHttpRequest called");
110
+
111
+ try {
112
+ const result = await handlePipelinesRequest();
113
+
114
+ if (result.ok) {
115
+ sendJson(res, 200, result);
116
+ } else {
117
+ // Map error codes to appropriate HTTP status codes
118
+ const statusCode =
119
+ result.code === configBridge.Constants.ERROR_CODES.INVALID_JSON ||
120
+ result.code === configBridge.Constants.ERROR_CODES.FS_ERROR
121
+ ? 500
122
+ : 400;
123
+ sendJson(res, statusCode, result);
124
+ }
125
+ } catch (err) {
126
+ console.error("handlePipelinesHttpRequest unexpected error:", err);
127
+ sendJson(res, 500, {
128
+ ok: false,
129
+ code: "internal_error",
130
+ message: "Internal server error",
131
+ });
132
+ }
133
+ }
@@ -0,0 +1,105 @@
1
+ /**
2
+ * Schema file endpoint
3
+ *
4
+ * Exports:
5
+ * - handleSchemaFileRequest(req, res) -> HTTP handler function
6
+ *
7
+ * Serves schema and sample JSON files for pipeline tasks.
8
+ */
9
+
10
+ import { getPipelineConfig } from "../../core/config.js";
11
+ import { sendJson } from "../utils/http-utils.js";
12
+ import { promises as fs } from "node:fs";
13
+ import path from "node:path";
14
+
15
+ /**
16
+ * HTTP handler for schema file requests.
17
+ *
18
+ * @param {http.IncomingMessage} req - HTTP request object
19
+ * @param {http.ServerResponse} res - HTTP response object
20
+ */
21
+ export async function handleSchemaFileRequest(req, res) {
22
+ const { slug, fileName } = req.params;
23
+ const { type } = req.query;
24
+
25
+ // Validate slug parameter
26
+ if (!slug || typeof slug !== "string" || !/^[A-Za-z0-9_-]+$/.test(slug)) {
27
+ return sendJson(res, 400, {
28
+ ok: false,
29
+ code: "invalid_params",
30
+ message: "Invalid slug parameter",
31
+ });
32
+ }
33
+
34
+ // Validate fileName parameter (no path traversal)
35
+ if (
36
+ !fileName ||
37
+ typeof fileName !== "string" ||
38
+ !/^[A-Za-z0-9_.-]+$/.test(fileName)
39
+ ) {
40
+ return sendJson(res, 400, {
41
+ ok: false,
42
+ code: "invalid_params",
43
+ message: "Invalid fileName parameter",
44
+ });
45
+ }
46
+
47
+ // Validate type parameter
48
+ if (type !== "schema" && type !== "sample") {
49
+ return sendJson(res, 400, {
50
+ ok: false,
51
+ code: "invalid_params",
52
+ message: "Invalid type parameter - must be 'schema' or 'sample'",
53
+ });
54
+ }
55
+
56
+ try {
57
+ // Get pipeline configuration
58
+ let pipelineConfig;
59
+ try {
60
+ pipelineConfig = getPipelineConfig(slug);
61
+ } catch (error) {
62
+ return sendJson(res, 404, {
63
+ ok: false,
64
+ code: "not_found",
65
+ message: `Pipeline '${slug}' not found in registry`,
66
+ });
67
+ }
68
+
69
+ const pipelineDir = path.dirname(pipelineConfig.pipelineJsonPath);
70
+ const baseName = path.parse(fileName).name;
71
+ const schemaFilePath = path.join(
72
+ pipelineDir,
73
+ "schemas",
74
+ `${baseName}.${type}.json`
75
+ );
76
+
77
+ // Read schema file
78
+ let fileContents;
79
+ try {
80
+ fileContents = await fs.readFile(schemaFilePath, "utf8");
81
+ } catch (error) {
82
+ if (error.code === "ENOENT") {
83
+ return sendJson(res, 404, {
84
+ ok: false,
85
+ code: "not_found",
86
+ message: "Schema file not found",
87
+ });
88
+ }
89
+ throw error;
90
+ }
91
+
92
+ // Return raw file contents (not parsed)
93
+ return sendJson(res, 200, {
94
+ ok: true,
95
+ data: fileContents,
96
+ });
97
+ } catch (error) {
98
+ console.error("handleSchemaFileRequest unexpected error:", error);
99
+ return sendJson(res, 500, {
100
+ ok: false,
101
+ code: "internal_error",
102
+ message: "Internal server error",
103
+ });
104
+ }
105
+ }
@@ -0,0 +1,104 @@
1
+ /**
2
+ * Task analysis endpoint
3
+ *
4
+ * Exports:
5
+ * - handleTaskAnalysisRequest(req, res) -> HTTP handler function
6
+ *
7
+ * Returns task analysis data if available, or null if no analysis file exists.
8
+ */
9
+
10
+ import { getPipelineConfig } from "../../core/config.js";
11
+ import { sendJson } from "../utils/http-utils.js";
12
+ import { promises as fs } from "node:fs";
13
+ import path from "node:path";
14
+
15
+ /**
16
+ * HTTP handler for task analysis requests.
17
+ *
18
+ * @param {http.IncomingMessage} req - HTTP request object
19
+ * @param {http.ServerResponse} res - HTTP response object
20
+ */
21
+ export async function handleTaskAnalysisRequest(req, res) {
22
+ const { slug, taskId } = req.params;
23
+
24
+ // Validate slug parameter
25
+ if (!slug || typeof slug !== "string" || !/^[A-Za-z0-9_-]+$/.test(slug)) {
26
+ return sendJson(res, 400, {
27
+ ok: false,
28
+ code: "invalid_params",
29
+ message: "Invalid slug parameter",
30
+ });
31
+ }
32
+
33
+ // Validate taskId parameter
34
+ if (
35
+ !taskId ||
36
+ typeof taskId !== "string" ||
37
+ !/^[A-Za-z0-9_-]+$/.test(taskId)
38
+ ) {
39
+ return sendJson(res, 400, {
40
+ ok: false,
41
+ code: "invalid_params",
42
+ message: "Invalid taskId parameter",
43
+ });
44
+ }
45
+
46
+ try {
47
+ // Get pipeline configuration
48
+ let pipelineConfig;
49
+ try {
50
+ pipelineConfig = getPipelineConfig(slug);
51
+ } catch (error) {
52
+ return sendJson(res, 404, {
53
+ ok: false,
54
+ code: "not_found",
55
+ message: `Pipeline '${slug}' not found in registry`,
56
+ });
57
+ }
58
+
59
+ const pipelineDir = path.dirname(pipelineConfig.pipelineJsonPath);
60
+ const analysisPath = path.join(
61
+ pipelineDir,
62
+ "analysis",
63
+ `${taskId}.analysis.json`
64
+ );
65
+
66
+ // Attempt to read and parse analysis file
67
+ let analysisData;
68
+ try {
69
+ const contents = await fs.readFile(analysisPath, "utf8");
70
+ analysisData = JSON.parse(contents);
71
+ } catch (error) {
72
+ if (error.code === "ENOENT") {
73
+ // Analysis file doesn't exist - this is not an error
74
+ return sendJson(res, 200, {
75
+ ok: true,
76
+ data: null,
77
+ });
78
+ }
79
+
80
+ if (error instanceof SyntaxError) {
81
+ return sendJson(res, 500, {
82
+ ok: false,
83
+ code: "invalid_json",
84
+ message: "Invalid JSON in analysis file",
85
+ });
86
+ }
87
+
88
+ throw error;
89
+ }
90
+
91
+ // Return analysis data
92
+ return sendJson(res, 200, {
93
+ ok: true,
94
+ data: analysisData,
95
+ });
96
+ } catch (error) {
97
+ console.error("handleTaskAnalysisRequest unexpected error:", error);
98
+ return sendJson(res, 500, {
99
+ ok: false,
100
+ code: "internal_error",
101
+ message: "Internal server error",
102
+ });
103
+ }
104
+ }
@@ -0,0 +1,114 @@
1
+ import fs from "node:fs";
2
+ import { streamSSE } from "../lib/sse.js";
3
+ import { createHighLevelLLM } from "../../llm/index.js";
4
+
5
+ export async function handleTaskPlan(req, res) {
6
+ console.log("[task-creation-endpoint] Request received");
7
+
8
+ const { messages, pipelineSlug } = req.body;
9
+
10
+ console.log("[task-creation-endpoint] Request details:", {
11
+ hasMessages: !!messages,
12
+ messageCount: Array.isArray(messages) ? messages.length : 0,
13
+ pipelineSlug,
14
+ bodyKeys: Object.keys(req.body),
15
+ });
16
+
17
+ // Validate input
18
+ if (!Array.isArray(messages)) {
19
+ console.error(
20
+ "[task-creation-endpoint] Validation failed: messages is not an array"
21
+ );
22
+ res.status(400).json({ error: "messages must be an array" });
23
+ return;
24
+ }
25
+
26
+ console.log(
27
+ "[task-creation-endpoint] Loading guidelines from docs/pipeline-task-guidelines.md..."
28
+ );
29
+
30
+ // Load guidelines - let it throw if missing
31
+ const guidelinesPath = "docs/pipeline-task-guidelines.md";
32
+ const guidelines = fs.readFileSync(guidelinesPath, "utf-8");
33
+
34
+ console.log(
35
+ "[task-creation-endpoint] Guidelines loaded, length:",
36
+ guidelines.length
37
+ );
38
+
39
+ // Build LLM messages array
40
+ const systemPrompt = `You are a pipeline task assistant. Help users create task definitions following these guidelines:
41
+
42
+ ${guidelines}
43
+
44
+ Provide complete, working code. Use markdown code blocks.
45
+
46
+ When you have completed a task definition that the user wants to create, wrap it in this format:
47
+ [TASK_PROPOSAL]
48
+ FILENAME: <filename.js>
49
+ TASKNAME: <task-name>
50
+ CODE:
51
+ \`\`\`javascript
52
+ <the complete task code here>
53
+ \`\`\`
54
+ [/TASK_PROPOSAL]`;
55
+
56
+ const llmMessages = [{ role: "system", content: systemPrompt }, ...messages];
57
+
58
+ console.log("[task-creation-endpoint] LLM messages array created:", {
59
+ totalMessages: llmMessages.length,
60
+ systemPromptLength: systemPrompt.length,
61
+ });
62
+
63
+ // Create SSE stream
64
+ console.log("[task-creation-endpoint] Creating SSE stream...");
65
+ const sse = streamSSE(res);
66
+
67
+ try {
68
+ console.log("[task-creation-endpoint] Creating LLM instance...");
69
+ // Get LLM instance (uses default provider from config)
70
+ const llm = createHighLevelLLM();
71
+
72
+ console.log("[task-creation-endpoint] Calling LLM chat with streaming...");
73
+ // Call LLM with streaming enabled
74
+ const response = await llm.chat({
75
+ messages: llmMessages,
76
+ responseFormat: "text",
77
+ stream: true,
78
+ });
79
+
80
+ console.log("[task-creation-endpoint] LLM response received:", {
81
+ isStream: typeof response[Symbol.asyncIterator] !== "undefined",
82
+ });
83
+
84
+ // Stream is an async generator
85
+ let chunkCount = 0;
86
+ for await (const chunk of response) {
87
+ if (chunk?.content) {
88
+ sse.send("chunk", { content: chunk.content });
89
+ chunkCount++;
90
+ }
91
+ }
92
+
93
+ console.log("[task-creation-endpoint] Sent", chunkCount, "chunks via SSE");
94
+
95
+ // Send done event
96
+ console.log("[task-creation-endpoint] Sending 'done' event...");
97
+ sse.send("done", {});
98
+ console.log("[task-creation-endpoint] Ending SSE stream...");
99
+ sse.end();
100
+ console.log("[task-creation-endpoint] Request completed successfully");
101
+ } catch (error) {
102
+ console.error("[task-creation-endpoint] Error occurred:", {
103
+ message: error.message,
104
+ stack: error.stack,
105
+ name: error.name,
106
+ });
107
+ // Send error event
108
+ sse.send("error", { message: error.message });
109
+ console.log(
110
+ "[task-creation-endpoint] Error sent via SSE, ending stream..."
111
+ );
112
+ sse.end();
113
+ }
114
+ }
@@ -0,0 +1,101 @@
1
+ import path from "node:path";
2
+ import { promises as fs } from "node:fs";
3
+ import { getConfig } from "../../core/config.js";
4
+ import { sendJson } from "../utils/http-utils.js";
5
+
6
+ /**
7
+ * Handle task creation requests
8
+ *
9
+ * POST /api/tasks/create
10
+ * Body: { pipelineSlug, filename, taskName, code }
11
+ *
12
+ * Creates a new task file and updates the pipeline's task registry index.js
13
+ */
14
+ export async function handleTaskSave(req, res) {
15
+ try {
16
+ const { pipelineSlug, filename, taskName, code } = req.body;
17
+
18
+ if (!pipelineSlug) {
19
+ return sendJson(res, 400, { error: "pipelineSlug is required" });
20
+ }
21
+ // Validate filename ends with .js
22
+ if (!filename || !filename.endsWith(".js")) {
23
+ return sendJson(res, 400, { error: "Filename must end with .js" });
24
+ }
25
+
26
+ // Validate taskName is kebab-case
27
+ const kebabCaseRegex = /^[a-z][a-z0-9-]*$/;
28
+ if (!taskName || !kebabCaseRegex.test(taskName)) {
29
+ return sendJson(res, 400, { error: "TaskName must be kebab-case" });
30
+ }
31
+
32
+ // Get configuration and root directory
33
+ const config = getConfig();
34
+ const rootDir = config.paths.root;
35
+
36
+ // Read registry.json to find pipeline's taskRegistryPath
37
+ const registryPath = path.join(rootDir, "pipeline-config", "registry.json");
38
+ const registryData = JSON.parse(await fs.readFile(registryPath, "utf8"));
39
+
40
+ // Look up pipeline in registry
41
+ const pipelineEntry = registryData.pipelines[pipelineSlug];
42
+ if (!pipelineEntry) {
43
+ return sendJson(res, 404, { error: "Pipeline not found" });
44
+ }
45
+
46
+ // Get task registry path (relative to root)
47
+ const taskRegistryPath = path.join(rootDir, pipelineEntry.taskRegistryPath);
48
+ const tasksDir = path.dirname(taskRegistryPath);
49
+
50
+ // Write task file (prevent path traversal by validating resolved path)
51
+ const taskFilePath = path.resolve(tasksDir, filename);
52
+ if (!taskFilePath.startsWith(tasksDir)) {
53
+ return sendJson(res, 400, { error: "Invalid filename" });
54
+ }
55
+ await fs.writeFile(taskFilePath, code, "utf8");
56
+
57
+ // Update index.js to export new task
58
+ const indexPath = taskRegistryPath;
59
+ let indexContent = await fs.readFile(indexPath, "utf8");
60
+
61
+ // Check if task name already exists in the index
62
+ const taskNamePattern = new RegExp(`^\\s*${taskName}\\s*:`, "m");
63
+ if (taskNamePattern.test(indexContent)) {
64
+ return sendJson(res, 400, {
65
+ error: `Task "${taskName}" already exists in the registry`,
66
+ });
67
+ }
68
+
69
+ // Find the line containing "export default {"
70
+ const exportLine = "export default {";
71
+ const exportLineIndex = indexContent.indexOf(exportLine);
72
+
73
+ if (exportLineIndex === -1) {
74
+ return sendJson(res, 500, {
75
+ error: "Failed to find export default line in index.js",
76
+ });
77
+ }
78
+
79
+ // Insert new task entry after the export line
80
+ const insertPosition = indexContent.indexOf("\n", exportLineIndex) + 1;
81
+ const newEntry = ` ${taskName}: "./${filename}",\n`;
82
+
83
+ indexContent =
84
+ indexContent.slice(0, insertPosition) +
85
+ newEntry +
86
+ indexContent.slice(insertPosition);
87
+
88
+ // Write updated index.js
89
+ await fs.writeFile(indexPath, indexContent, "utf8");
90
+
91
+ return sendJson(res, 200, {
92
+ ok: true,
93
+ path: taskFilePath,
94
+ });
95
+ } catch (error) {
96
+ console.error("Error creating task:", error);
97
+ return sendJson(res, 500, {
98
+ error: error.message || "Failed to create task",
99
+ });
100
+ }
101
+ }
@@ -18,6 +18,14 @@ import {
18
18
  handleTaskFileListRequest,
19
19
  handleTaskFileRequest,
20
20
  } from "./endpoints/file-endpoints.js";
21
+ import { handlePipelinesHttpRequest } from "./endpoints/pipelines-endpoint.js";
22
+ import { handleCreatePipeline } from "./endpoints/create-pipeline-endpoint.js";
23
+ import { handlePipelineTypeDetailRequest } from "./endpoints/pipeline-type-detail-endpoint.js";
24
+ import { handlePipelineAnalysis } from "./endpoints/pipeline-analysis-endpoint.js";
25
+ import { handleTaskAnalysisRequest } from "./endpoints/task-analysis-endpoint.js";
26
+ import { handleSchemaFileRequest } from "./endpoints/schema-file-endpoint.js";
27
+ import { handleTaskPlan } from "./endpoints/task-creation-endpoint.js";
28
+ import { handleTaskSave } from "./endpoints/task-save-endpoint.js";
21
29
  import { sendJson } from "./utils/http-utils.js";
22
30
  import { PROVIDER_FUNCTIONS } from "../config/models.js";
23
31
 
@@ -35,6 +43,12 @@ const __dirname = path.dirname(__filename);
35
43
  export function buildExpressApp({ dataDir, viteServer }) {
36
44
  const app = express();
37
45
 
46
+ // Parse JSON request bodies
47
+ app.use(express.json());
48
+
49
+ // Parse URL-encoded request bodies (for form submissions)
50
+ app.use(express.urlencoded({ extended: true }));
51
+
38
52
  // API guard middleware mounted on /api
39
53
  app.use("/api", (req, res, next) => {
40
54
  // Set CORS headers
@@ -103,6 +117,31 @@ export function buildExpressApp({ dataDir, viteServer }) {
103
117
  }
104
118
  });
105
119
 
120
+ // GET /api/pipelines
121
+ app.get("/api/pipelines", async (req, res) => {
122
+ await handlePipelinesHttpRequest(req, res);
123
+ });
124
+
125
+ // POST /api/pipelines
126
+ app.post("/api/pipelines", handleCreatePipeline);
127
+
128
+ // GET /api/pipelines/:slug
129
+ app.get("/api/pipelines/:slug", async (req, res) => {
130
+ await handlePipelineTypeDetailRequest(req, res);
131
+ });
132
+
133
+ // POST /api/pipelines/:slug/analyze
134
+ app.post("/api/pipelines/:slug/analyze", handlePipelineAnalysis);
135
+
136
+ // GET /api/pipelines/:slug/tasks/:taskId/analysis
137
+ app.get(
138
+ "/api/pipelines/:slug/tasks/:taskId/analysis",
139
+ handleTaskAnalysisRequest
140
+ );
141
+
142
+ // GET /api/pipelines/:slug/schemas/:fileName
143
+ app.get("/api/pipelines/:slug/schemas/:fileName", handleSchemaFileRequest);
144
+
106
145
  // GET /api/jobs
107
146
  app.get("/api/jobs", async (req, res) => {
108
147
  await handleJobListRequest(req, res);
@@ -163,6 +202,12 @@ export function buildExpressApp({ dataDir, viteServer }) {
163
202
  });
164
203
  });
165
204
 
205
+ // POST /api/ai/task-plan
206
+ app.post("/api/ai/task-plan", handleTaskPlan);
207
+
208
+ // POST /api/tasks/create
209
+ app.post("/api/tasks/create", handleTaskSave);
210
+
166
211
  // Dev middleware (mount after all API routes)
167
212
  if (viteServer && viteServer.middlewares) {
168
213
  app.use(viteServer.middlewares);
@@ -0,0 +1,67 @@
1
+ /**
2
+ * In-memory lock for pipeline analysis operations.
3
+ * Ensures only one pipeline can be analyzed at a time.
4
+ */
5
+
6
+ let currentLock = null;
7
+
8
+ /**
9
+ * Attempt to acquire the analysis lock for a pipeline.
10
+ * @param {string} pipelineSlug - The pipeline identifier
11
+ * @returns {{ acquired: true } | { acquired: false, heldBy: string }}
12
+ */
13
+ export function acquireLock(pipelineSlug) {
14
+ if (!pipelineSlug || typeof pipelineSlug !== "string") {
15
+ throw new Error(
16
+ `Invalid pipelineSlug: expected non-empty string, got ${typeof pipelineSlug}`
17
+ );
18
+ }
19
+
20
+ if (currentLock === null) {
21
+ currentLock = {
22
+ pipelineSlug,
23
+ startedAt: new Date(),
24
+ };
25
+ return { acquired: true };
26
+ }
27
+
28
+ return {
29
+ acquired: false,
30
+ heldBy: currentLock.pipelineSlug,
31
+ };
32
+ }
33
+
34
+ /**
35
+ * Release the analysis lock for a pipeline.
36
+ * @param {string} pipelineSlug - The pipeline identifier that holds the lock
37
+ * @throws {Error} If the lock is not held by this pipeline
38
+ */
39
+ export function releaseLock(pipelineSlug) {
40
+ if (!pipelineSlug || typeof pipelineSlug !== "string") {
41
+ throw new Error(
42
+ `Invalid pipelineSlug: expected non-empty string, got ${typeof pipelineSlug}`
43
+ );
44
+ }
45
+
46
+ if (currentLock === null) {
47
+ throw new Error(
48
+ `Cannot release lock for '${pipelineSlug}': no lock is currently held`
49
+ );
50
+ }
51
+
52
+ if (currentLock.pipelineSlug !== pipelineSlug) {
53
+ throw new Error(
54
+ `Cannot release lock for '${pipelineSlug}': lock is held by '${currentLock.pipelineSlug}'`
55
+ );
56
+ }
57
+
58
+ currentLock = null;
59
+ }
60
+
61
+ /**
62
+ * Get the current lock status.
63
+ * @returns {{ pipelineSlug: string, startedAt: Date } | null}
64
+ */
65
+ export function getLockStatus() {
66
+ return currentLock;
67
+ }