@ryanfw/prompt-orchestration-pipeline 0.11.0 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +11 -1
- package/src/cli/analyze-task.js +51 -0
- package/src/cli/index.js +8 -0
- package/src/components/AddPipelineSidebar.jsx +144 -0
- package/src/components/AnalysisProgressTray.jsx +87 -0
- package/src/components/DAGGrid.jsx +157 -47
- package/src/components/JobTable.jsx +4 -3
- package/src/components/Layout.jsx +142 -139
- package/src/components/MarkdownRenderer.jsx +149 -0
- package/src/components/PipelineDAGGrid.jsx +404 -0
- package/src/components/PipelineTypeTaskSidebar.jsx +96 -0
- package/src/components/SchemaPreviewPanel.jsx +97 -0
- package/src/components/StageTimeline.jsx +36 -0
- package/src/components/TaskAnalysisDisplay.jsx +227 -0
- package/src/components/TaskCreationSidebar.jsx +447 -0
- package/src/components/TaskDetailSidebar.jsx +119 -117
- package/src/components/TaskFilePane.jsx +94 -39
- package/src/components/ui/RestartJobModal.jsx +26 -6
- package/src/components/ui/StopJobModal.jsx +183 -0
- package/src/components/ui/button.jsx +59 -27
- package/src/components/ui/sidebar.jsx +118 -0
- package/src/config/models.js +99 -67
- package/src/core/config.js +11 -4
- package/src/core/lifecycle-policy.js +62 -0
- package/src/core/pipeline-runner.js +312 -217
- package/src/core/status-writer.js +84 -0
- package/src/llm/index.js +129 -9
- package/src/pages/Code.jsx +8 -1
- package/src/pages/PipelineDetail.jsx +84 -2
- package/src/pages/PipelineList.jsx +214 -0
- package/src/pages/PipelineTypeDetail.jsx +234 -0
- package/src/pages/PromptPipelineDashboard.jsx +10 -11
- package/src/providers/deepseek.js +76 -16
- package/src/providers/openai.js +61 -34
- package/src/task-analysis/enrichers/analysis-writer.js +62 -0
- package/src/task-analysis/enrichers/schema-deducer.js +145 -0
- package/src/task-analysis/enrichers/schema-writer.js +74 -0
- package/src/task-analysis/extractors/artifacts.js +137 -0
- package/src/task-analysis/extractors/llm-calls.js +176 -0
- package/src/task-analysis/extractors/stages.js +51 -0
- package/src/task-analysis/index.js +103 -0
- package/src/task-analysis/parser.js +28 -0
- package/src/task-analysis/utils/ast.js +43 -0
- package/src/ui/client/adapters/job-adapter.js +60 -0
- package/src/ui/client/api.js +233 -8
- package/src/ui/client/hooks/useAnalysisProgress.js +145 -0
- package/src/ui/client/hooks/useJobList.js +14 -1
- package/src/ui/client/index.css +64 -0
- package/src/ui/client/main.jsx +4 -0
- package/src/ui/client/sse-fetch.js +120 -0
- package/src/ui/dist/app.js +262 -0
- package/src/ui/dist/assets/index-cjHV9mYW.js +82578 -0
- package/src/ui/dist/assets/index-cjHV9mYW.js.map +1 -0
- package/src/ui/dist/assets/style-CoM9SoQF.css +180 -0
- package/src/ui/dist/favicon.svg +12 -0
- package/src/ui/dist/index.html +2 -2
- package/src/ui/endpoints/create-pipeline-endpoint.js +194 -0
- package/src/ui/endpoints/file-endpoints.js +330 -0
- package/src/ui/endpoints/job-control-endpoints.js +1001 -0
- package/src/ui/endpoints/job-endpoints.js +62 -0
- package/src/ui/endpoints/pipeline-analysis-endpoint.js +246 -0
- package/src/ui/endpoints/pipeline-type-detail-endpoint.js +181 -0
- package/src/ui/endpoints/pipelines-endpoint.js +133 -0
- package/src/ui/endpoints/schema-file-endpoint.js +105 -0
- package/src/ui/endpoints/sse-endpoints.js +223 -0
- package/src/ui/endpoints/state-endpoint.js +85 -0
- package/src/ui/endpoints/task-analysis-endpoint.js +104 -0
- package/src/ui/endpoints/task-creation-endpoint.js +114 -0
- package/src/ui/endpoints/task-save-endpoint.js +101 -0
- package/src/ui/endpoints/upload-endpoints.js +406 -0
- package/src/ui/express-app.js +227 -0
- package/src/ui/lib/analysis-lock.js +67 -0
- package/src/ui/lib/sse.js +30 -0
- package/src/ui/server.js +42 -1880
- package/src/ui/sse-broadcast.js +93 -0
- package/src/ui/utils/http-utils.js +139 -0
- package/src/ui/utils/mime-types.js +196 -0
- package/src/ui/utils/slug.js +31 -0
- package/src/ui/vite.config.js +22 -0
- package/src/ui/watcher.js +28 -2
- package/src/utils/jobs.js +39 -0
- package/src/ui/dist/assets/index-DeDzq-Kk.js +0 -23863
- package/src/ui/dist/assets/style-aBtD_Yrs.css +0 -62
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Schema file endpoint
|
|
3
|
+
*
|
|
4
|
+
* Exports:
|
|
5
|
+
* - handleSchemaFileRequest(req, res) -> HTTP handler function
|
|
6
|
+
*
|
|
7
|
+
* Serves schema and sample JSON files for pipeline tasks.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { getPipelineConfig } from "../../core/config.js";
|
|
11
|
+
import { sendJson } from "../utils/http-utils.js";
|
|
12
|
+
import { promises as fs } from "node:fs";
|
|
13
|
+
import path from "node:path";
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* HTTP handler for schema file requests.
|
|
17
|
+
*
|
|
18
|
+
* @param {http.IncomingMessage} req - HTTP request object
|
|
19
|
+
* @param {http.ServerResponse} res - HTTP response object
|
|
20
|
+
*/
|
|
21
|
+
export async function handleSchemaFileRequest(req, res) {
|
|
22
|
+
const { slug, fileName } = req.params;
|
|
23
|
+
const { type } = req.query;
|
|
24
|
+
|
|
25
|
+
// Validate slug parameter
|
|
26
|
+
if (!slug || typeof slug !== "string" || !/^[A-Za-z0-9_-]+$/.test(slug)) {
|
|
27
|
+
return sendJson(res, 400, {
|
|
28
|
+
ok: false,
|
|
29
|
+
code: "invalid_params",
|
|
30
|
+
message: "Invalid slug parameter",
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Validate fileName parameter (no path traversal)
|
|
35
|
+
if (
|
|
36
|
+
!fileName ||
|
|
37
|
+
typeof fileName !== "string" ||
|
|
38
|
+
!/^[A-Za-z0-9_.-]+$/.test(fileName)
|
|
39
|
+
) {
|
|
40
|
+
return sendJson(res, 400, {
|
|
41
|
+
ok: false,
|
|
42
|
+
code: "invalid_params",
|
|
43
|
+
message: "Invalid fileName parameter",
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Validate type parameter
|
|
48
|
+
if (type !== "schema" && type !== "sample") {
|
|
49
|
+
return sendJson(res, 400, {
|
|
50
|
+
ok: false,
|
|
51
|
+
code: "invalid_params",
|
|
52
|
+
message: "Invalid type parameter - must be 'schema' or 'sample'",
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
try {
|
|
57
|
+
// Get pipeline configuration
|
|
58
|
+
let pipelineConfig;
|
|
59
|
+
try {
|
|
60
|
+
pipelineConfig = getPipelineConfig(slug);
|
|
61
|
+
} catch (error) {
|
|
62
|
+
return sendJson(res, 404, {
|
|
63
|
+
ok: false,
|
|
64
|
+
code: "not_found",
|
|
65
|
+
message: `Pipeline '${slug}' not found in registry`,
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const pipelineDir = path.dirname(pipelineConfig.pipelineJsonPath);
|
|
70
|
+
const baseName = path.parse(fileName).name;
|
|
71
|
+
const schemaFilePath = path.join(
|
|
72
|
+
pipelineDir,
|
|
73
|
+
"schemas",
|
|
74
|
+
`${baseName}.${type}.json`
|
|
75
|
+
);
|
|
76
|
+
|
|
77
|
+
// Read schema file
|
|
78
|
+
let fileContents;
|
|
79
|
+
try {
|
|
80
|
+
fileContents = await fs.readFile(schemaFilePath, "utf8");
|
|
81
|
+
} catch (error) {
|
|
82
|
+
if (error.code === "ENOENT") {
|
|
83
|
+
return sendJson(res, 404, {
|
|
84
|
+
ok: false,
|
|
85
|
+
code: "not_found",
|
|
86
|
+
message: "Schema file not found",
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
throw error;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Return raw file contents (not parsed)
|
|
93
|
+
return sendJson(res, 200, {
|
|
94
|
+
ok: true,
|
|
95
|
+
data: fileContents,
|
|
96
|
+
});
|
|
97
|
+
} catch (error) {
|
|
98
|
+
console.error("handleSchemaFileRequest unexpected error:", error);
|
|
99
|
+
return sendJson(res, 500, {
|
|
100
|
+
ok: false,
|
|
101
|
+
code: "internal_error",
|
|
102
|
+
message: "Internal server error",
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
}
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SSE (Server-Sent Events) and state management endpoints
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { sseRegistry } from "../sse.js";
|
|
6
|
+
import { sendJson } from "../utils/http-utils.js";
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Decorate a change object with jobId and lifecycle information
|
|
10
|
+
*/
|
|
11
|
+
function decorateChangeWithJobId(change) {
|
|
12
|
+
if (!change || typeof change !== "object") return change;
|
|
13
|
+
const normalizedPath = String(change.path || "").replace(/\\/g, "/");
|
|
14
|
+
const match = normalizedPath.match(
|
|
15
|
+
/pipeline-data\/(current|complete|pending|rejected)\/([^/]+)/
|
|
16
|
+
);
|
|
17
|
+
if (!match) {
|
|
18
|
+
return change;
|
|
19
|
+
}
|
|
20
|
+
return {
|
|
21
|
+
...change,
|
|
22
|
+
lifecycle: match[1],
|
|
23
|
+
jobId: match[2],
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Prioritize job status changes from a list of changes
|
|
29
|
+
*/
|
|
30
|
+
function prioritizeJobStatusChange(changes = []) {
|
|
31
|
+
const normalized = changes.map((change) => decorateChangeWithJobId(change));
|
|
32
|
+
const statusChange = normalized.find(
|
|
33
|
+
(change) =>
|
|
34
|
+
typeof change?.path === "string" &&
|
|
35
|
+
/tasks-status\.json$/.test(change.path)
|
|
36
|
+
);
|
|
37
|
+
return statusChange || normalized[0] || null;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Broadcast state update to all SSE clients
|
|
42
|
+
*
|
|
43
|
+
* NOTE: Per plan, SSE should emit compact, incremental events rather than
|
|
44
|
+
* streaming full application state. Use /api/state for full snapshot
|
|
45
|
+
* retrieval on client bootstrap. This function will emit only the most
|
|
46
|
+
* recent change when available (type: "state:change") and fall back to a
|
|
47
|
+
* lightweight summary event if no recent change is present.
|
|
48
|
+
*/
|
|
49
|
+
function broadcastStateUpdate(currentState) {
|
|
50
|
+
try {
|
|
51
|
+
const recentChanges = (currentState && currentState.recentChanges) || [];
|
|
52
|
+
const latest = prioritizeJobStatusChange(recentChanges);
|
|
53
|
+
console.debug("[Server] Broadcasting state update:", {
|
|
54
|
+
latest,
|
|
55
|
+
currentState,
|
|
56
|
+
});
|
|
57
|
+
if (latest) {
|
|
58
|
+
// Emit only the most recent change as a compact, typed event
|
|
59
|
+
const eventData = { type: "state:change", data: latest };
|
|
60
|
+
console.debug("[Server] Broadcasting event:", eventData);
|
|
61
|
+
sseRegistry.broadcast(eventData);
|
|
62
|
+
} else {
|
|
63
|
+
// Fallback: emit a minimal summary so clients can observe a state "tick"
|
|
64
|
+
const eventData = {
|
|
65
|
+
type: "state:summary",
|
|
66
|
+
data: {
|
|
67
|
+
changeCount:
|
|
68
|
+
currentState && currentState.changeCount
|
|
69
|
+
? currentState.changeCount
|
|
70
|
+
: 0,
|
|
71
|
+
},
|
|
72
|
+
};
|
|
73
|
+
console.debug("[Server] Broadcasting summary event:", eventData);
|
|
74
|
+
sseRegistry.broadcast(eventData);
|
|
75
|
+
}
|
|
76
|
+
} catch (err) {
|
|
77
|
+
// Defensive: if something unexpected happens, fall back to a lightweight notification
|
|
78
|
+
try {
|
|
79
|
+
console.error("[Server] Error in broadcastStateUpdate:", err);
|
|
80
|
+
sseRegistry.broadcast({
|
|
81
|
+
type: "state:summary",
|
|
82
|
+
data: {
|
|
83
|
+
changeCount:
|
|
84
|
+
currentState && currentState.changeCount
|
|
85
|
+
? currentState.changeCount
|
|
86
|
+
: 0,
|
|
87
|
+
},
|
|
88
|
+
});
|
|
89
|
+
} catch (fallbackErr) {
|
|
90
|
+
// Log error to aid debugging; this should never happen unless sseRegistry.broadcast is broken
|
|
91
|
+
console.error(
|
|
92
|
+
"Failed to broadcast fallback state summary in broadcastStateUpdate:",
|
|
93
|
+
fallbackErr
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Handle SSE events endpoint (/api/events)
|
|
101
|
+
*/
|
|
102
|
+
function handleSseEvents(req, res, searchParams) {
|
|
103
|
+
// Parse jobId from query parameters for filtering
|
|
104
|
+
const jobId = searchParams.get("jobId");
|
|
105
|
+
|
|
106
|
+
// Set SSE headers
|
|
107
|
+
res.writeHead(200, {
|
|
108
|
+
"Content-Type": "text/event-stream",
|
|
109
|
+
"Cache-Control": "no-cache",
|
|
110
|
+
Connection: "keep-alive",
|
|
111
|
+
"Access-Control-Allow-Origin": "*",
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
// Flush headers immediately
|
|
115
|
+
res.flushHeaders();
|
|
116
|
+
|
|
117
|
+
// Initial full-state is no longer sent over the SSE stream.
|
|
118
|
+
// Clients should fetch the snapshot from GET /api/state during bootstrap
|
|
119
|
+
// and then rely on SSE incremental events (state:change/state:summary).
|
|
120
|
+
// Keep headers flushed; sseRegistry.addClient will optionally send an initial ping.
|
|
121
|
+
// (Previously sent full state here; removed to reduce SSE payloads.)
|
|
122
|
+
|
|
123
|
+
// Add to SSE registry with jobId metadata for filtering
|
|
124
|
+
sseRegistry.addClient(res, { jobId });
|
|
125
|
+
|
|
126
|
+
// Start heartbeat for this connection
|
|
127
|
+
const heartbeatInterval = setInterval(() => {
|
|
128
|
+
try {
|
|
129
|
+
res.write(
|
|
130
|
+
`event: heartbeat\ndata: ${JSON.stringify({ timestamp: Date.now() })}\n\n`
|
|
131
|
+
);
|
|
132
|
+
} catch (err) {
|
|
133
|
+
// Client disconnected, stop heartbeat
|
|
134
|
+
clearInterval(heartbeatInterval);
|
|
135
|
+
}
|
|
136
|
+
}, 30000);
|
|
137
|
+
|
|
138
|
+
// Remove client on disconnect
|
|
139
|
+
req.on("close", () => {
|
|
140
|
+
clearInterval(heartbeatInterval);
|
|
141
|
+
sseRegistry.removeClient(res);
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
/**
|
|
146
|
+
* Handle API state endpoint (/api/state)
|
|
147
|
+
*/
|
|
148
|
+
async function handleApiState(req, res) {
|
|
149
|
+
if (req.method !== "GET") {
|
|
150
|
+
sendJson(res, 200, {
|
|
151
|
+
success: false,
|
|
152
|
+
error: "Method not allowed",
|
|
153
|
+
allowed: ["GET"],
|
|
154
|
+
});
|
|
155
|
+
return;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Prefer returning in-memory state when available (tests and runtime rely on state.getState()).
|
|
159
|
+
// If in-memory state is available, return it directly; otherwise fall back to
|
|
160
|
+
// building a filesystem-backed snapshot for client bootstrap.
|
|
161
|
+
try {
|
|
162
|
+
// Dynamically import state to avoid circular dependencies
|
|
163
|
+
const state = await import("../state.js");
|
|
164
|
+
|
|
165
|
+
try {
|
|
166
|
+
if (state && typeof state.getState === "function") {
|
|
167
|
+
const inMemory = state.getState();
|
|
168
|
+
if (inMemory) {
|
|
169
|
+
sendJson(res, 200, inMemory);
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
} catch (innerErr) {
|
|
174
|
+
// If reading in-memory state throws for some reason, fall back to snapshot
|
|
175
|
+
console.warn("Warning: failed to retrieve in-memory state:", innerErr);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Build a filesystem-backed snapshot for client bootstrap.
|
|
179
|
+
// Dynamically import the composer and dependencies to avoid circular import issues.
|
|
180
|
+
const [
|
|
181
|
+
{ buildSnapshotFromFilesystem },
|
|
182
|
+
jobScannerModule,
|
|
183
|
+
jobReaderModule,
|
|
184
|
+
statusTransformerModule,
|
|
185
|
+
configBridgeModule,
|
|
186
|
+
] = await Promise.all([
|
|
187
|
+
import("../state-snapshot.js"),
|
|
188
|
+
import("../job-scanner.js").catch(() => null),
|
|
189
|
+
import("../job-reader.js").catch(() => null),
|
|
190
|
+
import("../transformers/status-transformer.js").catch(() => null),
|
|
191
|
+
import("../config-bridge.js").catch(() => null),
|
|
192
|
+
]);
|
|
193
|
+
|
|
194
|
+
const snapshot = await buildSnapshotFromFilesystem({
|
|
195
|
+
listAllJobs:
|
|
196
|
+
jobScannerModule && jobScannerModule.listAllJobs
|
|
197
|
+
? jobScannerModule.listAllJobs
|
|
198
|
+
: undefined,
|
|
199
|
+
readJob:
|
|
200
|
+
jobReaderModule && jobReaderModule.readJob
|
|
201
|
+
? jobReaderModule.readJob
|
|
202
|
+
: undefined,
|
|
203
|
+
transformMultipleJobs:
|
|
204
|
+
statusTransformerModule && statusTransformerModule.transformMultipleJobs
|
|
205
|
+
? statusTransformerModule.transformMultipleJobs
|
|
206
|
+
: undefined,
|
|
207
|
+
now: () => new Date(),
|
|
208
|
+
paths: (configBridgeModule && configBridgeModule.PATHS) || undefined,
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
sendJson(res, 200, snapshot);
|
|
212
|
+
} catch (err) {
|
|
213
|
+
console.error("Failed to build /api/state snapshot:", err);
|
|
214
|
+
sendJson(res, 500, {
|
|
215
|
+
ok: false,
|
|
216
|
+
code: "snapshot_error",
|
|
217
|
+
message: "Failed to build state snapshot",
|
|
218
|
+
details: err && err.message ? err.message : String(err),
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
export { handleSseEvents, handleApiState, broadcastStateUpdate };
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Handle GET /api/state endpoint
|
|
3
|
+
*/
|
|
4
|
+
import * as state from "../state.js";
|
|
5
|
+
import { sendJson } from "../utils/http-utils.js";
|
|
6
|
+
|
|
7
|
+
export async function handleApiState(req, res) {
|
|
8
|
+
if (req.method !== "GET") {
|
|
9
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
10
|
+
res.end(
|
|
11
|
+
JSON.stringify({
|
|
12
|
+
success: false,
|
|
13
|
+
error: "Method not allowed",
|
|
14
|
+
allowed: ["GET"],
|
|
15
|
+
})
|
|
16
|
+
);
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Prefer returning in-memory state when available (tests and runtime rely on state.getState()).
|
|
21
|
+
// If in-memory state is available, return it directly; otherwise fall back to
|
|
22
|
+
// building a filesystem-backed snapshot for client bootstrap.
|
|
23
|
+
try {
|
|
24
|
+
try {
|
|
25
|
+
if (state && typeof state.getState === "function") {
|
|
26
|
+
const inMemory = state.getState();
|
|
27
|
+
if (inMemory) {
|
|
28
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
29
|
+
res.end(JSON.stringify(inMemory));
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
} catch (innerErr) {
|
|
34
|
+
// If reading in-memory state throws for some reason, fall back to snapshot
|
|
35
|
+
console.warn("Warning: failed to retrieve in-memory state:", innerErr);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// Build a filesystem-backed snapshot for client bootstrap.
|
|
39
|
+
// Dynamically import the composer and dependencies to avoid circular import issues.
|
|
40
|
+
const [
|
|
41
|
+
{ buildSnapshotFromFilesystem },
|
|
42
|
+
jobScannerModule,
|
|
43
|
+
jobReaderModule,
|
|
44
|
+
statusTransformerModule,
|
|
45
|
+
configBridgeModule,
|
|
46
|
+
] = await Promise.all([
|
|
47
|
+
import("../state-snapshot.js"),
|
|
48
|
+
import("../job-scanner.js").catch(() => null),
|
|
49
|
+
import("../job-reader.js").catch(() => null),
|
|
50
|
+
import("../transformers/status-transformer.js").catch(() => null),
|
|
51
|
+
import("../config-bridge.js").catch(() => null),
|
|
52
|
+
]);
|
|
53
|
+
|
|
54
|
+
const snapshot = await buildSnapshotFromFilesystem({
|
|
55
|
+
listAllJobs:
|
|
56
|
+
jobScannerModule && jobScannerModule.listAllJobs
|
|
57
|
+
? jobScannerModule.listAllJobs
|
|
58
|
+
: undefined,
|
|
59
|
+
readJob:
|
|
60
|
+
jobReaderModule && jobReaderModule.readJob
|
|
61
|
+
? jobReaderModule.readJob
|
|
62
|
+
: undefined,
|
|
63
|
+
transformMultipleJobs:
|
|
64
|
+
statusTransformerModule && statusTransformerModule.transformMultipleJobs
|
|
65
|
+
? statusTransformerModule.transformMultipleJobs
|
|
66
|
+
: undefined,
|
|
67
|
+
now: () => new Date(),
|
|
68
|
+
paths: (configBridgeModule && configBridgeModule.PATHS) || undefined,
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
72
|
+
res.end(JSON.stringify(snapshot));
|
|
73
|
+
} catch (err) {
|
|
74
|
+
console.error("Failed to build /api/state snapshot:", err);
|
|
75
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
76
|
+
res.end(
|
|
77
|
+
JSON.stringify({
|
|
78
|
+
ok: false,
|
|
79
|
+
code: "snapshot_error",
|
|
80
|
+
message: "Failed to build state snapshot",
|
|
81
|
+
details: err && err.message ? err.message : String(err),
|
|
82
|
+
})
|
|
83
|
+
);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Task analysis endpoint
|
|
3
|
+
*
|
|
4
|
+
* Exports:
|
|
5
|
+
* - handleTaskAnalysisRequest(req, res) -> HTTP handler function
|
|
6
|
+
*
|
|
7
|
+
* Returns task analysis data if available, or null if no analysis file exists.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { getPipelineConfig } from "../../core/config.js";
|
|
11
|
+
import { sendJson } from "../utils/http-utils.js";
|
|
12
|
+
import { promises as fs } from "node:fs";
|
|
13
|
+
import path from "node:path";
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* HTTP handler for task analysis requests.
|
|
17
|
+
*
|
|
18
|
+
* @param {http.IncomingMessage} req - HTTP request object
|
|
19
|
+
* @param {http.ServerResponse} res - HTTP response object
|
|
20
|
+
*/
|
|
21
|
+
export async function handleTaskAnalysisRequest(req, res) {
|
|
22
|
+
const { slug, taskId } = req.params;
|
|
23
|
+
|
|
24
|
+
// Validate slug parameter
|
|
25
|
+
if (!slug || typeof slug !== "string" || !/^[A-Za-z0-9_-]+$/.test(slug)) {
|
|
26
|
+
return sendJson(res, 400, {
|
|
27
|
+
ok: false,
|
|
28
|
+
code: "invalid_params",
|
|
29
|
+
message: "Invalid slug parameter",
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Validate taskId parameter
|
|
34
|
+
if (
|
|
35
|
+
!taskId ||
|
|
36
|
+
typeof taskId !== "string" ||
|
|
37
|
+
!/^[A-Za-z0-9_-]+$/.test(taskId)
|
|
38
|
+
) {
|
|
39
|
+
return sendJson(res, 400, {
|
|
40
|
+
ok: false,
|
|
41
|
+
code: "invalid_params",
|
|
42
|
+
message: "Invalid taskId parameter",
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
try {
|
|
47
|
+
// Get pipeline configuration
|
|
48
|
+
let pipelineConfig;
|
|
49
|
+
try {
|
|
50
|
+
pipelineConfig = getPipelineConfig(slug);
|
|
51
|
+
} catch (error) {
|
|
52
|
+
return sendJson(res, 404, {
|
|
53
|
+
ok: false,
|
|
54
|
+
code: "not_found",
|
|
55
|
+
message: `Pipeline '${slug}' not found in registry`,
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const pipelineDir = path.dirname(pipelineConfig.pipelineJsonPath);
|
|
60
|
+
const analysisPath = path.join(
|
|
61
|
+
pipelineDir,
|
|
62
|
+
"analysis",
|
|
63
|
+
`${taskId}.analysis.json`
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
// Attempt to read and parse analysis file
|
|
67
|
+
let analysisData;
|
|
68
|
+
try {
|
|
69
|
+
const contents = await fs.readFile(analysisPath, "utf8");
|
|
70
|
+
analysisData = JSON.parse(contents);
|
|
71
|
+
} catch (error) {
|
|
72
|
+
if (error.code === "ENOENT") {
|
|
73
|
+
// Analysis file doesn't exist - this is not an error
|
|
74
|
+
return sendJson(res, 200, {
|
|
75
|
+
ok: true,
|
|
76
|
+
data: null,
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if (error instanceof SyntaxError) {
|
|
81
|
+
return sendJson(res, 500, {
|
|
82
|
+
ok: false,
|
|
83
|
+
code: "invalid_json",
|
|
84
|
+
message: "Invalid JSON in analysis file",
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
throw error;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Return analysis data
|
|
92
|
+
return sendJson(res, 200, {
|
|
93
|
+
ok: true,
|
|
94
|
+
data: analysisData,
|
|
95
|
+
});
|
|
96
|
+
} catch (error) {
|
|
97
|
+
console.error("handleTaskAnalysisRequest unexpected error:", error);
|
|
98
|
+
return sendJson(res, 500, {
|
|
99
|
+
ok: false,
|
|
100
|
+
code: "internal_error",
|
|
101
|
+
message: "Internal server error",
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import { streamSSE } from "../lib/sse.js";
|
|
3
|
+
import { createHighLevelLLM } from "../../llm/index.js";
|
|
4
|
+
|
|
5
|
+
export async function handleTaskPlan(req, res) {
|
|
6
|
+
console.log("[task-creation-endpoint] Request received");
|
|
7
|
+
|
|
8
|
+
const { messages, pipelineSlug } = req.body;
|
|
9
|
+
|
|
10
|
+
console.log("[task-creation-endpoint] Request details:", {
|
|
11
|
+
hasMessages: !!messages,
|
|
12
|
+
messageCount: Array.isArray(messages) ? messages.length : 0,
|
|
13
|
+
pipelineSlug,
|
|
14
|
+
bodyKeys: Object.keys(req.body),
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
// Validate input
|
|
18
|
+
if (!Array.isArray(messages)) {
|
|
19
|
+
console.error(
|
|
20
|
+
"[task-creation-endpoint] Validation failed: messages is not an array"
|
|
21
|
+
);
|
|
22
|
+
res.status(400).json({ error: "messages must be an array" });
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
console.log(
|
|
27
|
+
"[task-creation-endpoint] Loading guidelines from docs/pipeline-task-guidelines.md..."
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
// Load guidelines - let it throw if missing
|
|
31
|
+
const guidelinesPath = "docs/pipeline-task-guidelines.md";
|
|
32
|
+
const guidelines = fs.readFileSync(guidelinesPath, "utf-8");
|
|
33
|
+
|
|
34
|
+
console.log(
|
|
35
|
+
"[task-creation-endpoint] Guidelines loaded, length:",
|
|
36
|
+
guidelines.length
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
// Build LLM messages array
|
|
40
|
+
const systemPrompt = `You are a pipeline task assistant. Help users create task definitions following these guidelines:
|
|
41
|
+
|
|
42
|
+
${guidelines}
|
|
43
|
+
|
|
44
|
+
Provide complete, working code. Use markdown code blocks.
|
|
45
|
+
|
|
46
|
+
When you have completed a task definition that the user wants to create, wrap it in this format:
|
|
47
|
+
[TASK_PROPOSAL]
|
|
48
|
+
FILENAME: <filename.js>
|
|
49
|
+
TASKNAME: <task-name>
|
|
50
|
+
CODE:
|
|
51
|
+
\`\`\`javascript
|
|
52
|
+
<the complete task code here>
|
|
53
|
+
\`\`\`
|
|
54
|
+
[/TASK_PROPOSAL]`;
|
|
55
|
+
|
|
56
|
+
const llmMessages = [{ role: "system", content: systemPrompt }, ...messages];
|
|
57
|
+
|
|
58
|
+
console.log("[task-creation-endpoint] LLM messages array created:", {
|
|
59
|
+
totalMessages: llmMessages.length,
|
|
60
|
+
systemPromptLength: systemPrompt.length,
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
// Create SSE stream
|
|
64
|
+
console.log("[task-creation-endpoint] Creating SSE stream...");
|
|
65
|
+
const sse = streamSSE(res);
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
console.log("[task-creation-endpoint] Creating LLM instance...");
|
|
69
|
+
// Get LLM instance (uses default provider from config)
|
|
70
|
+
const llm = createHighLevelLLM();
|
|
71
|
+
|
|
72
|
+
console.log("[task-creation-endpoint] Calling LLM chat with streaming...");
|
|
73
|
+
// Call LLM with streaming enabled
|
|
74
|
+
const response = await llm.chat({
|
|
75
|
+
messages: llmMessages,
|
|
76
|
+
responseFormat: "text",
|
|
77
|
+
stream: true,
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
console.log("[task-creation-endpoint] LLM response received:", {
|
|
81
|
+
isStream: typeof response[Symbol.asyncIterator] !== "undefined",
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
// Stream is an async generator
|
|
85
|
+
let chunkCount = 0;
|
|
86
|
+
for await (const chunk of response) {
|
|
87
|
+
if (chunk?.content) {
|
|
88
|
+
sse.send("chunk", { content: chunk.content });
|
|
89
|
+
chunkCount++;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
console.log("[task-creation-endpoint] Sent", chunkCount, "chunks via SSE");
|
|
94
|
+
|
|
95
|
+
// Send done event
|
|
96
|
+
console.log("[task-creation-endpoint] Sending 'done' event...");
|
|
97
|
+
sse.send("done", {});
|
|
98
|
+
console.log("[task-creation-endpoint] Ending SSE stream...");
|
|
99
|
+
sse.end();
|
|
100
|
+
console.log("[task-creation-endpoint] Request completed successfully");
|
|
101
|
+
} catch (error) {
|
|
102
|
+
console.error("[task-creation-endpoint] Error occurred:", {
|
|
103
|
+
message: error.message,
|
|
104
|
+
stack: error.stack,
|
|
105
|
+
name: error.name,
|
|
106
|
+
});
|
|
107
|
+
// Send error event
|
|
108
|
+
sse.send("error", { message: error.message });
|
|
109
|
+
console.log(
|
|
110
|
+
"[task-creation-endpoint] Error sent via SSE, ending stream..."
|
|
111
|
+
);
|
|
112
|
+
sse.end();
|
|
113
|
+
}
|
|
114
|
+
}
|