@ryanfw/prompt-orchestration-pipeline 0.11.0 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +11 -1
- package/src/cli/analyze-task.js +51 -0
- package/src/cli/index.js +8 -0
- package/src/components/AddPipelineSidebar.jsx +144 -0
- package/src/components/AnalysisProgressTray.jsx +87 -0
- package/src/components/DAGGrid.jsx +157 -47
- package/src/components/JobTable.jsx +4 -3
- package/src/components/Layout.jsx +142 -139
- package/src/components/MarkdownRenderer.jsx +149 -0
- package/src/components/PipelineDAGGrid.jsx +404 -0
- package/src/components/PipelineTypeTaskSidebar.jsx +96 -0
- package/src/components/SchemaPreviewPanel.jsx +97 -0
- package/src/components/StageTimeline.jsx +36 -0
- package/src/components/TaskAnalysisDisplay.jsx +227 -0
- package/src/components/TaskCreationSidebar.jsx +447 -0
- package/src/components/TaskDetailSidebar.jsx +119 -117
- package/src/components/TaskFilePane.jsx +94 -39
- package/src/components/ui/RestartJobModal.jsx +26 -6
- package/src/components/ui/StopJobModal.jsx +183 -0
- package/src/components/ui/button.jsx +59 -27
- package/src/components/ui/sidebar.jsx +118 -0
- package/src/config/models.js +99 -67
- package/src/core/config.js +11 -4
- package/src/core/lifecycle-policy.js +62 -0
- package/src/core/pipeline-runner.js +312 -217
- package/src/core/status-writer.js +84 -0
- package/src/llm/index.js +129 -9
- package/src/pages/Code.jsx +8 -1
- package/src/pages/PipelineDetail.jsx +84 -2
- package/src/pages/PipelineList.jsx +214 -0
- package/src/pages/PipelineTypeDetail.jsx +234 -0
- package/src/pages/PromptPipelineDashboard.jsx +10 -11
- package/src/providers/deepseek.js +76 -16
- package/src/providers/openai.js +61 -34
- package/src/task-analysis/enrichers/analysis-writer.js +62 -0
- package/src/task-analysis/enrichers/schema-deducer.js +145 -0
- package/src/task-analysis/enrichers/schema-writer.js +74 -0
- package/src/task-analysis/extractors/artifacts.js +137 -0
- package/src/task-analysis/extractors/llm-calls.js +176 -0
- package/src/task-analysis/extractors/stages.js +51 -0
- package/src/task-analysis/index.js +103 -0
- package/src/task-analysis/parser.js +28 -0
- package/src/task-analysis/utils/ast.js +43 -0
- package/src/ui/client/adapters/job-adapter.js +60 -0
- package/src/ui/client/api.js +233 -8
- package/src/ui/client/hooks/useAnalysisProgress.js +145 -0
- package/src/ui/client/hooks/useJobList.js +14 -1
- package/src/ui/client/index.css +64 -0
- package/src/ui/client/main.jsx +4 -0
- package/src/ui/client/sse-fetch.js +120 -0
- package/src/ui/dist/app.js +262 -0
- package/src/ui/dist/assets/index-cjHV9mYW.js +82578 -0
- package/src/ui/dist/assets/index-cjHV9mYW.js.map +1 -0
- package/src/ui/dist/assets/style-CoM9SoQF.css +180 -0
- package/src/ui/dist/favicon.svg +12 -0
- package/src/ui/dist/index.html +2 -2
- package/src/ui/endpoints/create-pipeline-endpoint.js +194 -0
- package/src/ui/endpoints/file-endpoints.js +330 -0
- package/src/ui/endpoints/job-control-endpoints.js +1001 -0
- package/src/ui/endpoints/job-endpoints.js +62 -0
- package/src/ui/endpoints/pipeline-analysis-endpoint.js +246 -0
- package/src/ui/endpoints/pipeline-type-detail-endpoint.js +181 -0
- package/src/ui/endpoints/pipelines-endpoint.js +133 -0
- package/src/ui/endpoints/schema-file-endpoint.js +105 -0
- package/src/ui/endpoints/sse-endpoints.js +223 -0
- package/src/ui/endpoints/state-endpoint.js +85 -0
- package/src/ui/endpoints/task-analysis-endpoint.js +104 -0
- package/src/ui/endpoints/task-creation-endpoint.js +114 -0
- package/src/ui/endpoints/task-save-endpoint.js +101 -0
- package/src/ui/endpoints/upload-endpoints.js +406 -0
- package/src/ui/express-app.js +227 -0
- package/src/ui/lib/analysis-lock.js +67 -0
- package/src/ui/lib/sse.js +30 -0
- package/src/ui/server.js +42 -1880
- package/src/ui/sse-broadcast.js +93 -0
- package/src/ui/utils/http-utils.js +139 -0
- package/src/ui/utils/mime-types.js +196 -0
- package/src/ui/utils/slug.js +31 -0
- package/src/ui/vite.config.js +22 -0
- package/src/ui/watcher.js +28 -2
- package/src/utils/jobs.js +39 -0
- package/src/ui/dist/assets/index-DeDzq-Kk.js +0 -23863
- package/src/ui/dist/assets/style-aBtD_Yrs.css +0 -62
package/src/ui/server.js
CHANGED
|
@@ -4,29 +4,15 @@
|
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
6
|
import http from "http";
|
|
7
|
-
import fs from "fs";
|
|
8
7
|
import path from "path";
|
|
9
8
|
import { fileURLToPath } from "url";
|
|
9
|
+
import { loadEnvironment } from "../core/environment.js";
|
|
10
10
|
import { start as startWatcher, stop as stopWatcher } from "./watcher.js";
|
|
11
11
|
import * as state from "./state.js";
|
|
12
|
-
// Import orchestrator-related functions only in non-test mode
|
|
13
|
-
let submitJobWithValidation;
|
|
14
12
|
import { sseRegistry } from "./sse.js";
|
|
15
|
-
import {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
} from "../core/status-writer.js";
|
|
19
|
-
import { spawn } from "node:child_process";
|
|
20
|
-
import {
|
|
21
|
-
getPendingSeedPath,
|
|
22
|
-
resolvePipelinePaths,
|
|
23
|
-
getJobDirectoryPath,
|
|
24
|
-
getJobMetadataPath,
|
|
25
|
-
getJobPipelinePath,
|
|
26
|
-
} from "../config/paths.js";
|
|
27
|
-
import { handleJobList, handleJobDetail } from "./endpoints/job-endpoints.js";
|
|
28
|
-
import { generateJobId } from "../utils/id-generator.js";
|
|
29
|
-
import { extractSeedZip } from "./zip-utils.js";
|
|
13
|
+
import { resolvePipelinePaths } from "../config/paths.js";
|
|
14
|
+
import { broadcastStateUpdate } from "./sse-broadcast.js";
|
|
15
|
+
import { buildExpressApp } from "./express-app.js";
|
|
30
16
|
|
|
31
17
|
// Get __dirname equivalent in ES modules
|
|
32
18
|
const __filename = fileURLToPath(import.meta.url);
|
|
@@ -35,22 +21,6 @@ const __dirname = path.dirname(__filename);
|
|
|
35
21
|
// Vite dev server instance (populated in development mode)
|
|
36
22
|
let viteServer = null;
|
|
37
23
|
|
|
38
|
-
// In-memory restart guard to prevent duplicate concurrent restarts per job
|
|
39
|
-
const restartingJobs = new Set();
|
|
40
|
-
|
|
41
|
-
// Helper functions for restart guard
|
|
42
|
-
function isRestartInProgress(jobId) {
|
|
43
|
-
return restartingJobs.has(jobId);
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
function beginRestart(jobId) {
|
|
47
|
-
restartingJobs.add(jobId);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
function endRestart(jobId) {
|
|
51
|
-
restartingJobs.delete(jobId);
|
|
52
|
-
}
|
|
53
|
-
|
|
54
24
|
// Configuration
|
|
55
25
|
const PORT = process.env.PORT || 4000;
|
|
56
26
|
const WATCHED_PATHS = (
|
|
@@ -64,408 +34,8 @@ const WATCHED_PATHS = (
|
|
|
64
34
|
const HEARTBEAT_INTERVAL = 30000; // 30 seconds
|
|
65
35
|
const DATA_DIR = process.env.PO_ROOT || process.cwd();
|
|
66
36
|
|
|
67
|
-
/**
|
|
68
|
-
* Resolve job lifecycle directory deterministically
|
|
69
|
-
* @param {string} dataDir - Base data directory
|
|
70
|
-
* @param {string} jobId - Job identifier
|
|
71
|
-
* @returns {Promise<string|null>} One of "current", "complete", "rejected", or null if job not found
|
|
72
|
-
*/
|
|
73
|
-
async function resolveJobLifecycle(dataDir, jobId) {
|
|
74
|
-
const currentJobDir = getJobDirectoryPath(dataDir, jobId, "current");
|
|
75
|
-
const completeJobDir = getJobDirectoryPath(dataDir, jobId, "complete");
|
|
76
|
-
const rejectedJobDir = getJobDirectoryPath(dataDir, jobId, "rejected");
|
|
77
|
-
|
|
78
|
-
// Check in order of preference: current > complete > rejected
|
|
79
|
-
if (await exists(currentJobDir)) {
|
|
80
|
-
return "current";
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
if (await exists(completeJobDir)) {
|
|
84
|
-
return "complete";
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
if (await exists(rejectedJobDir)) {
|
|
88
|
-
return "rejected";
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
// Job not found in any lifecycle
|
|
92
|
-
return null;
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
function hasValidPayload(seed) {
|
|
96
|
-
if (!seed || typeof seed !== "object") return false;
|
|
97
|
-
const hasData = seed.data && typeof seed.data === "object";
|
|
98
|
-
const hasPipelineParams =
|
|
99
|
-
typeof seed.pipeline === "string" &&
|
|
100
|
-
seed.params &&
|
|
101
|
-
typeof seed.params === "object";
|
|
102
|
-
return hasData || hasPipelineParams;
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
/**
|
|
106
|
-
* Handle seed upload directly without starting orchestrator (for test environment)
|
|
107
|
-
* @param {Object} seedObject - Seed object to upload
|
|
108
|
-
* @param {string} dataDir - Base data directory
|
|
109
|
-
* @param {Array} uploadArtifacts - Array of {filename, content} objects
|
|
110
|
-
* @returns {Promise<Object>} Result object
|
|
111
|
-
*/
|
|
112
|
-
async function handleSeedUploadDirect(
|
|
113
|
-
seedObject,
|
|
114
|
-
dataDir,
|
|
115
|
-
uploadArtifacts = []
|
|
116
|
-
) {
|
|
117
|
-
let partialFiles = [];
|
|
118
|
-
|
|
119
|
-
try {
|
|
120
|
-
// Basic validation
|
|
121
|
-
if (
|
|
122
|
-
!seedObject.name ||
|
|
123
|
-
typeof seedObject.name !== "string" ||
|
|
124
|
-
seedObject.name.trim() === ""
|
|
125
|
-
) {
|
|
126
|
-
return {
|
|
127
|
-
success: false,
|
|
128
|
-
message: "Required fields missing",
|
|
129
|
-
};
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
if (!hasValidPayload(seedObject)) {
|
|
133
|
-
return { success: false, message: "Required fields missing" };
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
// Validate name format using the same logic as seed validator
|
|
137
|
-
if (
|
|
138
|
-
!seedObject.name ||
|
|
139
|
-
typeof seedObject.name !== "string" ||
|
|
140
|
-
seedObject.name.trim() === ""
|
|
141
|
-
) {
|
|
142
|
-
return {
|
|
143
|
-
success: false,
|
|
144
|
-
message: "name field is required",
|
|
145
|
-
};
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
const trimmedName = seedObject.name.trim();
|
|
149
|
-
if (trimmedName.length > 120) {
|
|
150
|
-
return {
|
|
151
|
-
success: false,
|
|
152
|
-
message: "name must be 120 characters or less",
|
|
153
|
-
};
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
// Allow spaces and common punctuation for better UX
|
|
157
|
-
// Still disallow control characters and path traversal patterns
|
|
158
|
-
const dangerousPattern = /[\x00-\x1f\x7f-\x9f]/;
|
|
159
|
-
if (dangerousPattern.test(trimmedName)) {
|
|
160
|
-
return {
|
|
161
|
-
success: false,
|
|
162
|
-
message: "name must contain only printable characters",
|
|
163
|
-
};
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
// Update seedObject with validated trimmed name
|
|
167
|
-
seedObject.name = trimmedName;
|
|
168
|
-
|
|
169
|
-
// Generate a random job ID
|
|
170
|
-
const jobId = generateJobId();
|
|
171
|
-
|
|
172
|
-
// Get the paths
|
|
173
|
-
const paths = resolvePipelinePaths(dataDir);
|
|
174
|
-
const pendingPath = getPendingSeedPath(dataDir, jobId);
|
|
175
|
-
const currentJobDir = getJobDirectoryPath(dataDir, jobId, "current");
|
|
176
|
-
const jobMetadataPath = getJobMetadataPath(dataDir, jobId, "current");
|
|
177
|
-
const jobPipelinePath = getJobPipelinePath(dataDir, jobId, "current");
|
|
178
|
-
|
|
179
|
-
// Ensure directories exist
|
|
180
|
-
await fs.promises.mkdir(paths.pending, { recursive: true });
|
|
181
|
-
await fs.promises.mkdir(currentJobDir, { recursive: true });
|
|
182
|
-
|
|
183
|
-
// Create job metadata
|
|
184
|
-
const jobMetadata = {
|
|
185
|
-
id: jobId,
|
|
186
|
-
name: seedObject.name,
|
|
187
|
-
pipeline: seedObject.pipeline || "default",
|
|
188
|
-
createdAt: new Date().toISOString(),
|
|
189
|
-
status: "pending",
|
|
190
|
-
};
|
|
191
|
-
|
|
192
|
-
// Read pipeline configuration for snapshot
|
|
193
|
-
let pipelineSnapshot = null;
|
|
194
|
-
try {
|
|
195
|
-
const pipelineConfigPath = path.join(
|
|
196
|
-
dataDir,
|
|
197
|
-
"pipeline-config",
|
|
198
|
-
"pipeline.json"
|
|
199
|
-
);
|
|
200
|
-
const pipelineContent = await fs.promises.readFile(
|
|
201
|
-
pipelineConfigPath,
|
|
202
|
-
"utf8"
|
|
203
|
-
);
|
|
204
|
-
pipelineSnapshot = JSON.parse(pipelineContent);
|
|
205
|
-
} catch (error) {
|
|
206
|
-
// If pipeline config doesn't exist, create a minimal snapshot
|
|
207
|
-
pipelineSnapshot = {
|
|
208
|
-
tasks: [],
|
|
209
|
-
name: seedObject.pipeline || "default",
|
|
210
|
-
};
|
|
211
|
-
}
|
|
212
|
-
|
|
213
|
-
// Write files atomically
|
|
214
|
-
partialFiles.push(pendingPath);
|
|
215
|
-
await fs.promises.writeFile(
|
|
216
|
-
pendingPath,
|
|
217
|
-
JSON.stringify(seedObject, null, 2)
|
|
218
|
-
);
|
|
219
|
-
|
|
220
|
-
partialFiles.push(jobMetadataPath);
|
|
221
|
-
await fs.promises.writeFile(
|
|
222
|
-
jobMetadataPath,
|
|
223
|
-
JSON.stringify(jobMetadata, null, 2)
|
|
224
|
-
);
|
|
225
|
-
|
|
226
|
-
partialFiles.push(jobPipelinePath);
|
|
227
|
-
await fs.promises.writeFile(
|
|
228
|
-
jobPipelinePath,
|
|
229
|
-
JSON.stringify(pipelineSnapshot, null, 2)
|
|
230
|
-
);
|
|
231
|
-
|
|
232
|
-
// Initialize job artifacts if any provided
|
|
233
|
-
if (uploadArtifacts.length > 0) {
|
|
234
|
-
try {
|
|
235
|
-
await initializeJobArtifacts(currentJobDir, uploadArtifacts);
|
|
236
|
-
} catch (artifactError) {
|
|
237
|
-
// Don't fail the upload if artifact initialization fails, just log the error
|
|
238
|
-
console.error("Failed to initialize job artifacts:", artifactError);
|
|
239
|
-
}
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
return {
|
|
243
|
-
success: true,
|
|
244
|
-
jobId,
|
|
245
|
-
jobName: seedObject.name,
|
|
246
|
-
message: "Seed file uploaded successfully",
|
|
247
|
-
};
|
|
248
|
-
} catch (error) {
|
|
249
|
-
// Clean up any partial files on failure
|
|
250
|
-
for (const filePath of partialFiles) {
|
|
251
|
-
try {
|
|
252
|
-
await fs.promises.unlink(filePath);
|
|
253
|
-
} catch (cleanupError) {
|
|
254
|
-
// Ignore cleanup errors
|
|
255
|
-
}
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
return {
|
|
259
|
-
success: false,
|
|
260
|
-
message: error.message || "Internal server error",
|
|
261
|
-
};
|
|
262
|
-
}
|
|
263
|
-
}
|
|
264
|
-
|
|
265
|
-
// SSE clients management
|
|
266
37
|
let heartbeatTimer = null;
|
|
267
38
|
|
|
268
|
-
// Helper functions for consistent API responses
|
|
269
|
-
const sendJson = (res, code, obj) => {
|
|
270
|
-
res.writeHead(code, {
|
|
271
|
-
"content-type": "application/json",
|
|
272
|
-
connection: "close",
|
|
273
|
-
});
|
|
274
|
-
res.end(JSON.stringify(obj));
|
|
275
|
-
};
|
|
276
|
-
|
|
277
|
-
const exists = async (p) =>
|
|
278
|
-
fs.promises
|
|
279
|
-
.access(p)
|
|
280
|
-
.then(() => true)
|
|
281
|
-
.catch(() => false);
|
|
282
|
-
|
|
283
|
-
async function readRawBody(req, maxBytes = 2 * 1024 * 1024) {
|
|
284
|
-
// 2MB guard
|
|
285
|
-
const chunks = [];
|
|
286
|
-
let total = 0;
|
|
287
|
-
for await (const chunk of req) {
|
|
288
|
-
total += chunk.length;
|
|
289
|
-
if (total > maxBytes) throw new Error("Payload too large");
|
|
290
|
-
chunks.push(chunk);
|
|
291
|
-
}
|
|
292
|
-
return Buffer.concat(chunks);
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
function extractJsonFromMultipart(raw, contentType) {
|
|
296
|
-
const m = /boundary=([^;]+)/i.exec(contentType || "");
|
|
297
|
-
if (!m) throw new Error("Missing multipart boundary");
|
|
298
|
-
const boundary = `--${m[1]}`;
|
|
299
|
-
const parts = raw.toString("utf8").split(boundary);
|
|
300
|
-
const filePart = parts.find((p) => /name="file"/i.test(p));
|
|
301
|
-
if (!filePart) throw new Error("Missing file part");
|
|
302
|
-
const [, , body] = filePart.split(/\r\n\r\n/);
|
|
303
|
-
if (!body) throw new Error("Empty file part");
|
|
304
|
-
// strip trailing CRLF + terminating dashes
|
|
305
|
-
return body.replace(/\r\n--\s*$/, "").trim();
|
|
306
|
-
}
|
|
307
|
-
|
|
308
|
-
/**
|
|
309
|
-
* Broadcast state update to all SSE clients
|
|
310
|
-
*
|
|
311
|
-
* NOTE: Per plan, SSE should emit compact, incremental events rather than
|
|
312
|
-
* streaming the full application state. Use /api/state for full snapshot
|
|
313
|
-
* retrieval on client bootstrap. This function will emit only the most
|
|
314
|
-
* recent change when available (type: "state:change") and fall back to a
|
|
315
|
-
* lightweight summary event if no recent change is present.
|
|
316
|
-
*/
|
|
317
|
-
function decorateChangeWithJobId(change) {
|
|
318
|
-
if (!change || typeof change !== "object") return change;
|
|
319
|
-
const normalizedPath = String(change.path || "").replace(/\\/g, "/");
|
|
320
|
-
const match = normalizedPath.match(
|
|
321
|
-
/pipeline-data\/(current|complete|pending|rejected)\/([^/]+)/
|
|
322
|
-
);
|
|
323
|
-
if (!match) {
|
|
324
|
-
return change;
|
|
325
|
-
}
|
|
326
|
-
return {
|
|
327
|
-
...change,
|
|
328
|
-
lifecycle: match[1],
|
|
329
|
-
jobId: match[2],
|
|
330
|
-
};
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
function prioritizeJobStatusChange(changes = []) {
|
|
334
|
-
const normalized = changes.map((change) => decorateChangeWithJobId(change));
|
|
335
|
-
const statusChange = normalized.find(
|
|
336
|
-
(change) =>
|
|
337
|
-
typeof change?.path === "string" &&
|
|
338
|
-
/tasks-status\.json$/.test(change.path)
|
|
339
|
-
);
|
|
340
|
-
return statusChange || normalized[0] || null;
|
|
341
|
-
}
|
|
342
|
-
|
|
343
|
-
/**
|
|
344
|
-
* Normalize seed upload from various input formats
|
|
345
|
-
* @param {http.IncomingMessage} req - HTTP request
|
|
346
|
-
* @param {string} contentTypeHeader - Content-Type header
|
|
347
|
-
* @returns {Promise<{seedObject: Object, uploadArtifacts: Array<{filename: string, content: Buffer}>}>}
|
|
348
|
-
*/
|
|
349
|
-
async function normalizeSeedUpload({ req, contentTypeHeader }) {
|
|
350
|
-
// Handle application/json uploads
|
|
351
|
-
if (contentTypeHeader.includes("application/json")) {
|
|
352
|
-
const buffer = await readRawBody(req);
|
|
353
|
-
try {
|
|
354
|
-
const seedObject = JSON.parse(buffer.toString("utf8") || "{}");
|
|
355
|
-
return {
|
|
356
|
-
seedObject,
|
|
357
|
-
uploadArtifacts: [{ filename: "seed.json", content: buffer }],
|
|
358
|
-
};
|
|
359
|
-
} catch (error) {
|
|
360
|
-
throw new Error("Invalid JSON");
|
|
361
|
-
}
|
|
362
|
-
}
|
|
363
|
-
|
|
364
|
-
// Handle multipart form data uploads
|
|
365
|
-
const formData = await parseMultipartFormData(req);
|
|
366
|
-
if (!formData.contentBuffer) {
|
|
367
|
-
throw new Error("No file content found");
|
|
368
|
-
}
|
|
369
|
-
|
|
370
|
-
// Check if this is a zip file
|
|
371
|
-
const isZipFile =
|
|
372
|
-
formData.contentType === "application/zip" ||
|
|
373
|
-
formData.filename?.toLowerCase().endsWith(".zip");
|
|
374
|
-
|
|
375
|
-
if (isZipFile) {
|
|
376
|
-
console.log("[UPLOAD] Detected zip upload", {
|
|
377
|
-
filename: formData.filename,
|
|
378
|
-
contentType: formData.contentType,
|
|
379
|
-
bufferSize: formData.contentBuffer.length,
|
|
380
|
-
});
|
|
381
|
-
|
|
382
|
-
// Handle zip upload
|
|
383
|
-
try {
|
|
384
|
-
const { seedObject, artifacts } = await extractSeedZip(
|
|
385
|
-
formData.contentBuffer
|
|
386
|
-
);
|
|
387
|
-
console.log("[UPLOAD] Zip extraction completed", {
|
|
388
|
-
artifactCount: artifacts.length,
|
|
389
|
-
artifactNames: artifacts.map((a) => a.filename),
|
|
390
|
-
seedKeys: Object.keys(seedObject),
|
|
391
|
-
});
|
|
392
|
-
return {
|
|
393
|
-
seedObject,
|
|
394
|
-
uploadArtifacts: artifacts,
|
|
395
|
-
};
|
|
396
|
-
} catch (error) {
|
|
397
|
-
console.log("[UPLOAD] Zip extraction failed", {
|
|
398
|
-
error: error.message,
|
|
399
|
-
filename: formData.filename,
|
|
400
|
-
});
|
|
401
|
-
// Re-throw zip-specific errors with clear messages
|
|
402
|
-
throw new Error(error.message);
|
|
403
|
-
}
|
|
404
|
-
} else {
|
|
405
|
-
// Handle regular JSON file upload
|
|
406
|
-
try {
|
|
407
|
-
const seedObject = JSON.parse(formData.contentBuffer.toString("utf8"));
|
|
408
|
-
const filename = formData.filename || "seed.json";
|
|
409
|
-
return {
|
|
410
|
-
seedObject,
|
|
411
|
-
uploadArtifacts: [{ filename, content: formData.contentBuffer }],
|
|
412
|
-
};
|
|
413
|
-
} catch (error) {
|
|
414
|
-
throw new Error("Invalid JSON");
|
|
415
|
-
}
|
|
416
|
-
}
|
|
417
|
-
}
|
|
418
|
-
|
|
419
|
-
function broadcastStateUpdate(currentState) {
|
|
420
|
-
try {
|
|
421
|
-
const recentChanges = (currentState && currentState.recentChanges) || [];
|
|
422
|
-
const latest = prioritizeJobStatusChange(recentChanges);
|
|
423
|
-
console.debug("[Server] Broadcasting state update:", {
|
|
424
|
-
latest,
|
|
425
|
-
currentState,
|
|
426
|
-
});
|
|
427
|
-
if (latest) {
|
|
428
|
-
// Emit only the most recent change as a compact, typed event
|
|
429
|
-
const eventData = { type: "state:change", data: latest };
|
|
430
|
-
console.debug("[Server] Broadcasting event:", eventData);
|
|
431
|
-
sseRegistry.broadcast(eventData);
|
|
432
|
-
} else {
|
|
433
|
-
// Fallback: emit a minimal summary so clients can observe a state "tick"
|
|
434
|
-
const eventData = {
|
|
435
|
-
type: "state:summary",
|
|
436
|
-
data: {
|
|
437
|
-
changeCount:
|
|
438
|
-
currentState && currentState.changeCount
|
|
439
|
-
? currentState.changeCount
|
|
440
|
-
: 0,
|
|
441
|
-
},
|
|
442
|
-
};
|
|
443
|
-
console.debug("[Server] Broadcasting summary event:", eventData);
|
|
444
|
-
sseRegistry.broadcast(eventData);
|
|
445
|
-
}
|
|
446
|
-
} catch (err) {
|
|
447
|
-
// Defensive: if something unexpected happens, fall back to a lightweight notification
|
|
448
|
-
try {
|
|
449
|
-
console.error("[Server] Error in broadcastStateUpdate:", err);
|
|
450
|
-
sseRegistry.broadcast({
|
|
451
|
-
type: "state:summary",
|
|
452
|
-
data: {
|
|
453
|
-
changeCount:
|
|
454
|
-
currentState && currentState.changeCount
|
|
455
|
-
? currentState.changeCount
|
|
456
|
-
: 0,
|
|
457
|
-
},
|
|
458
|
-
});
|
|
459
|
-
} catch (fallbackErr) {
|
|
460
|
-
// Log the error to aid debugging; this should never happen unless sseRegistry.broadcast is broken
|
|
461
|
-
console.error(
|
|
462
|
-
"Failed to broadcast fallback state summary in broadcastStateUpdate:",
|
|
463
|
-
fallbackErr
|
|
464
|
-
);
|
|
465
|
-
}
|
|
466
|
-
}
|
|
467
|
-
}
|
|
468
|
-
|
|
469
39
|
/**
|
|
470
40
|
* Start heartbeat to keep connections alive
|
|
471
41
|
*/
|
|
@@ -481,1391 +51,12 @@ function startHeartbeat() {
|
|
|
481
51
|
}
|
|
482
52
|
|
|
483
53
|
/**
|
|
484
|
-
*
|
|
485
|
-
* @param {
|
|
486
|
-
* @returns {Promise<Object>} Parsed form data with file content as Buffer
|
|
487
|
-
*/
|
|
488
|
-
function parseMultipartFormData(req) {
|
|
489
|
-
return new Promise((resolve, reject) => {
|
|
490
|
-
const chunks = [];
|
|
491
|
-
let boundary = null;
|
|
492
|
-
|
|
493
|
-
// Extract boundary from content-type header
|
|
494
|
-
const contentType = req.headers["content-type"];
|
|
495
|
-
if (!contentType || !contentType.includes("multipart/form-data")) {
|
|
496
|
-
reject(new Error("Invalid content-type: expected multipart/form-data"));
|
|
497
|
-
return;
|
|
498
|
-
}
|
|
499
|
-
|
|
500
|
-
const boundaryMatch = contentType.match(/boundary=([^;]+)/);
|
|
501
|
-
if (!boundaryMatch) {
|
|
502
|
-
reject(new Error("Missing boundary in content-type"));
|
|
503
|
-
return;
|
|
504
|
-
}
|
|
505
|
-
|
|
506
|
-
boundary = `--${boundaryMatch[1].trim()}`;
|
|
507
|
-
|
|
508
|
-
req.on("data", (chunk) => {
|
|
509
|
-
chunks.push(chunk);
|
|
510
|
-
});
|
|
511
|
-
|
|
512
|
-
req.on("end", () => {
|
|
513
|
-
try {
|
|
514
|
-
const buffer = Buffer.concat(chunks);
|
|
515
|
-
|
|
516
|
-
// Find file part in the buffer using string operations for headers
|
|
517
|
-
const data = buffer.toString(
|
|
518
|
-
"utf8",
|
|
519
|
-
0,
|
|
520
|
-
Math.min(buffer.length, 1024 * 1024)
|
|
521
|
-
); // First MB for header search
|
|
522
|
-
const parts = data.split(boundary);
|
|
523
|
-
|
|
524
|
-
for (let i = 0; i < parts.length; i++) {
|
|
525
|
-
const part = parts[i];
|
|
526
|
-
|
|
527
|
-
if (part.includes('name="file"') && part.includes("filename")) {
|
|
528
|
-
// Extract filename
|
|
529
|
-
const filenameMatch = part.match(/filename="([^"]+)"/);
|
|
530
|
-
if (!filenameMatch) continue;
|
|
531
|
-
|
|
532
|
-
// Extract content type
|
|
533
|
-
const contentTypeMatch = part.match(/Content-Type:\s*([^\r\n]+)/);
|
|
534
|
-
|
|
535
|
-
// Find this specific part's start in the data string
|
|
536
|
-
const partIndexInData = data.indexOf(part);
|
|
537
|
-
const headerEndInPart = part.indexOf("\r\n\r\n");
|
|
538
|
-
if (headerEndInPart === -1) {
|
|
539
|
-
reject(
|
|
540
|
-
new Error("Could not find end of headers in multipart part")
|
|
541
|
-
);
|
|
542
|
-
return;
|
|
543
|
-
}
|
|
544
|
-
|
|
545
|
-
// Calculate the actual byte positions in the buffer for this part
|
|
546
|
-
const headerEndInData = partIndexInData + headerEndInPart + 4;
|
|
547
|
-
|
|
548
|
-
// Use binary buffer to find the next boundary
|
|
549
|
-
const boundaryBuf = Buffer.from(boundary, "ascii");
|
|
550
|
-
const nextBoundaryIndex = buffer.indexOf(
|
|
551
|
-
boundaryBuf,
|
|
552
|
-
headerEndInData
|
|
553
|
-
);
|
|
554
|
-
const contentEndInData =
|
|
555
|
-
nextBoundaryIndex !== -1
|
|
556
|
-
? nextBoundaryIndex - 2 // Subtract 2 for \r\n before boundary
|
|
557
|
-
: buffer.length;
|
|
558
|
-
|
|
559
|
-
// Extract the file content as Buffer
|
|
560
|
-
const contentBuffer = buffer.slice(
|
|
561
|
-
headerEndInData,
|
|
562
|
-
contentEndInData
|
|
563
|
-
);
|
|
564
|
-
|
|
565
|
-
resolve({
|
|
566
|
-
filename: filenameMatch[1],
|
|
567
|
-
contentType: contentTypeMatch
|
|
568
|
-
? contentTypeMatch[1]
|
|
569
|
-
: "application/octet-stream",
|
|
570
|
-
contentBuffer: contentBuffer,
|
|
571
|
-
});
|
|
572
|
-
return;
|
|
573
|
-
}
|
|
574
|
-
}
|
|
575
|
-
|
|
576
|
-
reject(new Error("No file field found in form data"));
|
|
577
|
-
} catch (error) {
|
|
578
|
-
console.error("Error parsing multipart:", error);
|
|
579
|
-
reject(error);
|
|
580
|
-
}
|
|
581
|
-
});
|
|
582
|
-
|
|
583
|
-
req.on("error", reject);
|
|
584
|
-
});
|
|
585
|
-
}
|
|
586
|
-
|
|
587
|
-
/**
|
|
588
|
-
* Handle seed file upload
|
|
589
|
-
* @param {http.IncomingMessage} req - HTTP request
|
|
590
|
-
* @param {http.ServerResponse} res - HTTP response
|
|
591
|
-
*/
|
|
592
|
-
async function handleSeedUpload(req, res) {
|
|
593
|
-
// Add logging at the very start of the upload handler
|
|
594
|
-
console.log("[UPLOAD] Incoming seed upload", {
|
|
595
|
-
method: req.method,
|
|
596
|
-
url: req.url,
|
|
597
|
-
contentType: req.headers["content-type"],
|
|
598
|
-
userAgent: req.headers["user-agent"],
|
|
599
|
-
});
|
|
600
|
-
|
|
601
|
-
try {
|
|
602
|
-
const ct = req.headers["content-type"] || "";
|
|
603
|
-
|
|
604
|
-
// Use the new normalization function to handle all upload formats
|
|
605
|
-
let normalizedUpload;
|
|
606
|
-
try {
|
|
607
|
-
normalizedUpload = await normalizeSeedUpload({
|
|
608
|
-
req,
|
|
609
|
-
contentTypeHeader: ct,
|
|
610
|
-
});
|
|
611
|
-
} catch (error) {
|
|
612
|
-
console.log("[UPLOAD] Normalization failed", {
|
|
613
|
-
error: error.message,
|
|
614
|
-
contentType: ct,
|
|
615
|
-
});
|
|
616
|
-
|
|
617
|
-
// Handle specific zip-related errors with appropriate messages
|
|
618
|
-
let errorMessage = error.message;
|
|
619
|
-
if (error.message === "Invalid JSON") {
|
|
620
|
-
errorMessage = "Invalid JSON";
|
|
621
|
-
} else if (error.message === "seed.json not found in zip") {
|
|
622
|
-
errorMessage = "seed.json not found in zip";
|
|
623
|
-
}
|
|
624
|
-
|
|
625
|
-
res.writeHead(400, { "Content-Type": "application/json" });
|
|
626
|
-
res.end(JSON.stringify({ success: false, message: errorMessage }));
|
|
627
|
-
return;
|
|
628
|
-
}
|
|
629
|
-
|
|
630
|
-
const { seedObject, uploadArtifacts } = normalizedUpload;
|
|
631
|
-
|
|
632
|
-
// Use current PO_ROOT or fallback to DATA_DIR
|
|
633
|
-
const currentDataDir = process.env.PO_ROOT || DATA_DIR;
|
|
634
|
-
|
|
635
|
-
// For test environment, use simplified validation without starting orchestrator
|
|
636
|
-
if (process.env.NODE_ENV === "test") {
|
|
637
|
-
// Simplified validation for tests - just write to pending directory
|
|
638
|
-
const result = await handleSeedUploadDirect(
|
|
639
|
-
seedObject,
|
|
640
|
-
currentDataDir,
|
|
641
|
-
uploadArtifacts
|
|
642
|
-
);
|
|
643
|
-
|
|
644
|
-
// Return appropriate status code based on success
|
|
645
|
-
if (result.success) {
|
|
646
|
-
res.writeHead(200, {
|
|
647
|
-
"Content-Type": "application/json",
|
|
648
|
-
Connection: "close",
|
|
649
|
-
});
|
|
650
|
-
res.end(JSON.stringify(result));
|
|
651
|
-
|
|
652
|
-
// Broadcast SSE event for successful upload
|
|
653
|
-
sseRegistry.broadcast({
|
|
654
|
-
type: "seed:uploaded",
|
|
655
|
-
data: { name: result.jobName },
|
|
656
|
-
});
|
|
657
|
-
} else {
|
|
658
|
-
res.writeHead(400, {
|
|
659
|
-
"Content-Type": "application/json",
|
|
660
|
-
Connection: "close",
|
|
661
|
-
});
|
|
662
|
-
res.end(JSON.stringify(result));
|
|
663
|
-
}
|
|
664
|
-
return;
|
|
665
|
-
}
|
|
666
|
-
|
|
667
|
-
// Submit job with validation (for production)
|
|
668
|
-
// Dynamically import only in non-test mode
|
|
669
|
-
if (process.env.NODE_ENV !== "test") {
|
|
670
|
-
if (!submitJobWithValidation) {
|
|
671
|
-
({ submitJobWithValidation } = await import("../api/index.js"));
|
|
672
|
-
}
|
|
673
|
-
const result = await submitJobWithValidation({
|
|
674
|
-
dataDir: currentDataDir,
|
|
675
|
-
seedObject,
|
|
676
|
-
uploadArtifacts,
|
|
677
|
-
});
|
|
678
|
-
|
|
679
|
-
// Send appropriate response
|
|
680
|
-
if (result.success) {
|
|
681
|
-
res.writeHead(200, { "Content-Type": "application/json" });
|
|
682
|
-
res.end(JSON.stringify(result));
|
|
683
|
-
|
|
684
|
-
// Broadcast SSE event for successful upload
|
|
685
|
-
sseRegistry.broadcast({
|
|
686
|
-
type: "seed:uploaded",
|
|
687
|
-
data: { name: result.jobName },
|
|
688
|
-
});
|
|
689
|
-
} else {
|
|
690
|
-
res.writeHead(400, { "Content-Type": "application/json" });
|
|
691
|
-
res.end(JSON.stringify(result));
|
|
692
|
-
}
|
|
693
|
-
} else {
|
|
694
|
-
// In test mode, we should never reach here, but handle gracefully
|
|
695
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
696
|
-
res.end(
|
|
697
|
-
JSON.stringify({
|
|
698
|
-
success: false,
|
|
699
|
-
message:
|
|
700
|
-
"Test environment error - should not reach production code path",
|
|
701
|
-
})
|
|
702
|
-
);
|
|
703
|
-
}
|
|
704
|
-
} catch (error) {
|
|
705
|
-
console.error("Upload error:", error);
|
|
706
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
707
|
-
res.end(
|
|
708
|
-
JSON.stringify({
|
|
709
|
-
success: false,
|
|
710
|
-
message: "Internal server error",
|
|
711
|
-
})
|
|
712
|
-
);
|
|
713
|
-
}
|
|
714
|
-
}
|
|
715
|
-
|
|
716
|
-
// MIME type detection map
|
|
717
|
-
const MIME_MAP = {
|
|
718
|
-
// Text types
|
|
719
|
-
".txt": "text/plain",
|
|
720
|
-
".log": "text/plain",
|
|
721
|
-
".md": "text/markdown",
|
|
722
|
-
".csv": "text/csv",
|
|
723
|
-
".json": "application/json",
|
|
724
|
-
".xml": "application/xml",
|
|
725
|
-
".yaml": "application/x-yaml",
|
|
726
|
-
".yml": "application/x-yaml",
|
|
727
|
-
".toml": "application/toml",
|
|
728
|
-
".ini": "text/plain",
|
|
729
|
-
".conf": "text/plain",
|
|
730
|
-
".config": "text/plain",
|
|
731
|
-
".env": "text/plain",
|
|
732
|
-
".gitignore": "text/plain",
|
|
733
|
-
".dockerfile": "text/plain",
|
|
734
|
-
".sh": "application/x-sh",
|
|
735
|
-
".bash": "application/x-sh",
|
|
736
|
-
".zsh": "application/x-sh",
|
|
737
|
-
".fish": "application/x-fish",
|
|
738
|
-
".ps1": "application/x-powershell",
|
|
739
|
-
".bat": "application/x-bat",
|
|
740
|
-
".cmd": "application/x-cmd",
|
|
741
|
-
|
|
742
|
-
// Code types
|
|
743
|
-
".js": "application/javascript",
|
|
744
|
-
".mjs": "application/javascript",
|
|
745
|
-
".cjs": "application/javascript",
|
|
746
|
-
".ts": "application/typescript",
|
|
747
|
-
".mts": "application/typescript",
|
|
748
|
-
".cts": "application/typescript",
|
|
749
|
-
".jsx": "application/javascript",
|
|
750
|
-
".tsx": "application/typescript",
|
|
751
|
-
".py": "text/x-python",
|
|
752
|
-
".rb": "text/x-ruby",
|
|
753
|
-
".php": "application/x-php",
|
|
754
|
-
".java": "text/x-java-source",
|
|
755
|
-
".c": "text/x-c",
|
|
756
|
-
".cpp": "text/x-c++",
|
|
757
|
-
".cc": "text/x-c++",
|
|
758
|
-
".cxx": "text/x-c++",
|
|
759
|
-
".h": "text/x-c",
|
|
760
|
-
".hpp": "text/x-c++",
|
|
761
|
-
".cs": "text/x-csharp",
|
|
762
|
-
".go": "text/x-go",
|
|
763
|
-
".rs": "text/x-rust",
|
|
764
|
-
".swift": "text/x-swift",
|
|
765
|
-
".kt": "text/x-kotlin",
|
|
766
|
-
".scala": "text/x-scala",
|
|
767
|
-
".r": "text/x-r",
|
|
768
|
-
".sql": "application/sql",
|
|
769
|
-
".pl": "text/x-perl",
|
|
770
|
-
".lua": "text/x-lua",
|
|
771
|
-
".vim": "text/x-vim",
|
|
772
|
-
".el": "text/x-elisp",
|
|
773
|
-
".lisp": "text/x-lisp",
|
|
774
|
-
".hs": "text/x-haskell",
|
|
775
|
-
".ml": "text/x-ocaml",
|
|
776
|
-
".ex": "text/x-elixir",
|
|
777
|
-
".exs": "text/x-elixir",
|
|
778
|
-
".erl": "text/x-erlang",
|
|
779
|
-
".beam": "application/x-erlang-beam",
|
|
780
|
-
|
|
781
|
-
// Web types
|
|
782
|
-
".html": "text/html",
|
|
783
|
-
".htm": "text/html",
|
|
784
|
-
".xhtml": "application/xhtml+xml",
|
|
785
|
-
".css": "text/css",
|
|
786
|
-
".scss": "text/x-scss",
|
|
787
|
-
".sass": "text/x-sass",
|
|
788
|
-
".less": "text/x-less",
|
|
789
|
-
".styl": "text/x-stylus",
|
|
790
|
-
".vue": "text/x-vue",
|
|
791
|
-
".svelte": "text/x-svelte",
|
|
792
|
-
|
|
793
|
-
// Data formats
|
|
794
|
-
".pdf": "application/pdf",
|
|
795
|
-
".doc": "application/msword",
|
|
796
|
-
".docx":
|
|
797
|
-
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
798
|
-
".xls": "application/vnd.ms-excel",
|
|
799
|
-
".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
800
|
-
".ppt": "application/vnd.ms-powerpoint",
|
|
801
|
-
".pptx":
|
|
802
|
-
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
|
803
|
-
".odt": "application/vnd.oasis.opendocument.text",
|
|
804
|
-
".ods": "application/vnd.oasis.opendocument.spreadsheet",
|
|
805
|
-
".odp": "application/vnd.oasis.opendocument.presentation",
|
|
806
|
-
|
|
807
|
-
// Images
|
|
808
|
-
".png": "image/png",
|
|
809
|
-
".jpg": "image/jpeg",
|
|
810
|
-
".jpeg": "image/jpeg",
|
|
811
|
-
".gif": "image/gif",
|
|
812
|
-
".bmp": "image/bmp",
|
|
813
|
-
".webp": "image/webp",
|
|
814
|
-
".svg": "image/svg+xml",
|
|
815
|
-
".ico": "image/x-icon",
|
|
816
|
-
".tiff": "image/tiff",
|
|
817
|
-
".tif": "image/tiff",
|
|
818
|
-
".psd": "image/vnd.adobe.photoshop",
|
|
819
|
-
".ai": "application/pdf", // Illustrator files often saved as PDF
|
|
820
|
-
".eps": "application/postscript",
|
|
821
|
-
|
|
822
|
-
// Audio
|
|
823
|
-
".mp3": "audio/mpeg",
|
|
824
|
-
".wav": "audio/wav",
|
|
825
|
-
".ogg": "audio/ogg",
|
|
826
|
-
".flac": "audio/flac",
|
|
827
|
-
".aac": "audio/aac",
|
|
828
|
-
".m4a": "audio/mp4",
|
|
829
|
-
".wma": "audio/x-ms-wma",
|
|
830
|
-
|
|
831
|
-
// Video
|
|
832
|
-
".mp4": "video/mp4",
|
|
833
|
-
".avi": "video/x-msvideo",
|
|
834
|
-
".mov": "video/quicktime",
|
|
835
|
-
".wmv": "video/x-ms-wmv",
|
|
836
|
-
".flv": "video/x-flv",
|
|
837
|
-
".webm": "video/webm",
|
|
838
|
-
".mkv": "video/x-matroska",
|
|
839
|
-
".m4v": "video/mp4",
|
|
840
|
-
|
|
841
|
-
// Archives
|
|
842
|
-
".zip": "application/zip",
|
|
843
|
-
".rar": "application/x-rar-compressed",
|
|
844
|
-
".tar": "application/x-tar",
|
|
845
|
-
".gz": "application/gzip",
|
|
846
|
-
".tgz": "application/gzip",
|
|
847
|
-
".bz2": "application/x-bzip2",
|
|
848
|
-
".xz": "application/x-xz",
|
|
849
|
-
".7z": "application/x-7z-compressed",
|
|
850
|
-
".deb": "application/x-debian-package",
|
|
851
|
-
".rpm": "application/x-rpm",
|
|
852
|
-
".dmg": "application/x-apple-diskimage",
|
|
853
|
-
".iso": "application/x-iso9660-image",
|
|
854
|
-
|
|
855
|
-
// Fonts
|
|
856
|
-
".ttf": "font/ttf",
|
|
857
|
-
".otf": "font/otf",
|
|
858
|
-
".woff": "font/woff",
|
|
859
|
-
".woff2": "font/woff2",
|
|
860
|
-
".eot": "application/vnd.ms-fontobject",
|
|
861
|
-
|
|
862
|
-
// Misc
|
|
863
|
-
".bin": "application/octet-stream",
|
|
864
|
-
".exe": "application/x-msdownload",
|
|
865
|
-
".dll": "application/x-msdownload",
|
|
866
|
-
".so": "application/x-sharedlib",
|
|
867
|
-
".dylib": "application/x-mach-binary",
|
|
868
|
-
".class": "application/java-vm",
|
|
869
|
-
".jar": "application/java-archive",
|
|
870
|
-
".war": "application/java-archive",
|
|
871
|
-
".ear": "application/java-archive",
|
|
872
|
-
".apk": "application/vnd.android.package-archive",
|
|
873
|
-
".ipa": "application/x-itunes-ipa",
|
|
874
|
-
};
|
|
875
|
-
|
|
876
|
-
/**
|
|
877
|
-
* Determine MIME type from file extension
|
|
878
|
-
* @param {string} filename - File name
|
|
879
|
-
* @returns {string} MIME type
|
|
880
|
-
*/
|
|
881
|
-
function getMimeType(filename) {
|
|
882
|
-
const ext = path.extname(filename).toLowerCase();
|
|
883
|
-
return MIME_MAP[ext] || "application/octet-stream";
|
|
884
|
-
}
|
|
885
|
-
|
|
886
|
-
/**
|
|
887
|
-
* Check if MIME type should be treated as text
|
|
888
|
-
* @param {string} mime - MIME type
|
|
889
|
-
* @returns {boolean} True if text-like
|
|
890
|
-
*/
|
|
891
|
-
function isTextMime(mime) {
|
|
892
|
-
return (
|
|
893
|
-
mime.startsWith("text/") ||
|
|
894
|
-
mime === "application/json" ||
|
|
895
|
-
mime === "application/javascript" ||
|
|
896
|
-
mime === "application/xml" ||
|
|
897
|
-
mime === "application/x-yaml" ||
|
|
898
|
-
mime === "application/x-sh" ||
|
|
899
|
-
mime === "application/x-bat" ||
|
|
900
|
-
mime === "application/x-cmd" ||
|
|
901
|
-
mime === "application/x-powershell" ||
|
|
902
|
-
mime === "image/svg+xml" ||
|
|
903
|
-
mime === "application/x-ndjson" ||
|
|
904
|
-
mime === "text/csv" ||
|
|
905
|
-
mime === "text/markdown"
|
|
906
|
-
);
|
|
907
|
-
}
|
|
908
|
-
|
|
909
|
-
/**
|
|
910
|
-
* Handle task file list request with validation and security checks
|
|
911
|
-
* @param {http.IncomingMessage} req - HTTP request
|
|
912
|
-
* @param {http.ServerResponse} res - HTTP response
|
|
913
|
-
* @param {Object} params - Request parameters
|
|
914
|
-
*/
|
|
915
|
-
async function handleTaskFileListRequest(req, res, { jobId, taskId, type }) {
|
|
916
|
-
const dataDir = process.env.PO_ROOT || DATA_DIR;
|
|
917
|
-
|
|
918
|
-
// Resolve job lifecycle deterministically
|
|
919
|
-
const lifecycle = await resolveJobLifecycle(dataDir, jobId);
|
|
920
|
-
if (!lifecycle) {
|
|
921
|
-
// Job not found, return empty list
|
|
922
|
-
sendJson(res, 200, {
|
|
923
|
-
ok: true,
|
|
924
|
-
data: {
|
|
925
|
-
files: [],
|
|
926
|
-
jobId,
|
|
927
|
-
taskId,
|
|
928
|
-
type,
|
|
929
|
-
},
|
|
930
|
-
});
|
|
931
|
-
return;
|
|
932
|
-
}
|
|
933
|
-
|
|
934
|
-
// Use single lifecycle directory
|
|
935
|
-
const jobDir = getJobDirectoryPath(dataDir, jobId, lifecycle);
|
|
936
|
-
const taskDir = path.join(jobDir, "files", type);
|
|
937
|
-
|
|
938
|
-
// Use path.relative for stricter jail enforcement
|
|
939
|
-
const resolvedPath = path.resolve(taskDir);
|
|
940
|
-
const relativePath = path.relative(jobDir, resolvedPath);
|
|
941
|
-
|
|
942
|
-
if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
|
|
943
|
-
console.error("Path security: directory traversal detected", {
|
|
944
|
-
taskDir,
|
|
945
|
-
relativePath,
|
|
946
|
-
});
|
|
947
|
-
sendJson(res, 403, {
|
|
948
|
-
ok: false,
|
|
949
|
-
error: "forbidden",
|
|
950
|
-
message: "Path validation failed",
|
|
951
|
-
});
|
|
952
|
-
return;
|
|
953
|
-
}
|
|
954
|
-
|
|
955
|
-
// Check if directory exists
|
|
956
|
-
if (!(await exists(taskDir))) {
|
|
957
|
-
// Directory doesn't exist, return empty list
|
|
958
|
-
sendJson(res, 200, {
|
|
959
|
-
ok: true,
|
|
960
|
-
data: {
|
|
961
|
-
files: [],
|
|
962
|
-
jobId,
|
|
963
|
-
taskId,
|
|
964
|
-
type,
|
|
965
|
-
},
|
|
966
|
-
});
|
|
967
|
-
return;
|
|
968
|
-
}
|
|
969
|
-
|
|
970
|
-
try {
|
|
971
|
-
// Read directory contents
|
|
972
|
-
const entries = await fs.promises.readdir(taskDir, {
|
|
973
|
-
withFileTypes: true,
|
|
974
|
-
});
|
|
975
|
-
|
|
976
|
-
// Filter and map to file list
|
|
977
|
-
const files = [];
|
|
978
|
-
for (const entry of entries) {
|
|
979
|
-
if (entry.isFile()) {
|
|
980
|
-
// Validate each filename using the consolidated function
|
|
981
|
-
const validation = validateFilePath(entry.name);
|
|
982
|
-
if (validation) {
|
|
983
|
-
console.error("Path security: skipping invalid file", {
|
|
984
|
-
filename: entry.name,
|
|
985
|
-
reason: validation.message,
|
|
986
|
-
});
|
|
987
|
-
continue; // Skip files that fail validation
|
|
988
|
-
}
|
|
989
|
-
|
|
990
|
-
const filePath = path.join(taskDir, entry.name);
|
|
991
|
-
const stats = await fs.promises.stat(filePath);
|
|
992
|
-
|
|
993
|
-
files.push({
|
|
994
|
-
name: entry.name,
|
|
995
|
-
size: stats.size,
|
|
996
|
-
mtime: stats.mtime.toISOString(),
|
|
997
|
-
mime: getMimeType(entry.name),
|
|
998
|
-
});
|
|
999
|
-
}
|
|
1000
|
-
}
|
|
1001
|
-
|
|
1002
|
-
// Sort files by name
|
|
1003
|
-
files.sort((a, b) => a.name.localeCompare(b.name));
|
|
1004
|
-
|
|
1005
|
-
// Send successful response
|
|
1006
|
-
sendJson(res, 200, {
|
|
1007
|
-
ok: true,
|
|
1008
|
-
data: {
|
|
1009
|
-
files,
|
|
1010
|
-
jobId,
|
|
1011
|
-
taskId,
|
|
1012
|
-
type,
|
|
1013
|
-
},
|
|
1014
|
-
});
|
|
1015
|
-
} catch (error) {
|
|
1016
|
-
console.error("Error listing files:", error);
|
|
1017
|
-
sendJson(res, 500, {
|
|
1018
|
-
ok: false,
|
|
1019
|
-
error: "internal_error",
|
|
1020
|
-
message: "Failed to list files",
|
|
1021
|
-
});
|
|
1022
|
-
}
|
|
1023
|
-
}
|
|
1024
|
-
|
|
1025
|
-
/**
|
|
1026
|
-
* Consolidated path jail security validation with generic error messages
|
|
1027
|
-
* @param {string} filename - Filename to validate
|
|
1028
|
-
* @returns {Object|null} Validation result or null if valid
|
|
1029
|
-
*/
|
|
1030
|
-
function validateFilePath(filename) {
|
|
1031
|
-
// Check for path traversal patterns
|
|
1032
|
-
if (filename.includes("..")) {
|
|
1033
|
-
console.error("Path security: path traversal detected", { filename });
|
|
1034
|
-
return {
|
|
1035
|
-
allowed: false,
|
|
1036
|
-
message: "Path validation failed",
|
|
1037
|
-
};
|
|
1038
|
-
}
|
|
1039
|
-
|
|
1040
|
-
// Check for absolute paths (POSIX, Windows, backslashes, ~)
|
|
1041
|
-
if (
|
|
1042
|
-
path.isAbsolute(filename) ||
|
|
1043
|
-
/^[a-zA-Z]:/.test(filename) ||
|
|
1044
|
-
filename.includes("\\") ||
|
|
1045
|
-
filename.startsWith("~")
|
|
1046
|
-
) {
|
|
1047
|
-
console.error("Path security: absolute path detected", { filename });
|
|
1048
|
-
return {
|
|
1049
|
-
allowed: false,
|
|
1050
|
-
message: "Path validation failed",
|
|
1051
|
-
};
|
|
1052
|
-
}
|
|
1053
|
-
|
|
1054
|
-
// Check for empty filename
|
|
1055
|
-
if (!filename || filename.trim() === "") {
|
|
1056
|
-
console.error("Path security: empty filename detected");
|
|
1057
|
-
return {
|
|
1058
|
-
allowed: false,
|
|
1059
|
-
message: "Path validation failed",
|
|
1060
|
-
};
|
|
1061
|
-
}
|
|
1062
|
-
|
|
1063
|
-
// Path is valid
|
|
1064
|
-
return null;
|
|
1065
|
-
}
|
|
1066
|
-
|
|
1067
|
-
/**
|
|
1068
|
-
* Handle task file request with validation, jail checks, and proper encoding
|
|
1069
|
-
* @param {http.IncomingMessage} req - HTTP request
|
|
1070
|
-
* @param {http.ServerResponse} res - HTTP response
|
|
1071
|
-
* @param {Object} params - Request parameters
|
|
1072
|
-
*/
|
|
1073
|
-
async function handleTaskFileRequest(
|
|
1074
|
-
req,
|
|
1075
|
-
res,
|
|
1076
|
-
{ jobId, taskId, type, filename }
|
|
1077
|
-
) {
|
|
1078
|
-
const dataDir = process.env.PO_ROOT || DATA_DIR;
|
|
1079
|
-
|
|
1080
|
-
// Unified security validation
|
|
1081
|
-
const validation = validateFilePath(filename);
|
|
1082
|
-
if (validation) {
|
|
1083
|
-
sendJson(res, 403, {
|
|
1084
|
-
ok: false,
|
|
1085
|
-
error: "forbidden",
|
|
1086
|
-
message: validation.message,
|
|
1087
|
-
});
|
|
1088
|
-
return;
|
|
1089
|
-
}
|
|
1090
|
-
|
|
1091
|
-
// Resolve job lifecycle deterministically
|
|
1092
|
-
const lifecycle = await resolveJobLifecycle(dataDir, jobId);
|
|
1093
|
-
if (!lifecycle) {
|
|
1094
|
-
sendJson(res, 404, {
|
|
1095
|
-
ok: false,
|
|
1096
|
-
error: "not_found",
|
|
1097
|
-
message: "Job not found",
|
|
1098
|
-
});
|
|
1099
|
-
return;
|
|
1100
|
-
}
|
|
1101
|
-
|
|
1102
|
-
// Use single lifecycle directory
|
|
1103
|
-
const jobDir = getJobDirectoryPath(dataDir, jobId, lifecycle);
|
|
1104
|
-
const taskDir = path.join(jobDir, "files", type);
|
|
1105
|
-
const filePath = path.join(taskDir, filename);
|
|
1106
|
-
|
|
1107
|
-
// Use path.relative for stricter jail enforcement
|
|
1108
|
-
const resolvedPath = path.resolve(filePath);
|
|
1109
|
-
const relativePath = path.relative(jobDir, resolvedPath);
|
|
1110
|
-
|
|
1111
|
-
if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
|
|
1112
|
-
sendJson(res, 403, {
|
|
1113
|
-
ok: false,
|
|
1114
|
-
error: "forbidden",
|
|
1115
|
-
message: "Path resolves outside allowed directory",
|
|
1116
|
-
});
|
|
1117
|
-
return;
|
|
1118
|
-
}
|
|
1119
|
-
|
|
1120
|
-
// Check if file exists
|
|
1121
|
-
if (!(await exists(filePath))) {
|
|
1122
|
-
sendJson(res, 404, {
|
|
1123
|
-
ok: false,
|
|
1124
|
-
error: "not_found",
|
|
1125
|
-
message: "File not found",
|
|
1126
|
-
filePath,
|
|
1127
|
-
});
|
|
1128
|
-
return;
|
|
1129
|
-
}
|
|
1130
|
-
|
|
1131
|
-
try {
|
|
1132
|
-
// Get file stats
|
|
1133
|
-
const stats = await fs.promises.stat(filePath);
|
|
1134
|
-
if (!stats.isFile()) {
|
|
1135
|
-
sendJson(res, 404, {
|
|
1136
|
-
ok: false,
|
|
1137
|
-
error: "not_found",
|
|
1138
|
-
message: "Not a regular file",
|
|
1139
|
-
});
|
|
1140
|
-
return;
|
|
1141
|
-
}
|
|
1142
|
-
|
|
1143
|
-
// Determine MIME type and encoding
|
|
1144
|
-
const mime = getMimeType(filename);
|
|
1145
|
-
const isText = isTextMime(mime);
|
|
1146
|
-
const encoding = isText ? "utf8" : "base64";
|
|
1147
|
-
|
|
1148
|
-
// Read file content
|
|
1149
|
-
let content;
|
|
1150
|
-
if (isText) {
|
|
1151
|
-
content = await fs.promises.readFile(filePath, "utf8");
|
|
1152
|
-
} else {
|
|
1153
|
-
const buffer = await fs.promises.readFile(filePath);
|
|
1154
|
-
content = buffer.toString("base64");
|
|
1155
|
-
}
|
|
1156
|
-
|
|
1157
|
-
// Build relative path for response
|
|
1158
|
-
const relativePath = path.join("tasks", taskId, type, filename);
|
|
1159
|
-
|
|
1160
|
-
// Send successful response
|
|
1161
|
-
sendJson(res, 200, {
|
|
1162
|
-
ok: true,
|
|
1163
|
-
jobId,
|
|
1164
|
-
taskId,
|
|
1165
|
-
type,
|
|
1166
|
-
path: relativePath,
|
|
1167
|
-
mime,
|
|
1168
|
-
size: stats.size,
|
|
1169
|
-
mtime: stats.mtime.toISOString(),
|
|
1170
|
-
encoding,
|
|
1171
|
-
content,
|
|
1172
|
-
});
|
|
1173
|
-
} catch (error) {
|
|
1174
|
-
console.error("Error reading file:", error);
|
|
1175
|
-
sendJson(res, 500, {
|
|
1176
|
-
ok: false,
|
|
1177
|
-
error: "internal_error",
|
|
1178
|
-
message: "Failed to read file",
|
|
1179
|
-
});
|
|
1180
|
-
}
|
|
1181
|
-
}
|
|
1182
|
-
|
|
1183
|
-
/**
|
|
1184
|
-
* Serve static files from dist directory (built React app)
|
|
1185
|
-
*/
|
|
1186
|
-
function serveStatic(res, filePath) {
|
|
1187
|
-
const ext = path.extname(filePath);
|
|
1188
|
-
const contentTypes = {
|
|
1189
|
-
".html": "text/html",
|
|
1190
|
-
".js": "application/javascript",
|
|
1191
|
-
".css": "text/css",
|
|
1192
|
-
".json": "application/json",
|
|
1193
|
-
".png": "image/png",
|
|
1194
|
-
".jpg": "image/jpeg",
|
|
1195
|
-
".svg": "image/svg+xml",
|
|
1196
|
-
};
|
|
1197
|
-
|
|
1198
|
-
fs.readFile(filePath, (err, content) => {
|
|
1199
|
-
if (err) {
|
|
1200
|
-
res.writeHead(404);
|
|
1201
|
-
res.end("Not Found");
|
|
1202
|
-
} else {
|
|
1203
|
-
res.writeHead(200, { "Content-Type": contentTypes[ext] || "text/plain" });
|
|
1204
|
-
res.end(content);
|
|
1205
|
-
}
|
|
1206
|
-
});
|
|
1207
|
-
}
|
|
1208
|
-
|
|
1209
|
-
/**
|
|
1210
|
-
* Create and start the HTTP server
|
|
54
|
+
* Create and start an HTTP server
|
|
55
|
+
* @param {string} serverDataDir - Base data directory for pipeline data
|
|
1211
56
|
*/
|
|
1212
|
-
function createServer() {
|
|
1213
|
-
const
|
|
1214
|
-
|
|
1215
|
-
const { pathname, searchParams } = new URL(
|
|
1216
|
-
req.url,
|
|
1217
|
-
`http://${req.headers.host}`
|
|
1218
|
-
);
|
|
1219
|
-
|
|
1220
|
-
// CORS headers for API endpoints
|
|
1221
|
-
if (pathname.startsWith("/api/")) {
|
|
1222
|
-
// Important for tests: avoid idle keep-alive sockets on short API calls
|
|
1223
|
-
res.setHeader("Connection", "close");
|
|
1224
|
-
res.setHeader("Access-Control-Allow-Origin", "*");
|
|
1225
|
-
res.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS");
|
|
1226
|
-
res.setHeader("Access-Control-Allow-Headers", "Content-Type");
|
|
1227
|
-
|
|
1228
|
-
if (req.method === "OPTIONS") {
|
|
1229
|
-
res.writeHead(204);
|
|
1230
|
-
res.end();
|
|
1231
|
-
return;
|
|
1232
|
-
}
|
|
1233
|
-
}
|
|
1234
|
-
|
|
1235
|
-
// Route: GET /api/state
|
|
1236
|
-
if (pathname === "/api/state") {
|
|
1237
|
-
if (req.method !== "GET") {
|
|
1238
|
-
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1239
|
-
res.end(
|
|
1240
|
-
JSON.stringify({
|
|
1241
|
-
success: false,
|
|
1242
|
-
error: "Method not allowed",
|
|
1243
|
-
allowed: ["GET"],
|
|
1244
|
-
})
|
|
1245
|
-
);
|
|
1246
|
-
return;
|
|
1247
|
-
}
|
|
1248
|
-
|
|
1249
|
-
// Prefer returning the in-memory state when available (tests and runtime rely on state.getState()).
|
|
1250
|
-
// If in-memory state is available, return it directly; otherwise fall back to
|
|
1251
|
-
// building a filesystem-backed snapshot for client bootstrap.
|
|
1252
|
-
try {
|
|
1253
|
-
try {
|
|
1254
|
-
if (state && typeof state.getState === "function") {
|
|
1255
|
-
const inMemory = state.getState();
|
|
1256
|
-
if (inMemory) {
|
|
1257
|
-
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1258
|
-
res.end(JSON.stringify(inMemory));
|
|
1259
|
-
return;
|
|
1260
|
-
}
|
|
1261
|
-
}
|
|
1262
|
-
} catch (innerErr) {
|
|
1263
|
-
// If reading in-memory state throws for some reason, fall back to snapshot
|
|
1264
|
-
console.warn(
|
|
1265
|
-
"Warning: failed to retrieve in-memory state:",
|
|
1266
|
-
innerErr
|
|
1267
|
-
);
|
|
1268
|
-
}
|
|
1269
|
-
|
|
1270
|
-
// Build a filesystem-backed snapshot for client bootstrap.
|
|
1271
|
-
// Dynamically import the composer and dependencies to avoid circular import issues.
|
|
1272
|
-
const [
|
|
1273
|
-
{ buildSnapshotFromFilesystem },
|
|
1274
|
-
jobScannerModule,
|
|
1275
|
-
jobReaderModule,
|
|
1276
|
-
statusTransformerModule,
|
|
1277
|
-
configBridgeModule,
|
|
1278
|
-
] = await Promise.all([
|
|
1279
|
-
import("./state-snapshot.js"),
|
|
1280
|
-
import("./job-scanner.js").catch(() => null),
|
|
1281
|
-
import("./job-reader.js").catch(() => null),
|
|
1282
|
-
import("./transformers/status-transformer.js").catch(() => null),
|
|
1283
|
-
import("./config-bridge.js").catch(() => null),
|
|
1284
|
-
]);
|
|
1285
|
-
|
|
1286
|
-
const snapshot = await buildSnapshotFromFilesystem({
|
|
1287
|
-
listAllJobs:
|
|
1288
|
-
jobScannerModule && jobScannerModule.listAllJobs
|
|
1289
|
-
? jobScannerModule.listAllJobs
|
|
1290
|
-
: undefined,
|
|
1291
|
-
readJob:
|
|
1292
|
-
jobReaderModule && jobReaderModule.readJob
|
|
1293
|
-
? jobReaderModule.readJob
|
|
1294
|
-
: undefined,
|
|
1295
|
-
transformMultipleJobs:
|
|
1296
|
-
statusTransformerModule &&
|
|
1297
|
-
statusTransformerModule.transformMultipleJobs
|
|
1298
|
-
? statusTransformerModule.transformMultipleJobs
|
|
1299
|
-
: undefined,
|
|
1300
|
-
now: () => new Date(),
|
|
1301
|
-
paths: (configBridgeModule && configBridgeModule.PATHS) || undefined,
|
|
1302
|
-
});
|
|
1303
|
-
|
|
1304
|
-
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1305
|
-
res.end(JSON.stringify(snapshot));
|
|
1306
|
-
} catch (err) {
|
|
1307
|
-
console.error("Failed to build /api/state snapshot:", err);
|
|
1308
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
1309
|
-
res.end(
|
|
1310
|
-
JSON.stringify({
|
|
1311
|
-
ok: false,
|
|
1312
|
-
code: "snapshot_error",
|
|
1313
|
-
message: "Failed to build state snapshot",
|
|
1314
|
-
details: err && err.message ? err.message : String(err),
|
|
1315
|
-
})
|
|
1316
|
-
);
|
|
1317
|
-
}
|
|
1318
|
-
|
|
1319
|
-
return;
|
|
1320
|
-
}
|
|
1321
|
-
|
|
1322
|
-
// Route: GET /api/events (SSE)
|
|
1323
|
-
if (
|
|
1324
|
-
(pathname === "/api/events" || pathname === "/api/sse") &&
|
|
1325
|
-
req.method === "GET"
|
|
1326
|
-
) {
|
|
1327
|
-
// Parse jobId from query parameters for filtering
|
|
1328
|
-
const jobId = searchParams.get("jobId");
|
|
1329
|
-
|
|
1330
|
-
// Set SSE headers
|
|
1331
|
-
res.writeHead(200, {
|
|
1332
|
-
"Content-Type": "text/event-stream",
|
|
1333
|
-
"Cache-Control": "no-cache",
|
|
1334
|
-
Connection: "keep-alive",
|
|
1335
|
-
"Access-Control-Allow-Origin": "*",
|
|
1336
|
-
});
|
|
1337
|
-
|
|
1338
|
-
// Flush headers immediately
|
|
1339
|
-
res.flushHeaders();
|
|
1340
|
-
|
|
1341
|
-
// Initial full-state is no longer sent over the SSE stream.
|
|
1342
|
-
// Clients should fetch the snapshot from GET /api/state during bootstrap
|
|
1343
|
-
// and then rely on SSE incremental events (state:change/state:summary).
|
|
1344
|
-
// Keep headers flushed; sseRegistry.addClient will optionally send an initial ping.
|
|
1345
|
-
// (Previously sent full state here; removed to reduce SSE payloads.)
|
|
1346
|
-
|
|
1347
|
-
// Add to SSE registry with jobId metadata for filtering
|
|
1348
|
-
sseRegistry.addClient(res, { jobId });
|
|
1349
|
-
|
|
1350
|
-
// Start heartbeat for this connection
|
|
1351
|
-
const heartbeatInterval = setInterval(() => {
|
|
1352
|
-
try {
|
|
1353
|
-
res.write(
|
|
1354
|
-
`event: heartbeat\ndata: ${JSON.stringify({ timestamp: Date.now() })}\n\n`
|
|
1355
|
-
);
|
|
1356
|
-
} catch (err) {
|
|
1357
|
-
// Client disconnected, stop heartbeat
|
|
1358
|
-
clearInterval(heartbeatInterval);
|
|
1359
|
-
}
|
|
1360
|
-
}, 30000);
|
|
1361
|
-
|
|
1362
|
-
// Remove client on disconnect
|
|
1363
|
-
req.on("close", () => {
|
|
1364
|
-
clearInterval(heartbeatInterval);
|
|
1365
|
-
sseRegistry.removeClient(res);
|
|
1366
|
-
});
|
|
1367
|
-
|
|
1368
|
-
return;
|
|
1369
|
-
}
|
|
1370
|
-
|
|
1371
|
-
// Route: POST /api/upload/seed
|
|
1372
|
-
if (pathname === "/api/upload/seed") {
|
|
1373
|
-
if (req.method !== "POST") {
|
|
1374
|
-
return sendJson(res, 405, {
|
|
1375
|
-
success: false,
|
|
1376
|
-
error: "Method not allowed",
|
|
1377
|
-
allowed: ["POST"],
|
|
1378
|
-
});
|
|
1379
|
-
}
|
|
1380
|
-
|
|
1381
|
-
// Use the handleSeedUpload function which properly parses multipart data
|
|
1382
|
-
await handleSeedUpload(req, res);
|
|
1383
|
-
return;
|
|
1384
|
-
}
|
|
1385
|
-
|
|
1386
|
-
// Route: GET /api/jobs/:jobId/tasks/:taskId/files (must come before generic /api/jobs/:jobId)
|
|
1387
|
-
if (
|
|
1388
|
-
pathname.startsWith("/api/jobs/") &&
|
|
1389
|
-
pathname.includes("/tasks/") &&
|
|
1390
|
-
pathname.endsWith("/files") &&
|
|
1391
|
-
req.method === "GET"
|
|
1392
|
-
) {
|
|
1393
|
-
const pathMatch = pathname.match(
|
|
1394
|
-
/^\/api\/jobs\/([^\/]+)\/tasks\/([^\/]+)\/files$/
|
|
1395
|
-
);
|
|
1396
|
-
if (!pathMatch) {
|
|
1397
|
-
sendJson(res, 400, {
|
|
1398
|
-
ok: false,
|
|
1399
|
-
error: "bad_request",
|
|
1400
|
-
message: "Invalid path format",
|
|
1401
|
-
});
|
|
1402
|
-
return;
|
|
1403
|
-
}
|
|
1404
|
-
|
|
1405
|
-
const [, jobId, taskId] = pathMatch;
|
|
1406
|
-
const type = searchParams.get("type");
|
|
1407
|
-
|
|
1408
|
-
// Validate parameters
|
|
1409
|
-
if (!jobId || typeof jobId !== "string" || jobId.trim() === "") {
|
|
1410
|
-
sendJson(res, 400, {
|
|
1411
|
-
ok: false,
|
|
1412
|
-
error: "bad_request",
|
|
1413
|
-
message: "jobId is required",
|
|
1414
|
-
});
|
|
1415
|
-
return;
|
|
1416
|
-
}
|
|
1417
|
-
|
|
1418
|
-
if (!taskId || typeof taskId !== "string" || taskId.trim() === "") {
|
|
1419
|
-
sendJson(res, 400, {
|
|
1420
|
-
ok: false,
|
|
1421
|
-
error: "bad_request",
|
|
1422
|
-
message: "taskId is required",
|
|
1423
|
-
});
|
|
1424
|
-
return;
|
|
1425
|
-
}
|
|
1426
|
-
|
|
1427
|
-
if (!type || !["artifacts", "logs", "tmp"].includes(type)) {
|
|
1428
|
-
sendJson(res, 400, {
|
|
1429
|
-
ok: false,
|
|
1430
|
-
error: "bad_request",
|
|
1431
|
-
message: "type must be one of: artifacts, logs, tmp",
|
|
1432
|
-
});
|
|
1433
|
-
return;
|
|
1434
|
-
}
|
|
1435
|
-
|
|
1436
|
-
try {
|
|
1437
|
-
await handleTaskFileListRequest(req, res, {
|
|
1438
|
-
jobId,
|
|
1439
|
-
taskId,
|
|
1440
|
-
type,
|
|
1441
|
-
});
|
|
1442
|
-
} catch (error) {
|
|
1443
|
-
console.error(`Error handling task file list request:`, error);
|
|
1444
|
-
sendJson(res, 500, {
|
|
1445
|
-
ok: false,
|
|
1446
|
-
error: "internal_error",
|
|
1447
|
-
message: "Internal server error",
|
|
1448
|
-
});
|
|
1449
|
-
}
|
|
1450
|
-
return;
|
|
1451
|
-
}
|
|
1452
|
-
|
|
1453
|
-
// Route: GET /api/jobs/:jobId/tasks/:taskId/file (must come before generic /api/jobs/:jobId)
|
|
1454
|
-
if (
|
|
1455
|
-
pathname.startsWith("/api/jobs/") &&
|
|
1456
|
-
pathname.includes("/tasks/") &&
|
|
1457
|
-
pathname.endsWith("/file") &&
|
|
1458
|
-
req.method === "GET"
|
|
1459
|
-
) {
|
|
1460
|
-
const pathMatch = pathname.match(
|
|
1461
|
-
/^\/api\/jobs\/([^\/]+)\/tasks\/([^\/]+)\/file$/
|
|
1462
|
-
);
|
|
1463
|
-
if (!pathMatch) {
|
|
1464
|
-
sendJson(res, 400, {
|
|
1465
|
-
ok: false,
|
|
1466
|
-
error: "bad_request",
|
|
1467
|
-
message: "Invalid path format",
|
|
1468
|
-
});
|
|
1469
|
-
return;
|
|
1470
|
-
}
|
|
1471
|
-
|
|
1472
|
-
const [, jobId, taskId] = pathMatch;
|
|
1473
|
-
const type = searchParams.get("type");
|
|
1474
|
-
const filename = searchParams.get("filename");
|
|
1475
|
-
|
|
1476
|
-
// Validate parameters
|
|
1477
|
-
if (!jobId || typeof jobId !== "string" || jobId.trim() === "") {
|
|
1478
|
-
sendJson(res, 400, {
|
|
1479
|
-
ok: false,
|
|
1480
|
-
error: "bad_request",
|
|
1481
|
-
message: "jobId is required",
|
|
1482
|
-
});
|
|
1483
|
-
return;
|
|
1484
|
-
}
|
|
1485
|
-
|
|
1486
|
-
if (!taskId || typeof taskId !== "string" || taskId.trim() === "") {
|
|
1487
|
-
sendJson(res, 400, {
|
|
1488
|
-
ok: false,
|
|
1489
|
-
error: "bad_request",
|
|
1490
|
-
message: "taskId is required",
|
|
1491
|
-
});
|
|
1492
|
-
return;
|
|
1493
|
-
}
|
|
1494
|
-
|
|
1495
|
-
if (!type || !["artifacts", "logs", "tmp"].includes(type)) {
|
|
1496
|
-
sendJson(res, 400, {
|
|
1497
|
-
ok: false,
|
|
1498
|
-
error: "bad_request",
|
|
1499
|
-
message: "type must be one of: artifacts, logs, tmp",
|
|
1500
|
-
});
|
|
1501
|
-
return;
|
|
1502
|
-
}
|
|
1503
|
-
|
|
1504
|
-
if (!filename || typeof filename !== "string" || filename.trim() === "") {
|
|
1505
|
-
sendJson(res, 400, {
|
|
1506
|
-
ok: false,
|
|
1507
|
-
error: "bad_request",
|
|
1508
|
-
message: "filename is required",
|
|
1509
|
-
});
|
|
1510
|
-
return;
|
|
1511
|
-
}
|
|
1512
|
-
|
|
1513
|
-
try {
|
|
1514
|
-
await handleTaskFileRequest(req, res, {
|
|
1515
|
-
jobId,
|
|
1516
|
-
taskId,
|
|
1517
|
-
type,
|
|
1518
|
-
filename,
|
|
1519
|
-
});
|
|
1520
|
-
} catch (error) {
|
|
1521
|
-
console.error(`Error handling task file request:`, error);
|
|
1522
|
-
sendJson(res, 500, {
|
|
1523
|
-
ok: false,
|
|
1524
|
-
error: "internal_error",
|
|
1525
|
-
message: "Internal server error",
|
|
1526
|
-
});
|
|
1527
|
-
}
|
|
1528
|
-
return;
|
|
1529
|
-
}
|
|
1530
|
-
|
|
1531
|
-
// Route: GET /api/jobs
|
|
1532
|
-
if (pathname === "/api/jobs" && req.method === "GET") {
|
|
1533
|
-
try {
|
|
1534
|
-
const result = await handleJobList();
|
|
1535
|
-
|
|
1536
|
-
if (result.ok) {
|
|
1537
|
-
sendJson(res, 200, result.data);
|
|
1538
|
-
} else {
|
|
1539
|
-
sendJson(res, 500, result);
|
|
1540
|
-
}
|
|
1541
|
-
} catch (error) {
|
|
1542
|
-
console.error("Error handling /api/jobs:", error);
|
|
1543
|
-
sendJson(res, 500, {
|
|
1544
|
-
ok: false,
|
|
1545
|
-
code: "internal_error",
|
|
1546
|
-
message: "Internal server error",
|
|
1547
|
-
});
|
|
1548
|
-
}
|
|
1549
|
-
return;
|
|
1550
|
-
}
|
|
1551
|
-
|
|
1552
|
-
// Route: GET /api/llm/functions
|
|
1553
|
-
if (pathname === "/api/llm/functions" && req.method === "GET") {
|
|
1554
|
-
try {
|
|
1555
|
-
const { PROVIDER_FUNCTIONS } = await import("../config/models.js");
|
|
1556
|
-
|
|
1557
|
-
sendJson(res, 200, PROVIDER_FUNCTIONS);
|
|
1558
|
-
} catch (error) {
|
|
1559
|
-
console.error("Error handling /api/llm/functions:", error);
|
|
1560
|
-
sendJson(res, 500, {
|
|
1561
|
-
ok: false,
|
|
1562
|
-
error: "internal_error",
|
|
1563
|
-
message: "Failed to get LLM functions",
|
|
1564
|
-
});
|
|
1565
|
-
}
|
|
1566
|
-
return;
|
|
1567
|
-
}
|
|
1568
|
-
|
|
1569
|
-
// Route: POST /api/jobs/:jobId/restart
|
|
1570
|
-
if (
|
|
1571
|
-
pathname.startsWith("/api/jobs/") &&
|
|
1572
|
-
pathname.endsWith("/restart") &&
|
|
1573
|
-
req.method === "POST"
|
|
1574
|
-
) {
|
|
1575
|
-
const pathMatch = pathname.match(/^\/api\/jobs\/([^\/]+)\/restart$/);
|
|
1576
|
-
if (!pathMatch) {
|
|
1577
|
-
sendJson(res, 400, {
|
|
1578
|
-
ok: false,
|
|
1579
|
-
error: "bad_request",
|
|
1580
|
-
message: "Invalid path format",
|
|
1581
|
-
});
|
|
1582
|
-
return;
|
|
1583
|
-
}
|
|
1584
|
-
|
|
1585
|
-
const [, jobId] = pathMatch;
|
|
1586
|
-
const dataDir = process.env.PO_ROOT || DATA_DIR;
|
|
1587
|
-
|
|
1588
|
-
try {
|
|
1589
|
-
// Validate jobId
|
|
1590
|
-
if (!jobId || typeof jobId !== "string" || jobId.trim() === "") {
|
|
1591
|
-
sendJson(res, 400, {
|
|
1592
|
-
ok: false,
|
|
1593
|
-
error: "bad_request",
|
|
1594
|
-
message: "jobId is required",
|
|
1595
|
-
});
|
|
1596
|
-
return;
|
|
1597
|
-
}
|
|
1598
|
-
|
|
1599
|
-
// Resolve job lifecycle
|
|
1600
|
-
const lifecycle = await resolveJobLifecycle(dataDir, jobId);
|
|
1601
|
-
if (!lifecycle) {
|
|
1602
|
-
sendJson(res, 404, {
|
|
1603
|
-
ok: false,
|
|
1604
|
-
code: "job_not_found",
|
|
1605
|
-
message: "Job not found",
|
|
1606
|
-
});
|
|
1607
|
-
return;
|
|
1608
|
-
}
|
|
1609
|
-
|
|
1610
|
-
// Only support current lifecycle for MVP
|
|
1611
|
-
if (lifecycle !== "current") {
|
|
1612
|
-
sendJson(res, 409, {
|
|
1613
|
-
ok: false,
|
|
1614
|
-
code: "unsupported_lifecycle",
|
|
1615
|
-
message:
|
|
1616
|
-
"Job restart is only supported for jobs in 'current' lifecycle",
|
|
1617
|
-
});
|
|
1618
|
-
return;
|
|
1619
|
-
}
|
|
1620
|
-
|
|
1621
|
-
// Check if job is already running
|
|
1622
|
-
const jobDir = getJobDirectoryPath(dataDir, jobId, "current");
|
|
1623
|
-
const statusPath = path.join(jobDir, "tasks-status.json");
|
|
1624
|
-
|
|
1625
|
-
let snapshot;
|
|
1626
|
-
try {
|
|
1627
|
-
const content = await fs.promises.readFile(statusPath, "utf8");
|
|
1628
|
-
snapshot = JSON.parse(content);
|
|
1629
|
-
} catch (error) {
|
|
1630
|
-
if (error.code === "ENOENT") {
|
|
1631
|
-
sendJson(res, 404, {
|
|
1632
|
-
ok: false,
|
|
1633
|
-
code: "job_not_found",
|
|
1634
|
-
message: "Job status file not found",
|
|
1635
|
-
});
|
|
1636
|
-
return;
|
|
1637
|
-
}
|
|
1638
|
-
throw error;
|
|
1639
|
-
}
|
|
1640
|
-
|
|
1641
|
-
// Guard against running jobs
|
|
1642
|
-
if (snapshot.state === "running") {
|
|
1643
|
-
sendJson(res, 409, {
|
|
1644
|
-
ok: false,
|
|
1645
|
-
code: "job_running",
|
|
1646
|
-
message: "Job is currently running",
|
|
1647
|
-
});
|
|
1648
|
-
return;
|
|
1649
|
-
}
|
|
1650
|
-
|
|
1651
|
-
// Guard against concurrent restarts
|
|
1652
|
-
if (isRestartInProgress(jobId)) {
|
|
1653
|
-
sendJson(res, 409, {
|
|
1654
|
-
ok: false,
|
|
1655
|
-
code: "job_running",
|
|
1656
|
-
message: "Job restart is already in progress",
|
|
1657
|
-
});
|
|
1658
|
-
return;
|
|
1659
|
-
}
|
|
1660
|
-
|
|
1661
|
-
// Begin restart guard
|
|
1662
|
-
beginRestart(jobId);
|
|
1663
|
-
|
|
1664
|
-
try {
|
|
1665
|
-
// Parse optional fromTask from request body for targeted restart
|
|
1666
|
-
let body = {};
|
|
1667
|
-
try {
|
|
1668
|
-
const rawBody = await readRawBody(req);
|
|
1669
|
-
body = JSON.parse(rawBody.toString("utf8"));
|
|
1670
|
-
} catch (error) {
|
|
1671
|
-
sendJson(res, 400, {
|
|
1672
|
-
ok: false,
|
|
1673
|
-
error: "bad_request",
|
|
1674
|
-
message: "Invalid JSON in request body",
|
|
1675
|
-
});
|
|
1676
|
-
return;
|
|
1677
|
-
}
|
|
1678
|
-
|
|
1679
|
-
const { fromTask } = body;
|
|
1680
|
-
|
|
1681
|
-
// Reset job: clean-slate or partial from a specific task
|
|
1682
|
-
const { resetJobFromTask } = await import("../core/status-writer.js");
|
|
1683
|
-
if (fromTask) {
|
|
1684
|
-
await resetJobFromTask(jobDir, fromTask, { clearTokenUsage: true });
|
|
1685
|
-
} else {
|
|
1686
|
-
await resetJobToCleanSlate(jobDir, { clearTokenUsage: true });
|
|
1687
|
-
}
|
|
1688
|
-
|
|
1689
|
-
// Spawn detached pipeline-runner process
|
|
1690
|
-
const runnerPath = path.join(__dirname, "../core/pipeline-runner.js");
|
|
1691
|
-
const base = process.env.PO_ROOT || DATA_DIR;
|
|
1692
|
-
const env = {
|
|
1693
|
-
...process.env,
|
|
1694
|
-
PO_ROOT: base,
|
|
1695
|
-
PO_DATA_DIR: path.join(base, "pipeline-data"),
|
|
1696
|
-
PO_PENDING_DIR: path.join(base, "pipeline-data", "pending"),
|
|
1697
|
-
PO_CURRENT_DIR: path.join(base, "pipeline-data", "current"),
|
|
1698
|
-
PO_COMPLETE_DIR: path.join(base, "pipeline-data", "complete"),
|
|
1699
|
-
...(fromTask && { PO_START_FROM_TASK: fromTask }),
|
|
1700
|
-
};
|
|
1701
|
-
|
|
1702
|
-
const child = spawn(process.execPath, [runnerPath, jobId], {
|
|
1703
|
-
env,
|
|
1704
|
-
stdio: "ignore",
|
|
1705
|
-
detached: true,
|
|
1706
|
-
});
|
|
1707
|
-
|
|
1708
|
-
// Unref the child process so it runs in the background
|
|
1709
|
-
child.unref();
|
|
1710
|
-
|
|
1711
|
-
// Send success response
|
|
1712
|
-
sendJson(res, 202, {
|
|
1713
|
-
ok: true,
|
|
1714
|
-
jobId,
|
|
1715
|
-
mode: "clean-slate",
|
|
1716
|
-
spawned: true,
|
|
1717
|
-
});
|
|
1718
|
-
} finally {
|
|
1719
|
-
// Always end restart guard
|
|
1720
|
-
endRestart(jobId);
|
|
1721
|
-
}
|
|
1722
|
-
} catch (error) {
|
|
1723
|
-
console.error(`Error handling POST /api/jobs/${jobId}/restart:`, error);
|
|
1724
|
-
|
|
1725
|
-
// Clean up restart guard on error
|
|
1726
|
-
if (isRestartInProgress(jobId)) {
|
|
1727
|
-
endRestart(jobId);
|
|
1728
|
-
}
|
|
1729
|
-
|
|
1730
|
-
if (error.code === "ENOENT") {
|
|
1731
|
-
sendJson(res, 404, {
|
|
1732
|
-
ok: false,
|
|
1733
|
-
code: "job_not_found",
|
|
1734
|
-
message: "Job directory not found",
|
|
1735
|
-
});
|
|
1736
|
-
} else if (error.code === "spawn failed") {
|
|
1737
|
-
sendJson(res, 500, {
|
|
1738
|
-
ok: false,
|
|
1739
|
-
code: "spawn_failed",
|
|
1740
|
-
message: error.message || "Failed to spawn pipeline runner",
|
|
1741
|
-
});
|
|
1742
|
-
} else {
|
|
1743
|
-
sendJson(res, 500, {
|
|
1744
|
-
ok: false,
|
|
1745
|
-
code: "internal_error",
|
|
1746
|
-
message: "Internal server error",
|
|
1747
|
-
});
|
|
1748
|
-
}
|
|
1749
|
-
}
|
|
1750
|
-
|
|
1751
|
-
return;
|
|
1752
|
-
}
|
|
1753
|
-
|
|
1754
|
-
// Route: GET /api/jobs/:jobId
|
|
1755
|
-
if (pathname.startsWith("/api/jobs/") && req.method === "GET") {
|
|
1756
|
-
const jobId = pathname.substring("/api/jobs/".length);
|
|
1757
|
-
|
|
1758
|
-
try {
|
|
1759
|
-
const result = await handleJobDetail(jobId);
|
|
1760
|
-
|
|
1761
|
-
if (result.ok) {
|
|
1762
|
-
sendJson(res, 200, result);
|
|
1763
|
-
} else {
|
|
1764
|
-
switch (result.code) {
|
|
1765
|
-
case "job_not_found":
|
|
1766
|
-
sendJson(res, 404, result);
|
|
1767
|
-
break;
|
|
1768
|
-
case "bad_request":
|
|
1769
|
-
sendJson(res, 400, result);
|
|
1770
|
-
break;
|
|
1771
|
-
default:
|
|
1772
|
-
sendJson(res, 500, result);
|
|
1773
|
-
}
|
|
1774
|
-
}
|
|
1775
|
-
} catch (error) {
|
|
1776
|
-
console.error(`Error handling /api/jobs/${jobId}:`, error);
|
|
1777
|
-
sendJson(res, 500, {
|
|
1778
|
-
ok: false,
|
|
1779
|
-
code: "internal_error",
|
|
1780
|
-
message: "Internal server error",
|
|
1781
|
-
});
|
|
1782
|
-
}
|
|
1783
|
-
return;
|
|
1784
|
-
}
|
|
1785
|
-
|
|
1786
|
-
// Route: GET /favicon.svg
|
|
1787
|
-
if (pathname === "/favicon.svg" && req.method === "GET") {
|
|
1788
|
-
const faviconPath = path.join(__dirname, "public", "favicon.svg");
|
|
1789
|
-
|
|
1790
|
-
try {
|
|
1791
|
-
const content = await fs.promises.readFile(faviconPath, "utf8");
|
|
1792
|
-
res.writeHead(200, {
|
|
1793
|
-
"Content-Type": "image/svg+xml",
|
|
1794
|
-
"Cache-Control": "public, max-age=3600", // Cache for 1 hour
|
|
1795
|
-
});
|
|
1796
|
-
res.end(content);
|
|
1797
|
-
} catch (error) {
|
|
1798
|
-
console.error("Error serving favicon:", error);
|
|
1799
|
-
res.writeHead(404);
|
|
1800
|
-
res.end("Favicon not found");
|
|
1801
|
-
}
|
|
1802
|
-
return;
|
|
1803
|
-
}
|
|
1804
|
-
|
|
1805
|
-
// Unknown API endpoint fallback (keep API responses in JSON)
|
|
1806
|
-
if (pathname.startsWith("/api/")) {
|
|
1807
|
-
res.writeHead(200, { "Content-Type": "application/json" });
|
|
1808
|
-
res.end(
|
|
1809
|
-
JSON.stringify({
|
|
1810
|
-
success: false,
|
|
1811
|
-
error: "Not found",
|
|
1812
|
-
path: pathname,
|
|
1813
|
-
method: req.method,
|
|
1814
|
-
})
|
|
1815
|
-
);
|
|
1816
|
-
return;
|
|
1817
|
-
}
|
|
1818
|
-
|
|
1819
|
-
// Prefer Vite middleware in development for non-API routes (HMR & asset serving)
|
|
1820
|
-
if (viteServer && viteServer.middlewares) {
|
|
1821
|
-
try {
|
|
1822
|
-
// Let Vite handle all non-API requests (including assets). If Vite calls next,
|
|
1823
|
-
// fall back to the static handlers below.
|
|
1824
|
-
return viteServer.middlewares(req, res, () => {
|
|
1825
|
-
if (pathname === "/" || pathname === "/index.html") {
|
|
1826
|
-
serveStatic(res, path.join(__dirname, "dist", "index.html"));
|
|
1827
|
-
} else if (pathname.startsWith("/assets/")) {
|
|
1828
|
-
const assetPath = pathname.substring(1); // Remove leading slash
|
|
1829
|
-
serveStatic(res, path.join(__dirname, "dist", assetPath));
|
|
1830
|
-
} else if (pathname.startsWith("/public/")) {
|
|
1831
|
-
const publicPath = pathname.substring(1); // Remove leading slash
|
|
1832
|
-
serveStatic(
|
|
1833
|
-
res,
|
|
1834
|
-
path.join(__dirname, "public", publicPath.replace("public/", ""))
|
|
1835
|
-
);
|
|
1836
|
-
} else {
|
|
1837
|
-
// Fallback to index.html for client-side routing
|
|
1838
|
-
serveStatic(res, path.join(__dirname, "dist", "index.html"));
|
|
1839
|
-
}
|
|
1840
|
-
});
|
|
1841
|
-
} catch (err) {
|
|
1842
|
-
console.error("Vite middleware error:", err);
|
|
1843
|
-
// Fallback to serving built assets
|
|
1844
|
-
serveStatic(res, path.join(__dirname, "dist", "index.html"));
|
|
1845
|
-
}
|
|
1846
|
-
} else {
|
|
1847
|
-
// No Vite dev server available; serve static files from dist/public as before
|
|
1848
|
-
if (pathname === "/" || pathname === "/index.html") {
|
|
1849
|
-
serveStatic(res, path.join(__dirname, "dist", "index.html"));
|
|
1850
|
-
} else if (pathname.startsWith("/assets/")) {
|
|
1851
|
-
// Serve assets from dist/assets
|
|
1852
|
-
const assetPath = pathname.substring(1); // Remove leading slash
|
|
1853
|
-
serveStatic(res, path.join(__dirname, "dist", assetPath));
|
|
1854
|
-
} else if (pathname.startsWith("/public/")) {
|
|
1855
|
-
// Serve static files from public directory
|
|
1856
|
-
const publicPath = pathname.substring(1); // Remove leading slash
|
|
1857
|
-
serveStatic(
|
|
1858
|
-
res,
|
|
1859
|
-
path.join(__dirname, "public", publicPath.replace("public/", ""))
|
|
1860
|
-
);
|
|
1861
|
-
} else {
|
|
1862
|
-
// For any other route, serve the React app's index.html
|
|
1863
|
-
// This allows client-side routing to work
|
|
1864
|
-
serveStatic(res, path.join(__dirname, "dist", "index.html"));
|
|
1865
|
-
}
|
|
1866
|
-
}
|
|
1867
|
-
});
|
|
1868
|
-
|
|
57
|
+
function createServer(serverDataDir = DATA_DIR) {
|
|
58
|
+
const app = buildExpressApp({ dataDir: serverDataDir, viteServer });
|
|
59
|
+
const server = http.createServer(app);
|
|
1869
60
|
return server;
|
|
1870
61
|
}
|
|
1871
62
|
|
|
@@ -1917,7 +108,7 @@ function initializeWatcher() {
|
|
|
1917
108
|
absolutePaths,
|
|
1918
109
|
(changes) => {
|
|
1919
110
|
// Update state for each change and capture the last returned state.
|
|
1920
|
-
// Prefer broadcasting
|
|
111
|
+
// Prefer broadcasting state returned by recordChange (if available)
|
|
1921
112
|
// to ensure tests and callers receive an up-to-date snapshot without
|
|
1922
113
|
// relying on mocked module-level getState behavior.
|
|
1923
114
|
let lastState = null;
|
|
@@ -1936,39 +127,6 @@ function initializeWatcher() {
|
|
|
1936
127
|
);
|
|
1937
128
|
}
|
|
1938
129
|
|
|
1939
|
-
/**
|
|
1940
|
-
* Start the server
|
|
1941
|
-
*/
|
|
1942
|
-
function start(customPort) {
|
|
1943
|
-
const port = customPort || PORT;
|
|
1944
|
-
const server = createServer();
|
|
1945
|
-
|
|
1946
|
-
server.listen(port, () => {
|
|
1947
|
-
console.log(`Server running at http://localhost:${port}`);
|
|
1948
|
-
console.log(`Watching paths: ${WATCHED_PATHS.join(", ")}`);
|
|
1949
|
-
|
|
1950
|
-
initializeWatcher();
|
|
1951
|
-
startHeartbeat();
|
|
1952
|
-
});
|
|
1953
|
-
|
|
1954
|
-
// Graceful shutdown
|
|
1955
|
-
process.on("SIGINT", async () => {
|
|
1956
|
-
console.log("\nShutting down gracefully...");
|
|
1957
|
-
|
|
1958
|
-
if (heartbeatTimer) clearInterval(heartbeatTimer);
|
|
1959
|
-
if (watcher) await stopWatcher(watcher);
|
|
1960
|
-
|
|
1961
|
-
sseRegistry.closeAll();
|
|
1962
|
-
|
|
1963
|
-
server.close(() => {
|
|
1964
|
-
console.log("Server closed");
|
|
1965
|
-
process.exit(0);
|
|
1966
|
-
});
|
|
1967
|
-
});
|
|
1968
|
-
|
|
1969
|
-
return server;
|
|
1970
|
-
}
|
|
1971
|
-
|
|
1972
130
|
/**
|
|
1973
131
|
* Start server with configurable data directory and port
|
|
1974
132
|
* @param {Object} options - Server options
|
|
@@ -1978,38 +136,38 @@ function start(customPort) {
|
|
|
1978
136
|
*/
|
|
1979
137
|
async function startServer({ dataDir, port: customPort }) {
|
|
1980
138
|
try {
|
|
139
|
+
// Load environment variables from .env file for API keys and other config
|
|
140
|
+
await loadEnvironment();
|
|
141
|
+
|
|
1981
142
|
// Initialize config-bridge paths early to ensure consistent path resolution
|
|
1982
143
|
// This prevents path caching issues when dataDir changes between tests
|
|
1983
144
|
const { initPATHS } = await import("./config-bridge.node.js");
|
|
1984
145
|
initPATHS(dataDir);
|
|
1985
146
|
|
|
1986
147
|
// Require PO_ROOT for non-test runs
|
|
1987
|
-
if (!process.env.PO_ROOT) {
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
|
|
1991
|
-
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
|
|
1996
|
-
|
|
1997
|
-
|
|
1998
|
-
);
|
|
1999
|
-
process.env.PO_ROOT = process.cwd();
|
|
2000
|
-
}
|
|
148
|
+
if (!process.env.PO_ROOT && process.env.NODE_ENV !== "test") {
|
|
149
|
+
console.error(
|
|
150
|
+
"ERROR: PO_ROOT environment variable is required for non-test runs"
|
|
151
|
+
);
|
|
152
|
+
throw new Error(
|
|
153
|
+
"PO_ROOT environment variable is required for non-test runs"
|
|
154
|
+
);
|
|
155
|
+
} else if (!process.env.PO_ROOT) {
|
|
156
|
+
console.warn(
|
|
157
|
+
"WARNING: PO_ROOT not set, using process.cwd() in test mode"
|
|
158
|
+
);
|
|
2001
159
|
}
|
|
2002
160
|
|
|
2003
|
-
// Use customPort if provided, otherwise use PORT env var, otherwise use
|
|
161
|
+
// Use customPort if provided, otherwise use PORT env var, otherwise use default PORT constant
|
|
2004
162
|
const port =
|
|
2005
163
|
customPort !== undefined
|
|
2006
164
|
? customPort
|
|
2007
165
|
: process.env.PORT
|
|
2008
166
|
? parseInt(process.env.PORT)
|
|
2009
|
-
:
|
|
167
|
+
: PORT;
|
|
2010
168
|
|
|
2011
|
-
// In development, start Vite in middlewareMode so
|
|
2012
|
-
//
|
|
169
|
+
// In development, start Vite in middlewareMode so that Node server can serve
|
|
170
|
+
// client with HMR in a single process. We dynamically import Vite here
|
|
2013
171
|
// to avoid including it in production bundles.
|
|
2014
172
|
// Skip Vite entirely for API-only tests when DISABLE_VITE=1 is set.
|
|
2015
173
|
// Do not start Vite in tests to avoid dep-scan errors during teardown.
|
|
@@ -2031,7 +189,7 @@ async function startServer({ dataDir, port: customPort }) {
|
|
|
2031
189
|
}
|
|
2032
190
|
}
|
|
2033
191
|
|
|
2034
|
-
const server = createServer();
|
|
192
|
+
const server = createServer(dataDir);
|
|
2035
193
|
|
|
2036
194
|
// Robust promise with proper error handling and race condition prevention
|
|
2037
195
|
await new Promise((resolve, reject) => {
|
|
@@ -2091,6 +249,7 @@ async function startServer({ dataDir, port: customPort }) {
|
|
|
2091
249
|
return {
|
|
2092
250
|
url: baseUrl,
|
|
2093
251
|
close: async () => {
|
|
252
|
+
console.log("[Server] Starting server cleanup...");
|
|
2094
253
|
// Clean up all resources
|
|
2095
254
|
if (heartbeatTimer) {
|
|
2096
255
|
clearInterval(heartbeatTimer);
|
|
@@ -2114,8 +273,14 @@ async function startServer({ dataDir, port: customPort }) {
|
|
|
2114
273
|
}
|
|
2115
274
|
}
|
|
2116
275
|
|
|
2117
|
-
|
|
2118
|
-
|
|
276
|
+
console.log("[Server] Closing HTTP server...");
|
|
277
|
+
// Close HTTP server
|
|
278
|
+
return new Promise((resolve) => {
|
|
279
|
+
server.close(() => {
|
|
280
|
+
console.log("[Server] HTTP server closed");
|
|
281
|
+
resolve();
|
|
282
|
+
});
|
|
283
|
+
});
|
|
2119
284
|
},
|
|
2120
285
|
};
|
|
2121
286
|
} catch (error) {
|
|
@@ -2127,20 +292,17 @@ async function startServer({ dataDir, port: customPort }) {
|
|
|
2127
292
|
// Export for testing
|
|
2128
293
|
export {
|
|
2129
294
|
createServer,
|
|
2130
|
-
start,
|
|
2131
295
|
startServer,
|
|
2132
296
|
broadcastStateUpdate,
|
|
2133
297
|
sseRegistry,
|
|
2134
298
|
initializeWatcher,
|
|
2135
299
|
state,
|
|
2136
|
-
resolveJobLifecycle,
|
|
2137
|
-
restartingJobs,
|
|
2138
|
-
isRestartInProgress,
|
|
2139
|
-
beginRestart,
|
|
2140
|
-
endRestart,
|
|
2141
300
|
};
|
|
2142
301
|
|
|
2143
302
|
// Start server if run directly
|
|
2144
303
|
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
2145
|
-
|
|
304
|
+
startServer({ dataDir: DATA_DIR }).catch((err) => {
|
|
305
|
+
console.error("Failed to start server:", err);
|
|
306
|
+
process.exit(1);
|
|
307
|
+
});
|
|
2146
308
|
}
|