@ryanfw/prompt-orchestration-pipeline 0.0.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/README.md +415 -24
  2. package/package.json +46 -8
  3. package/src/api/files.js +48 -0
  4. package/src/api/index.js +149 -53
  5. package/src/api/validators/seed.js +141 -0
  6. package/src/cli/index.js +444 -29
  7. package/src/cli/run-orchestrator.js +39 -0
  8. package/src/cli/update-pipeline-json.js +47 -0
  9. package/src/components/DAGGrid.jsx +649 -0
  10. package/src/components/JobCard.jsx +96 -0
  11. package/src/components/JobDetail.jsx +159 -0
  12. package/src/components/JobTable.jsx +202 -0
  13. package/src/components/Layout.jsx +134 -0
  14. package/src/components/TaskFilePane.jsx +570 -0
  15. package/src/components/UploadSeed.jsx +239 -0
  16. package/src/components/ui/badge.jsx +20 -0
  17. package/src/components/ui/button.jsx +43 -0
  18. package/src/components/ui/card.jsx +20 -0
  19. package/src/components/ui/focus-styles.css +60 -0
  20. package/src/components/ui/progress.jsx +26 -0
  21. package/src/components/ui/select.jsx +27 -0
  22. package/src/components/ui/separator.jsx +6 -0
  23. package/src/config/paths.js +99 -0
  24. package/src/core/config.js +270 -9
  25. package/src/core/file-io.js +202 -0
  26. package/src/core/module-loader.js +157 -0
  27. package/src/core/orchestrator.js +275 -294
  28. package/src/core/pipeline-runner.js +95 -41
  29. package/src/core/progress.js +66 -0
  30. package/src/core/status-writer.js +331 -0
  31. package/src/core/task-runner.js +719 -73
  32. package/src/core/validation.js +120 -1
  33. package/src/lib/utils.js +6 -0
  34. package/src/llm/README.md +139 -30
  35. package/src/llm/index.js +222 -72
  36. package/src/pages/PipelineDetail.jsx +111 -0
  37. package/src/pages/PromptPipelineDashboard.jsx +223 -0
  38. package/src/providers/deepseek.js +3 -15
  39. package/src/ui/client/adapters/job-adapter.js +258 -0
  40. package/src/ui/client/bootstrap.js +120 -0
  41. package/src/ui/client/hooks/useJobDetailWithUpdates.js +619 -0
  42. package/src/ui/client/hooks/useJobList.js +50 -0
  43. package/src/ui/client/hooks/useJobListWithUpdates.js +335 -0
  44. package/src/ui/client/hooks/useTicker.js +26 -0
  45. package/src/ui/client/index.css +31 -0
  46. package/src/ui/client/index.html +18 -0
  47. package/src/ui/client/main.jsx +38 -0
  48. package/src/ui/config-bridge.browser.js +149 -0
  49. package/src/ui/config-bridge.js +149 -0
  50. package/src/ui/config-bridge.node.js +310 -0
  51. package/src/ui/dist/assets/index-CxcrauYR.js +22702 -0
  52. package/src/ui/dist/assets/style-D6K_oQ12.css +62 -0
  53. package/src/ui/dist/index.html +19 -0
  54. package/src/ui/endpoints/job-endpoints.js +300 -0
  55. package/src/ui/file-reader.js +216 -0
  56. package/src/ui/job-change-detector.js +83 -0
  57. package/src/ui/job-index.js +231 -0
  58. package/src/ui/job-reader.js +274 -0
  59. package/src/ui/job-scanner.js +188 -0
  60. package/src/ui/public/app.js +3 -1
  61. package/src/ui/server.js +1636 -59
  62. package/src/ui/sse-enhancer.js +149 -0
  63. package/src/ui/sse.js +204 -0
  64. package/src/ui/state-snapshot.js +252 -0
  65. package/src/ui/transformers/list-transformer.js +347 -0
  66. package/src/ui/transformers/status-transformer.js +307 -0
  67. package/src/ui/watcher.js +61 -7
  68. package/src/utils/dag.js +101 -0
  69. package/src/utils/duration.js +126 -0
  70. package/src/utils/id-generator.js +30 -0
  71. package/src/utils/jobs.js +7 -0
  72. package/src/utils/pipelines.js +44 -0
  73. package/src/utils/task-files.js +271 -0
  74. package/src/utils/ui.jsx +76 -0
  75. package/src/ui/public/index.html +0 -53
  76. package/src/ui/public/style.css +0 -341
@@ -1,335 +1,316 @@
1
- // ESM
1
+ // ESM Orchestrator - clean, test-friendly, no JSX or ellipses
2
2
  import fs from "node:fs/promises";
3
3
  import path from "node:path";
4
- import crypto from "node:crypto";
5
4
  import chokidar from "chokidar";
6
- import { spawn } from "node:child_process";
7
- import url from "node:url";
8
- import { validateSeed, formatValidationErrors } from "./validation.js";
9
- import { getConfig } from "./config.js";
10
- import { withRetry } from "./retry.js";
11
-
12
- export class Orchestrator {
13
- constructor({ paths, pipelineDefinition }) {
14
- this.paths = paths;
15
- this.pipelineDefinition = pipelineDefinition;
16
- this.runningProcesses = new Map();
17
- this.watcher = null;
5
+ import { spawn as defaultSpawn } from "node:child_process";
6
+ import { getConfig, getPipelineConfig } from "./config.js";
7
+
8
+ /**
9
+ * Resolve canonical pipeline directories for the given data root.
10
+ * @param {string} dataDir
11
+ */
12
+ function resolveDirs(dataDir) {
13
+ // Normalize incoming dataDir: callers may pass either the project root,
14
+ // the pipeline-data root, or even pipeline-data/pending by mistake.
15
+ // Detect if 'pipeline-data' is present in the provided path and normalize
16
+ // to the canonical pipeline-data root to avoid duplicated segments.
17
+ const normalized = path.normalize(String(dataDir || ""));
18
+ const parts = normalized.split(path.sep).filter(Boolean);
19
+ const idx = parts.lastIndexOf("pipeline-data");
20
+ let root;
21
+ if (idx !== -1) {
22
+ // Preserve original root (drive letter on Windows, '/' on POSIX, or '' for relative)
23
+ const originalRoot = path.parse(normalized).root; // '' | '/' | 'C:\\'
24
+ if (originalRoot) {
25
+ // Prepend original root to preserve absolute / drive-letter semantics
26
+ root = path.join(originalRoot, ...parts.slice(0, idx + 1));
27
+ } else {
28
+ // Relative input -> keep relative result
29
+ root = path.join(...parts.slice(0, idx + 1));
30
+ }
31
+ } else {
32
+ root = path.join(dataDir, "pipeline-data");
18
33
  }
19
34
 
20
- async start() {
21
- await fs.mkdir(this.paths.pending, { recursive: true });
22
- await fs.mkdir(this.paths.current, { recursive: true });
23
- await fs.mkdir(this.paths.complete, { recursive: true });
35
+ const pending = path.join(root, "pending");
36
+ const current = path.join(root, "current");
37
+ const complete = path.join(root, "complete");
38
+ return { dataDir: root, pending, current, complete };
39
+ }
24
40
 
25
- for (const name of await this.#listDirs(this.paths.current)) {
26
- this.#ensureRunner(name);
27
- }
41
+ /**
42
+ * Ensure directory exists (mkdir -p).
43
+ */
44
+ async function ensureDir(dir) {
45
+ await fs.mkdir(dir, { recursive: true });
46
+ }
28
47
 
29
- const config = getConfig();
30
- this.watcher = chokidar
31
- .watch(path.join(this.paths.pending, "*-seed.json"), {
32
- awaitWriteFinish: {
33
- stabilityThreshold: config.orchestrator.watchStabilityThreshold,
34
- pollInterval: config.orchestrator.watchPollInterval,
35
- },
36
- })
37
- .on("add", (p) => this.#onSeed(p));
38
-
39
- return this;
48
+ /**
49
+ * Move a file atomically by writing through a tmp file, then rename.
50
+ * If src is on same FS, a regular rename is enough. We keep it simple for tests.
51
+ */
52
+ async function moveFile(src, dest) {
53
+ await fs.mkdir(path.dirname(dest), { recursive: true });
54
+ await fs.rename(src, dest);
55
+ }
56
+
57
+ /**
58
+ * Start the orchestrator.
59
+ * - Ensures pipeline dirs
60
+ * - Watches pending/*.json seeds
61
+ * - On add: move to current/{jobId}/seed.json and spawn runner
62
+ *
63
+ * @param {{ dataDir: string, spawn?: typeof defaultSpawn, watcherFactory?: Function, testMode?: boolean }} opts
64
+ * @returns {Promise<{ stop: () => Promise<void> }>}
65
+ */
66
+ export async function startOrchestrator(opts) {
67
+ const dataDir = opts?.dataDir;
68
+ if (!dataDir) throw new Error("startOrchestrator: dataDir is required");
69
+ const spawn = opts?.spawn ?? defaultSpawn;
70
+ const watcherFactory = opts?.watcherFactory ?? chokidar.watch;
71
+ const testMode = !!opts?.testMode;
72
+
73
+ const dirs = resolveDirs(dataDir);
74
+ await ensureDir(dirs.pending);
75
+ await ensureDir(dirs.current);
76
+ await ensureDir(dirs.complete);
77
+
78
+ /** @type {Map<string, import('node:child_process').ChildProcess>} */
79
+ const running = new Map();
80
+
81
+ // Guard: if job already running or already in current/, do nothing
82
+ function isJobActive(name) {
83
+ return running.has(name);
40
84
  }
41
85
 
42
- async stop() {
43
- if (this.watcher) {
44
- await this.watcher.close();
45
- this.watcher = null;
46
- }
86
+ function currentSeedPath(name) {
87
+ return path.join(dirs.current, name, "seed.json");
88
+ }
47
89
 
48
- for (const [name, info] of this.runningProcesses) {
49
- info.process.kill("SIGTERM");
50
- }
90
+ async function handleSeedAdd(filePath) {
91
+ if (!filePath || !filePath.endsWith(".json")) return;
51
92
 
52
- // Skip the shutdown timeout in test environment
53
- if (process.env.NODE_ENV !== "test") {
54
- const config = getConfig();
55
- await new Promise((r) =>
56
- setTimeout(r, config.orchestrator.shutdownTimeout)
57
- );
93
+ // Extract jobId from filename pattern: ^([A-Za-z0-9-_]+)-seed\.json$
94
+ const base = path.basename(filePath);
95
+ const match = base.match(/^([A-Za-z0-9-_]+)-seed\.json$/);
96
+ if (!match) {
97
+ console.warn("Rejecting non-id seed file:", base);
98
+ return;
58
99
  }
100
+ const jobId = match[1];
59
101
 
60
- for (const [name, info] of this.runningProcesses) {
61
- if (!info.process.killed) info.process.kill("SIGKILL");
102
+ let seed;
103
+ try {
104
+ const text = await fs.readFile(filePath, "utf8");
105
+ seed = JSON.parse(text);
106
+ } catch {
107
+ // If not valid JSON, ignore and leave file for later/manual cleanup
108
+ return;
62
109
  }
63
110
 
64
- this.runningProcesses.clear();
65
- }
66
-
67
- async #onSeed(seedPath) {
68
- const base = path.basename(seedPath);
69
- const name = base.replace(/-seed\.json$/, "");
70
- const workDir = path.join(this.paths.current, name);
71
- const lockFile = path.join(this.paths.current, `${name}.lock`);
72
-
111
+ // If already running or already moved to current, skip (idempotent)
112
+ if (isJobActive(jobId)) return;
113
+ const dest = currentSeedPath(jobId);
73
114
  try {
74
- await fs.writeFile(lockFile, process.pid.toString(), { flag: "wx" });
75
- } catch (err) {
76
- if (err.code === "EEXIST") return;
77
- throw err;
78
- }
115
+ await fs.access(dest);
116
+ // Already picked up
117
+ return;
118
+ } catch {}
79
119
 
120
+ // Move seed to current/{jobId}/seed.json
121
+ console.log(`[Orchestrator] Moving file from ${filePath} to ${dest}`);
80
122
  try {
81
- try {
82
- await fs.mkdir(workDir, { recursive: false });
83
- } catch (err) {
84
- if (err.code === "EEXIST") return;
85
- throw err;
86
- }
123
+ await moveFile(filePath, dest);
124
+ console.log(`[Orchestrator] Successfully moved file to ${dest}`);
125
+ } catch (error) {
126
+ console.log(`[Orchestrator] Failed to move file: ${error.message}`);
127
+ throw error; // Re-throw to see the actual error
128
+ }
87
129
 
88
- const seed = JSON.parse(await fs.readFile(seedPath, "utf8"));
89
-
90
- // Validate seed file structure
91
- const validation = validateSeed(seed);
92
- if (!validation.valid) {
93
- const errorMsg = formatValidationErrors(validation.errors);
94
- console.error(`Invalid seed file ${base}:\n${errorMsg}`);
95
- // Move invalid seed to a rejected directory for inspection
96
- const rejectedDir = path.join(
97
- path.dirname(this.paths.pending),
98
- "rejected"
99
- );
100
- await fs.mkdir(rejectedDir, { recursive: true });
101
- const rejectedPath = path.join(rejectedDir, base);
102
- await fs.rename(seedPath, rejectedPath);
103
- return;
104
- }
130
+ // Ensure tasks directory and status file exist in work dir
131
+ const workDir = path.dirname(dest);
132
+ const tasksDir = path.join(workDir, "tasks");
133
+ await fs.mkdir(tasksDir, { recursive: true });
105
134
 
106
- const pipelineId = this.#makeId();
107
-
108
- await this.#atomicWrite(
109
- path.join(workDir, "seed.json"),
110
- JSON.stringify(seed, null, 2)
111
- );
112
- await this.#atomicWrite(
113
- path.join(workDir, "tasks-status.json"),
114
- JSON.stringify(
115
- {
116
- pipelineId,
117
- name,
118
- current: null,
119
- createdAt: new Date().toISOString(),
120
- tasks: {},
121
- },
122
- null,
123
- 2
124
- )
125
- );
126
-
127
- await fs.mkdir(path.join(workDir, "tasks"), { recursive: true });
128
-
129
- // Move the seed file to a 'processed' directory after successful processing
130
- const processedDir = path.join(
131
- path.dirname(this.paths.pending),
132
- "processed"
133
- );
134
- await fs.mkdir(processedDir, { recursive: true });
135
- const processedPath = path.join(processedDir, base);
136
- await fs.rename(seedPath, processedPath);
137
- } finally {
138
- try {
139
- await fs.unlink(lockFile);
140
- } catch {}
135
+ const statusPath = path.join(workDir, "tasks-status.json");
136
+ try {
137
+ await fs.access(statusPath);
138
+ } catch {
139
+ const status = {
140
+ id: jobId,
141
+ name: seed?.name ?? jobId,
142
+ pipeline: seed?.pipeline, // Include pipeline slug from seed
143
+ createdAt: new Date().toISOString(),
144
+ state: "pending",
145
+ tasks: {}, // Initialize empty tasks object for pipeline runner
146
+ };
147
+ await fs.writeFile(statusPath, JSON.stringify(status, null, 2));
141
148
  }
142
-
143
- // Start runner after all file operations are complete
144
- this.#ensureRunner(name);
149
+ // Spawn runner for this job
150
+ const child = spawnRunner(jobId, dirs, running, spawn, testMode, seed);
151
+ // child registered inside spawnRunner
152
+ return child;
145
153
  }
146
154
 
147
- #ensureRunner(name) {
148
- if (this.runningProcesses.has(name)) return;
149
-
150
- const config = getConfig();
151
-
152
- // Wrap process spawn in retry logic (fire-and-forget)
153
- // This is intentionally not awaited - we want to start runners asynchronously
154
- // and let them run in the background. Failures are handled via dead letter queue.
155
- withRetry(() => this.#spawnRunner(name), {
156
- maxAttempts: config.orchestrator.processSpawnRetries,
157
- initialDelay: config.orchestrator.processSpawnRetryDelay,
158
- onRetry: ({ attempt, delay, error }) => {
159
- console.warn(
160
- `Failed to start pipeline ${name} (attempt ${attempt}): ${error.message}. Retrying in ${delay}ms...`
161
- );
162
- },
163
- shouldRetry: (error) => {
164
- // Don't retry if the error is due to missing files or invalid config
165
- const nonRetryableCodes = ["ENOENT", "EACCES", "MODULE_NOT_FOUND"];
166
- const nonRetryableMessages = ["Invalid pipeline"];
167
- if (error.code && nonRetryableCodes.includes(error.code)) {
168
- return false;
169
- }
170
- if (error.message && nonRetryableMessages.includes(error.message)) {
171
- return false;
172
- }
173
- return true;
174
- },
175
- }).catch((error) => {
176
- console.error(
177
- `Failed to start pipeline ${name} after ${config.orchestrator.processSpawnRetries} attempts:`,
178
- error
179
- );
180
- // Move to dead letter queue
181
- this.#moveToDeadLetter(name, error).catch((dlqError) => {
182
- console.error(`Failed to move ${name} to dead letter queue:`, dlqError);
183
- });
155
+ // Watch pending directory for seeds
156
+ const watchPattern = path.join(dirs.pending, "*.json");
157
+ console.log("Orchestrator watching pattern:", watchPattern);
158
+ const watcher = watcherFactory(watchPattern, {
159
+ ignoreInitial: false,
160
+ awaitWriteFinish: false, // Disable awaitWriteFinish for faster detection
161
+ depth: 0,
162
+ });
163
+
164
+ // Wait for watcher to be ready before resolving
165
+ await new Promise((resolve, reject) => {
166
+ watcher.on("ready", () => {
167
+ console.log("Orchestrator watcher is ready");
168
+ resolve();
184
169
  });
185
- }
186
170
 
187
- #spawnRunner(name) {
188
- return new Promise((resolve, reject) => {
189
- const __dirname = path.dirname(url.fileURLToPath(import.meta.url));
190
- const runnerPath = path.join(__dirname, "pipeline-runner.js");
191
-
192
- const env = {
193
- ...process.env,
194
- PO_ROOT: process.cwd(),
195
- PO_DATA_DIR: path.relative(
196
- process.cwd(),
197
- path.dirname(this.paths.pending)
198
- ),
199
- PO_CURRENT_DIR: this.paths.current,
200
- PO_COMPLETE_DIR: this.paths.complete,
201
- PO_CONFIG_DIR: path.join(process.cwd(), "pipeline-config"),
202
- PO_PIPELINE_PATH:
203
- this.pipelineDefinition?.__path ||
204
- path.join(process.cwd(), "pipeline-config", "pipeline.json"),
205
- PO_TASK_REGISTRY: path.join(
206
- process.cwd(),
207
- "pipeline-config",
208
- "tasks/index.js"
209
- ),
210
- };
211
-
212
- const child = spawn(process.execPath, [runnerPath, name], {
213
- stdio: ["ignore", "inherit", "inherit"],
214
- env,
215
- cwd: process.cwd(),
216
- });
217
-
218
- // Track if process started successfully
219
- let started = false;
220
-
221
- // Consider spawn successful after a short delay
222
- const startupTimeout = setTimeout(() => {
223
- started = true;
224
- resolve();
225
- }, 100);
226
-
227
- this.runningProcesses.set(name, {
228
- process: child,
229
- startedAt: new Date().toISOString(),
230
- name,
231
- });
232
-
233
- child.on("exit", (code, signal) => {
234
- clearTimeout(startupTimeout);
235
- this.runningProcesses.delete(name);
236
- if (code !== 0) {
237
- console.error(
238
- `Pipeline ${name} exited with code ${code}, signal ${signal}`
239
- );
240
- } else {
241
- console.log(`Pipeline ${name} completed successfully`);
242
- }
243
- });
244
-
245
- child.on("error", (err) => {
246
- clearTimeout(startupTimeout);
247
- this.runningProcesses.delete(name);
248
- if (!started) {
249
- reject(err);
250
- } else {
251
- console.error(`Pipeline ${name} encountered error:`, err);
252
- }
253
- });
171
+ watcher.on("error", (error) => {
172
+ console.log("Orchestrator watcher error:", error);
173
+ reject(error);
254
174
  });
255
- }
175
+ });
256
176
 
257
- async #moveToDeadLetter(name, error) {
258
- const workDir = path.join(this.paths.current, name);
259
- const deadLetterDir = path.join(
260
- path.dirname(this.paths.pending),
261
- "dead-letter"
262
- );
263
- await fs.mkdir(deadLetterDir, { recursive: true });
264
-
265
- const errorLog = {
266
- name,
267
- error: {
268
- message: error.message,
269
- stack: error.stack,
270
- },
271
- timestamp: new Date().toISOString(),
272
- attempts: getConfig().orchestrator.processSpawnRetries,
273
- };
274
-
275
- await this.#atomicWrite(
276
- path.join(deadLetterDir, `${name}-error.json`),
277
- JSON.stringify(errorLog, null, 2)
278
- );
177
+ watcher.on("add", (file) => {
178
+ console.log("Orchestrator detected file add:", file);
179
+ // Return the promise so tests awaiting the add handler block until processing completes
180
+ return handleSeedAdd(file);
181
+ });
279
182
 
280
- // Move the work directory to dead letter
281
- const deadLetterWorkDir = path.join(deadLetterDir, name);
183
+ async function stop() {
282
184
  try {
283
- await fs.rename(workDir, deadLetterWorkDir);
284
- } catch (err) {
285
- // If rename fails, try to copy
286
- console.warn(`Could not move ${name} to dead letter, attempting copy`);
287
- // If rename fails, try to copy
288
- console.warn(`Could not move ${name} to dead letter, attempting copy`);
185
+ await watcher.close();
186
+ } catch {}
187
+
188
+ // Try graceful shutdown for children
189
+ const kills = [];
190
+ for (const [name, child] of running.entries()) {
289
191
  try {
290
- await this.#copyDirRecursive(workDir, deadLetterWorkDir);
291
- await fs.rm(workDir, { recursive: true, force: true });
292
- } catch (copyErr) {
293
- console.error(`Failed to copy ${name} to dead letter:`, copyErr);
294
- }
192
+ if (!child.killed) {
193
+ child.kill("SIGTERM");
194
+ // Give tests a chance to simulate exit; then force kill
195
+ setTimeout(() => {
196
+ try {
197
+ !child.killed && child.kill("SIGKILL");
198
+ } catch {}
199
+ }, 500);
200
+ }
201
+ } catch {}
202
+ kills.push(Promise.resolve());
295
203
  }
204
+ await Promise.all(kills);
205
+ running.clear();
296
206
  }
297
207
 
298
- async #copyDirRecursive(src, dest) {
299
- await fs.mkdir(dest, { recursive: true });
300
- const entries = await fs.readdir(src, { withFileTypes: true });
301
- for (const entry of entries) {
302
- const srcPath = path.join(src, entry.name);
303
- const destPath = path.join(dest, entry.name);
304
- if (entry.isDirectory()) {
305
- await this.#copyDirRecursive(srcPath, destPath);
306
- } else if (entry.isFile()) {
307
- await fs.copyFile(srcPath, destPath);
208
+ return { stop };
209
+ }
210
+
211
+ /**
212
+ * Spawn a pipeline runner. In testMode we still call spawn() so tests can assert,
213
+ * but we resolve immediately and let tests drive the lifecycle (emit 'exit', etc.).
214
+ *
215
+ * @param {string} jobId
216
+ * @param {{dataDir:string,pending:string,current:string,complete:string}} dirs
217
+ * @param {Map<string, import('node:child_process').ChildProcess>} running
218
+ * @param {typeof defaultSpawn} spawn
219
+ * @param {boolean} testMode
220
+ * @param {Object} seed - Seed data containing pipeline information
221
+ */
222
+ function spawnRunner(jobId, dirs, running, spawn, testMode, seed) {
223
+ const runnerPath = path.join(
224
+ process.cwd(),
225
+ "src",
226
+ "core",
227
+ "pipeline-runner.js"
228
+ );
229
+
230
+ const configSnapshot = getConfig();
231
+ const availablePipelines = Object.keys(configSnapshot?.pipelines ?? {});
232
+ const pipelineSlug = seed?.pipeline;
233
+
234
+ console.log("[Orchestrator] spawnRunner invoked", {
235
+ jobId,
236
+ pipelineSlug: pipelineSlug ?? null,
237
+ availablePipelines,
238
+ seedKeys: seed ? Object.keys(seed) : null,
239
+ });
240
+
241
+ if (!availablePipelines.length) {
242
+ console.warn(
243
+ "[Orchestrator] No pipelines registered in config() when spawnRunner invoked"
244
+ );
245
+ } else if (!availablePipelines.includes(pipelineSlug)) {
246
+ console.warn(
247
+ "[Orchestrator] Requested pipeline slug missing from registry snapshot",
248
+ {
249
+ jobId,
250
+ pipelineSlug,
251
+ availablePipelines,
308
252
  }
309
- }
310
- }
311
- async #listDirs(dir) {
312
- try {
313
- const entries = await fs.readdir(dir, { withFileTypes: true });
314
- return entries.filter((e) => e.isDirectory()).map((e) => e.name);
315
- } catch (err) {
316
- if (err.code === "ENOENT") return [];
317
- throw err;
318
- }
253
+ );
319
254
  }
320
255
 
321
- #makeId() {
322
- return (
323
- "pl-" +
324
- new Date().toISOString().replaceAll(/[:.]/g, "-") +
325
- "-" +
326
- crypto.randomBytes(3).toString("hex")
256
+ if (!pipelineSlug) {
257
+ console.error("[Orchestrator] Missing pipeline slug in seed", {
258
+ jobId,
259
+ seed,
260
+ availablePipelines,
261
+ });
262
+ throw new Error(
263
+ "Pipeline slug is required in seed data. Include a 'pipeline' field in your seed."
327
264
  );
328
265
  }
329
266
 
330
- async #atomicWrite(file, data) {
331
- const tmp = file + ".tmp";
332
- await fs.writeFile(tmp, data);
333
- await fs.rename(tmp, file);
267
+ let pipelineConfig;
268
+ try {
269
+ pipelineConfig = getPipelineConfig(pipelineSlug);
270
+ } catch (error) {
271
+ console.error("[Orchestrator] Pipeline lookup failed", {
272
+ jobId,
273
+ pipelineSlug,
274
+ availablePipelines,
275
+ });
276
+ throw error;
277
+ }
278
+
279
+ // Use environment variables with explicit slug propagation
280
+ const env = {
281
+ ...process.env,
282
+ PO_DATA_DIR: dirs.dataDir,
283
+ PO_PENDING_DIR: dirs.pending,
284
+ PO_CURRENT_DIR: dirs.current,
285
+ PO_COMPLETE_DIR: dirs.complete,
286
+ PO_PIPELINE_SLUG: pipelineSlug,
287
+ // Force mock provider for testing
288
+ PO_DEFAULT_PROVIDER: "mock",
289
+ };
290
+
291
+ // Always call spawn so tests can capture it
292
+ const child = spawn(process.execPath, [runnerPath, jobId], {
293
+ stdio: ["ignore", "inherit", "inherit"],
294
+ env,
295
+ cwd: process.cwd(),
296
+ });
297
+
298
+ running.set(jobId, child);
299
+
300
+ child.on("exit", () => {
301
+ running.delete(jobId);
302
+ });
303
+ child.on("error", () => {
304
+ running.delete(jobId);
305
+ });
306
+
307
+ // In test mode: return immediately; in real mode you might await readiness
308
+ if (testMode) {
309
+ return child;
334
310
  }
311
+
312
+ // Non-test: we can consider "started" immediately for simplicity
313
+ return child;
335
314
  }
315
+
316
+ export default { startOrchestrator };