@ryanfw/prompt-orchestration-pipeline 0.0.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/README.md +415 -24
  2. package/package.json +46 -8
  3. package/src/api/files.js +48 -0
  4. package/src/api/index.js +149 -53
  5. package/src/api/validators/seed.js +141 -0
  6. package/src/cli/index.js +444 -29
  7. package/src/cli/run-orchestrator.js +39 -0
  8. package/src/cli/update-pipeline-json.js +47 -0
  9. package/src/components/DAGGrid.jsx +649 -0
  10. package/src/components/JobCard.jsx +96 -0
  11. package/src/components/JobDetail.jsx +159 -0
  12. package/src/components/JobTable.jsx +202 -0
  13. package/src/components/Layout.jsx +134 -0
  14. package/src/components/TaskFilePane.jsx +570 -0
  15. package/src/components/UploadSeed.jsx +239 -0
  16. package/src/components/ui/badge.jsx +20 -0
  17. package/src/components/ui/button.jsx +43 -0
  18. package/src/components/ui/card.jsx +20 -0
  19. package/src/components/ui/focus-styles.css +60 -0
  20. package/src/components/ui/progress.jsx +26 -0
  21. package/src/components/ui/select.jsx +27 -0
  22. package/src/components/ui/separator.jsx +6 -0
  23. package/src/config/paths.js +99 -0
  24. package/src/core/config.js +270 -9
  25. package/src/core/file-io.js +202 -0
  26. package/src/core/module-loader.js +157 -0
  27. package/src/core/orchestrator.js +275 -294
  28. package/src/core/pipeline-runner.js +95 -41
  29. package/src/core/progress.js +66 -0
  30. package/src/core/status-writer.js +331 -0
  31. package/src/core/task-runner.js +719 -73
  32. package/src/core/validation.js +120 -1
  33. package/src/lib/utils.js +6 -0
  34. package/src/llm/README.md +139 -30
  35. package/src/llm/index.js +222 -72
  36. package/src/pages/PipelineDetail.jsx +111 -0
  37. package/src/pages/PromptPipelineDashboard.jsx +223 -0
  38. package/src/providers/deepseek.js +3 -15
  39. package/src/ui/client/adapters/job-adapter.js +258 -0
  40. package/src/ui/client/bootstrap.js +120 -0
  41. package/src/ui/client/hooks/useJobDetailWithUpdates.js +619 -0
  42. package/src/ui/client/hooks/useJobList.js +50 -0
  43. package/src/ui/client/hooks/useJobListWithUpdates.js +335 -0
  44. package/src/ui/client/hooks/useTicker.js +26 -0
  45. package/src/ui/client/index.css +31 -0
  46. package/src/ui/client/index.html +18 -0
  47. package/src/ui/client/main.jsx +38 -0
  48. package/src/ui/config-bridge.browser.js +149 -0
  49. package/src/ui/config-bridge.js +149 -0
  50. package/src/ui/config-bridge.node.js +310 -0
  51. package/src/ui/dist/assets/index-CxcrauYR.js +22702 -0
  52. package/src/ui/dist/assets/style-D6K_oQ12.css +62 -0
  53. package/src/ui/dist/index.html +19 -0
  54. package/src/ui/endpoints/job-endpoints.js +300 -0
  55. package/src/ui/file-reader.js +216 -0
  56. package/src/ui/job-change-detector.js +83 -0
  57. package/src/ui/job-index.js +231 -0
  58. package/src/ui/job-reader.js +274 -0
  59. package/src/ui/job-scanner.js +188 -0
  60. package/src/ui/public/app.js +3 -1
  61. package/src/ui/server.js +1636 -59
  62. package/src/ui/sse-enhancer.js +149 -0
  63. package/src/ui/sse.js +204 -0
  64. package/src/ui/state-snapshot.js +252 -0
  65. package/src/ui/transformers/list-transformer.js +347 -0
  66. package/src/ui/transformers/status-transformer.js +307 -0
  67. package/src/ui/watcher.js +61 -7
  68. package/src/utils/dag.js +101 -0
  69. package/src/utils/duration.js +126 -0
  70. package/src/utils/id-generator.js +30 -0
  71. package/src/utils/jobs.js +7 -0
  72. package/src/utils/pipelines.js +44 -0
  73. package/src/utils/task-files.js +271 -0
  74. package/src/utils/ui.jsx +76 -0
  75. package/src/ui/public/index.html +0 -53
  76. package/src/ui/public/style.css +0 -341
package/src/cli/index.js CHANGED
@@ -1,63 +1,286 @@
1
1
  #!/usr/bin/env node
2
2
  import { Command } from "commander";
3
+ import { submitJobWithValidation } from "../api/index.js";
3
4
  import { PipelineOrchestrator } from "../api/index.js";
4
5
  import fs from "node:fs/promises";
6
+ import path from "node:path";
7
+ import { fileURLToPath } from "node:url";
8
+ import { spawn } from "node:child_process";
9
+ import { updatePipelineJson } from "./update-pipeline-json.js";
10
+
11
+ // Derive package root for resolving internal paths regardless of host CWD
12
+ const currentFile = fileURLToPath(import.meta.url);
13
+ const PKG_ROOT = path.dirname(path.dirname(path.dirname(currentFile)));
14
+
15
+ // Canonical stage names that must match src/core/task-runner.js
16
+ const STAGE_NAMES = [
17
+ "ingestion",
18
+ "preProcessing",
19
+ "promptTemplating",
20
+ "inference",
21
+ "parsing",
22
+ "validateStructure",
23
+ "validateQuality",
24
+ "critique",
25
+ "refine",
26
+ "finalValidation",
27
+ "integration",
28
+ ];
5
29
 
6
30
  const program = new Command();
7
31
 
8
32
  program
9
33
  .name("pipeline-orchestrator")
10
34
  .description("Pipeline orchestration system")
11
- .version("1.0.0");
35
+ .version("1.0.0")
36
+ .option("-r, --root <path>", "Pipeline root (PO_ROOT)")
37
+ .option("-p, --port <port>", "UI server port", "4000");
12
38
 
13
39
  program
14
40
  .command("init")
15
41
  .description("Initialize pipeline configuration")
16
42
  .action(async () => {
17
- const template = {
18
- pipeline: { name: "my-pipeline", version: "1.0.0", tasks: ["example-task"] },
19
- tasks: {
20
- "example-task": {
21
- `ingestion`: `export async function ingestion(context) { return { data: "example" }; }`,
22
- `inference`: `export async function inference(context) { return { output: context.data }; }`
23
- }
24
- };
25
- await fs.mkdir("pipeline-config/tasks/example-task", { recursive: true });
26
- await fs.writeFile("pipeline-config/pipeline.json", JSON.stringify(template.pipeline, null, 2));
27
- await fs.writeFile("pipeline-config/tasks/index.js", `export default {\n 'example-task': './example-task/index.js'\n};`);
28
- await fs.writeFile("pipeline-config/tasks/example-task/index.js", `${template.tasks["example-task"].ingestion}\n\n${template.tasks["example-task"].inference}\n`);
29
- console.log("Pipeline configuration initialized");
43
+ const globalOptions = program.opts();
44
+ const root = globalOptions.root || path.resolve(process.cwd(), "pipelines");
45
+
46
+ // Create directories
47
+ await fs.mkdir(path.join(root, "pipeline-config"), { recursive: true });
48
+ await fs.mkdir(path.join(root, "pipeline-data", "pending"), {
49
+ recursive: true,
50
+ });
51
+ await fs.mkdir(path.join(root, "pipeline-data", "current"), {
52
+ recursive: true,
53
+ });
54
+ await fs.mkdir(path.join(root, "pipeline-data", "complete"), {
55
+ recursive: true,
56
+ });
57
+ await fs.mkdir(path.join(root, "pipeline-data", "rejected"), {
58
+ recursive: true,
59
+ });
60
+
61
+ // Create .gitkeep files
62
+ await fs.writeFile(
63
+ path.join(root, "pipeline-data", "pending", ".gitkeep"),
64
+ ""
65
+ );
66
+ await fs.writeFile(
67
+ path.join(root, "pipeline-data", "current", ".gitkeep"),
68
+ ""
69
+ );
70
+ await fs.writeFile(
71
+ path.join(root, "pipeline-data", "complete", ".gitkeep"),
72
+ ""
73
+ );
74
+ await fs.writeFile(
75
+ path.join(root, "pipeline-data", "rejected", ".gitkeep"),
76
+ ""
77
+ );
78
+
79
+ // Write registry.json with exact required content
80
+ const registryContent = { pipelines: {} };
81
+ await fs.writeFile(
82
+ path.join(root, "pipeline-config", "registry.json"),
83
+ JSON.stringify(registryContent, null, 2) + "\n"
84
+ );
85
+
86
+ console.log(`Pipeline configuration initialized at ${root}`);
30
87
  });
31
88
 
32
89
  program
33
90
  .command("start")
34
- .description("Start the pipeline orchestrator")
35
- .option("-u, --ui", "Start with UI server")
36
- .option("-p, --port <port>", "UI server port", "3000")
37
- .action(async (options) => {
38
- const orchestrator = new PipelineOrchestrator({ ui: options.ui, uiPort: parseInt(options.port) });
39
- await orchestrator.initialize();
40
- console.log("Pipeline orchestrator started");
41
- process.on("SIGINT", async () => { await orchestrator.stop(); process.exit(0); });
91
+ .description("Start the pipeline orchestrator with UI server")
92
+ .action(async () => {
93
+ const globalOptions = program.opts();
94
+ let root = globalOptions.root || process.env.PO_ROOT;
95
+ const port = globalOptions.port || "4000";
96
+
97
+ // Resolve absolute root path
98
+ if (!root) {
99
+ console.error(
100
+ "PO_ROOT is required. Use --root or set PO_ROOT to your pipeline root (e.g., ./demo)."
101
+ );
102
+ process.exit(1);
103
+ }
104
+
105
+ const absoluteRoot = path.isAbsolute(root)
106
+ ? root
107
+ : path.resolve(process.cwd(), root);
108
+ process.env.PO_ROOT = absoluteRoot;
109
+
110
+ console.log(`Using PO_ROOT=${absoluteRoot}`);
111
+ console.log(`UI port=${port}`);
112
+
113
+ let uiChild = null;
114
+ let orchestratorChild = null;
115
+ let childrenExited = 0;
116
+ let exitCode = 0;
117
+
118
+ // Cleanup function to kill remaining children
119
+ const cleanup = () => {
120
+ if (uiChild && !uiChild.killed) {
121
+ uiChild.kill("SIGTERM");
122
+ setTimeout(() => {
123
+ if (!uiChild.killed) uiChild.kill("SIGKILL");
124
+ }, 5000);
125
+ }
126
+ if (orchestratorChild && !orchestratorChild.killed) {
127
+ orchestratorChild.kill("SIGTERM");
128
+ setTimeout(() => {
129
+ if (!orchestratorChild.killed) orchestratorChild.kill("SIGKILL");
130
+ }, 5000);
131
+ }
132
+ };
133
+
134
+ // Handle parent process signals
135
+ process.on("SIGINT", () => {
136
+ console.log("\nReceived SIGINT, shutting down...");
137
+ cleanup();
138
+ process.exit(exitCode);
139
+ });
140
+
141
+ process.on("SIGTERM", () => {
142
+ console.log("\nReceived SIGTERM, shutting down...");
143
+ cleanup();
144
+ process.exit(exitCode);
145
+ });
146
+
147
+ try {
148
+ // Step d: Check for prebuilt UI assets
149
+ const distPath = path.join(PKG_ROOT, "src/ui/dist");
150
+ try {
151
+ await fs.access(distPath);
152
+ console.log("UI build found, skipping build step");
153
+ } catch {
154
+ console.error(
155
+ "UI assets missing. This indicates a source checkout. Run 'npm run ui:build' locally or install dev deps."
156
+ );
157
+ process.exit(1);
158
+ }
159
+
160
+ // Step e: Spawn UI server
161
+ console.log("Starting UI server...");
162
+ const uiServerPath = path.join(PKG_ROOT, "src/ui/server.js");
163
+ uiChild = spawn("node", [uiServerPath], {
164
+ stdio: "pipe",
165
+ env: {
166
+ ...process.env,
167
+ NODE_ENV: "production",
168
+ PO_ROOT: absoluteRoot,
169
+ PORT: port,
170
+ PO_UI_PORT: undefined, // Ensure PORT takes precedence
171
+ },
172
+ });
173
+
174
+ // Pipe UI output with prefix
175
+ uiChild.stdout.on("data", (data) => {
176
+ console.log(`[ui] ${data.toString().trim()}`);
177
+ });
178
+
179
+ uiChild.stderr.on("data", (data) => {
180
+ console.error(`[ui] ${data.toString().trim()}`);
181
+ });
182
+
183
+ // Step f: Spawn orchestrator
184
+ console.log("Starting orchestrator...");
185
+ const orchestratorPath = path.join(
186
+ PKG_ROOT,
187
+ "src/cli/run-orchestrator.js"
188
+ );
189
+ orchestratorChild = spawn("node", [orchestratorPath], {
190
+ stdio: "pipe",
191
+ env: {
192
+ ...process.env,
193
+ NODE_ENV: "production",
194
+ PO_ROOT: absoluteRoot,
195
+ },
196
+ });
197
+
198
+ // Pipe orchestrator output with prefix
199
+ orchestratorChild.stdout.on("data", (data) => {
200
+ console.log(`[orc] ${data.toString().trim()}`);
201
+ });
202
+
203
+ orchestratorChild.stderr.on("data", (data) => {
204
+ console.error(`[orc] ${data.toString().trim()}`);
205
+ });
206
+
207
+ // Step h: Kill-others-on-fail behavior
208
+ const handleChildExit = (child, name) => {
209
+ return (code, signal) => {
210
+ console.log(
211
+ `${name} process exited with code ${code}, signal ${signal}`
212
+ );
213
+ childrenExited++;
214
+
215
+ if (code !== 0) {
216
+ exitCode = code;
217
+ console.log(`${name} failed, terminating other process...`);
218
+ cleanup();
219
+ }
220
+
221
+ if (childrenExited === 2 || (code !== 0 && childrenExited === 1)) {
222
+ process.exit(exitCode);
223
+ }
224
+ };
225
+ };
226
+
227
+ uiChild.on("exit", handleChildExit(uiChild, "UI"));
228
+ orchestratorChild.on(
229
+ "exit",
230
+ handleChildExit(orchestratorChild, "Orchestrator")
231
+ );
232
+
233
+ // Handle child process errors
234
+ uiChild.on("error", (error) => {
235
+ console.error(`UI process error: ${error.message}`);
236
+ exitCode = 1;
237
+ cleanup();
238
+ process.exit(1);
239
+ });
240
+
241
+ orchestratorChild.on("error", (error) => {
242
+ console.error(`Orchestrator process error: ${error.message}`);
243
+ exitCode = 1;
244
+ cleanup();
245
+ process.exit(1);
246
+ });
247
+ } catch (error) {
248
+ console.error(`Failed to start pipeline: ${error.message}`);
249
+ cleanup();
250
+ process.exit(1);
251
+ }
42
252
  });
43
253
 
44
254
  program
45
255
  .command("submit <seed-file>")
46
256
  .description("Submit a new job")
47
257
  .action(async (seedFile) => {
48
- const seed = JSON.parse(await fs.readFile(seedFile, "utf8"));
49
- const orchestrator = new PipelineOrchestrator({ autoStart: false });
50
- await orchestrator.initialize();
51
- const job = await orchestrator.submitJob(seed);
52
- console.log(`Job submitted: ${job.name}`);
258
+ try {
259
+ const seed = JSON.parse(await fs.readFile(seedFile, "utf8"));
260
+ const result = await submitJobWithValidation({
261
+ dataDir: process.cwd(),
262
+ seedObject: seed,
263
+ });
264
+
265
+ if (result.success) {
266
+ console.log(`Job submitted: ${result.jobId} (${result.jobName})`);
267
+ } else {
268
+ console.error(`Failed to submit job: ${result.message}`);
269
+ process.exit(1);
270
+ }
271
+ } catch (error) {
272
+ console.error(`Error submitting job: ${error.message}`);
273
+ process.exit(1);
274
+ }
53
275
  });
54
276
 
55
277
  program
56
278
  .command("status [job-name]")
57
279
  .description("Get job status")
58
280
  .action(async (jobName) => {
59
- const orchestrator = new PipelineOrchestrator({ autoStart: false });
60
- await orchestrator.initialize();
281
+ const orchestrator = await PipelineOrchestrator.create({
282
+ autoStart: false,
283
+ });
61
284
  if (jobName) {
62
285
  const status = await orchestrator.getStatus(jobName);
63
286
  console.log(JSON.stringify(status, null, 2));
@@ -67,4 +290,196 @@ program
67
290
  }
68
291
  });
69
292
 
293
+ program
294
+ .command("add-pipeline <pipeline-slug>")
295
+ .description("Add a new pipeline configuration")
296
+ .action(async (pipelineSlug) => {
297
+ const globalOptions = program.opts();
298
+ const root = globalOptions.root || path.resolve(process.cwd(), "pipelines");
299
+
300
+ // Validate pipeline-slug is kebab-case
301
+ const kebabCaseRegex = /^[a-z0-9-]+$/;
302
+ if (!kebabCaseRegex.test(pipelineSlug)) {
303
+ console.error("Invalid pipeline slug: must be kebab-case (a-z0-9-)");
304
+ process.exit(1);
305
+ }
306
+
307
+ try {
308
+ // Ensure directories exist
309
+ const pipelineConfigDir = path.join(
310
+ root,
311
+ "pipeline-config",
312
+ pipelineSlug
313
+ );
314
+ const tasksDir = path.join(pipelineConfigDir, "tasks");
315
+ await fs.mkdir(tasksDir, { recursive: true });
316
+
317
+ // Write pipeline.json
318
+ const pipelineConfig = {
319
+ name: pipelineSlug,
320
+ version: "1.0.0",
321
+ description: "New pipeline",
322
+ tasks: [],
323
+ };
324
+ await fs.writeFile(
325
+ path.join(pipelineConfigDir, "pipeline.json"),
326
+ JSON.stringify(pipelineConfig, null, 2) + "\n"
327
+ );
328
+
329
+ // Write tasks/index.js
330
+ await fs.writeFile(
331
+ path.join(tasksDir, "index.js"),
332
+ "export default {};\n"
333
+ );
334
+
335
+ // Update registry.json
336
+ const registryPath = path.join(root, "pipeline-config", "registry.json");
337
+ let registry = { pipelines: {} };
338
+
339
+ try {
340
+ const registryContent = await fs.readFile(registryPath, "utf8");
341
+ registry = JSON.parse(registryContent);
342
+ if (!registry.pipelines) {
343
+ registry.pipelines = {};
344
+ }
345
+ } catch (error) {
346
+ // If registry doesn't exist or is invalid, use empty registry
347
+ registry = { pipelines: {} };
348
+ }
349
+
350
+ // Add/replace pipeline entry
351
+ registry.pipelines[pipelineSlug] = {
352
+ name: pipelineSlug,
353
+ description: "New pipeline",
354
+ pipelinePath: `pipeline-config/${pipelineSlug}/pipeline.json`,
355
+ taskRegistryPath: `pipeline-config/${pipelineSlug}/tasks/index.js`,
356
+ };
357
+
358
+ // Write back registry
359
+ await fs.writeFile(
360
+ registryPath,
361
+ JSON.stringify(registry, null, 2) + "\n"
362
+ );
363
+
364
+ console.log(`Pipeline "${pipelineSlug}" added successfully`);
365
+ } catch (error) {
366
+ console.error(`Error adding pipeline: ${error.message}`);
367
+ process.exit(1);
368
+ }
369
+ });
370
+
371
+ program
372
+ .command("add-pipeline-task <pipeline-slug> <task-slug>")
373
+ .description("Add a new task to a pipeline")
374
+ .action(async (pipelineSlug, taskSlug) => {
375
+ const globalOptions = program.opts();
376
+ const root = globalOptions.root || path.resolve(process.cwd(), "pipelines");
377
+
378
+ // Validate both slugs are kebab-case
379
+ const kebabCaseRegex = /^[a-z0-9-]+$/;
380
+ if (!kebabCaseRegex.test(pipelineSlug)) {
381
+ console.error("Invalid pipeline slug: must be kebab-case (a-z0-9-)");
382
+ process.exit(1);
383
+ }
384
+ if (!kebabCaseRegex.test(taskSlug)) {
385
+ console.error("Invalid task slug: must be kebab-case (a-z0-9-)");
386
+ process.exit(1);
387
+ }
388
+
389
+ // Check if pipeline tasks directory exists
390
+ const tasksDir = path.join(root, "pipeline-config", pipelineSlug, "tasks");
391
+ try {
392
+ await fs.access(tasksDir);
393
+ } catch (error) {
394
+ console.error(
395
+ `Pipeline "${pipelineSlug}" not found. Run add-pipeline first.`
396
+ );
397
+ process.exit(1);
398
+ }
399
+
400
+ try {
401
+ // Create task file with all stage exports
402
+ const taskFileContent = STAGE_NAMES.map((stageName) => {
403
+ if (stageName === "ingestion") {
404
+ return `// Step 1: Ingestion, ${getStagePurpose(stageName)}
405
+ export const ingestion = async ({ io, llm, data: { seed }, meta, flags }) => {
406
+
407
+ return { output: {}, flags };
408
+ }`;
409
+ }
410
+ const stepNumber = STAGE_NAMES.indexOf(stageName) + 1;
411
+ return `// Step ${stepNumber}: ${stageName.charAt(0).toUpperCase() + stageName.slice(1)}, ${getStagePurpose(stageName)}
412
+ export const ${stageName} = async ({ io, llm, data, meta, flags }) => {
413
+
414
+ return { output: {}, flags };
415
+ }`;
416
+ }).join("\n\n");
417
+
418
+ await fs.writeFile(
419
+ path.join(tasksDir, `${taskSlug}.js`),
420
+ taskFileContent + "\n"
421
+ );
422
+
423
+ // Update tasks/index.js
424
+ const indexFilePath = path.join(tasksDir, "index.js");
425
+ let taskIndex = {};
426
+
427
+ try {
428
+ const indexContent = await fs.readFile(indexFilePath, "utf8");
429
+ // Parse the default export from the file
430
+ const exportMatch = indexContent.match(
431
+ /export default\s+({[\s\S]*?})\s*;?\s*$/
432
+ );
433
+ if (exportMatch) {
434
+ // Use eval to parse the object (safe in this controlled context)
435
+ taskIndex = eval(`(${exportMatch[1]})`);
436
+ }
437
+ } catch (error) {
438
+ // If file is missing or invalid, start with empty object
439
+ taskIndex = {};
440
+ }
441
+
442
+ // Add/replace task mapping
443
+ taskIndex[taskSlug] = `./${taskSlug}.js`;
444
+
445
+ // Sort keys alphabetically for stable output
446
+ const sortedKeys = Object.keys(taskIndex).sort();
447
+ const sortedIndex = {};
448
+ for (const key of sortedKeys) {
449
+ sortedIndex[key] = taskIndex[key];
450
+ }
451
+
452
+ // Write back the index file with proper formatting
453
+ const indexContent = `export default ${JSON.stringify(sortedIndex, null, 2)};\n`;
454
+ await fs.writeFile(indexFilePath, indexContent);
455
+
456
+ // Update pipeline.json to include the new task
457
+ await updatePipelineJson(root, pipelineSlug, taskSlug);
458
+
459
+ console.log(`Task "${taskSlug}" added to pipeline "${pipelineSlug}"`);
460
+ } catch (error) {
461
+ console.error(`Error adding task: ${error.message}`);
462
+ process.exit(1);
463
+ }
464
+ });
465
+
466
+ // Helper function to get stage purpose descriptions
467
+ function getStagePurpose(stageName) {
468
+ const purposes = {
469
+ ingestion:
470
+ "load/shape input for downstream stages (no external side-effects required)",
471
+ preProcessing: "prepare and clean data for main processing",
472
+ promptTemplating: "generate or format prompts for LLM interaction",
473
+ inference: "execute LLM calls or other model inference",
474
+ parsing: "extract and structure results from model outputs",
475
+ validateStructure: "ensure output meets expected format and schema",
476
+ validateQuality: "check content quality and completeness",
477
+ critique: "analyze and evaluate results against criteria",
478
+ refine: "improve and optimize outputs based on feedback",
479
+ finalValidation: "perform final checks before completion",
480
+ integration: "integrate results into downstream systems or workflows",
481
+ };
482
+ return purposes[stageName] || "handle stage-specific processing";
483
+ }
484
+
70
485
  program.parse();
@@ -0,0 +1,39 @@
1
+ #!/usr/bin/env node
2
+ import { startOrchestrator } from "../core/orchestrator.js";
3
+
4
+ async function main() {
5
+ const root = process.env.PO_ROOT;
6
+
7
+ if (!root) {
8
+ console.error(
9
+ "PO_ROOT environment variable is required. Please set PO_ROOT to your pipeline root directory (e.g., ./demo)."
10
+ );
11
+ process.exit(1);
12
+ }
13
+
14
+ try {
15
+ console.log(`Starting orchestrator with dataDir: ${root}`);
16
+ const { stop } = await startOrchestrator({ dataDir: root });
17
+
18
+ // Handle graceful shutdown
19
+ process.on("SIGINT", async () => {
20
+ console.log("\nReceived SIGINT, shutting down orchestrator...");
21
+ await stop();
22
+ process.exit(0);
23
+ });
24
+
25
+ process.on("SIGTERM", async () => {
26
+ console.log("\nReceived SIGTERM, shutting down orchestrator...");
27
+ await stop();
28
+ process.exit(0);
29
+ });
30
+ } catch (error) {
31
+ console.error("Orchestrator failed to start:", error.message);
32
+ process.exit(1);
33
+ }
34
+ }
35
+
36
+ main().catch((error) => {
37
+ console.error("Unhandled error in orchestrator runner:", error);
38
+ process.exit(1);
39
+ });
@@ -0,0 +1,47 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+
4
+ /**
5
+ * Updates pipeline.json to include a new task
6
+ * @param {string} root - The pipeline root directory
7
+ * @param {string} pipelineSlug - The pipeline slug
8
+ * @param {string} taskSlug - The task slug to add
9
+ */
10
+ export async function updatePipelineJson(root, pipelineSlug, taskSlug) {
11
+ const pipelineConfigPath = path.join(
12
+ root,
13
+ "pipeline-config",
14
+ pipelineSlug,
15
+ "pipeline.json"
16
+ );
17
+ let pipelineConfig = {};
18
+
19
+ try {
20
+ const pipelineContent = await fs.readFile(pipelineConfigPath, "utf8");
21
+ pipelineConfig = JSON.parse(pipelineContent);
22
+ } catch (error) {
23
+ // If file is missing or invalid, create minimal config
24
+ pipelineConfig = {
25
+ name: pipelineSlug,
26
+ version: "1.0.0",
27
+ description: "New pipeline",
28
+ tasks: [],
29
+ };
30
+ }
31
+
32
+ // Ensure tasks array exists
33
+ if (!Array.isArray(pipelineConfig.tasks)) {
34
+ pipelineConfig.tasks = [];
35
+ }
36
+
37
+ // Add task to the end of the list if not already present
38
+ if (!pipelineConfig.tasks.includes(taskSlug)) {
39
+ pipelineConfig.tasks.push(taskSlug);
40
+ }
41
+
42
+ // Write back pipeline.json
43
+ await fs.writeFile(
44
+ pipelineConfigPath,
45
+ JSON.stringify(pipelineConfig, null, 2) + "\n"
46
+ );
47
+ }