@ryanfw/prompt-orchestration-pipeline 0.0.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +415 -24
- package/package.json +45 -8
- package/src/api/files.js +48 -0
- package/src/api/index.js +149 -53
- package/src/api/validators/seed.js +141 -0
- package/src/cli/index.js +456 -29
- package/src/cli/run-orchestrator.js +39 -0
- package/src/cli/update-pipeline-json.js +47 -0
- package/src/components/DAGGrid.jsx +649 -0
- package/src/components/JobCard.jsx +96 -0
- package/src/components/JobDetail.jsx +159 -0
- package/src/components/JobTable.jsx +202 -0
- package/src/components/Layout.jsx +134 -0
- package/src/components/TaskFilePane.jsx +570 -0
- package/src/components/UploadSeed.jsx +239 -0
- package/src/components/ui/badge.jsx +20 -0
- package/src/components/ui/button.jsx +43 -0
- package/src/components/ui/card.jsx +20 -0
- package/src/components/ui/focus-styles.css +60 -0
- package/src/components/ui/progress.jsx +26 -0
- package/src/components/ui/select.jsx +27 -0
- package/src/components/ui/separator.jsx +6 -0
- package/src/config/paths.js +99 -0
- package/src/core/config.js +270 -9
- package/src/core/file-io.js +202 -0
- package/src/core/module-loader.js +157 -0
- package/src/core/orchestrator.js +275 -294
- package/src/core/pipeline-runner.js +95 -41
- package/src/core/progress.js +66 -0
- package/src/core/status-writer.js +331 -0
- package/src/core/task-runner.js +719 -73
- package/src/core/validation.js +120 -1
- package/src/lib/utils.js +6 -0
- package/src/llm/README.md +139 -30
- package/src/llm/index.js +222 -72
- package/src/pages/PipelineDetail.jsx +111 -0
- package/src/pages/PromptPipelineDashboard.jsx +223 -0
- package/src/providers/deepseek.js +3 -15
- package/src/ui/client/adapters/job-adapter.js +258 -0
- package/src/ui/client/bootstrap.js +120 -0
- package/src/ui/client/hooks/useJobDetailWithUpdates.js +619 -0
- package/src/ui/client/hooks/useJobList.js +50 -0
- package/src/ui/client/hooks/useJobListWithUpdates.js +335 -0
- package/src/ui/client/hooks/useTicker.js +26 -0
- package/src/ui/client/index.css +31 -0
- package/src/ui/client/index.html +18 -0
- package/src/ui/client/main.jsx +38 -0
- package/src/ui/config-bridge.browser.js +149 -0
- package/src/ui/config-bridge.js +149 -0
- package/src/ui/config-bridge.node.js +310 -0
- package/src/ui/dist/assets/index-BDABnI-4.js +33399 -0
- package/src/ui/dist/assets/style-Ks8LY8gB.css +28496 -0
- package/src/ui/dist/index.html +19 -0
- package/src/ui/endpoints/job-endpoints.js +300 -0
- package/src/ui/file-reader.js +216 -0
- package/src/ui/job-change-detector.js +83 -0
- package/src/ui/job-index.js +231 -0
- package/src/ui/job-reader.js +274 -0
- package/src/ui/job-scanner.js +188 -0
- package/src/ui/public/app.js +3 -1
- package/src/ui/server.js +1636 -59
- package/src/ui/sse-enhancer.js +149 -0
- package/src/ui/sse.js +204 -0
- package/src/ui/state-snapshot.js +252 -0
- package/src/ui/transformers/list-transformer.js +347 -0
- package/src/ui/transformers/status-transformer.js +307 -0
- package/src/ui/watcher.js +61 -7
- package/src/utils/dag.js +101 -0
- package/src/utils/duration.js +126 -0
- package/src/utils/id-generator.js +30 -0
- package/src/utils/jobs.js +7 -0
- package/src/utils/pipelines.js +44 -0
- package/src/utils/task-files.js +271 -0
- package/src/utils/ui.jsx +76 -0
- package/src/ui/public/index.html +0 -53
- package/src/ui/public/style.css +0 -341
package/README.md
CHANGED
|
@@ -38,7 +38,7 @@ The outer pipeline manages runs, state, and isolation. It is responsible for:
|
|
|
38
38
|
|
|
39
39
|
- Assigning a pipeline run ID for each new submission.
|
|
40
40
|
- Creating predictable directories for pending seeds, active runs, and completed runs.
|
|
41
|
-
- Spawning isolated processes for each task (so one failure doesn
|
|
41
|
+
- Spawning isolated processes for each task (so one failure doesn't crash others).
|
|
42
42
|
- Tracking progress in a run‑scoped status file.
|
|
43
43
|
- Promoting completed runs into a repository of results with audit metadata.
|
|
44
44
|
|
|
@@ -57,8 +57,8 @@ my-project/
|
|
|
57
57
|
```mermaid
|
|
58
58
|
flowchart TD
|
|
59
59
|
A["pipeline-data/pending/*-seed.json"] --> B[Orchestrator]
|
|
60
|
-
B --> C["create pipeline-data/current
|
|
61
|
-
B --> D["init pipeline-data/current
|
|
60
|
+
B --> C["create pipeline-data/current/{jobId}/seed.json"]
|
|
61
|
+
B --> D["init pipeline-data/current/{jobId}/tasks-status.json"]
|
|
62
62
|
B --> E[Read pipeline-config/pipeline.json]
|
|
63
63
|
E --> F[Spawn task runner]
|
|
64
64
|
F --> G["write tasks/<task>/letter.json"]
|
|
@@ -68,7 +68,7 @@ flowchart TD
|
|
|
68
68
|
J --> K{More tasks?}
|
|
69
69
|
K -->|yes| F
|
|
70
70
|
K -->|no| L[Promote to complete]
|
|
71
|
-
L --> M["pipeline-data/complete
|
|
71
|
+
L --> M["pipeline-data/complete/{jobId}/**"]
|
|
72
72
|
L --> N["append pipeline-data/complete/runs.jsonl"]
|
|
73
73
|
```
|
|
74
74
|
|
|
@@ -163,13 +163,20 @@ flowchart TD
|
|
|
163
163
|
```
|
|
164
164
|
my-project/
|
|
165
165
|
├── pipeline-config/
|
|
166
|
-
│ ├──
|
|
167
|
-
│ └──
|
|
168
|
-
│ ├──
|
|
169
|
-
│ ├── task
|
|
170
|
-
│ │ └──
|
|
171
|
-
│
|
|
172
|
-
│
|
|
166
|
+
│ ├── registry.json # Pipeline registry (maps slugs → configurations)
|
|
167
|
+
│ └── pipelines/ # Pipeline definitions (slugged layout)
|
|
168
|
+
│ ├── content/
|
|
169
|
+
│ │ ├── pipeline.json # Pipeline definition (ordered list of task IDs)
|
|
170
|
+
│ │ └── tasks/ # Task implementations
|
|
171
|
+
│ │ ├── index.js # Task registry (maps task IDs → modules)
|
|
172
|
+
│ │ ├── task-a/
|
|
173
|
+
│ │ │ └── index.js
|
|
174
|
+
│ │ └── task-b/
|
|
175
|
+
│ │ └── index.js
|
|
176
|
+
│ └── analytics/ # Additional pipeline (example)
|
|
177
|
+
│ ├── pipeline.json
|
|
178
|
+
│ └── tasks/
|
|
179
|
+
│ └── index.js
|
|
173
180
|
├── pipeline-data/ # Runtime directories (auto‑created/managed)
|
|
174
181
|
│ ├── pending/
|
|
175
182
|
│ ├── current/
|
|
@@ -178,7 +185,28 @@ my-project/
|
|
|
178
185
|
└── .pipelinerc.json # Optional CLI config
|
|
179
186
|
```
|
|
180
187
|
|
|
181
|
-
**`pipeline.json` (example)**
|
|
188
|
+
**`pipeline-config/registry.json` (example)**
|
|
189
|
+
|
|
190
|
+
```json
|
|
191
|
+
{
|
|
192
|
+
"pipelines": {
|
|
193
|
+
"content": {
|
|
194
|
+
"name": "Content Generation Pipeline",
|
|
195
|
+
"description": "Generates and processes content using LLM tasks",
|
|
196
|
+
"pipelinePath": "pipeline-config/content/pipeline.json",
|
|
197
|
+
"taskRegistryPath": "pipeline-config/content/tasks/index.js"
|
|
198
|
+
},
|
|
199
|
+
"analytics": {
|
|
200
|
+
"name": "Analytics Pipeline",
|
|
201
|
+
"description": "Processes data for analytics and reporting",
|
|
202
|
+
"pipelinePath": "pipeline-config/analytics/pipeline.json",
|
|
203
|
+
"taskRegistryPath": "pipeline-config/analytics/tasks/index.js"
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
```
|
|
208
|
+
|
|
209
|
+
**`pipeline-config/pipelines/content/pipeline.json` (example)**
|
|
182
210
|
|
|
183
211
|
```json
|
|
184
212
|
{
|
|
@@ -186,7 +214,7 @@ my-project/
|
|
|
186
214
|
}
|
|
187
215
|
```
|
|
188
216
|
|
|
189
|
-
**`pipeline-config/tasks/index.js` (example registry)**
|
|
217
|
+
**`pipeline-config/pipelines/content/tasks/index.js` (example registry)**
|
|
190
218
|
|
|
191
219
|
```js
|
|
192
220
|
// ESM registry mapping task IDs to loader functions or modules
|
|
@@ -196,7 +224,7 @@ export default {
|
|
|
196
224
|
};
|
|
197
225
|
```
|
|
198
226
|
|
|
199
|
-
> The orchestrator resolves
|
|
227
|
+
> The orchestrator resolves pipeline slugs from `registry.json` and loads the corresponding pipeline configuration and task registry.
|
|
200
228
|
|
|
201
229
|
### Install & scripts
|
|
202
230
|
|
|
@@ -218,8 +246,8 @@ Add the package and scripts to your consumer project:
|
|
|
218
246
|
|
|
219
247
|
### CLI overview
|
|
220
248
|
|
|
221
|
-
- **`pipeline-orchestrator init`** – scaffolds `pipeline-config/` and `pipeline-data/` if missing.
|
|
222
|
-
- **`pipeline-orchestrator start`** – starts the orchestrator; watches `pipeline-data/pending/` for new seeds and processes them
|
|
249
|
+
- **`pipeline-orchestrator init`** – scaffolds `pipeline-config/` with registry and default pipeline, plus `pipeline-data/` if missing.
|
|
250
|
+
- **`pipeline-orchestrator start`** – starts the orchestrator; watches `pipeline-data/pending/` for new seeds and processes them using the default pipeline from `pipeline-config/registry.json`.
|
|
223
251
|
- **`pipeline-orchestrator start --ui`** – starts the orchestrator and the optional UI server.
|
|
224
252
|
- **`pipeline-orchestrator submit [path]`** – submits a seed into `pipeline-data/pending/` (path can point to a JSON file).
|
|
225
253
|
|
|
@@ -238,13 +266,234 @@ If present in the project root, this file can provide defaults for the CLI (e.g.
|
|
|
238
266
|
|
|
239
267
|
_(Keys and defaults may vary by version; prefer `--help` for authoritative options.)_
|
|
240
268
|
|
|
269
|
+
### Seed format
|
|
270
|
+
|
|
271
|
+
All seeds must include a `pipeline` field that references a valid pipeline slug from the registry. The pipeline field is mandatory and no fallbacks are allowed.
|
|
272
|
+
|
|
273
|
+
**Minimal seed example:**
|
|
274
|
+
|
|
275
|
+
```json
|
|
276
|
+
{
|
|
277
|
+
"name": "my-job",
|
|
278
|
+
"pipeline": "content",
|
|
279
|
+
"data": {
|
|
280
|
+
"type": "content-creation",
|
|
281
|
+
"topic": "AI-Powered Development Tools"
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
**Required fields:**
|
|
287
|
+
|
|
288
|
+
- `name`: Unique identifier for the job (alphanumeric, hyphens, and underscores only)
|
|
289
|
+
- `pipeline`: Valid pipeline slug from `pipeline-config/registry.json`
|
|
290
|
+
- `data`: Object containing the input data for the pipeline
|
|
291
|
+
|
|
241
292
|
### Example flow in a consumer project
|
|
242
293
|
|
|
243
|
-
1. **Initialize**: `npm run pipeline:init` to
|
|
244
|
-
2. **Define**: Edit `pipeline-config/pipeline.json` and implement tasks under `pipeline-config/tasks/`.
|
|
245
|
-
3. **
|
|
246
|
-
4. **
|
|
247
|
-
5. **
|
|
294
|
+
1. **Initialize**: `npm run pipeline:init` to create the registry and default pipeline structure.
|
|
295
|
+
2. **Define**: Edit `pipeline-config/pipelines/{slug}/pipeline.json` and implement tasks under `pipeline-config/pipelines/{slug}/tasks/`.
|
|
296
|
+
3. **Configure**: Update `pipeline-config/registry.json` to add new pipelines or change the default.
|
|
297
|
+
4. **Run**: `npm run pipeline` (or `npm run pipeline:ui` for the UI).
|
|
298
|
+
5. **Submit**: Add a seed JSON to `pipeline-data/pending/` or run `npm run pipeline:submit -- ./path/to/seed.json`.
|
|
299
|
+
6. **Inspect**: Watch `pipeline-data/current/{jobId}` for in‑progress artifacts and `pipeline-data/complete/{jobId}` for results.
|
|
300
|
+
|
|
301
|
+
---
|
|
302
|
+
|
|
303
|
+
## Section C — UI and JobId-Only Navigation
|
|
304
|
+
|
|
305
|
+
This project includes a web UI for monitoring pipeline execution and inspecting results.
|
|
306
|
+
|
|
307
|
+
### JobId-Only Policy
|
|
308
|
+
|
|
309
|
+
**Important**: The UI uses JobId-only navigation. All pipeline detail pages use `/pipeline/:jobId` URLs with no slug-based fallbacks.
|
|
310
|
+
|
|
311
|
+
#### Directory Structure
|
|
312
|
+
|
|
313
|
+
The UI uses ID-based storage exclusively:
|
|
314
|
+
|
|
315
|
+
```
|
|
316
|
+
pipeline-data/
|
|
317
|
+
├── pending/
|
|
318
|
+
│ ├── {jobId}/
|
|
319
|
+
│ │ ├── seed.json
|
|
320
|
+
│ │ └── ...
|
|
321
|
+
├── current/
|
|
322
|
+
│ ├── {jobId}/
|
|
323
|
+
│ │ ├── seed.json
|
|
324
|
+
│ │ ├── tasks-status.json
|
|
325
|
+
│ │ └── ...
|
|
326
|
+
├── complete/
|
|
327
|
+
│ ├── {jobId}/
|
|
328
|
+
│ │ ├── seed.json
|
|
329
|
+
│ │ ├── tasks-status.json
|
|
330
|
+
│ │ └── ...
|
|
331
|
+
└── rejected/
|
|
332
|
+
├── {jobId}/
|
|
333
|
+
│ ├── seed.json
|
|
334
|
+
│ └── ...
|
|
335
|
+
```
|
|
336
|
+
|
|
337
|
+
#### Accessing Pipeline Details
|
|
338
|
+
|
|
339
|
+
- **Valid**: `/pipeline/abc123def456` - Loads job with ID `abc123def456`
|
|
340
|
+
- **Invalid**: `/pipeline/content-generation` - Shows "Invalid job ID" error
|
|
341
|
+
|
|
342
|
+
#### Error Handling
|
|
343
|
+
|
|
344
|
+
- **Invalid job ID**: Shows "Invalid job ID" for malformed IDs
|
|
345
|
+
- **Job not found**: Shows "Job not found" for valid IDs that don't exist
|
|
346
|
+
- **Network errors**: Shows appropriate network error messages
|
|
347
|
+
|
|
348
|
+
---
|
|
349
|
+
|
|
350
|
+
## Section D — File I/O System (New)
|
|
351
|
+
|
|
352
|
+
### Scoped File Operations
|
|
353
|
+
|
|
354
|
+
The pipeline now includes a **scoped file I/O system** that provides each task with isolated file operations through a `context.files` API. This replaces the legacy artifacts system with a more organized approach.
|
|
355
|
+
|
|
356
|
+
#### File Structure
|
|
357
|
+
|
|
358
|
+
Each task gets its own directory structure:
|
|
359
|
+
|
|
360
|
+
```
|
|
361
|
+
pipeline-data/current/{jobId}/
|
|
362
|
+
├── tasks/
|
|
363
|
+
│ └── {taskName}/
|
|
364
|
+
│ ├── artifacts/ # Generated outputs (replace mode)
|
|
365
|
+
│ ├── logs/ # Process logs (append mode)
|
|
366
|
+
│ └── tmp/ # Temporary files (replace mode
|
|
367
|
+
└── tasks-status.json # Updated with files.* arrays
|
|
368
|
+
```
|
|
369
|
+
|
|
370
|
+
#### File I/O API
|
|
371
|
+
|
|
372
|
+
Tasks receive a `context.files` object with these methods:
|
|
373
|
+
|
|
374
|
+
```javascript
|
|
375
|
+
// Write artifacts (default: replace mode)
|
|
376
|
+
await context.files.writeArtifact("output.json", data);
|
|
377
|
+
await context.files.writeArtifact("report.txt", content, { mode: "replace" });
|
|
378
|
+
|
|
379
|
+
// Write logs (default: append mode)
|
|
380
|
+
await context.files.writeLog("process.log", "Starting process\n");
|
|
381
|
+
await context.files.writeLog("debug.log", error, { mode: "append" });
|
|
382
|
+
|
|
383
|
+
// Write temporary files (default: replace mode)
|
|
384
|
+
await context.files.writeTmp("temp.json", intermediateData);
|
|
385
|
+
|
|
386
|
+
// Read files
|
|
387
|
+
const artifact = await context.files.readArtifact("output.json");
|
|
388
|
+
const logs = await context.files.readLog("process.log");
|
|
389
|
+
const temp = await context.files.readTmp("temp.json");
|
|
390
|
+
```
|
|
391
|
+
|
|
392
|
+
#### Status Schema Updates
|
|
393
|
+
|
|
394
|
+
The `tasks-status.json` now includes `files.*` arrays:
|
|
395
|
+
|
|
396
|
+
```json
|
|
397
|
+
{
|
|
398
|
+
"id": "example-pipeline",
|
|
399
|
+
"current": "analysis",
|
|
400
|
+
"files": {
|
|
401
|
+
"artifacts": ["raw-research.json", "analysis-output.json", "summary.txt"],
|
|
402
|
+
"logs": ["ingestion.log", "integration.log"],
|
|
403
|
+
"tmp": ["temp-data.json"]
|
|
404
|
+
},
|
|
405
|
+
"tasks": {
|
|
406
|
+
"analysis": {
|
|
407
|
+
"state": "complete",
|
|
408
|
+
"files": {
|
|
409
|
+
"artifacts": ["raw-research.json", "analysis-output.json"],
|
|
410
|
+
"logs": ["ingestion.log"],
|
|
411
|
+
"tmp": []
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
```
|
|
417
|
+
|
|
418
|
+
#### Migration from Legacy Artifacts
|
|
419
|
+
|
|
420
|
+
The new system **breaks backward compatibility** intentionally:
|
|
421
|
+
|
|
422
|
+
- **Old**: `task.artifacts` array with file objects
|
|
423
|
+
- **New**: `task.files.artifacts` array with filenames only
|
|
424
|
+
- **Old**: Files stored in task root directory
|
|
425
|
+
- **New**: Files organized in `artifacts/`, `logs/`, `tmp/` subdirectories
|
|
426
|
+
|
|
427
|
+
To migrate existing demo data:
|
|
428
|
+
|
|
429
|
+
```bash
|
|
430
|
+
node scripts/migrate-demo-files.js
|
|
431
|
+
```
|
|
432
|
+
|
|
433
|
+
#### Verification
|
|
434
|
+
|
|
435
|
+
To verify the file I/O system is working:
|
|
436
|
+
|
|
437
|
+
1. **Check test suite**: All 882 tests should pass
|
|
438
|
+
2. **Run demo pipeline**: Files should appear in correct subdirectories
|
|
439
|
+
3. **Inspect tasks-status.json**: Should contain `files.*` arrays
|
|
440
|
+
4. **Check UI**: Job details should show files from new schema
|
|
441
|
+
|
|
442
|
+
#### Example Task Usage
|
|
443
|
+
|
|
444
|
+
```javascript
|
|
445
|
+
export async function ingestion(context) {
|
|
446
|
+
const researchContent = context.seed.data.content;
|
|
447
|
+
|
|
448
|
+
// Log the start of ingestion
|
|
449
|
+
await context.files.writeLog(
|
|
450
|
+
"ingestion.log",
|
|
451
|
+
`[${new Date().toISOString()}] Starting data ingestion\n`
|
|
452
|
+
);
|
|
453
|
+
|
|
454
|
+
// Store raw research data
|
|
455
|
+
await context.files.writeArtifact(
|
|
456
|
+
"raw-research.json",
|
|
457
|
+
JSON.stringify(
|
|
458
|
+
{
|
|
459
|
+
content: researchContent,
|
|
460
|
+
type: context.seed.data.type,
|
|
461
|
+
ingestedAt: new Date().toISOString(),
|
|
462
|
+
},
|
|
463
|
+
null,
|
|
464
|
+
2
|
|
465
|
+
)
|
|
466
|
+
);
|
|
467
|
+
|
|
468
|
+
return { output: { researchContent } };
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
export async function integration(context) {
|
|
472
|
+
const { analysisContent } = context.output;
|
|
473
|
+
|
|
474
|
+
// Store final analysis output
|
|
475
|
+
await context.files.writeArtifact(
|
|
476
|
+
"analysis-output.json",
|
|
477
|
+
JSON.stringify(
|
|
478
|
+
{
|
|
479
|
+
content: analysisContent,
|
|
480
|
+
timestamp: new Date().toISOString(),
|
|
481
|
+
taskName: context.taskName,
|
|
482
|
+
},
|
|
483
|
+
null,
|
|
484
|
+
2
|
|
485
|
+
)
|
|
486
|
+
);
|
|
487
|
+
|
|
488
|
+
// Log completion
|
|
489
|
+
await context.files.writeLog(
|
|
490
|
+
"integration.log",
|
|
491
|
+
`[${new Date().toISOString()}] ✓ Analysis integration completed\n`
|
|
492
|
+
);
|
|
493
|
+
|
|
494
|
+
return { output: { analysis: { content: analysisContent } } };
|
|
495
|
+
}
|
|
496
|
+
```
|
|
248
497
|
|
|
249
498
|
---
|
|
250
499
|
|
|
@@ -252,8 +501,72 @@ _(Keys and defaults may vary by version; prefer `--help` for authoritative optio
|
|
|
252
501
|
|
|
253
502
|
- **Determinism** – each task persists its inputs/outputs; you can re‑run or debug any stage.
|
|
254
503
|
- **Isolation** – tasks run in separate processes when appropriate.
|
|
255
|
-
- **
|
|
256
|
-
- **Status** – a `tasks-status.json` file tracks progress and
|
|
504
|
+
- **Scoped File I/O** – tasks use `context.files` API for organized file operations.
|
|
505
|
+
- **Status** – a `tasks-status.json` file tracks progress and file inventories across the pipeline.
|
|
506
|
+
- **JobId-only** – all job identification and navigation uses unique job IDs, not pipeline names.
|
|
507
|
+
|
|
508
|
+
---
|
|
509
|
+
|
|
510
|
+
## Section E — Logging and Debugging
|
|
511
|
+
|
|
512
|
+
### Per-Stage Logging
|
|
513
|
+
|
|
514
|
+
The pipeline captures console output from each stage execution to dedicated log files. This helps with debugging specific stages and understanding the flow of data through the pipeline.
|
|
515
|
+
|
|
516
|
+
#### Log File Locations
|
|
517
|
+
|
|
518
|
+
```
|
|
519
|
+
pipeline-data/current/{jobId}/
|
|
520
|
+
├── files/
|
|
521
|
+
│ └── logs/
|
|
522
|
+
│ ├── stage-validateStructure.log
|
|
523
|
+
│ ├── stage-critique.log
|
|
524
|
+
│ └── stage-refine.log
|
|
525
|
+
└── tasks-status.json
|
|
526
|
+
```
|
|
527
|
+
|
|
528
|
+
#### Log Contents
|
|
529
|
+
|
|
530
|
+
Each `stage-{stageName}.log` file contains:
|
|
531
|
+
|
|
532
|
+
- All console output from that stage (console.log, console.error, console.warn, console.info)
|
|
533
|
+
- Timestamped entries for debugging
|
|
534
|
+
- Error messages and stack traces if the stage fails
|
|
535
|
+
|
|
536
|
+
#### Status File
|
|
537
|
+
|
|
538
|
+
The `tasks-status.json` file contains the complete execution state:
|
|
539
|
+
|
|
540
|
+
- `data` object with all stage outputs (`data.validateStructure`, `data.critique`, etc.)
|
|
541
|
+
- `flags` object with accumulated pipeline flags (`validationFailed`, `critiqueComplete`, etc.)
|
|
542
|
+
- `logs` array with audit trail entries
|
|
543
|
+
- `files` arrays tracking all generated files
|
|
544
|
+
|
|
545
|
+
#### Debugging Examples
|
|
546
|
+
|
|
547
|
+
**Check stage output:**
|
|
548
|
+
|
|
549
|
+
```bash
|
|
550
|
+
# View validation stage logs
|
|
551
|
+
cat pipeline-data/current/{jobId}/files/logs/stage-validateStructure.log
|
|
552
|
+
|
|
553
|
+
# View critique stage logs
|
|
554
|
+
cat pipeline-data/current/{jobId}/files/logs/stage-critique.log
|
|
555
|
+
```
|
|
556
|
+
|
|
557
|
+
**Inspect execution state:**
|
|
558
|
+
|
|
559
|
+
```bash
|
|
560
|
+
# View complete pipeline state
|
|
561
|
+
cat pipeline-data/current/{jobId}/tasks-status.json | jq '.data, .flags'
|
|
562
|
+
```
|
|
563
|
+
|
|
564
|
+
**Debug failed stages:**
|
|
565
|
+
|
|
566
|
+
```bash
|
|
567
|
+
# Check error logs for specific stage
|
|
568
|
+
grep -i error pipeline-data/current/{jobId}/files/logs/stage-*.log
|
|
569
|
+
```
|
|
257
570
|
|
|
258
571
|
---
|
|
259
572
|
|
|
@@ -261,7 +574,82 @@ _(Keys and defaults may vary by version; prefer `--help` for authoritative optio
|
|
|
261
574
|
|
|
262
575
|
- **Nothing happens when I submit a seed** → Ensure the orchestrator is running and watching `pipeline-data/pending/`.
|
|
263
576
|
- **Task not found** → Confirm the task ID exists in `pipeline-config/tasks/index.js` and matches `pipeline.json`.
|
|
264
|
-
- **UI doesn
|
|
577
|
+
- **UI doesn't load** → Try `pipeline-orchestrator start --ui` and check for port conflicts.
|
|
578
|
+
- **Invalid job ID error** → Ensure you're using a valid job ID from the job list, not a pipeline name.
|
|
579
|
+
- **Stage failed** → Check `pipeline-data/current/{jobId}/files/logs/stage-{stageName}.log` for detailed error messages.
|
|
580
|
+
- **Missing output** → Inspect `tasks-status.json` to see what data and flags were generated by each stage.
|
|
581
|
+
|
|
582
|
+
---
|
|
583
|
+
|
|
584
|
+
## Scaffolding
|
|
585
|
+
|
|
586
|
+
Use the CLI commands to quickly scaffold a new pipeline structure. These examples show the minimal commands and resulting directory trees.
|
|
587
|
+
|
|
588
|
+
### Initialize pipeline structure
|
|
589
|
+
|
|
590
|
+
```bash
|
|
591
|
+
pipeline-orchestrator init --root ./pipelines
|
|
592
|
+
```
|
|
593
|
+
|
|
594
|
+
**Resulting directory tree:**
|
|
595
|
+
|
|
596
|
+
```
|
|
597
|
+
pipelines/
|
|
598
|
+
├── pipeline-config/
|
|
599
|
+
│ └── registry.json
|
|
600
|
+
└── pipeline-data/
|
|
601
|
+
├── pending/.gitkeep
|
|
602
|
+
├── current/.gitkeep
|
|
603
|
+
├── complete/.gitkeep
|
|
604
|
+
└── rejected/.gitkeep
|
|
605
|
+
```
|
|
606
|
+
|
|
607
|
+
### Add a pipeline
|
|
608
|
+
|
|
609
|
+
```bash
|
|
610
|
+
pipeline-orchestrator add-pipeline content-generation --root ./pipelines
|
|
611
|
+
```
|
|
612
|
+
|
|
613
|
+
**Resulting directory tree:**
|
|
614
|
+
|
|
615
|
+
```
|
|
616
|
+
pipelines/
|
|
617
|
+
├── pipeline-config/
|
|
618
|
+
│ ├── registry.json
|
|
619
|
+
│ └── content-generation/
|
|
620
|
+
│ ├── pipeline.json
|
|
621
|
+
│ └── tasks/
|
|
622
|
+
│ └── index.js
|
|
623
|
+
└── pipeline-data/
|
|
624
|
+
├── pending/.gitkeep
|
|
625
|
+
├── current/.gitkeep
|
|
626
|
+
├── complete/.gitkeep
|
|
627
|
+
└── rejected/.gitkeep
|
|
628
|
+
```
|
|
629
|
+
|
|
630
|
+
### Add a pipeline task
|
|
631
|
+
|
|
632
|
+
```bash
|
|
633
|
+
pipeline-orchestrator add-pipeline-task content-generation research --root ./pipelines
|
|
634
|
+
```
|
|
635
|
+
|
|
636
|
+
**Resulting directory tree:**
|
|
637
|
+
|
|
638
|
+
```
|
|
639
|
+
pipelines/
|
|
640
|
+
├── pipeline-config/
|
|
641
|
+
│ ├── registry.json
|
|
642
|
+
│ └── content-generation/
|
|
643
|
+
│ ├── pipeline.json
|
|
644
|
+
│ └── tasks/
|
|
645
|
+
│ ├── index.js
|
|
646
|
+
│ └── research.js
|
|
647
|
+
└── pipeline-data/
|
|
648
|
+
├── pending/.gitkeep
|
|
649
|
+
├── current/.gitkeep
|
|
650
|
+
├── complete/.gitkeep
|
|
651
|
+
└── rejected/.gitkeep
|
|
652
|
+
```
|
|
265
653
|
|
|
266
654
|
---
|
|
267
655
|
|
|
@@ -281,6 +669,9 @@ npm run pipeline:ui
|
|
|
281
669
|
|
|
282
670
|
# 4) Submit a seed (JSON file)
|
|
283
671
|
npm run pipeline:submit -- ./seeds/example-seed.json
|
|
672
|
+
|
|
673
|
+
# 5) Access UI (if running with --ui)
|
|
674
|
+
# Navigate to job details using /pipeline/{jobId} URLs
|
|
284
675
|
```
|
|
285
676
|
|
|
286
677
|
---
|
package/package.json
CHANGED
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ryanfw/prompt-orchestration-pipeline",
|
|
3
|
-
"version": "0.0
|
|
3
|
+
"version": "0.3.0",
|
|
4
4
|
"description": "A Prompt-orchestration pipeline (POP) is a framework for building, running, and experimenting with complex chains of LLM tasks.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "src/ui/server.js",
|
|
7
|
+
"bin": {
|
|
8
|
+
"pipeline-orchestrator": "src/cli/index.js"
|
|
9
|
+
},
|
|
7
10
|
"files": [
|
|
8
11
|
"src",
|
|
9
12
|
"README.md",
|
|
@@ -17,20 +20,54 @@
|
|
|
17
20
|
"access": "public"
|
|
18
21
|
},
|
|
19
22
|
"scripts": {
|
|
20
|
-
"test": "vitest run",
|
|
21
|
-
"
|
|
22
|
-
"
|
|
23
|
+
"test": "vitest run --config ./vite.config.js --root .",
|
|
24
|
+
"lint": "eslint . --ext .js,.jsx",
|
|
25
|
+
"backend": "NODE_ENV=development nodemon src/ui/server.js",
|
|
26
|
+
"ui": "NODE_ENV=development nodemon src/ui/server.js",
|
|
27
|
+
"ui:dev": "vite",
|
|
28
|
+
"ui:build": "vite build",
|
|
29
|
+
"ui:prod": "node src/ui/server.js",
|
|
30
|
+
"demo:ui": "NODE_ENV=production PO_ROOT=demo node src/ui/server.js",
|
|
31
|
+
"demo:orchestrator": "PO_ROOT=demo NODE_ENV=production node -e \"import('./src/core/orchestrator.js').then(m => m.startOrchestrator({ dataDir: process.env.PO_ROOT || 'demo' })).catch(err => { console.error(err); process.exit(1) })\"",
|
|
32
|
+
"demo:all": "NODE_ENV=development PO_ROOT=demo npm run ui:build && concurrently \"npm:demo:ui\" \"npm:demo:orchestrator\" --kill-others-on-fail",
|
|
33
|
+
"demo:prod": "npm run ui:build && NODE_ENV=production PO_ROOT=demo node src/ui/server.js"
|
|
23
34
|
},
|
|
24
35
|
"dependencies": {
|
|
36
|
+
"@radix-ui/react-progress": "^1.1.7",
|
|
37
|
+
"@radix-ui/react-tabs": "^1.1.13",
|
|
38
|
+
"@radix-ui/react-toast": "^1.2.15",
|
|
39
|
+
"@radix-ui/react-tooltip": "^1.2.8",
|
|
40
|
+
"@radix-ui/themes": "^3.2.1",
|
|
25
41
|
"ajv": "^8.17.1",
|
|
26
42
|
"chokidar": "^3.5.3",
|
|
27
|
-
"
|
|
28
|
-
"
|
|
43
|
+
"clsx": "^2.1.1",
|
|
44
|
+
"commander": "^14.0.2",
|
|
45
|
+
"dotenv": "^17.2.3",
|
|
46
|
+
"lucide-react": "^0.544.0",
|
|
47
|
+
"openai": "^5.23.1",
|
|
48
|
+
"react": "^19.2.0",
|
|
49
|
+
"react-dom": "^19.2.0",
|
|
50
|
+
"react-router-dom": "^7.9.4",
|
|
51
|
+
"tslib": "^2.8.1"
|
|
29
52
|
},
|
|
30
53
|
"devDependencies": {
|
|
54
|
+
"@eslint/js": "^9.37.0",
|
|
55
|
+
"@tailwindcss/postcss": "^4.1.14",
|
|
56
|
+
"@testing-library/jest-dom": "^6.9.1",
|
|
57
|
+
"@testing-library/react": "^16.3.0",
|
|
58
|
+
"@testing-library/user-event": "^14.6.1",
|
|
59
|
+
"@vitejs/plugin-react": "^5.0.4",
|
|
31
60
|
"@vitest/coverage-v8": "^3.2.4",
|
|
32
|
-
"
|
|
33
|
-
"
|
|
61
|
+
"concurrently": "^9.2.1",
|
|
62
|
+
"eslint": "^9.37.0",
|
|
63
|
+
"eslint-plugin-react": "^7.37.5",
|
|
64
|
+
"eslint-plugin-react-hooks": "^6.1.1",
|
|
65
|
+
"eslint-plugin-react-refresh": "^0.4.23",
|
|
66
|
+
"jsdom": "^27.0.0",
|
|
67
|
+
"nodemon": "^3.1.10",
|
|
68
|
+
"prettier": "^3.6.2",
|
|
69
|
+
"tailwindcss": "^4.1.14",
|
|
70
|
+
"vite": "^7.1.9",
|
|
34
71
|
"vitest": "^3.2.4"
|
|
35
72
|
},
|
|
36
73
|
"engines": {
|
package/src/api/files.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Atomic file operations for seed upload
|
|
3
|
+
* @module api/files
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { promises as fs } from "fs";
|
|
7
|
+
import { randomUUID } from "crypto";
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Write file atomically using temp file then rename
|
|
11
|
+
* @param {string} filePath - Target file path
|
|
12
|
+
* @param {string|Buffer} content - File content
|
|
13
|
+
* @returns {Promise<void>}
|
|
14
|
+
*/
|
|
15
|
+
async function atomicWrite(filePath, content) {
|
|
16
|
+
const tempPath = `${filePath}.${randomUUID()}.tmp`;
|
|
17
|
+
|
|
18
|
+
try {
|
|
19
|
+
// Write to temp file first
|
|
20
|
+
await fs.writeFile(tempPath, content);
|
|
21
|
+
|
|
22
|
+
// Atomically rename to target path
|
|
23
|
+
await fs.rename(tempPath, filePath);
|
|
24
|
+
} catch (error) {
|
|
25
|
+
// Clean up temp file on any error
|
|
26
|
+
try {
|
|
27
|
+
await fs.unlink(tempPath);
|
|
28
|
+
} catch (cleanupError) {
|
|
29
|
+
// Ignore cleanup errors
|
|
30
|
+
}
|
|
31
|
+
throw error;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Clean up partial files on failure
|
|
37
|
+
* @param {string} filePath - File path that may have partial writes
|
|
38
|
+
* @returns {Promise<void>}
|
|
39
|
+
*/
|
|
40
|
+
async function cleanupOnFailure(filePath) {
|
|
41
|
+
try {
|
|
42
|
+
await fs.unlink(filePath);
|
|
43
|
+
} catch (error) {
|
|
44
|
+
// File doesn't exist or can't be deleted - ignore
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export { atomicWrite, cleanupOnFailure };
|