@ryanfw/prompt-orchestration-pipeline 0.0.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +415 -24
- package/package.json +46 -8
- package/src/api/files.js +48 -0
- package/src/api/index.js +149 -53
- package/src/api/validators/seed.js +141 -0
- package/src/cli/index.js +444 -29
- package/src/cli/run-orchestrator.js +39 -0
- package/src/cli/update-pipeline-json.js +47 -0
- package/src/components/DAGGrid.jsx +649 -0
- package/src/components/JobCard.jsx +96 -0
- package/src/components/JobDetail.jsx +159 -0
- package/src/components/JobTable.jsx +202 -0
- package/src/components/Layout.jsx +134 -0
- package/src/components/TaskFilePane.jsx +570 -0
- package/src/components/UploadSeed.jsx +239 -0
- package/src/components/ui/badge.jsx +20 -0
- package/src/components/ui/button.jsx +43 -0
- package/src/components/ui/card.jsx +20 -0
- package/src/components/ui/focus-styles.css +60 -0
- package/src/components/ui/progress.jsx +26 -0
- package/src/components/ui/select.jsx +27 -0
- package/src/components/ui/separator.jsx +6 -0
- package/src/config/paths.js +99 -0
- package/src/core/config.js +270 -9
- package/src/core/file-io.js +202 -0
- package/src/core/module-loader.js +157 -0
- package/src/core/orchestrator.js +275 -294
- package/src/core/pipeline-runner.js +95 -41
- package/src/core/progress.js +66 -0
- package/src/core/status-writer.js +331 -0
- package/src/core/task-runner.js +719 -73
- package/src/core/validation.js +120 -1
- package/src/lib/utils.js +6 -0
- package/src/llm/README.md +139 -30
- package/src/llm/index.js +222 -72
- package/src/pages/PipelineDetail.jsx +111 -0
- package/src/pages/PromptPipelineDashboard.jsx +223 -0
- package/src/providers/deepseek.js +3 -15
- package/src/ui/client/adapters/job-adapter.js +258 -0
- package/src/ui/client/bootstrap.js +120 -0
- package/src/ui/client/hooks/useJobDetailWithUpdates.js +619 -0
- package/src/ui/client/hooks/useJobList.js +50 -0
- package/src/ui/client/hooks/useJobListWithUpdates.js +335 -0
- package/src/ui/client/hooks/useTicker.js +26 -0
- package/src/ui/client/index.css +31 -0
- package/src/ui/client/index.html +18 -0
- package/src/ui/client/main.jsx +38 -0
- package/src/ui/config-bridge.browser.js +149 -0
- package/src/ui/config-bridge.js +149 -0
- package/src/ui/config-bridge.node.js +310 -0
- package/src/ui/dist/assets/index-CxcrauYR.js +22702 -0
- package/src/ui/dist/assets/style-D6K_oQ12.css +62 -0
- package/src/ui/dist/index.html +19 -0
- package/src/ui/endpoints/job-endpoints.js +300 -0
- package/src/ui/file-reader.js +216 -0
- package/src/ui/job-change-detector.js +83 -0
- package/src/ui/job-index.js +231 -0
- package/src/ui/job-reader.js +274 -0
- package/src/ui/job-scanner.js +188 -0
- package/src/ui/public/app.js +3 -1
- package/src/ui/server.js +1636 -59
- package/src/ui/sse-enhancer.js +149 -0
- package/src/ui/sse.js +204 -0
- package/src/ui/state-snapshot.js +252 -0
- package/src/ui/transformers/list-transformer.js +347 -0
- package/src/ui/transformers/status-transformer.js +307 -0
- package/src/ui/watcher.js +61 -7
- package/src/utils/dag.js +101 -0
- package/src/utils/duration.js +126 -0
- package/src/utils/id-generator.js +30 -0
- package/src/utils/jobs.js +7 -0
- package/src/utils/pipelines.js +44 -0
- package/src/utils/task-files.js +271 -0
- package/src/utils/ui.jsx +76 -0
- package/src/ui/public/index.html +0 -53
- package/src/ui/public/style.css +0 -341
package/src/core/config.js
CHANGED
|
@@ -5,9 +5,163 @@
|
|
|
5
5
|
* supporting both environment variables and config file overrides.
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
|
-
import { promises as fs } from "node:fs";
|
|
8
|
+
import { promises as fs, existsSync, readFileSync } from "node:fs";
|
|
9
9
|
import path from "node:path";
|
|
10
10
|
|
|
11
|
+
async function checkFileExistence(filePath) {
|
|
12
|
+
try {
|
|
13
|
+
await fs.access(filePath);
|
|
14
|
+
return true;
|
|
15
|
+
} catch (error) {
|
|
16
|
+
if (error.code === "ENOENT") {
|
|
17
|
+
return false;
|
|
18
|
+
} else {
|
|
19
|
+
throw error; // Re-throw other errors
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function resolveRepoRoot(config) {
|
|
25
|
+
const configuredRoot = config?.paths?.root;
|
|
26
|
+
if (!configuredRoot) {
|
|
27
|
+
throw new Error("PO_ROOT is required");
|
|
28
|
+
}
|
|
29
|
+
return path.resolve(configuredRoot);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function resolveWithBase(rootDir, maybePath) {
|
|
33
|
+
if (!maybePath) {
|
|
34
|
+
return undefined;
|
|
35
|
+
}
|
|
36
|
+
return path.isAbsolute(maybePath)
|
|
37
|
+
? maybePath
|
|
38
|
+
: path.resolve(rootDir, maybePath);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function normalizeRegistryEntry(slug, entry, rootDir) {
|
|
42
|
+
const pipelineJsonPath = entry?.pipelineJsonPath
|
|
43
|
+
? resolveWithBase(rootDir, entry.pipelineJsonPath)
|
|
44
|
+
: undefined;
|
|
45
|
+
|
|
46
|
+
const configDir = entry?.configDir
|
|
47
|
+
? resolveWithBase(rootDir, entry.configDir)
|
|
48
|
+
: pipelineJsonPath
|
|
49
|
+
? path.dirname(pipelineJsonPath)
|
|
50
|
+
: path.join(rootDir, "pipeline-config", slug);
|
|
51
|
+
|
|
52
|
+
const tasksDir = entry?.tasksDir
|
|
53
|
+
? resolveWithBase(rootDir, entry.tasksDir)
|
|
54
|
+
: path.join(configDir, "tasks");
|
|
55
|
+
|
|
56
|
+
return {
|
|
57
|
+
configDir,
|
|
58
|
+
tasksDir,
|
|
59
|
+
name: entry?.name,
|
|
60
|
+
description: entry?.description,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async function hydratePipelinesFromRegistry(config) {
|
|
65
|
+
const rootDir = resolveRepoRoot(config);
|
|
66
|
+
const registryPath = path.join(rootDir, "pipeline-config", "registry.json");
|
|
67
|
+
|
|
68
|
+
let registryData;
|
|
69
|
+
try {
|
|
70
|
+
const contents = await fs.readFile(registryPath, "utf8");
|
|
71
|
+
registryData = JSON.parse(contents);
|
|
72
|
+
} catch (error) {
|
|
73
|
+
if (error?.code === "ENOENT") {
|
|
74
|
+
return;
|
|
75
|
+
}
|
|
76
|
+
throw new Error(
|
|
77
|
+
"Failed to read pipeline registry at " +
|
|
78
|
+
registryPath +
|
|
79
|
+
": " +
|
|
80
|
+
error.message
|
|
81
|
+
);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (
|
|
85
|
+
!registryData ||
|
|
86
|
+
typeof registryData !== "object" ||
|
|
87
|
+
!registryData.pipelines ||
|
|
88
|
+
typeof registryData.pipelines !== "object"
|
|
89
|
+
) {
|
|
90
|
+
if (
|
|
91
|
+
registryData &&
|
|
92
|
+
typeof registryData === "object" &&
|
|
93
|
+
registryData.slugs &&
|
|
94
|
+
typeof registryData.slugs === "object"
|
|
95
|
+
) {
|
|
96
|
+
console.warn(
|
|
97
|
+
"[config] Detected legacy pipeline registry format using `slugs`. Expected `pipelines` object. Falling back to defaultConfig.pipelines."
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
return;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const resolved = {};
|
|
104
|
+
for (const [slug, entry] of Object.entries(registryData.pipelines)) {
|
|
105
|
+
const normalized = normalizeRegistryEntry(slug, entry, rootDir);
|
|
106
|
+
resolved[slug] = normalized;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (Object.keys(resolved).length > 0) {
|
|
110
|
+
config.pipelines = resolved;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function hydratePipelinesFromRegistrySync(config) {
|
|
115
|
+
const rootDir = resolveRepoRoot(config);
|
|
116
|
+
const registryPath = path.join(rootDir, "pipeline-config", "registry.json");
|
|
117
|
+
|
|
118
|
+
if (!existsSync(registryPath)) {
|
|
119
|
+
return;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
let registryData;
|
|
123
|
+
try {
|
|
124
|
+
const contents = readFileSync(registryPath, "utf8");
|
|
125
|
+
registryData = JSON.parse(contents);
|
|
126
|
+
} catch (error) {
|
|
127
|
+
throw new Error(
|
|
128
|
+
"Failed to read pipeline registry at " +
|
|
129
|
+
registryPath +
|
|
130
|
+
": " +
|
|
131
|
+
error.message
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (
|
|
136
|
+
!registryData ||
|
|
137
|
+
typeof registryData !== "object" ||
|
|
138
|
+
!registryData.pipelines ||
|
|
139
|
+
typeof registryData.pipelines !== "object"
|
|
140
|
+
) {
|
|
141
|
+
if (
|
|
142
|
+
registryData &&
|
|
143
|
+
typeof registryData === "object" &&
|
|
144
|
+
registryData.slugs &&
|
|
145
|
+
typeof registryData.slugs === "object"
|
|
146
|
+
) {
|
|
147
|
+
console.warn(
|
|
148
|
+
"[config] Detected legacy pipeline registry format using `slugs`. Expected `pipelines` object. Falling back to defaultConfig.pipelines."
|
|
149
|
+
);
|
|
150
|
+
}
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const resolved = {};
|
|
155
|
+
for (const [slug, entry] of Object.entries(registryData.pipelines)) {
|
|
156
|
+
const normalized = normalizeRegistryEntry(slug, entry, rootDir);
|
|
157
|
+
resolved[slug] = normalized;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (Object.keys(resolved).length > 0) {
|
|
161
|
+
config.pipelines = resolved;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
11
165
|
/**
|
|
12
166
|
* Default configuration values
|
|
13
167
|
* These can be overridden by environment variables or config file
|
|
@@ -33,6 +187,36 @@ export const defaultConfig = {
|
|
|
33
187
|
maxConcurrency: 5,
|
|
34
188
|
retryMaxAttempts: 3,
|
|
35
189
|
retryBackoffMs: 1000,
|
|
190
|
+
models: {
|
|
191
|
+
"openai:gpt-4": {
|
|
192
|
+
provider: "openai",
|
|
193
|
+
model: "gpt-4",
|
|
194
|
+
},
|
|
195
|
+
"openai:gpt-4-turbo": {
|
|
196
|
+
provider: "openai",
|
|
197
|
+
model: "gpt-4-turbo",
|
|
198
|
+
},
|
|
199
|
+
"openai:gpt-5": {
|
|
200
|
+
provider: "openai",
|
|
201
|
+
model: "gpt-5-chat-latest",
|
|
202
|
+
},
|
|
203
|
+
"deepseek:reasoner": {
|
|
204
|
+
provider: "deepseek",
|
|
205
|
+
model: "deepseek-reasoner",
|
|
206
|
+
},
|
|
207
|
+
"deepseek:chat": {
|
|
208
|
+
provider: "deepseek",
|
|
209
|
+
model: "deepseek-chat",
|
|
210
|
+
},
|
|
211
|
+
"anthropic:opus": {
|
|
212
|
+
provider: "anthropic",
|
|
213
|
+
model: "claude-3-opus",
|
|
214
|
+
},
|
|
215
|
+
"anthropic:sonnet": {
|
|
216
|
+
provider: "anthropic",
|
|
217
|
+
model: "claude-3-sonnet",
|
|
218
|
+
},
|
|
219
|
+
},
|
|
36
220
|
},
|
|
37
221
|
ui: {
|
|
38
222
|
port: 3000,
|
|
@@ -41,13 +225,13 @@ export const defaultConfig = {
|
|
|
41
225
|
maxRecentChanges: 10,
|
|
42
226
|
},
|
|
43
227
|
paths: {
|
|
44
|
-
root:
|
|
228
|
+
root: undefined,
|
|
45
229
|
dataDir: "pipeline-data",
|
|
46
|
-
configDir: "pipeline-config",
|
|
47
230
|
pendingDir: "pending",
|
|
48
231
|
currentDir: "current",
|
|
49
232
|
completeDir: "complete",
|
|
50
233
|
},
|
|
234
|
+
pipelines: {},
|
|
51
235
|
validation: {
|
|
52
236
|
seedNameMinLength: 1,
|
|
53
237
|
seedNameMaxLength: 100,
|
|
@@ -154,9 +338,7 @@ function loadFromEnvironment(config) {
|
|
|
154
338
|
if (process.env.PO_DATA_DIR) {
|
|
155
339
|
envConfig.paths.dataDir = process.env.PO_DATA_DIR;
|
|
156
340
|
}
|
|
157
|
-
|
|
158
|
-
envConfig.paths.configDir = process.env.PO_CONFIG_DIR;
|
|
159
|
-
}
|
|
341
|
+
// Note: PO_CONFIG_DIR is deprecated - use pipelines.registry instead
|
|
160
342
|
|
|
161
343
|
// Logging settings
|
|
162
344
|
if (process.env.PO_LOG_LEVEL) {
|
|
@@ -215,7 +397,7 @@ async function loadFromFile(configPath) {
|
|
|
215
397
|
* Validate configuration values
|
|
216
398
|
* Throws if configuration is invalid
|
|
217
399
|
*/
|
|
218
|
-
function validateConfig(config) {
|
|
400
|
+
async function validateConfig(config) {
|
|
219
401
|
const errors = [];
|
|
220
402
|
|
|
221
403
|
// Validate numeric values are positive
|
|
@@ -236,7 +418,7 @@ function validateConfig(config) {
|
|
|
236
418
|
}
|
|
237
419
|
|
|
238
420
|
// Validate provider
|
|
239
|
-
const validProviders = ["openai", "deepseek", "anthropic"];
|
|
421
|
+
const validProviders = ["openai", "deepseek", "anthropic", "mock"];
|
|
240
422
|
if (!validProviders.includes(config.llm.defaultProvider)) {
|
|
241
423
|
errors.push(
|
|
242
424
|
`llm.defaultProvider must be one of: ${validProviders.join(", ")}`
|
|
@@ -286,9 +468,49 @@ export async function loadConfig(options = {}) {
|
|
|
286
468
|
// Override with environment variables
|
|
287
469
|
config = loadFromEnvironment(config);
|
|
288
470
|
|
|
471
|
+
// Validate that PO_ROOT is set
|
|
472
|
+
if (!config.paths.root) {
|
|
473
|
+
throw new Error("PO_ROOT is required");
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
// Hydrate pipeline registry if present
|
|
477
|
+
await hydratePipelinesFromRegistry(config);
|
|
478
|
+
|
|
479
|
+
// Validate pipelines presence after hydration
|
|
480
|
+
if (!config.pipelines || Object.keys(config.pipelines).length === 0) {
|
|
481
|
+
const repoRoot = resolveRepoRoot(config);
|
|
482
|
+
throw new Error(
|
|
483
|
+
`No pipelines are registered. Create pipeline-config/registry.json in ${repoRoot} to register pipelines.`
|
|
484
|
+
);
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
// Normalize pipeline paths and validate existence
|
|
488
|
+
const repoRoot = resolveRepoRoot(config);
|
|
489
|
+
for (const slug in config.pipelines) {
|
|
490
|
+
const pipeline = config.pipelines[slug];
|
|
491
|
+
|
|
492
|
+
// Resolve to absolute paths
|
|
493
|
+
pipeline.configDir = path.resolve(repoRoot, pipeline.configDir);
|
|
494
|
+
pipeline.tasksDir = path.resolve(repoRoot, pipeline.tasksDir);
|
|
495
|
+
|
|
496
|
+
// Validate directory existence
|
|
497
|
+
if (!(await checkFileExistence(pipeline.configDir))) {
|
|
498
|
+
throw new Error(pipeline.configDir + " does not exist");
|
|
499
|
+
}
|
|
500
|
+
if (!(await checkFileExistence(pipeline.tasksDir))) {
|
|
501
|
+
throw new Error(pipeline.tasksDir + " does not exist");
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
// Validate pipeline.json exists
|
|
505
|
+
const pipelineJsonPath = path.join(pipeline.configDir, "pipeline.json");
|
|
506
|
+
if (!(await checkFileExistence(pipelineJsonPath))) {
|
|
507
|
+
throw new Error(pipelineJsonPath + " does not exist");
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
|
|
289
511
|
// Validate if requested
|
|
290
512
|
if (validate) {
|
|
291
|
-
validateConfig(config);
|
|
513
|
+
await validateConfig(config);
|
|
292
514
|
}
|
|
293
515
|
|
|
294
516
|
// Cache the loaded config
|
|
@@ -309,6 +531,24 @@ export function getConfig() {
|
|
|
309
531
|
currentConfig = loadFromEnvironment(
|
|
310
532
|
JSON.parse(JSON.stringify(defaultConfig))
|
|
311
533
|
);
|
|
534
|
+
|
|
535
|
+
// Validate that PO_ROOT is set
|
|
536
|
+
if (!currentConfig.paths.root) {
|
|
537
|
+
throw new Error("PO_ROOT is required");
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
hydratePipelinesFromRegistrySync(currentConfig);
|
|
541
|
+
|
|
542
|
+
// Validate pipelines presence after hydration
|
|
543
|
+
if (
|
|
544
|
+
!currentConfig.pipelines ||
|
|
545
|
+
Object.keys(currentConfig.pipelines).length === 0
|
|
546
|
+
) {
|
|
547
|
+
const repoRoot = resolveRepoRoot(currentConfig);
|
|
548
|
+
throw new Error(
|
|
549
|
+
`No pipelines are registered. Create pipeline-config/registry.json in ${repoRoot} to register pipelines.`
|
|
550
|
+
);
|
|
551
|
+
}
|
|
312
552
|
}
|
|
313
553
|
return currentConfig;
|
|
314
554
|
}
|
|
@@ -343,3 +583,24 @@ export function getConfigValue(path, defaultValue = undefined) {
|
|
|
343
583
|
|
|
344
584
|
return value;
|
|
345
585
|
}
|
|
586
|
+
|
|
587
|
+
/**
|
|
588
|
+
* Get pipeline configuration by slug
|
|
589
|
+
*
|
|
590
|
+
* @param {string} slug - Pipeline slug identifier
|
|
591
|
+
* @returns {Object} Object with pipelineJsonPath and tasksDir
|
|
592
|
+
*/
|
|
593
|
+
export function getPipelineConfig(slug) {
|
|
594
|
+
const config = getConfig();
|
|
595
|
+
|
|
596
|
+
if (!config.pipelines || !config.pipelines[slug]) {
|
|
597
|
+
throw new Error("Pipeline " + slug + " not found in registry");
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
const pipeline = config.pipelines[slug];
|
|
601
|
+
|
|
602
|
+
return {
|
|
603
|
+
pipelineJsonPath: path.join(pipeline.configDir, "pipeline.json"),
|
|
604
|
+
tasksDir: pipeline.tasksDir,
|
|
605
|
+
};
|
|
606
|
+
}
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Creates a task-scoped file I/O interface that manages file operations
|
|
6
|
+
* and automatically updates tasks-status.json with file tracking.
|
|
7
|
+
*
|
|
8
|
+
* @param {Object} config - Configuration object
|
|
9
|
+
* @param {string} config.workDir - Base working directory (e.g., /path/to/pipeline-data/current/jobId)
|
|
10
|
+
* @param {string} config.taskName - Name of the current task
|
|
11
|
+
* @param {Function} config.getStage - Function that returns current stage name
|
|
12
|
+
* @param {string} config.statusPath - Path to tasks-status.json file
|
|
13
|
+
* @returns {Object} File I/O interface with curried functions
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
async function ensureDir(dir) {
|
|
17
|
+
await fs.mkdir(dir, { recursive: true });
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
21
|
+
const taskDir = path.join(workDir, "tasks", taskName);
|
|
22
|
+
|
|
23
|
+
// New directory structure: {workDir}/files/{type}
|
|
24
|
+
const filesRoot = path.join(workDir, "files");
|
|
25
|
+
const artifactsDir = path.join(filesRoot, "artifacts");
|
|
26
|
+
const logsDir = path.join(filesRoot, "logs");
|
|
27
|
+
const tmpDir = path.join(filesRoot, "tmp");
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Updates tasks-status.json with file information, ensuring de-duplication
|
|
31
|
+
*/
|
|
32
|
+
async function updateStatusWithFiles(fileType, fileName) {
|
|
33
|
+
try {
|
|
34
|
+
const statusContent = await fs.readFile(statusPath, "utf8");
|
|
35
|
+
const status = JSON.parse(statusContent);
|
|
36
|
+
|
|
37
|
+
// Initialize files object if it doesn't exist
|
|
38
|
+
if (!status.files) {
|
|
39
|
+
status.files = { artifacts: [], logs: [], tmp: [] };
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Initialize task files if they don't exist
|
|
43
|
+
if (!status.tasks[taskName].files) {
|
|
44
|
+
status.tasks[taskName].files = { artifacts: [], logs: [], tmp: [] };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Add to job-level files array (de-duped)
|
|
48
|
+
const jobArray = status.files[fileType];
|
|
49
|
+
if (!jobArray.includes(fileName)) {
|
|
50
|
+
jobArray.push(fileName);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Add to task-level files array (de-duped)
|
|
54
|
+
const taskArray = status.tasks[taskName].files[fileType];
|
|
55
|
+
if (!taskArray.includes(fileName)) {
|
|
56
|
+
taskArray.push(fileName);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Write back to file atomically
|
|
60
|
+
await atomicWrite(statusPath, JSON.stringify(status, null, 2));
|
|
61
|
+
} catch (error) {
|
|
62
|
+
// If status file doesn't exist or is invalid, we'll log but not fail
|
|
63
|
+
console.warn(
|
|
64
|
+
`Failed to update status with file ${fileName}:`,
|
|
65
|
+
error.message
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Atomic write helper
|
|
72
|
+
*/
|
|
73
|
+
async function atomicWrite(filePath, data) {
|
|
74
|
+
const tmpPath = filePath + ".tmp";
|
|
75
|
+
await fs.writeFile(tmpPath, data);
|
|
76
|
+
await fs.rename(tmpPath, filePath);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Generic write function that handles different modes
|
|
81
|
+
*/
|
|
82
|
+
async function writeFile(dirPath, fileName, content, mode = "replace") {
|
|
83
|
+
await ensureDir(dirPath);
|
|
84
|
+
const filePath = path.join(dirPath, fileName);
|
|
85
|
+
|
|
86
|
+
if (mode === "append") {
|
|
87
|
+
await fs.appendFile(filePath, content);
|
|
88
|
+
} else {
|
|
89
|
+
await atomicWrite(filePath, content);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return filePath;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Generic read function
|
|
97
|
+
*/
|
|
98
|
+
async function readFile(dirPath, fileName) {
|
|
99
|
+
const filePath = path.join(dirPath, fileName);
|
|
100
|
+
return await fs.readFile(filePath, "utf8");
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Return curried functions for each file type
|
|
104
|
+
return {
|
|
105
|
+
/**
|
|
106
|
+
* Write an artifact file
|
|
107
|
+
* @param {string} name - File name
|
|
108
|
+
* @param {string} content - File content
|
|
109
|
+
* @param {Object} options - Options object
|
|
110
|
+
* @param {string} options.mode - "replace" (default) or "append"
|
|
111
|
+
*/
|
|
112
|
+
async writeArtifact(name, content, options = {}) {
|
|
113
|
+
const filePath = await writeFile(
|
|
114
|
+
artifactsDir,
|
|
115
|
+
name,
|
|
116
|
+
content,
|
|
117
|
+
options.mode || "replace"
|
|
118
|
+
);
|
|
119
|
+
await updateStatusWithFiles("artifacts", name);
|
|
120
|
+
return filePath;
|
|
121
|
+
},
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* Write a log file
|
|
125
|
+
* @param {string} name - File name
|
|
126
|
+
* @param {string} content - Log content
|
|
127
|
+
* @param {Object} options - Options object
|
|
128
|
+
* @param {string} options.mode - "append" (default) or "replace"
|
|
129
|
+
*/
|
|
130
|
+
async writeLog(name, content, options = {}) {
|
|
131
|
+
const filePath = await writeFile(
|
|
132
|
+
logsDir,
|
|
133
|
+
name,
|
|
134
|
+
content,
|
|
135
|
+
options.mode || "append"
|
|
136
|
+
);
|
|
137
|
+
await updateStatusWithFiles("logs", name);
|
|
138
|
+
return filePath;
|
|
139
|
+
},
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Write a temporary file
|
|
143
|
+
* @param {string} name - File name
|
|
144
|
+
* @param {string} content - File content
|
|
145
|
+
* @param {Object} options - Options object
|
|
146
|
+
* @param {string} options.mode - "replace" (default) or "append"
|
|
147
|
+
*/
|
|
148
|
+
async writeTmp(name, content, options = {}) {
|
|
149
|
+
const filePath = await writeFile(
|
|
150
|
+
tmpDir,
|
|
151
|
+
name,
|
|
152
|
+
content,
|
|
153
|
+
options.mode || "replace"
|
|
154
|
+
);
|
|
155
|
+
await updateStatusWithFiles("tmp", name);
|
|
156
|
+
return filePath;
|
|
157
|
+
},
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Read an artifact file
|
|
161
|
+
* @param {string} name - File name
|
|
162
|
+
* @returns {string} File content
|
|
163
|
+
*/
|
|
164
|
+
async readArtifact(name) {
|
|
165
|
+
return await readFile(artifactsDir, name);
|
|
166
|
+
},
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Read a log file
|
|
170
|
+
* @param {string} name - File name
|
|
171
|
+
* @returns {string} File content
|
|
172
|
+
*/
|
|
173
|
+
async readLog(name) {
|
|
174
|
+
return await readFile(logsDir, name);
|
|
175
|
+
},
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Read a temporary file
|
|
179
|
+
* @param {string} name - File name
|
|
180
|
+
* @returns {string} File content
|
|
181
|
+
*/
|
|
182
|
+
async readTmp(name) {
|
|
183
|
+
return await readFile(tmpDir, name);
|
|
184
|
+
},
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Get the task directory path
|
|
188
|
+
* @returns {string} Task directory path
|
|
189
|
+
*/
|
|
190
|
+
getTaskDir() {
|
|
191
|
+
return taskDir;
|
|
192
|
+
},
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Get the current stage name
|
|
196
|
+
* @returns {string} Current stage name
|
|
197
|
+
*/
|
|
198
|
+
getCurrentStage() {
|
|
199
|
+
return getStage();
|
|
200
|
+
},
|
|
201
|
+
};
|
|
202
|
+
}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import fsp from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
4
|
+
|
|
5
|
+
const WORKSPACE_CACHE_DIR = path.join(process.cwd(), ".tmp-task-modules");
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Convert supported modulePath formats into a file:// URL.
|
|
9
|
+
* @param {string | URL} modulePath
|
|
10
|
+
* @returns {URL}
|
|
11
|
+
*/
|
|
12
|
+
function resolveToFileURL(modulePath) {
|
|
13
|
+
if (modulePath instanceof URL) {
|
|
14
|
+
return modulePath;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
if (
|
|
18
|
+
modulePath &&
|
|
19
|
+
typeof modulePath === "object" &&
|
|
20
|
+
typeof modulePath.href === "string"
|
|
21
|
+
) {
|
|
22
|
+
try {
|
|
23
|
+
return new URL(modulePath.href);
|
|
24
|
+
} catch {
|
|
25
|
+
// fall through to error below
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (typeof modulePath !== "string") {
|
|
30
|
+
throw new TypeError(
|
|
31
|
+
`Module path must be a string or URL. Received: ${typeof modulePath}`
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (modulePath.startsWith("file://")) {
|
|
36
|
+
return new URL(modulePath);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
if (!path.isAbsolute(modulePath)) {
|
|
40
|
+
throw new Error(
|
|
41
|
+
`Module path must be absolute. Received: ${modulePath}\n` +
|
|
42
|
+
`Hint: resolve module paths before calling loadFreshModule().`
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return pathToFileURL(modulePath);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Detect whether an error corresponds to a module-not-found condition.
|
|
51
|
+
* @param {unknown} error
|
|
52
|
+
* @returns {boolean}
|
|
53
|
+
*/
|
|
54
|
+
function isModuleNotFoundError(error) {
|
|
55
|
+
if (!error || typeof error !== "object") {
|
|
56
|
+
return false;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const err = /** @type {{ code?: string; message?: string }} */ (error);
|
|
60
|
+
|
|
61
|
+
if (err.code === "ERR_MODULE_NOT_FOUND") {
|
|
62
|
+
return true;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const message = err.message || "";
|
|
66
|
+
return (
|
|
67
|
+
message.includes("Cannot find module") ||
|
|
68
|
+
message.includes("Failed to load url")
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Produce a clearer error when the underlying module file is missing.
|
|
74
|
+
* @param {string} modulePath
|
|
75
|
+
* @param {Error} originalError
|
|
76
|
+
*/
|
|
77
|
+
function createMissingModuleError(modulePath, originalError) {
|
|
78
|
+
const error = new Error(
|
|
79
|
+
`Module not found at "${modulePath}". Ensure the file exists before running the pipeline.`
|
|
80
|
+
);
|
|
81
|
+
error.name = originalError.name || "ERR_MODULE_NOT_FOUND";
|
|
82
|
+
if ("cause" in Error.prototype) {
|
|
83
|
+
error.cause = originalError;
|
|
84
|
+
} else {
|
|
85
|
+
error.originalError = originalError;
|
|
86
|
+
}
|
|
87
|
+
return error;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Copy a module file into a workspace-local cache directory so Vite/Vitest can load it.
|
|
92
|
+
* @param {string} sourcePath
|
|
93
|
+
* @returns {Promise<string>}
|
|
94
|
+
*/
|
|
95
|
+
async function copyModuleToWorkspaceCache(sourcePath) {
|
|
96
|
+
await fsp.mkdir(WORKSPACE_CACHE_DIR, { recursive: true });
|
|
97
|
+
const ext = path.extname(sourcePath) || ".js";
|
|
98
|
+
const base = path.basename(sourcePath, ext);
|
|
99
|
+
const uniqueSuffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
|
100
|
+
const destFile = path.join(
|
|
101
|
+
WORKSPACE_CACHE_DIR,
|
|
102
|
+
`${base}.${uniqueSuffix}${ext}`
|
|
103
|
+
);
|
|
104
|
+
await fsp.copyFile(sourcePath, destFile);
|
|
105
|
+
return destFile;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Dynamically import a module with cache busting while remaining compatible with Node's file:/// resolution.
|
|
110
|
+
* Falls back to copying the module into a workspace-local cache when query parameters break filesystem resolution.
|
|
111
|
+
* @param {string | URL} modulePath
|
|
112
|
+
* @returns {Promise<any>} Module namespace object
|
|
113
|
+
*/
|
|
114
|
+
export async function loadFreshModule(modulePath) {
|
|
115
|
+
const fileUrl = resolveToFileURL(modulePath);
|
|
116
|
+
const cacheBustedUrl = `${fileUrl.href}?t=${Date.now()}`;
|
|
117
|
+
|
|
118
|
+
try {
|
|
119
|
+
return await import(cacheBustedUrl);
|
|
120
|
+
} catch (error) {
|
|
121
|
+
if (!isModuleNotFoundError(error) || fileUrl.protocol !== "file:") {
|
|
122
|
+
throw error;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const absolutePath = fileURLToPath(fileUrl);
|
|
126
|
+
|
|
127
|
+
try {
|
|
128
|
+
await fsp.access(absolutePath);
|
|
129
|
+
} catch {
|
|
130
|
+
throw createMissingModuleError(
|
|
131
|
+
absolutePath,
|
|
132
|
+
/** @type {Error} */ (error)
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
try {
|
|
137
|
+
const cacheCopy = await copyModuleToWorkspaceCache(absolutePath);
|
|
138
|
+
const cacheUrl = `${pathToFileURL(cacheCopy).href}?t=${Date.now()}`;
|
|
139
|
+
return await import(cacheUrl);
|
|
140
|
+
} catch (fallbackError) {
|
|
141
|
+
const messageLines = [
|
|
142
|
+
`Failed to load module "${absolutePath}" after attempting cache-busting import.`,
|
|
143
|
+
`Cache-busted URL: ${cacheBustedUrl}`,
|
|
144
|
+
`Original error: ${/** @type {Error} */ (error).message}`,
|
|
145
|
+
`Fallback error: ${/** @type {Error} */ (fallbackError).message}`,
|
|
146
|
+
];
|
|
147
|
+
const combined = new Error(messageLines.join("\n"));
|
|
148
|
+
if ("cause" in Error.prototype) {
|
|
149
|
+
combined.cause = fallbackError;
|
|
150
|
+
} else {
|
|
151
|
+
combined.fallbackError = fallbackError;
|
|
152
|
+
}
|
|
153
|
+
combined.initialError = error;
|
|
154
|
+
throw combined;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|