@ryanfw/prompt-orchestration-pipeline 0.9.1 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -1
- package/src/api/index.js +38 -1
- package/src/components/DAGGrid.jsx +24 -7
- package/src/components/JobDetail.jsx +11 -0
- package/src/components/TaskDetailSidebar.jsx +27 -3
- package/src/components/UploadSeed.jsx +2 -2
- package/src/config/log-events.js +77 -0
- package/src/core/file-io.js +202 -7
- package/src/core/orchestrator.js +140 -4
- package/src/core/pipeline-runner.js +84 -6
- package/src/core/status-initializer.js +155 -0
- package/src/core/status-writer.js +151 -13
- package/src/core/symlink-utils.js +196 -0
- package/src/core/task-runner.js +37 -7
- package/src/ui/client/adapters/job-adapter.js +21 -2
- package/src/ui/client/hooks/useJobDetailWithUpdates.js +92 -0
- package/src/ui/dist/assets/{index-DqkbzXZ1.js → index-DeDzq-Kk.js} +129 -14
- package/src/ui/dist/assets/style-aBtD_Yrs.css +62 -0
- package/src/ui/dist/index.html +2 -2
- package/src/ui/server.js +201 -109
- package/src/ui/zip-utils.js +103 -0
- package/src/ui/dist/assets/style-DBF9NQGk.css +0 -62
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ryanfw/prompt-orchestration-pipeline",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.11.0",
|
|
4
4
|
"description": "A Prompt-orchestration pipeline (POP) is a framework for building, running, and experimenting with complex chains of LLM tasks.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "src/ui/server.js",
|
|
@@ -43,6 +43,7 @@
|
|
|
43
43
|
"chokidar": "^3.5.3",
|
|
44
44
|
"commander": "^14.0.2",
|
|
45
45
|
"dotenv": "^17.2.3",
|
|
46
|
+
"fflate": "^0.8.2",
|
|
46
47
|
"lucide-react": "^0.544.0",
|
|
47
48
|
"openai": "^5.23.1",
|
|
48
49
|
"react": "^19.2.0",
|
package/src/api/index.js
CHANGED
|
@@ -12,6 +12,7 @@ import {
|
|
|
12
12
|
getJobPipelinePath,
|
|
13
13
|
} from "../config/paths.js";
|
|
14
14
|
import { generateJobId } from "../utils/id-generator.js";
|
|
15
|
+
import { initializeJobArtifacts } from "../core/status-writer.js";
|
|
15
16
|
|
|
16
17
|
// Pure functional utilities
|
|
17
18
|
const createPaths = (config) => {
|
|
@@ -100,17 +101,28 @@ export const submitJob = async (state, seed) => {
|
|
|
100
101
|
* @param {Object} options - Options object
|
|
101
102
|
* @param {string} options.dataDir - Base data directory
|
|
102
103
|
* @param {Object} options.seedObject - Seed object to submit
|
|
104
|
+
* @param {Array} [options.uploadArtifacts] - Array of {filename, content} objects
|
|
103
105
|
* @returns {Promise<Object>} Result object with success status
|
|
104
106
|
*/
|
|
105
|
-
export const submitJobWithValidation = async ({
|
|
107
|
+
export const submitJobWithValidation = async ({
|
|
108
|
+
dataDir,
|
|
109
|
+
seedObject,
|
|
110
|
+
uploadArtifacts = [],
|
|
111
|
+
}) => {
|
|
106
112
|
let partialFiles = [];
|
|
107
113
|
|
|
108
114
|
try {
|
|
109
115
|
// Validate the seed object
|
|
116
|
+
console.log("[DEBUG] submitJobWithValidation: validating seed", {
|
|
117
|
+
seedName: seedObject.name,
|
|
118
|
+
seedPipeline: seedObject.pipeline,
|
|
119
|
+
hasData: !!seedObject.data,
|
|
120
|
+
});
|
|
110
121
|
const validatedSeed = await validateSeed(
|
|
111
122
|
JSON.stringify(seedObject),
|
|
112
123
|
dataDir
|
|
113
124
|
);
|
|
125
|
+
console.log("[DEBUG] submitJobWithValidation: seed validation passed");
|
|
114
126
|
|
|
115
127
|
// Generate a random job ID
|
|
116
128
|
const jobId = generateJobId();
|
|
@@ -175,6 +187,24 @@ export const submitJobWithValidation = async ({ dataDir, seedObject }) => {
|
|
|
175
187
|
JSON.stringify(pipelineSnapshot, null, 2)
|
|
176
188
|
);
|
|
177
189
|
|
|
190
|
+
// Initialize job artifacts if any provided
|
|
191
|
+
if (uploadArtifacts.length > 0) {
|
|
192
|
+
console.log("[DEBUG] submitJobWithValidation: initializing artifacts", {
|
|
193
|
+
artifactCount: uploadArtifacts.length,
|
|
194
|
+
artifactNames: uploadArtifacts.map((a) => a.filename),
|
|
195
|
+
currentJobDir,
|
|
196
|
+
});
|
|
197
|
+
try {
|
|
198
|
+
await initializeJobArtifacts(currentJobDir, uploadArtifacts);
|
|
199
|
+
console.log(
|
|
200
|
+
"[DEBUG] submitJobWithValidation: artifacts initialized successfully"
|
|
201
|
+
);
|
|
202
|
+
} catch (artifactError) {
|
|
203
|
+
// Don't fail the upload if artifact initialization fails, just log the error
|
|
204
|
+
console.error("Failed to initialize job artifacts:", artifactError);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
178
208
|
return {
|
|
179
209
|
success: true,
|
|
180
210
|
jobId,
|
|
@@ -199,6 +229,13 @@ export const submitJobWithValidation = async ({ dataDir, seedObject }) => {
|
|
|
199
229
|
errorMessage = "Required fields missing";
|
|
200
230
|
}
|
|
201
231
|
|
|
232
|
+
console.error("[DEBUG] submitJobWithValidation: validation failed", {
|
|
233
|
+
errorMessage,
|
|
234
|
+
originalError: error.message,
|
|
235
|
+
seedName: seedObject.name,
|
|
236
|
+
seedPipeline: seedObject.pipeline,
|
|
237
|
+
});
|
|
238
|
+
|
|
202
239
|
return {
|
|
203
240
|
success: false,
|
|
204
241
|
message: errorMessage,
|
|
@@ -87,6 +87,22 @@ const canShowRestart = (status) => {
|
|
|
87
87
|
return status === TaskState.FAILED || status === TaskState.DONE;
|
|
88
88
|
};
|
|
89
89
|
|
|
90
|
+
// Custom comparison function for TaskCard memoization
|
|
91
|
+
const areEqualTaskCardProps = (prevProps, nextProps) => {
|
|
92
|
+
return (
|
|
93
|
+
prevProps.item === nextProps.item &&
|
|
94
|
+
prevProps.idx === nextProps.idx &&
|
|
95
|
+
prevProps.status === nextProps.status &&
|
|
96
|
+
prevProps.isActive === nextProps.isActive &&
|
|
97
|
+
prevProps.canRestart === nextProps.canRestart &&
|
|
98
|
+
prevProps.isSubmitting === nextProps.isSubmitting &&
|
|
99
|
+
prevProps.disabledReason === nextProps.disabledReason &&
|
|
100
|
+
prevProps.onClick === nextProps.onClick &&
|
|
101
|
+
prevProps.onKeyDown === nextProps.onKeyDown &&
|
|
102
|
+
prevProps.handleRestartClick === nextProps.handleRestartClick
|
|
103
|
+
);
|
|
104
|
+
};
|
|
105
|
+
|
|
90
106
|
// Memoized card component to prevent unnecessary re-renders
|
|
91
107
|
const TaskCard = memo(function TaskCard({
|
|
92
108
|
item,
|
|
@@ -96,7 +112,7 @@ const TaskCard = memo(function TaskCard({
|
|
|
96
112
|
isActive,
|
|
97
113
|
canRestart,
|
|
98
114
|
isSubmitting,
|
|
99
|
-
|
|
115
|
+
disabledReason,
|
|
100
116
|
onClick,
|
|
101
117
|
onKeyDown,
|
|
102
118
|
handleRestartClick,
|
|
@@ -190,9 +206,7 @@ const TaskCard = memo(function TaskCard({
|
|
|
190
206
|
disabled={!canRestart || isSubmitting}
|
|
191
207
|
className="text-xs cursor-pointer disabled:cursor-not-allowed"
|
|
192
208
|
title={
|
|
193
|
-
!canRestart
|
|
194
|
-
? getRestartDisabledReason()
|
|
195
|
-
: `Restart job from ${item.id}`
|
|
209
|
+
!canRestart ? disabledReason : `Restart job from ${item.id}`
|
|
196
210
|
}
|
|
197
211
|
>
|
|
198
212
|
Restart
|
|
@@ -202,7 +216,7 @@ const TaskCard = memo(function TaskCard({
|
|
|
202
216
|
</div>
|
|
203
217
|
</div>
|
|
204
218
|
);
|
|
205
|
-
});
|
|
219
|
+
}, areEqualTaskCardProps);
|
|
206
220
|
|
|
207
221
|
/**
|
|
208
222
|
* DAGGrid component for visualizing pipeline tasks with connectors and slide-over details
|
|
@@ -222,6 +236,7 @@ function DAGGrid({
|
|
|
222
236
|
activeIndex = 0,
|
|
223
237
|
jobId,
|
|
224
238
|
filesByTypeForItem = () => createEmptyTaskFiles(),
|
|
239
|
+
taskById = {},
|
|
225
240
|
}) {
|
|
226
241
|
const overlayRef = useRef(null);
|
|
227
242
|
const gridRef = useRef(null);
|
|
@@ -441,7 +456,7 @@ function DAGGrid({
|
|
|
441
456
|
cancelAnimationFrame(rafRef.current);
|
|
442
457
|
}
|
|
443
458
|
};
|
|
444
|
-
}, [items, effectiveCols, visualOrder]);
|
|
459
|
+
}, [items.length, effectiveCols, visualOrder]);
|
|
445
460
|
|
|
446
461
|
// Get status for a given item index with fallback to activeIndex
|
|
447
462
|
const getStatus = (index) => {
|
|
@@ -679,6 +694,7 @@ function DAGGrid({
|
|
|
679
694
|
const status = getStatus(idx);
|
|
680
695
|
const isActive = idx === activeIndex;
|
|
681
696
|
const canRestart = isRestartEnabled();
|
|
697
|
+
const restartDisabledReason = getRestartDisabledReason();
|
|
682
698
|
|
|
683
699
|
return (
|
|
684
700
|
<TaskCard
|
|
@@ -689,7 +705,7 @@ function DAGGrid({
|
|
|
689
705
|
isActive={isActive}
|
|
690
706
|
canRestart={canRestart}
|
|
691
707
|
isSubmitting={isSubmitting}
|
|
692
|
-
|
|
708
|
+
disabledReason={restartDisabledReason}
|
|
693
709
|
onClick={() => {
|
|
694
710
|
setOpenIdx(idx);
|
|
695
711
|
}}
|
|
@@ -715,6 +731,7 @@ function DAGGrid({
|
|
|
715
731
|
jobId={jobId}
|
|
716
732
|
taskId={items[openIdx]?.id || `task-${openIdx}`}
|
|
717
733
|
taskBody={items[openIdx]?.body || null}
|
|
734
|
+
taskError={taskById[items[openIdx]?.id]?.error || null}
|
|
718
735
|
filesByTypeForItem={filesByTypeForItem}
|
|
719
736
|
task={items[openIdx]}
|
|
720
737
|
taskIndex={openIdx}
|
|
@@ -129,6 +129,16 @@ export default function JobDetail({ job, pipeline }) {
|
|
|
129
129
|
return item;
|
|
130
130
|
});
|
|
131
131
|
|
|
132
|
+
// Check if all entries were reused and lengths match
|
|
133
|
+
const allReused = newItems.every(
|
|
134
|
+
(item, index) => item === prevItems[index]
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
if (allReused && prevItems.length === newItems.length) {
|
|
138
|
+
// All items reused, preserve array reference
|
|
139
|
+
return prevItems;
|
|
140
|
+
}
|
|
141
|
+
|
|
132
142
|
prevDagItemsRef.current = newItems;
|
|
133
143
|
return newItems;
|
|
134
144
|
}, [stableDagItems]);
|
|
@@ -156,6 +166,7 @@ export default function JobDetail({ job, pipeline }) {
|
|
|
156
166
|
activeIndex={activeIndex}
|
|
157
167
|
jobId={job.id}
|
|
158
168
|
filesByTypeForItem={filesByTypeForItem}
|
|
169
|
+
taskById={taskById}
|
|
159
170
|
/>
|
|
160
171
|
</div>
|
|
161
172
|
);
|
|
@@ -23,6 +23,7 @@ export function TaskDetailSidebar({
|
|
|
23
23
|
jobId,
|
|
24
24
|
taskId,
|
|
25
25
|
taskBody,
|
|
26
|
+
taskError,
|
|
26
27
|
filesByTypeForItem = () => ({ artifacts: [], logs: [], tmp: [] }),
|
|
27
28
|
task,
|
|
28
29
|
onClose,
|
|
@@ -32,6 +33,7 @@ export function TaskDetailSidebar({
|
|
|
32
33
|
const [filePaneType, setFilePaneType] = useState("artifacts");
|
|
33
34
|
const [filePaneOpen, setFilePaneOpen] = useState(false);
|
|
34
35
|
const [filePaneFilename, setFilePaneFilename] = useState(null);
|
|
36
|
+
const [showStack, setShowStack] = useState(false);
|
|
35
37
|
const closeButtonRef = useRef(null);
|
|
36
38
|
|
|
37
39
|
// Get CSS classes for card header based on status (mirrored from DAGGrid)
|
|
@@ -120,14 +122,36 @@ export function TaskDetailSidebar({
|
|
|
120
122
|
</div>
|
|
121
123
|
|
|
122
124
|
<div className="p-6 space-y-8 overflow-y-auto h-full">
|
|
123
|
-
{/* Error Callout - shown when task has error status
|
|
124
|
-
{status === TaskState.FAILED && taskBody && (
|
|
125
|
+
{/* Error Callout - shown when task has error status */}
|
|
126
|
+
{status === TaskState.FAILED && (taskError?.message || taskBody) && (
|
|
125
127
|
<section aria-label="Error">
|
|
126
128
|
<Callout.Root role="alert" aria-live="assertive">
|
|
127
129
|
<Callout.Text className="whitespace-pre-wrap break-words">
|
|
128
|
-
{taskBody}
|
|
130
|
+
{taskError?.message || taskBody}
|
|
129
131
|
</Callout.Text>
|
|
130
132
|
</Callout.Root>
|
|
133
|
+
|
|
134
|
+
{/* Stack trace toggle */}
|
|
135
|
+
{taskError?.stack && (
|
|
136
|
+
<div className="mt-3">
|
|
137
|
+
<button
|
|
138
|
+
onClick={() => setShowStack(!showStack)}
|
|
139
|
+
className="text-sm text-blue-600 hover:text-blue-800 underline"
|
|
140
|
+
aria-expanded={showStack}
|
|
141
|
+
aria-controls="error-stack"
|
|
142
|
+
>
|
|
143
|
+
{showStack ? "Hide stack" : "Show stack"}
|
|
144
|
+
</button>
|
|
145
|
+
{showStack && (
|
|
146
|
+
<pre
|
|
147
|
+
id="error-stack"
|
|
148
|
+
className="mt-2 p-2 bg-gray-50 border rounded text-xs font-mono max-h-64 overflow-auto whitespace-pre-wrap"
|
|
149
|
+
>
|
|
150
|
+
{taskError.stack}
|
|
151
|
+
</pre>
|
|
152
|
+
)}
|
|
153
|
+
</div>
|
|
154
|
+
)}
|
|
131
155
|
</section>
|
|
132
156
|
)}
|
|
133
157
|
|
|
@@ -152,14 +152,14 @@ export default function UploadSeed({ onUploadSuccess }) {
|
|
|
152
152
|
<span className="font-medium text-gray-900">Click to upload</span>{" "}
|
|
153
153
|
or drag and drop
|
|
154
154
|
</div>
|
|
155
|
-
<p className="text-xs text-gray-500">JSON files only</p>
|
|
155
|
+
<p className="text-xs text-gray-500">JSON or zip files only</p>
|
|
156
156
|
</div>
|
|
157
157
|
</div>
|
|
158
158
|
|
|
159
159
|
<input
|
|
160
160
|
ref={fileInputRef}
|
|
161
161
|
type="file"
|
|
162
|
-
accept=".json"
|
|
162
|
+
accept=".json,.zip"
|
|
163
163
|
className="hidden"
|
|
164
164
|
onChange={handleFileChange}
|
|
165
165
|
data-testid="file-input"
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Canonical log event constants and file extensions for the prompt orchestration pipeline.
|
|
3
|
+
* This module serves as the single source of truth for all log-related naming conventions.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
// Log event types for different stages and events in the pipeline
|
|
7
|
+
export const LogEvent = Object.freeze({
|
|
8
|
+
START: "start",
|
|
9
|
+
COMPLETE: "complete",
|
|
10
|
+
ERROR: "error",
|
|
11
|
+
CONTEXT: "context",
|
|
12
|
+
DEBUG: "debug",
|
|
13
|
+
METRICS: "metrics",
|
|
14
|
+
PIPELINE_START: "pipeline-start",
|
|
15
|
+
PIPELINE_COMPLETE: "pipeline-complete",
|
|
16
|
+
PIPELINE_ERROR: "pipeline-error",
|
|
17
|
+
EXECUTION_LOGS: "execution-logs",
|
|
18
|
+
FAILURE_DETAILS: "failure-details",
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
// File extensions for different log types
|
|
22
|
+
export const LogFileExtension = Object.freeze({
|
|
23
|
+
TEXT: "log",
|
|
24
|
+
JSON: "json",
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
// Validation sets for ensuring consistency
|
|
28
|
+
export const VALID_LOG_EVENTS = new Set(Object.values(LogEvent));
|
|
29
|
+
export const VALID_LOG_FILE_EXTENSIONS = new Set(
|
|
30
|
+
Object.values(LogFileExtension)
|
|
31
|
+
);
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Validates a log event string.
|
|
35
|
+
* @param {string} event - Log event to validate
|
|
36
|
+
* @returns {boolean} True if valid, false otherwise
|
|
37
|
+
*/
|
|
38
|
+
export function isValidLogEvent(event) {
|
|
39
|
+
return VALID_LOG_EVENTS.has(event);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Validates a log file extension string.
|
|
44
|
+
* @param {string} ext - File extension to validate
|
|
45
|
+
* @returns {boolean} True if valid, false otherwise
|
|
46
|
+
*/
|
|
47
|
+
export function isValidLogFileExtension(ext) {
|
|
48
|
+
return VALID_LOG_FILE_EXTENSIONS.has(ext);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Normalizes a log event string to canonical form.
|
|
53
|
+
* @param {string} event - Raw log event
|
|
54
|
+
* @returns {string|null} Canonical log event or null if invalid
|
|
55
|
+
*/
|
|
56
|
+
export function normalizeLogEvent(event) {
|
|
57
|
+
if (typeof event !== "string") {
|
|
58
|
+
return null;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const normalized = event.toLowerCase().trim();
|
|
62
|
+
return isValidLogEvent(normalized) ? normalized : null;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Normalizes a log file extension string to canonical form.
|
|
67
|
+
* @param {string} ext - Raw file extension
|
|
68
|
+
* @returns {string|null} Canonical file extension or null if invalid
|
|
69
|
+
*/
|
|
70
|
+
export function normalizeLogFileExtension(ext) {
|
|
71
|
+
if (typeof ext !== "string") {
|
|
72
|
+
return null;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const normalized = ext.toLowerCase().trim().replace(/^\./, "");
|
|
76
|
+
return isValidLogFileExtension(normalized) ? normalized : null;
|
|
77
|
+
}
|
package/src/core/file-io.js
CHANGED
|
@@ -1,6 +1,13 @@
|
|
|
1
1
|
import fs from "node:fs/promises";
|
|
2
|
+
import fsSync from "node:fs";
|
|
2
3
|
import path from "node:path";
|
|
3
4
|
import { writeJobStatus } from "./status-writer.js";
|
|
5
|
+
import {
|
|
6
|
+
LogEvent,
|
|
7
|
+
LogFileExtension,
|
|
8
|
+
isValidLogEvent,
|
|
9
|
+
isValidLogFileExtension,
|
|
10
|
+
} from "../config/log-events.js";
|
|
4
11
|
|
|
5
12
|
/**
|
|
6
13
|
* Creates a task-scoped file I/O interface that manages file operations
|
|
@@ -18,7 +25,17 @@ async function ensureDir(dir) {
|
|
|
18
25
|
await fs.mkdir(dir, { recursive: true });
|
|
19
26
|
}
|
|
20
27
|
|
|
21
|
-
|
|
28
|
+
function ensureDirSync(dir) {
|
|
29
|
+
fsSync.mkdir(dir, { recursive: true });
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function createTaskFileIO({
|
|
33
|
+
workDir,
|
|
34
|
+
taskName,
|
|
35
|
+
getStage,
|
|
36
|
+
statusPath,
|
|
37
|
+
trackTaskFiles = true,
|
|
38
|
+
}) {
|
|
22
39
|
const taskDir = path.join(workDir, "tasks", taskName);
|
|
23
40
|
|
|
24
41
|
// New directory structure: {workDir}/files/{type}
|
|
@@ -34,18 +51,21 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
34
51
|
const jobDir = path.dirname(statusPath);
|
|
35
52
|
await writeJobStatus(jobDir, (snapshot) => {
|
|
36
53
|
snapshot.files ||= { artifacts: [], logs: [], tmp: [] };
|
|
37
|
-
snapshot.tasks ||= {};
|
|
38
|
-
snapshot.tasks[taskName] ||= {};
|
|
39
|
-
snapshot.tasks[taskName].files ||= { artifacts: [], logs: [], tmp: [] };
|
|
40
54
|
|
|
41
55
|
const jobArray = snapshot.files[fileType];
|
|
42
56
|
if (!jobArray.includes(fileName)) {
|
|
43
57
|
jobArray.push(fileName);
|
|
44
58
|
}
|
|
45
59
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
60
|
+
if (trackTaskFiles) {
|
|
61
|
+
snapshot.tasks ||= {};
|
|
62
|
+
snapshot.tasks[taskName] ||= {};
|
|
63
|
+
snapshot.tasks[taskName].files ||= { artifacts: [], logs: [], tmp: [] };
|
|
64
|
+
|
|
65
|
+
const taskArray = snapshot.tasks[taskName].files[fileType];
|
|
66
|
+
if (!taskArray.includes(fileName)) {
|
|
67
|
+
taskArray.push(fileName);
|
|
68
|
+
}
|
|
49
69
|
}
|
|
50
70
|
|
|
51
71
|
return snapshot;
|
|
@@ -61,6 +81,15 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
61
81
|
await fs.rename(tmpPath, filePath);
|
|
62
82
|
}
|
|
63
83
|
|
|
84
|
+
/**
|
|
85
|
+
* Synchronous atomic write helper
|
|
86
|
+
*/
|
|
87
|
+
function atomicWriteSync(filePath, data) {
|
|
88
|
+
const tmpPath = filePath + ".tmp";
|
|
89
|
+
fsSync.writeFileSync(tmpPath, data);
|
|
90
|
+
fsSync.renameSync(tmpPath, filePath);
|
|
91
|
+
}
|
|
92
|
+
|
|
64
93
|
/**
|
|
65
94
|
* Generic write function that handles different modes
|
|
66
95
|
*/
|
|
@@ -85,6 +114,54 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
85
114
|
return await fs.readFile(filePath, "utf8");
|
|
86
115
|
}
|
|
87
116
|
|
|
117
|
+
/**
|
|
118
|
+
* Synchronous status writer for critical paths
|
|
119
|
+
* @param {string} jobDir - Directory containing tasks-status.json
|
|
120
|
+
* @param {Function} updater - Function that mutates and returns the snapshot
|
|
121
|
+
*/
|
|
122
|
+
function writeJobStatusSync(jobDir, updater) {
|
|
123
|
+
const statusPath = path.join(jobDir, "tasks-status.json");
|
|
124
|
+
let snapshot;
|
|
125
|
+
try {
|
|
126
|
+
const raw = fsSync.readFileSync(statusPath, "utf8");
|
|
127
|
+
snapshot = JSON.parse(raw);
|
|
128
|
+
} catch {
|
|
129
|
+
snapshot = { files: { artifacts: [], logs: [], tmp: [] }, tasks: {} };
|
|
130
|
+
}
|
|
131
|
+
const updated = updater(snapshot);
|
|
132
|
+
fsSync.writeFileSync(statusPath, JSON.stringify(updated, null, 2));
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Synchronous status update with file tracking and metadata
|
|
137
|
+
* @param {string} fileType - "logs", "artifacts", or "tmp"
|
|
138
|
+
* @param {string} fileName - Name of the file
|
|
139
|
+
*/
|
|
140
|
+
function updateStatusWithFilesSync(fileType, fileName) {
|
|
141
|
+
const jobDir = path.dirname(statusPath);
|
|
142
|
+
writeJobStatusSync(jobDir, (snapshot) => {
|
|
143
|
+
snapshot.files ||= { artifacts: [], logs: [], tmp: [] };
|
|
144
|
+
|
|
145
|
+
const jobArray = snapshot.files[fileType];
|
|
146
|
+
if (!jobArray.includes(fileName)) {
|
|
147
|
+
jobArray.push(fileName);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
if (trackTaskFiles) {
|
|
151
|
+
snapshot.tasks ||= {};
|
|
152
|
+
snapshot.tasks[taskName] ||= {};
|
|
153
|
+
snapshot.tasks[taskName].files ||= { artifacts: [], logs: [], tmp: [] };
|
|
154
|
+
|
|
155
|
+
const taskArray = snapshot.tasks[taskName].files[fileType];
|
|
156
|
+
if (!taskArray.includes(fileName)) {
|
|
157
|
+
taskArray.push(fileName);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return snapshot;
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
|
|
88
165
|
// Return curried functions for each file type
|
|
89
166
|
return {
|
|
90
167
|
/**
|
|
@@ -113,6 +190,12 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
113
190
|
* @param {string} options.mode - "append" (default) or "replace"
|
|
114
191
|
*/
|
|
115
192
|
async writeLog(name, content, options = {}) {
|
|
193
|
+
if (!validateLogName(name)) {
|
|
194
|
+
throw new Error(
|
|
195
|
+
`Invalid log filename "${name}". Must follow format {taskName}-{stage}-{event}.{ext}`
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
|
|
116
199
|
const filePath = await writeFile(
|
|
117
200
|
logsDir,
|
|
118
201
|
name,
|
|
@@ -176,6 +259,33 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
176
259
|
return taskDir;
|
|
177
260
|
},
|
|
178
261
|
|
|
262
|
+
/**
|
|
263
|
+
* Write a log file synchronously (critical path only)
|
|
264
|
+
* @param {string} name - File name
|
|
265
|
+
* @param {string} content - Log content
|
|
266
|
+
* @param {Object} options - Options object
|
|
267
|
+
* @param {string} options.mode - "replace" (default) or "append"
|
|
268
|
+
*/
|
|
269
|
+
writeLogSync(name, content, options = {}) {
|
|
270
|
+
if (!validateLogName(name)) {
|
|
271
|
+
throw new Error(
|
|
272
|
+
`Invalid log filename "${name}". Must follow format {taskName}-{stage}-{event}.{ext}`
|
|
273
|
+
);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
ensureDirSync(logsDir);
|
|
277
|
+
const filePath = path.join(logsDir, name);
|
|
278
|
+
|
|
279
|
+
if (options.mode === "append") {
|
|
280
|
+
fsSync.appendFileSync(filePath, content);
|
|
281
|
+
} else {
|
|
282
|
+
atomicWriteSync(filePath, content);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
updateStatusWithFilesSync("logs", name);
|
|
286
|
+
return filePath;
|
|
287
|
+
},
|
|
288
|
+
|
|
179
289
|
/**
|
|
180
290
|
* Get the current stage name
|
|
181
291
|
* @returns {string} Current stage name
|
|
@@ -185,3 +295,88 @@ export function createTaskFileIO({ workDir, taskName, getStage, statusPath }) {
|
|
|
185
295
|
},
|
|
186
296
|
};
|
|
187
297
|
}
|
|
298
|
+
|
|
299
|
+
/**
|
|
300
|
+
* Generates a standardized log filename following the convention {taskName}-{stage}-{event}.{ext}
|
|
301
|
+
* @param {string} taskName - Name of the task
|
|
302
|
+
* @param {string} stage - Stage name or identifier
|
|
303
|
+
* @param {string} event - Event type from LogEvent constants
|
|
304
|
+
* @param {string} ext - File extension from LogFileExtension constants
|
|
305
|
+
* @returns {string} Formatted log filename
|
|
306
|
+
*/
|
|
307
|
+
export function generateLogName(
|
|
308
|
+
taskName,
|
|
309
|
+
stage,
|
|
310
|
+
event,
|
|
311
|
+
ext = LogFileExtension.TEXT
|
|
312
|
+
) {
|
|
313
|
+
if (!taskName || !stage || !event || !ext) {
|
|
314
|
+
throw new Error(
|
|
315
|
+
"All parameters (taskName, stage, event, ext) are required for generateLogName"
|
|
316
|
+
);
|
|
317
|
+
}
|
|
318
|
+
if (!isValidLogEvent(event)) {
|
|
319
|
+
throw new Error(
|
|
320
|
+
`Invalid log event "${event}". Use a value from LogEvent: ${Object.values(
|
|
321
|
+
LogEvent
|
|
322
|
+
).join(", ")}`
|
|
323
|
+
);
|
|
324
|
+
}
|
|
325
|
+
if (!isValidLogFileExtension(ext)) {
|
|
326
|
+
throw new Error(
|
|
327
|
+
`Invalid log file extension "${ext}". Use a value from LogFileExtension: ${Object.values(
|
|
328
|
+
LogFileExtension
|
|
329
|
+
).join(", ")}`
|
|
330
|
+
);
|
|
331
|
+
}
|
|
332
|
+
return `${taskName}-${stage}-${event}.${ext}`;
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
/**
|
|
336
|
+
* Parses a log filename to extract taskName, stage, event, and extension
|
|
337
|
+
* @param {string} fileName - Log filename to parse
|
|
338
|
+
* @returns {Object|null} Parsed components or null if invalid format
|
|
339
|
+
*/
|
|
340
|
+
export function parseLogName(fileName) {
|
|
341
|
+
if (typeof fileName !== "string") {
|
|
342
|
+
return null;
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// Match pattern: taskName-stage-event.ext
|
|
346
|
+
// Split on first two hyphens: taskName-stage-event.ext
|
|
347
|
+
const match = fileName.match(
|
|
348
|
+
/^(?<taskName>[^-]+)-(?<stage>[^-]+)-(?<event>[^.]+)\.(?<ext>.+)$/
|
|
349
|
+
);
|
|
350
|
+
if (!match) {
|
|
351
|
+
return null;
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
const { taskName, stage, event, ext } = match.groups;
|
|
355
|
+
return { taskName, stage, event, ext };
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
/**
|
|
359
|
+
* Generates a glob pattern for matching log files with specific components
|
|
360
|
+
* @param {string} taskName - Task name (optional, use "*" for wildcard)
|
|
361
|
+
* @param {string} stage - Stage name (optional, use "*" for wildcard)
|
|
362
|
+
* @param {string} event - Event type (optional, use "*" for wildcard)
|
|
363
|
+
* @param {string} ext - File extension (optional, use "*" for wildcard)
|
|
364
|
+
* @returns {string} Glob pattern for file matching
|
|
365
|
+
*/
|
|
366
|
+
export function getLogPattern(
|
|
367
|
+
taskName = "*",
|
|
368
|
+
stage = "*",
|
|
369
|
+
event = "*",
|
|
370
|
+
ext = "*"
|
|
371
|
+
) {
|
|
372
|
+
return `${taskName}-${stage}-${event}.${ext}`;
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
/**
|
|
376
|
+
* Validates that a log filename follows the standardized naming convention
|
|
377
|
+
* @param {string} fileName - Log filename to validate
|
|
378
|
+
* @returns {boolean} True if valid, false otherwise
|
|
379
|
+
*/
|
|
380
|
+
export function validateLogName(fileName) {
|
|
381
|
+
return parseLogName(fileName) !== null;
|
|
382
|
+
}
|