@ryanfw/prompt-orchestration-pipeline 0.10.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ryanfw/prompt-orchestration-pipeline",
3
- "version": "0.10.0",
3
+ "version": "0.11.0",
4
4
  "description": "A Prompt-orchestration pipeline (POP) is a framework for building, running, and experimenting with complex chains of LLM tasks.",
5
5
  "type": "module",
6
6
  "main": "src/ui/server.js",
@@ -43,6 +43,7 @@
43
43
  "chokidar": "^3.5.3",
44
44
  "commander": "^14.0.2",
45
45
  "dotenv": "^17.2.3",
46
+ "fflate": "^0.8.2",
46
47
  "lucide-react": "^0.544.0",
47
48
  "openai": "^5.23.1",
48
49
  "react": "^19.2.0",
package/src/api/index.js CHANGED
@@ -12,6 +12,7 @@ import {
12
12
  getJobPipelinePath,
13
13
  } from "../config/paths.js";
14
14
  import { generateJobId } from "../utils/id-generator.js";
15
+ import { initializeJobArtifacts } from "../core/status-writer.js";
15
16
 
16
17
  // Pure functional utilities
17
18
  const createPaths = (config) => {
@@ -100,17 +101,28 @@ export const submitJob = async (state, seed) => {
100
101
  * @param {Object} options - Options object
101
102
  * @param {string} options.dataDir - Base data directory
102
103
  * @param {Object} options.seedObject - Seed object to submit
104
+ * @param {Array} [options.uploadArtifacts] - Array of {filename, content} objects
103
105
  * @returns {Promise<Object>} Result object with success status
104
106
  */
105
- export const submitJobWithValidation = async ({ dataDir, seedObject }) => {
107
+ export const submitJobWithValidation = async ({
108
+ dataDir,
109
+ seedObject,
110
+ uploadArtifacts = [],
111
+ }) => {
106
112
  let partialFiles = [];
107
113
 
108
114
  try {
109
115
  // Validate the seed object
116
+ console.log("[DEBUG] submitJobWithValidation: validating seed", {
117
+ seedName: seedObject.name,
118
+ seedPipeline: seedObject.pipeline,
119
+ hasData: !!seedObject.data,
120
+ });
110
121
  const validatedSeed = await validateSeed(
111
122
  JSON.stringify(seedObject),
112
123
  dataDir
113
124
  );
125
+ console.log("[DEBUG] submitJobWithValidation: seed validation passed");
114
126
 
115
127
  // Generate a random job ID
116
128
  const jobId = generateJobId();
@@ -175,6 +187,24 @@ export const submitJobWithValidation = async ({ dataDir, seedObject }) => {
175
187
  JSON.stringify(pipelineSnapshot, null, 2)
176
188
  );
177
189
 
190
+ // Initialize job artifacts if any provided
191
+ if (uploadArtifacts.length > 0) {
192
+ console.log("[DEBUG] submitJobWithValidation: initializing artifacts", {
193
+ artifactCount: uploadArtifacts.length,
194
+ artifactNames: uploadArtifacts.map((a) => a.filename),
195
+ currentJobDir,
196
+ });
197
+ try {
198
+ await initializeJobArtifacts(currentJobDir, uploadArtifacts);
199
+ console.log(
200
+ "[DEBUG] submitJobWithValidation: artifacts initialized successfully"
201
+ );
202
+ } catch (artifactError) {
203
+ // Don't fail the upload if artifact initialization fails, just log the error
204
+ console.error("Failed to initialize job artifacts:", artifactError);
205
+ }
206
+ }
207
+
178
208
  return {
179
209
  success: true,
180
210
  jobId,
@@ -199,6 +229,13 @@ export const submitJobWithValidation = async ({ dataDir, seedObject }) => {
199
229
  errorMessage = "Required fields missing";
200
230
  }
201
231
 
232
+ console.error("[DEBUG] submitJobWithValidation: validation failed", {
233
+ errorMessage,
234
+ originalError: error.message,
235
+ seedName: seedObject.name,
236
+ seedPipeline: seedObject.pipeline,
237
+ });
238
+
202
239
  return {
203
240
  success: false,
204
241
  message: errorMessage,
@@ -87,6 +87,22 @@ const canShowRestart = (status) => {
87
87
  return status === TaskState.FAILED || status === TaskState.DONE;
88
88
  };
89
89
 
90
+ // Custom comparison function for TaskCard memoization
91
+ const areEqualTaskCardProps = (prevProps, nextProps) => {
92
+ return (
93
+ prevProps.item === nextProps.item &&
94
+ prevProps.idx === nextProps.idx &&
95
+ prevProps.status === nextProps.status &&
96
+ prevProps.isActive === nextProps.isActive &&
97
+ prevProps.canRestart === nextProps.canRestart &&
98
+ prevProps.isSubmitting === nextProps.isSubmitting &&
99
+ prevProps.disabledReason === nextProps.disabledReason &&
100
+ prevProps.onClick === nextProps.onClick &&
101
+ prevProps.onKeyDown === nextProps.onKeyDown &&
102
+ prevProps.handleRestartClick === nextProps.handleRestartClick
103
+ );
104
+ };
105
+
90
106
  // Memoized card component to prevent unnecessary re-renders
91
107
  const TaskCard = memo(function TaskCard({
92
108
  item,
@@ -96,7 +112,7 @@ const TaskCard = memo(function TaskCard({
96
112
  isActive,
97
113
  canRestart,
98
114
  isSubmitting,
99
- getRestartDisabledReason,
115
+ disabledReason,
100
116
  onClick,
101
117
  onKeyDown,
102
118
  handleRestartClick,
@@ -190,9 +206,7 @@ const TaskCard = memo(function TaskCard({
190
206
  disabled={!canRestart || isSubmitting}
191
207
  className="text-xs cursor-pointer disabled:cursor-not-allowed"
192
208
  title={
193
- !canRestart
194
- ? getRestartDisabledReason()
195
- : `Restart job from ${item.id}`
209
+ !canRestart ? disabledReason : `Restart job from ${item.id}`
196
210
  }
197
211
  >
198
212
  Restart
@@ -202,7 +216,7 @@ const TaskCard = memo(function TaskCard({
202
216
  </div>
203
217
  </div>
204
218
  );
205
- });
219
+ }, areEqualTaskCardProps);
206
220
 
207
221
  /**
208
222
  * DAGGrid component for visualizing pipeline tasks with connectors and slide-over details
@@ -222,6 +236,7 @@ function DAGGrid({
222
236
  activeIndex = 0,
223
237
  jobId,
224
238
  filesByTypeForItem = () => createEmptyTaskFiles(),
239
+ taskById = {},
225
240
  }) {
226
241
  const overlayRef = useRef(null);
227
242
  const gridRef = useRef(null);
@@ -441,7 +456,7 @@ function DAGGrid({
441
456
  cancelAnimationFrame(rafRef.current);
442
457
  }
443
458
  };
444
- }, [items, effectiveCols, visualOrder]);
459
+ }, [items.length, effectiveCols, visualOrder]);
445
460
 
446
461
  // Get status for a given item index with fallback to activeIndex
447
462
  const getStatus = (index) => {
@@ -679,6 +694,7 @@ function DAGGrid({
679
694
  const status = getStatus(idx);
680
695
  const isActive = idx === activeIndex;
681
696
  const canRestart = isRestartEnabled();
697
+ const restartDisabledReason = getRestartDisabledReason();
682
698
 
683
699
  return (
684
700
  <TaskCard
@@ -689,7 +705,7 @@ function DAGGrid({
689
705
  isActive={isActive}
690
706
  canRestart={canRestart}
691
707
  isSubmitting={isSubmitting}
692
- getRestartDisabledReason={getRestartDisabledReason}
708
+ disabledReason={restartDisabledReason}
693
709
  onClick={() => {
694
710
  setOpenIdx(idx);
695
711
  }}
@@ -715,6 +731,7 @@ function DAGGrid({
715
731
  jobId={jobId}
716
732
  taskId={items[openIdx]?.id || `task-${openIdx}`}
717
733
  taskBody={items[openIdx]?.body || null}
734
+ taskError={taskById[items[openIdx]?.id]?.error || null}
718
735
  filesByTypeForItem={filesByTypeForItem}
719
736
  task={items[openIdx]}
720
737
  taskIndex={openIdx}
@@ -129,6 +129,16 @@ export default function JobDetail({ job, pipeline }) {
129
129
  return item;
130
130
  });
131
131
 
132
+ // Check if all entries were reused and lengths match
133
+ const allReused = newItems.every(
134
+ (item, index) => item === prevItems[index]
135
+ );
136
+
137
+ if (allReused && prevItems.length === newItems.length) {
138
+ // All items reused, preserve array reference
139
+ return prevItems;
140
+ }
141
+
132
142
  prevDagItemsRef.current = newItems;
133
143
  return newItems;
134
144
  }, [stableDagItems]);
@@ -156,6 +166,7 @@ export default function JobDetail({ job, pipeline }) {
156
166
  activeIndex={activeIndex}
157
167
  jobId={job.id}
158
168
  filesByTypeForItem={filesByTypeForItem}
169
+ taskById={taskById}
159
170
  />
160
171
  </div>
161
172
  );
@@ -23,6 +23,7 @@ export function TaskDetailSidebar({
23
23
  jobId,
24
24
  taskId,
25
25
  taskBody,
26
+ taskError,
26
27
  filesByTypeForItem = () => ({ artifacts: [], logs: [], tmp: [] }),
27
28
  task,
28
29
  onClose,
@@ -32,6 +33,7 @@ export function TaskDetailSidebar({
32
33
  const [filePaneType, setFilePaneType] = useState("artifacts");
33
34
  const [filePaneOpen, setFilePaneOpen] = useState(false);
34
35
  const [filePaneFilename, setFilePaneFilename] = useState(null);
36
+ const [showStack, setShowStack] = useState(false);
35
37
  const closeButtonRef = useRef(null);
36
38
 
37
39
  // Get CSS classes for card header based on status (mirrored from DAGGrid)
@@ -120,14 +122,36 @@ export function TaskDetailSidebar({
120
122
  </div>
121
123
 
122
124
  <div className="p-6 space-y-8 overflow-y-auto h-full">
123
- {/* Error Callout - shown when task has error status and body */}
124
- {status === TaskState.FAILED && taskBody && (
125
+ {/* Error Callout - shown when task has error status */}
126
+ {status === TaskState.FAILED && (taskError?.message || taskBody) && (
125
127
  <section aria-label="Error">
126
128
  <Callout.Root role="alert" aria-live="assertive">
127
129
  <Callout.Text className="whitespace-pre-wrap break-words">
128
- {taskBody}
130
+ {taskError?.message || taskBody}
129
131
  </Callout.Text>
130
132
  </Callout.Root>
133
+
134
+ {/* Stack trace toggle */}
135
+ {taskError?.stack && (
136
+ <div className="mt-3">
137
+ <button
138
+ onClick={() => setShowStack(!showStack)}
139
+ className="text-sm text-blue-600 hover:text-blue-800 underline"
140
+ aria-expanded={showStack}
141
+ aria-controls="error-stack"
142
+ >
143
+ {showStack ? "Hide stack" : "Show stack"}
144
+ </button>
145
+ {showStack && (
146
+ <pre
147
+ id="error-stack"
148
+ className="mt-2 p-2 bg-gray-50 border rounded text-xs font-mono max-h-64 overflow-auto whitespace-pre-wrap"
149
+ >
150
+ {taskError.stack}
151
+ </pre>
152
+ )}
153
+ </div>
154
+ )}
131
155
  </section>
132
156
  )}
133
157
 
@@ -152,14 +152,14 @@ export default function UploadSeed({ onUploadSuccess }) {
152
152
  <span className="font-medium text-gray-900">Click to upload</span>{" "}
153
153
  or drag and drop
154
154
  </div>
155
- <p className="text-xs text-gray-500">JSON files only</p>
155
+ <p className="text-xs text-gray-500">JSON or zip files only</p>
156
156
  </div>
157
157
  </div>
158
158
 
159
159
  <input
160
160
  ref={fileInputRef}
161
161
  type="file"
162
- accept=".json"
162
+ accept=".json,.zip"
163
163
  className="hidden"
164
164
  onChange={handleFileChange}
165
165
  data-testid="file-input"
@@ -154,6 +154,38 @@ export async function startOrchestrator(opts) {
154
154
  tasks: {}, // Initialize empty tasks object for pipeline runner
155
155
  };
156
156
  await fs.writeFile(statusPath, JSON.stringify(status, null, 2));
157
+
158
+ // Initialize status from artifacts if any exist
159
+ try {
160
+ const { initializeStatusFromArtifacts } = await import(
161
+ "./status-initializer.js"
162
+ );
163
+ const pipelineConfig = getPipelineConfig(seed?.pipeline || "default");
164
+ const pipelineSnapshot = JSON.parse(
165
+ await fs.readFile(pipelineConfig.pipelineJsonPath, "utf8")
166
+ );
167
+
168
+ const applyArtifacts = await initializeStatusFromArtifacts({
169
+ jobDir: workDir,
170
+ pipeline: pipelineSnapshot,
171
+ });
172
+
173
+ // Apply artifact initialization to the status
174
+ const updatedStatus = applyArtifacts(status);
175
+ await fs.writeFile(statusPath, JSON.stringify(updatedStatus, null, 2));
176
+
177
+ logger.log("Initialized status from upload artifacts", {
178
+ jobId,
179
+ pipeline: seed?.pipeline,
180
+ artifactsCount: updatedStatus.files?.artifacts?.length || 0,
181
+ });
182
+ } catch (artifactError) {
183
+ // Don't fail job startup if artifact initialization fails, just log
184
+ logger.warn("Failed to initialize status from artifacts", {
185
+ jobId,
186
+ error: artifactError.message,
187
+ });
188
+ }
157
189
  }
158
190
  // Create fileIO for orchestrator-level logging
159
191
  const fileIO = createTaskFileIO({
@@ -0,0 +1,155 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+
4
+ /**
5
+ * Initialize status snapshot from artifacts in the filesystem
6
+ * @param {Object} options - Options object
7
+ * @param {string} options.jobDir - Job directory path
8
+ * @param {Object} options.pipeline - Pipeline configuration object
9
+ * @returns {Promise<Function>} Function that applies artifact initialization to a snapshot
10
+ */
11
+ export async function initializeStatusFromArtifacts({ jobDir, pipeline }) {
12
+ if (!jobDir || typeof jobDir !== "string") {
13
+ throw new Error("jobDir must be a non-empty string");
14
+ }
15
+
16
+ if (!pipeline || typeof pipeline !== "object") {
17
+ throw new Error("pipeline must be an object");
18
+ }
19
+
20
+ const artifactsDir = path.join(jobDir, "files", "artifacts");
21
+ let artifactFilenames = [];
22
+
23
+ try {
24
+ // Read artifacts directory
25
+ const entries = await fs.readdir(artifactsDir, { withFileTypes: true });
26
+
27
+ // Collect filenames for regular files only
28
+ artifactFilenames = entries
29
+ .filter((entry) => entry.isFile())
30
+ .map((entry) => entry.name);
31
+
32
+ console.log("[STATUS_INIT] Found artifacts in directory", {
33
+ artifactsDir,
34
+ artifactCount: artifactFilenames.length,
35
+ artifactNames: artifactFilenames,
36
+ });
37
+ } catch (error) {
38
+ if (error.code === "ENOENT") {
39
+ // Directory doesn't exist, no artifacts to initialize
40
+ console.log(
41
+ "[STATUS_INIT] Artifacts directory does not exist, skipping initialization",
42
+ {
43
+ artifactsDir,
44
+ }
45
+ );
46
+ } else {
47
+ console.error("[STATUS_INIT] Failed to read artifacts directory", {
48
+ artifactsDir,
49
+ error: error.message,
50
+ });
51
+ }
52
+ // Return a no-op function for non-existent or unreadable directory
53
+ return (snapshot) => snapshot;
54
+ }
55
+
56
+ // Determine first task ID from pipeline
57
+ const firstTaskId = Array.isArray(pipeline.tasks) ? pipeline.tasks[0] : null;
58
+ console.log("[STATUS_INIT] Determined first task", {
59
+ firstTaskId,
60
+ hasTasks: Array.isArray(pipeline.tasks),
61
+ taskCount: pipeline.tasks?.length || 0,
62
+ });
63
+
64
+ // Return function that applies the artifact initialization to a snapshot
65
+ return function apply(snapshot) {
66
+ console.log("[STATUS_INIT] Applying artifact initialization to snapshot", {
67
+ existingArtifacts: snapshot.files?.artifacts?.length || 0,
68
+ newArtifacts: artifactFilenames.length,
69
+ firstTaskId,
70
+ });
71
+
72
+ // Ensure files object exists with proper structure
73
+ if (!snapshot.files || typeof snapshot.files !== "object") {
74
+ snapshot.files = { artifacts: [], logs: [], tmp: [] };
75
+ } else {
76
+ // Ensure each files array exists
77
+ for (const type of ["artifacts", "logs", "tmp"]) {
78
+ if (!Array.isArray(snapshot.files[type])) {
79
+ snapshot.files[type] = [];
80
+ }
81
+ }
82
+ }
83
+
84
+ // Add artifact filenames to root level (deduplicate)
85
+ const existingArtifacts = new Set(snapshot.files.artifacts || []);
86
+ for (const filename of artifactFilenames) {
87
+ if (!existingArtifacts.has(filename)) {
88
+ snapshot.files.artifacts.push(filename);
89
+ existingArtifacts.add(filename);
90
+ }
91
+ }
92
+
93
+ // Add artifact filenames to first task (if it exists)
94
+ if (firstTaskId) {
95
+ // Ensure tasks object exists
96
+ if (!snapshot.tasks || typeof snapshot.tasks !== "object") {
97
+ snapshot.tasks = {};
98
+ }
99
+
100
+ // Ensure first task exists
101
+ if (!snapshot.tasks[firstTaskId]) {
102
+ snapshot.tasks[firstTaskId] = {};
103
+ }
104
+
105
+ // Ensure task files object exists with proper structure
106
+ if (
107
+ !snapshot.tasks[firstTaskId].files ||
108
+ typeof snapshot.tasks[firstTaskId].files !== "object"
109
+ ) {
110
+ snapshot.tasks[firstTaskId].files = {
111
+ artifacts: [],
112
+ logs: [],
113
+ tmp: [],
114
+ };
115
+ } else {
116
+ // Ensure each task files array exists
117
+ for (const type of ["artifacts", "logs", "tmp"]) {
118
+ if (!Array.isArray(snapshot.tasks[firstTaskId].files[type])) {
119
+ snapshot.tasks[firstTaskId].files[type] = [];
120
+ }
121
+ }
122
+ }
123
+
124
+ // Add artifact filenames to first task (deduplicate)
125
+ const existingTaskArtifacts = new Set(
126
+ snapshot.tasks[firstTaskId].files.artifacts || []
127
+ );
128
+ for (const filename of artifactFilenames) {
129
+ if (!existingTaskArtifacts.has(filename)) {
130
+ snapshot.tasks[firstTaskId].files.artifacts.push(filename);
131
+ existingTaskArtifacts.add(filename);
132
+ }
133
+ }
134
+
135
+ console.log("[STATUS_INIT] Added artifacts to first task", {
136
+ firstTaskId,
137
+ taskArtifactCount: snapshot.tasks[firstTaskId].files.artifacts.length,
138
+ artifactNames: artifactFilenames,
139
+ });
140
+ }
141
+
142
+ console.log("[STATUS_INIT] Final snapshot state", {
143
+ rootArtifacts: snapshot.files.artifacts.length,
144
+ rootArtifactNames: snapshot.files.artifacts,
145
+ firstTaskArtifacts: firstTaskId
146
+ ? snapshot.tasks[firstTaskId].files.artifacts.length
147
+ : 0,
148
+ firstTaskArtifactNames: firstTaskId
149
+ ? snapshot.tasks[firstTaskId].files.artifacts
150
+ : [],
151
+ });
152
+
153
+ return snapshot;
154
+ };
155
+ }
@@ -281,22 +281,71 @@ export async function updateTaskStatus(jobDir, taskId, taskUpdateFn) {
281
281
  throw new Error("taskUpdateFn must be a function");
282
282
  }
283
283
 
284
- return writeJobStatus(jobDir, (snapshot) => {
285
- // Ensure task exists
286
- if (!snapshot.tasks[taskId]) {
287
- snapshot.tasks[taskId] = {};
288
- }
284
+ const jobId = path.basename(jobDir);
285
+ const logger = createJobLogger("StatusWriter", jobId);
289
286
 
290
- const task = snapshot.tasks[taskId];
287
+ // Get or create the write queue for this job directory
288
+ const prev = writeQueues.get(jobDir) || Promise.resolve();
289
+ let resultSnapshot;
291
290
 
292
- // Apply task updates
293
- const result = taskUpdateFn(task);
294
- if (result !== undefined) {
295
- snapshot.tasks[taskId] = result;
296
- }
291
+ const next = prev
292
+ .then(async () => {
293
+ logger.group("Task Status Update Operation");
294
+ logger.log(`Updating task ${taskId} for job: ${jobId}`);
297
295
 
298
- return snapshot;
299
- });
296
+ const statusPath = path.join(jobDir, "tasks-status.json");
297
+
298
+ // Read existing status or create default
299
+ const current = await readStatusFile(statusPath, jobId);
300
+ const validated = validateStatusSnapshot(current);
301
+
302
+ // Ensure task exists
303
+ if (!validated.tasks[taskId]) {
304
+ validated.tasks[taskId] = {};
305
+ }
306
+
307
+ const task = validated.tasks[taskId];
308
+
309
+ // Apply task updates
310
+ const result = taskUpdateFn(task);
311
+ if (result !== undefined) {
312
+ validated.tasks[taskId] = result;
313
+ }
314
+
315
+ validated.lastUpdated = new Date().toISOString();
316
+
317
+ // Atomic write
318
+ await atomicWrite(statusPath, validated);
319
+ logger.log("Task status file written successfully");
320
+
321
+ // Emit task:updated SSE event after successful write
322
+ try {
323
+ const eventData = {
324
+ jobId,
325
+ taskId,
326
+ task: validated.tasks[taskId],
327
+ };
328
+ await logger.sse("task:updated", eventData);
329
+ logger.log("task:updated SSE event broadcasted successfully");
330
+ } catch (error) {
331
+ // Don't fail the write if SSE emission fails
332
+ logger.error("Failed to emit task:updated SSE event:", error);
333
+ }
334
+
335
+ logger.groupEnd();
336
+ resultSnapshot = validated;
337
+ })
338
+ .catch((e) => {
339
+ throw e;
340
+ });
341
+
342
+ // Store the promise chain and set up cleanup
343
+ writeQueues.set(
344
+ jobDir,
345
+ next.finally(() => {})
346
+ );
347
+
348
+ return next.then(() => resultSnapshot);
300
349
  }
301
350
 
302
351
  /**
@@ -435,3 +484,92 @@ export async function resetJobToCleanSlate(
435
484
  return snapshot;
436
485
  });
437
486
  }
487
+
488
+ /**
489
+ * Consolidated path jail security validation with generic error messages
490
+ * @param {string} filename - Filename to validate
491
+ * @returns {Object|null} Validation result or null if valid
492
+ */
493
+ function validateFilePath(filename) {
494
+ // Check for path traversal patterns
495
+ if (filename.includes("..")) {
496
+ console.error("Path security: path traversal detected", { filename });
497
+ return {
498
+ allowed: false,
499
+ message: "Path validation failed",
500
+ };
501
+ }
502
+
503
+ // Check for absolute paths (POSIX, Windows, backslashes, ~)
504
+ if (
505
+ path.isAbsolute(filename) ||
506
+ /^[a-zA-Z]:/.test(filename) ||
507
+ filename.includes("\\") ||
508
+ filename.startsWith("~")
509
+ ) {
510
+ console.error("Path security: absolute path detected", { filename });
511
+ return {
512
+ allowed: false,
513
+ message: "Path validation failed",
514
+ };
515
+ }
516
+
517
+ // Check for empty filename
518
+ if (!filename || filename.trim() === "") {
519
+ console.error("Path security: empty filename detected");
520
+ return {
521
+ allowed: false,
522
+ message: "Path validation failed",
523
+ };
524
+ }
525
+
526
+ // Path is valid
527
+ return null;
528
+ }
529
+
530
+ /**
531
+ * Initialize job-level artifact index and copy artifacts to job directory
532
+ * @param {string} jobDir - Job directory path
533
+ * @param {Array} uploadArtifacts - Array of {filename, content} objects
534
+ * @returns {Promise<void>}
535
+ */
536
+ export async function initializeJobArtifacts(jobDir, uploadArtifacts = []) {
537
+ if (!jobDir || typeof jobDir !== "string") {
538
+ throw new Error("jobDir must be a non-empty string");
539
+ }
540
+
541
+ if (!Array.isArray(uploadArtifacts)) {
542
+ throw new Error("uploadArtifacts must be an array");
543
+ }
544
+
545
+ if (uploadArtifacts.length === 0) {
546
+ return;
547
+ }
548
+
549
+ const jobFilesDir = path.join(jobDir, "files");
550
+ const jobArtifactsDir = path.join(jobFilesDir, "artifacts");
551
+
552
+ await fs.mkdir(jobFilesDir, { recursive: true });
553
+ await fs.mkdir(jobArtifactsDir, { recursive: true });
554
+
555
+ for (const artifact of uploadArtifacts) {
556
+ const { filename, content } = artifact || {};
557
+
558
+ if (!filename || typeof filename !== "string") {
559
+ continue; // Skip invalid entries rather than throwing
560
+ }
561
+
562
+ // Validate filename using the consolidated function
563
+ const validation = validateFilePath(filename);
564
+ if (validation) {
565
+ console.error("Path security: skipping invalid artifact", {
566
+ filename,
567
+ reason: validation.message,
568
+ });
569
+ continue; // Skip invalid filenames rather than throwing
570
+ }
571
+
572
+ const artifactPath = path.join(jobArtifactsDir, filename);
573
+ await fs.writeFile(artifactPath, content);
574
+ }
575
+ }