@ryanfw/prompt-orchestration-pipeline 0.10.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/package.json +3 -1
  2. package/src/api/index.js +38 -1
  3. package/src/components/DAGGrid.jsx +180 -53
  4. package/src/components/JobDetail.jsx +11 -0
  5. package/src/components/TaskDetailSidebar.jsx +27 -3
  6. package/src/components/UploadSeed.jsx +2 -2
  7. package/src/components/ui/RestartJobModal.jsx +26 -6
  8. package/src/components/ui/StopJobModal.jsx +183 -0
  9. package/src/core/config.js +7 -3
  10. package/src/core/lifecycle-policy.js +62 -0
  11. package/src/core/orchestrator.js +32 -0
  12. package/src/core/pipeline-runner.js +312 -217
  13. package/src/core/status-initializer.js +155 -0
  14. package/src/core/status-writer.js +235 -13
  15. package/src/pages/Code.jsx +8 -1
  16. package/src/pages/PipelineDetail.jsx +85 -3
  17. package/src/pages/PromptPipelineDashboard.jsx +10 -11
  18. package/src/ui/client/adapters/job-adapter.js +81 -2
  19. package/src/ui/client/api.js +233 -8
  20. package/src/ui/client/hooks/useJobDetailWithUpdates.js +92 -0
  21. package/src/ui/client/hooks/useJobList.js +14 -1
  22. package/src/ui/dist/app.js +262 -0
  23. package/src/ui/dist/assets/{index-DqkbzXZ1.js → index-B320avRx.js} +5051 -2186
  24. package/src/ui/dist/assets/index-B320avRx.js.map +1 -0
  25. package/src/ui/dist/assets/style-BYCoLBnK.css +62 -0
  26. package/src/ui/dist/favicon.svg +12 -0
  27. package/src/ui/dist/index.html +2 -2
  28. package/src/ui/endpoints/file-endpoints.js +330 -0
  29. package/src/ui/endpoints/job-control-endpoints.js +1001 -0
  30. package/src/ui/endpoints/job-endpoints.js +62 -0
  31. package/src/ui/endpoints/sse-endpoints.js +223 -0
  32. package/src/ui/endpoints/state-endpoint.js +85 -0
  33. package/src/ui/endpoints/upload-endpoints.js +406 -0
  34. package/src/ui/express-app.js +182 -0
  35. package/src/ui/server.js +38 -1788
  36. package/src/ui/sse-broadcast.js +93 -0
  37. package/src/ui/utils/http-utils.js +139 -0
  38. package/src/ui/utils/mime-types.js +196 -0
  39. package/src/ui/vite.config.js +22 -0
  40. package/src/ui/zip-utils.js +103 -0
  41. package/src/utils/jobs.js +39 -0
  42. package/src/ui/dist/assets/style-DBF9NQGk.css +0 -62
@@ -0,0 +1,155 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+
4
+ /**
5
+ * Initialize status snapshot from artifacts in the filesystem
6
+ * @param {Object} options - Options object
7
+ * @param {string} options.jobDir - Job directory path
8
+ * @param {Object} options.pipeline - Pipeline configuration object
9
+ * @returns {Promise<Function>} Function that applies artifact initialization to a snapshot
10
+ */
11
+ export async function initializeStatusFromArtifacts({ jobDir, pipeline }) {
12
+ if (!jobDir || typeof jobDir !== "string") {
13
+ throw new Error("jobDir must be a non-empty string");
14
+ }
15
+
16
+ if (!pipeline || typeof pipeline !== "object") {
17
+ throw new Error("pipeline must be an object");
18
+ }
19
+
20
+ const artifactsDir = path.join(jobDir, "files", "artifacts");
21
+ let artifactFilenames = [];
22
+
23
+ try {
24
+ // Read artifacts directory
25
+ const entries = await fs.readdir(artifactsDir, { withFileTypes: true });
26
+
27
+ // Collect filenames for regular files only
28
+ artifactFilenames = entries
29
+ .filter((entry) => entry.isFile())
30
+ .map((entry) => entry.name);
31
+
32
+ console.log("[STATUS_INIT] Found artifacts in directory", {
33
+ artifactsDir,
34
+ artifactCount: artifactFilenames.length,
35
+ artifactNames: artifactFilenames,
36
+ });
37
+ } catch (error) {
38
+ if (error.code === "ENOENT") {
39
+ // Directory doesn't exist, no artifacts to initialize
40
+ console.log(
41
+ "[STATUS_INIT] Artifacts directory does not exist, skipping initialization",
42
+ {
43
+ artifactsDir,
44
+ }
45
+ );
46
+ } else {
47
+ console.error("[STATUS_INIT] Failed to read artifacts directory", {
48
+ artifactsDir,
49
+ error: error.message,
50
+ });
51
+ }
52
+ // Return a no-op function for non-existent or unreadable directory
53
+ return (snapshot) => snapshot;
54
+ }
55
+
56
+ // Determine first task ID from pipeline
57
+ const firstTaskId = Array.isArray(pipeline.tasks) ? pipeline.tasks[0] : null;
58
+ console.log("[STATUS_INIT] Determined first task", {
59
+ firstTaskId,
60
+ hasTasks: Array.isArray(pipeline.tasks),
61
+ taskCount: pipeline.tasks?.length || 0,
62
+ });
63
+
64
+ // Return function that applies the artifact initialization to a snapshot
65
+ return function apply(snapshot) {
66
+ console.log("[STATUS_INIT] Applying artifact initialization to snapshot", {
67
+ existingArtifacts: snapshot.files?.artifacts?.length || 0,
68
+ newArtifacts: artifactFilenames.length,
69
+ firstTaskId,
70
+ });
71
+
72
+ // Ensure files object exists with proper structure
73
+ if (!snapshot.files || typeof snapshot.files !== "object") {
74
+ snapshot.files = { artifacts: [], logs: [], tmp: [] };
75
+ } else {
76
+ // Ensure each files array exists
77
+ for (const type of ["artifacts", "logs", "tmp"]) {
78
+ if (!Array.isArray(snapshot.files[type])) {
79
+ snapshot.files[type] = [];
80
+ }
81
+ }
82
+ }
83
+
84
+ // Add artifact filenames to root level (deduplicate)
85
+ const existingArtifacts = new Set(snapshot.files.artifacts || []);
86
+ for (const filename of artifactFilenames) {
87
+ if (!existingArtifacts.has(filename)) {
88
+ snapshot.files.artifacts.push(filename);
89
+ existingArtifacts.add(filename);
90
+ }
91
+ }
92
+
93
+ // Add artifact filenames to first task (if it exists)
94
+ if (firstTaskId) {
95
+ // Ensure tasks object exists
96
+ if (!snapshot.tasks || typeof snapshot.tasks !== "object") {
97
+ snapshot.tasks = {};
98
+ }
99
+
100
+ // Ensure first task exists
101
+ if (!snapshot.tasks[firstTaskId]) {
102
+ snapshot.tasks[firstTaskId] = {};
103
+ }
104
+
105
+ // Ensure task files object exists with proper structure
106
+ if (
107
+ !snapshot.tasks[firstTaskId].files ||
108
+ typeof snapshot.tasks[firstTaskId].files !== "object"
109
+ ) {
110
+ snapshot.tasks[firstTaskId].files = {
111
+ artifacts: [],
112
+ logs: [],
113
+ tmp: [],
114
+ };
115
+ } else {
116
+ // Ensure each task files array exists
117
+ for (const type of ["artifacts", "logs", "tmp"]) {
118
+ if (!Array.isArray(snapshot.tasks[firstTaskId].files[type])) {
119
+ snapshot.tasks[firstTaskId].files[type] = [];
120
+ }
121
+ }
122
+ }
123
+
124
+ // Add artifact filenames to first task (deduplicate)
125
+ const existingTaskArtifacts = new Set(
126
+ snapshot.tasks[firstTaskId].files.artifacts || []
127
+ );
128
+ for (const filename of artifactFilenames) {
129
+ if (!existingTaskArtifacts.has(filename)) {
130
+ snapshot.tasks[firstTaskId].files.artifacts.push(filename);
131
+ existingTaskArtifacts.add(filename);
132
+ }
133
+ }
134
+
135
+ console.log("[STATUS_INIT] Added artifacts to first task", {
136
+ firstTaskId,
137
+ taskArtifactCount: snapshot.tasks[firstTaskId].files.artifacts.length,
138
+ artifactNames: artifactFilenames,
139
+ });
140
+ }
141
+
142
+ console.log("[STATUS_INIT] Final snapshot state", {
143
+ rootArtifacts: snapshot.files.artifacts.length,
144
+ rootArtifactNames: snapshot.files.artifacts,
145
+ firstTaskArtifacts: firstTaskId
146
+ ? snapshot.tasks[firstTaskId].files.artifacts.length
147
+ : 0,
148
+ firstTaskArtifactNames: firstTaskId
149
+ ? snapshot.tasks[firstTaskId].files.artifacts
150
+ : [],
151
+ });
152
+
153
+ return snapshot;
154
+ };
155
+ }
@@ -208,6 +208,26 @@ export async function writeJobStatus(jobDir, updateFn) {
208
208
  logger.error("Failed to emit SSE event:", error);
209
209
  }
210
210
 
211
+ // Emit lifecycle_block event if update contains lifecycle block reason
212
+ if (snapshot.lifecycleBlockReason) {
213
+ try {
214
+ const lifecycleEventData = {
215
+ jobId,
216
+ taskId: snapshot.lifecycleBlockTaskId,
217
+ op: snapshot.lifecycleBlockOp,
218
+ reason: snapshot.lifecycleBlockReason,
219
+ };
220
+ await logger.sse("lifecycle_block", lifecycleEventData);
221
+ logger.log(
222
+ "lifecycle_block SSE event broadcasted successfully",
223
+ lifecycleEventData
224
+ );
225
+ } catch (error) {
226
+ // Don't fail the write if SSE emission fails
227
+ logger.error("Failed to emit lifecycle_block SSE event:", error);
228
+ }
229
+ }
230
+
211
231
  logger.groupEnd();
212
232
  resultSnapshot = snapshot;
213
233
  })
@@ -281,22 +301,71 @@ export async function updateTaskStatus(jobDir, taskId, taskUpdateFn) {
281
301
  throw new Error("taskUpdateFn must be a function");
282
302
  }
283
303
 
284
- return writeJobStatus(jobDir, (snapshot) => {
285
- // Ensure task exists
286
- if (!snapshot.tasks[taskId]) {
287
- snapshot.tasks[taskId] = {};
288
- }
304
+ const jobId = path.basename(jobDir);
305
+ const logger = createJobLogger("StatusWriter", jobId);
289
306
 
290
- const task = snapshot.tasks[taskId];
307
+ // Get or create the write queue for this job directory
308
+ const prev = writeQueues.get(jobDir) || Promise.resolve();
309
+ let resultSnapshot;
291
310
 
292
- // Apply task updates
293
- const result = taskUpdateFn(task);
294
- if (result !== undefined) {
295
- snapshot.tasks[taskId] = result;
296
- }
311
+ const next = prev
312
+ .then(async () => {
313
+ logger.group("Task Status Update Operation");
314
+ logger.log(`Updating task ${taskId} for job: ${jobId}`);
297
315
 
298
- return snapshot;
299
- });
316
+ const statusPath = path.join(jobDir, "tasks-status.json");
317
+
318
+ // Read existing status or create default
319
+ const current = await readStatusFile(statusPath, jobId);
320
+ const validated = validateStatusSnapshot(current);
321
+
322
+ // Ensure task exists
323
+ if (!validated.tasks[taskId]) {
324
+ validated.tasks[taskId] = {};
325
+ }
326
+
327
+ const task = validated.tasks[taskId];
328
+
329
+ // Apply task updates
330
+ const result = taskUpdateFn(task);
331
+ if (result !== undefined) {
332
+ validated.tasks[taskId] = result;
333
+ }
334
+
335
+ validated.lastUpdated = new Date().toISOString();
336
+
337
+ // Atomic write
338
+ await atomicWrite(statusPath, validated);
339
+ logger.log("Task status file written successfully");
340
+
341
+ // Emit task:updated SSE event after successful write
342
+ try {
343
+ const eventData = {
344
+ jobId,
345
+ taskId,
346
+ task: validated.tasks[taskId],
347
+ };
348
+ await logger.sse("task:updated", eventData);
349
+ logger.log("task:updated SSE event broadcasted successfully");
350
+ } catch (error) {
351
+ // Don't fail the write if SSE emission fails
352
+ logger.error("Failed to emit task:updated SSE event:", error);
353
+ }
354
+
355
+ logger.groupEnd();
356
+ resultSnapshot = validated;
357
+ })
358
+ .catch((e) => {
359
+ throw e;
360
+ });
361
+
362
+ // Store the promise chain and set up cleanup
363
+ writeQueues.set(
364
+ jobDir,
365
+ next.finally(() => {})
366
+ );
367
+
368
+ return next.then(() => resultSnapshot);
300
369
  }
301
370
 
302
371
  /**
@@ -435,3 +504,156 @@ export async function resetJobToCleanSlate(
435
504
  return snapshot;
436
505
  });
437
506
  }
507
+
508
+ /**
509
+ * Reset a single task to pending state without affecting other tasks
510
+ *
511
+ * @param {string} jobDir - Job directory path containing tasks-status.json
512
+ * @param {string} taskId - Task identifier to reset
513
+ * @param {Object} options - Reset options
514
+ * @param {boolean} [options.clearTokenUsage=true] - Whether to clear token usage arrays
515
+ * @returns {Promise<Object>} The updated status snapshot
516
+ */
517
+ export async function resetSingleTask(
518
+ jobDir,
519
+ taskId,
520
+ { clearTokenUsage = true } = {}
521
+ ) {
522
+ if (!jobDir || typeof jobDir !== "string") {
523
+ throw new Error("jobDir must be a non-empty string");
524
+ }
525
+
526
+ if (!taskId || typeof taskId !== "string") {
527
+ throw new Error("taskId must be a non-empty string");
528
+ }
529
+
530
+ return writeJobStatus(jobDir, (snapshot) => {
531
+ // Ensure tasks object exists
532
+ if (!snapshot.tasks || typeof snapshot.tasks !== "object") {
533
+ snapshot.tasks = {};
534
+ }
535
+
536
+ // Ensure the target task exists
537
+ if (!snapshot.tasks[taskId]) {
538
+ snapshot.tasks[taskId] = {};
539
+ }
540
+
541
+ const task = snapshot.tasks[taskId];
542
+
543
+ // Reset only the target task state and metadata
544
+ task.state = TaskState.PENDING;
545
+ task.currentStage = null;
546
+
547
+ // Remove error-related fields
548
+ delete task.failedStage;
549
+ delete task.error;
550
+
551
+ // Reset counters
552
+ task.attempts = 0;
553
+ task.refinementAttempts = 0;
554
+
555
+ // Clear token usage if requested
556
+ if (clearTokenUsage) {
557
+ task.tokenUsage = [];
558
+ }
559
+
560
+ // Update lastUpdated timestamp
561
+ snapshot.lastUpdated = new Date().toISOString();
562
+
563
+ // Do not modify:
564
+ // - Any other tasks within snapshot.tasks
565
+ // - snapshot.files.artifacts|logs|tmp
566
+ // - Root-level fields other than lastUpdated
567
+
568
+ return snapshot;
569
+ });
570
+ }
571
+
572
+ /**
573
+ * Consolidated path jail security validation with generic error messages
574
+ * @param {string} filename - Filename to validate
575
+ * @returns {Object|null} Validation result or null if valid
576
+ */
577
+ function validateFilePath(filename) {
578
+ // Check for path traversal patterns
579
+ if (filename.includes("..")) {
580
+ console.error("Path security: path traversal detected", { filename });
581
+ return {
582
+ allowed: false,
583
+ message: "Path validation failed",
584
+ };
585
+ }
586
+
587
+ // Check for absolute paths (POSIX, Windows, backslashes, ~)
588
+ if (
589
+ path.isAbsolute(filename) ||
590
+ /^[a-zA-Z]:/.test(filename) ||
591
+ filename.includes("\\") ||
592
+ filename.startsWith("~")
593
+ ) {
594
+ console.error("Path security: absolute path detected", { filename });
595
+ return {
596
+ allowed: false,
597
+ message: "Path validation failed",
598
+ };
599
+ }
600
+
601
+ // Check for empty filename
602
+ if (!filename || filename.trim() === "") {
603
+ console.error("Path security: empty filename detected");
604
+ return {
605
+ allowed: false,
606
+ message: "Path validation failed",
607
+ };
608
+ }
609
+
610
+ // Path is valid
611
+ return null;
612
+ }
613
+
614
+ /**
615
+ * Initialize job-level artifact index and copy artifacts to job directory
616
+ * @param {string} jobDir - Job directory path
617
+ * @param {Array} uploadArtifacts - Array of {filename, content} objects
618
+ * @returns {Promise<void>}
619
+ */
620
+ export async function initializeJobArtifacts(jobDir, uploadArtifacts = []) {
621
+ if (!jobDir || typeof jobDir !== "string") {
622
+ throw new Error("jobDir must be a non-empty string");
623
+ }
624
+
625
+ if (!Array.isArray(uploadArtifacts)) {
626
+ throw new Error("uploadArtifacts must be an array");
627
+ }
628
+
629
+ if (uploadArtifacts.length === 0) {
630
+ return;
631
+ }
632
+
633
+ const jobFilesDir = path.join(jobDir, "files");
634
+ const jobArtifactsDir = path.join(jobFilesDir, "artifacts");
635
+
636
+ await fs.mkdir(jobFilesDir, { recursive: true });
637
+ await fs.mkdir(jobArtifactsDir, { recursive: true });
638
+
639
+ for (const artifact of uploadArtifacts) {
640
+ const { filename, content } = artifact || {};
641
+
642
+ if (!filename || typeof filename !== "string") {
643
+ continue; // Skip invalid entries rather than throwing
644
+ }
645
+
646
+ // Validate filename using the consolidated function
647
+ const validation = validateFilePath(filename);
648
+ if (validation) {
649
+ console.error("Path security: skipping invalid artifact", {
650
+ filename,
651
+ reason: validation.message,
652
+ });
653
+ continue; // Skip invalid filenames rather than throwing
654
+ }
655
+
656
+ const artifactPath = path.join(jobArtifactsDir, filename);
657
+ await fs.writeFile(artifactPath, content);
658
+ }
659
+ }
@@ -86,7 +86,14 @@ export default function CodePage() {
86
86
  useEffect(() => {
87
87
  fetch("/api/llm/functions")
88
88
  .then((res) => res.json())
89
- .then(setLlmFunctions)
89
+ .then(({ ok, data }) => {
90
+ if (!ok || typeof data !== "object" || data === null) {
91
+ throw new Error(
92
+ "Invalid /api/llm/functions response: expected { ok:true, data:Object }"
93
+ );
94
+ }
95
+ setLlmFunctions(data);
96
+ })
90
97
  .catch(console.error);
91
98
  }, []);
92
99
 
@@ -1,6 +1,6 @@
1
- import React from "react";
1
+ import React, { useState } from "react";
2
2
  import { data, useParams } from "react-router-dom";
3
- import { Box, Flex, Text } from "@radix-ui/themes";
3
+ import { Box, Flex, Text, Button } from "@radix-ui/themes";
4
4
  import * as Tooltip from "@radix-ui/react-tooltip";
5
5
  import JobDetail from "../components/JobDetail.jsx";
6
6
  import { useJobDetailWithUpdates } from "../ui/client/hooks/useJobDetailWithUpdates.js";
@@ -8,9 +8,15 @@ import Layout from "../components/Layout.jsx";
8
8
  import PageSubheader from "../components/PageSubheader.jsx";
9
9
  import { statusBadge } from "../utils/ui.jsx";
10
10
  import { formatCurrency4, formatTokensCompact } from "../utils/formatters.js";
11
+ import { rescanJob } from "../ui/client/api.js";
12
+ import StopJobModal from "../components/ui/StopJobModal.jsx";
13
+ import { stopJob } from "../ui/client/api.js";
11
14
 
12
15
  export default function PipelineDetail() {
13
16
  const { jobId } = useParams();
17
+ const [isRescanning, setIsRescanning] = useState(false);
18
+ const [isStopModalOpen, setIsStopModalOpen] = useState(false);
19
+ const [isStopping, setIsStopping] = useState(false);
14
20
 
15
21
  // Handle missing job ID (undefined/null)
16
22
  if (jobId === undefined || jobId === null) {
@@ -143,6 +149,12 @@ export default function PipelineDetail() {
143
149
  ...(job.name ? [{ label: job.name }] : []),
144
150
  ];
145
151
 
152
+ // Determine if job is currently running
153
+ const isRunning =
154
+ job?.status === "running" ||
155
+ (job?.tasks &&
156
+ Object.values(job.tasks).some((task) => task?.state === "running"));
157
+
146
158
  // Derive cost data from job object with safe fallbacks
147
159
  const totalCost = job?.totalCost || job?.costs?.summary?.totalCost || 0;
148
160
  const totalTokens = job?.totalTokens || job?.costs?.summary?.totalTokens || 0;
@@ -193,13 +205,75 @@ export default function PipelineDetail() {
193
205
  costIndicator
194
206
  );
195
207
 
196
- // Right side content for PageSubheader: job ID, cost indicator, and status badge
208
+ const handleRescan = async () => {
209
+ setIsRescanning(true);
210
+ try {
211
+ const result = await rescanJob(jobId);
212
+ if (result.ok) {
213
+ const addedCount = result.added ? result.added.length : 0;
214
+ const removedCount = result.removed ? result.removed.length : 0;
215
+ let message = "Rescan complete.";
216
+ if (addedCount > 0 && removedCount > 0) {
217
+ message += ` Added ${addedCount} task${addedCount > 1 ? "s" : ""}: ${JSON.stringify(result.added)}. Removed ${removedCount} task${removedCount > 1 ? "s" : ""}: ${JSON.stringify(result.removed)}.`;
218
+ } else if (addedCount > 0) {
219
+ message += ` Added ${addedCount} task${addedCount > 1 ? "s" : ""}: ${JSON.stringify(result.added)}.`;
220
+ } else if (removedCount > 0) {
221
+ message += ` Removed ${removedCount} task${removedCount > 1 ? "s" : ""}: ${JSON.stringify(result.removed)}.`;
222
+ } else {
223
+ message += " No changes found.";
224
+ }
225
+ console.log(message);
226
+ }
227
+ } catch (err) {
228
+ console.error("Rescan failed:", err);
229
+ alert("Rescan failed: " + err.message);
230
+ } finally {
231
+ setIsRescanning(false);
232
+ }
233
+ };
234
+
235
+ const openStopModal = () => setIsStopModalOpen(true);
236
+ const closeStopModal = () => setIsStopModalOpen(false);
237
+
238
+ const handleStopConfirm = async () => {
239
+ setIsStopping(true);
240
+ try {
241
+ await stopJob(jobId);
242
+ closeStopModal();
243
+ } catch (error) {
244
+ console.warn("Failed to stop job:", error);
245
+ closeStopModal();
246
+ } finally {
247
+ setIsStopping(false);
248
+ }
249
+ };
250
+
251
+ // Right side content for PageSubheader: job ID, cost indicator, status badge, and Stop control
197
252
  const subheaderRightContent = (
198
253
  <Flex align="center" gap="3" className="shrink-0 flex-wrap">
199
254
  <Text size="2" color="gray">
200
255
  ID: {job.id || jobId}
201
256
  </Text>
202
257
  {costIndicatorWithTooltip}
258
+ {isRunning && (
259
+ <Button
260
+ size="1"
261
+ variant="solid"
262
+ color="red"
263
+ disabled={isStopping}
264
+ onClick={openStopModal}
265
+ >
266
+ {isStopping ? "Stopping..." : "Stop"}
267
+ </Button>
268
+ )}
269
+ <Button
270
+ size="1"
271
+ variant="soft"
272
+ disabled={isRescanning}
273
+ onClick={handleRescan}
274
+ >
275
+ {isRescanning ? "Rescanning..." : "Rescan"}
276
+ </Button>
203
277
  {statusBadge(job.status)}
204
278
  </Flex>
205
279
  );
@@ -215,6 +289,14 @@ export default function PipelineDetail() {
215
289
  )}
216
290
  </PageSubheader>
217
291
  <JobDetail job={job} pipeline={pipeline} />
292
+ <StopJobModal
293
+ isOpen={isStopModalOpen}
294
+ onClose={closeStopModal}
295
+ onConfirm={handleStopConfirm}
296
+ runningJobs={[{ id: job.id, name: job.name, progress: job.progress }]}
297
+ defaultJobId={job.id}
298
+ isSubmitting={isStopping}
299
+ />
218
300
  </Layout>
219
301
  );
220
302
  }
@@ -7,24 +7,23 @@ import { Box, Flex, Text, Tabs } from "@radix-ui/themes";
7
7
  import { Progress } from "../components/ui/progress";
8
8
  import { useJobListWithUpdates } from "../ui/client/hooks/useJobListWithUpdates";
9
9
  import { adaptJobSummary } from "../ui/client/adapters/job-adapter";
10
- import { TaskState, JobStatus } from "../config/statuses.js";
11
10
 
12
11
  // Referenced components — leave these alone
13
12
  import JobTable from "../components/JobTable";
14
13
  import Layout from "../components/Layout.jsx";
15
14
 
16
- export default function PromptPipelineDashboard({ isConnected }) {
15
+ export default function PromptPipelineDashboard() {
17
16
  const navigate = useNavigate();
18
17
  const hookResult = useJobListWithUpdates();
19
18
 
20
19
  if (
20
+ /* eslint-disable-next-line no-undef */
21
21
  process.env.NODE_ENV === "test" &&
22
22
  (hookResult === undefined ||
23
23
  hookResult === null ||
24
24
  typeof hookResult !== "object" ||
25
25
  Array.isArray(hookResult))
26
26
  ) {
27
- // eslint-disable-next-line no-console
28
27
  console.error(
29
28
  "[PromptPipelineDashboard] useJobListWithUpdates returned unexpected value",
30
29
  {
@@ -39,7 +38,7 @@ export default function PromptPipelineDashboard({ isConnected }) {
39
38
  );
40
39
  }
41
40
 
42
- const { data: apiJobs, loading, error, connectionStatus } = hookResult;
41
+ const { data: apiJobs, error } = hookResult;
43
42
 
44
43
  const jobs = useMemo(() => {
45
44
  const src = Array.isArray(apiJobs) ? apiJobs : [];
@@ -57,26 +56,26 @@ export default function PromptPipelineDashboard({ isConnected }) {
57
56
  // Shared ticker for live duration updates - removed useTicker
58
57
 
59
58
  const errorCount = useMemo(
60
- () => jobs.filter((j) => j.status === TaskState.FAILED).length,
59
+ () => jobs.filter((j) => j.displayCategory === "errors").length,
61
60
  [jobs]
62
61
  );
63
62
  const currentCount = useMemo(
64
- () => jobs.filter((j) => j.status === TaskState.RUNNING).length,
63
+ () => jobs.filter((j) => j.displayCategory === "current").length,
65
64
  [jobs]
66
65
  );
67
66
  const completedCount = useMemo(
68
- () => jobs.filter((j) => j.status === JobStatus.COMPLETE).length,
67
+ () => jobs.filter((j) => j.displayCategory === "complete").length,
69
68
  [jobs]
70
69
  );
71
70
 
72
71
  const filteredJobs = useMemo(() => {
73
72
  switch (activeTab) {
74
73
  case "current":
75
- return jobs.filter((j) => j.status === TaskState.RUNNING);
74
+ return jobs.filter((j) => j.displayCategory === "current");
76
75
  case "errors":
77
- return jobs.filter((j) => j.status === TaskState.FAILED);
76
+ return jobs.filter((j) => j.displayCategory === "errors");
78
77
  case "complete":
79
- return jobs.filter((j) => j.status === JobStatus.COMPLETE);
78
+ return jobs.filter((j) => j.displayCategory === "complete");
80
79
  default:
81
80
  return [];
82
81
  }
@@ -86,7 +85,7 @@ export default function PromptPipelineDashboard({ isConnected }) {
86
85
 
87
86
  // Aggregate progress for currently running jobs (for a subtle top progress bar)
88
87
  const runningJobs = useMemo(
89
- () => jobs.filter((j) => j.status === TaskState.RUNNING),
88
+ () => jobs.filter((j) => j.displayCategory === "current"),
90
89
  [jobs]
91
90
  );
92
91
  const aggregateProgress = useMemo(() => {