@ryanfw/prompt-orchestration-pipeline 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/package.json +1 -1
  2. package/src/components/JobCard.jsx +1 -1
  3. package/src/components/JobDetail.jsx +45 -12
  4. package/src/components/JobTable.jsx +40 -1
  5. package/src/components/Layout.jsx +146 -22
  6. package/src/components/PageSubheader.jsx +75 -0
  7. package/src/components/UploadSeed.jsx +0 -70
  8. package/src/components/ui/Logo.jsx +16 -0
  9. package/src/core/config.js +145 -13
  10. package/src/core/file-io.js +12 -27
  11. package/src/core/pipeline-runner.js +13 -6
  12. package/src/core/status-writer.js +63 -52
  13. package/src/core/task-runner.js +61 -1
  14. package/src/llm/index.js +97 -40
  15. package/src/pages/Code.jsx +297 -0
  16. package/src/pages/PipelineDetail.jsx +47 -8
  17. package/src/pages/PromptPipelineDashboard.jsx +6 -53
  18. package/src/providers/deepseek.js +17 -1
  19. package/src/providers/openai.js +1 -1
  20. package/src/ui/client/adapters/job-adapter.js +26 -2
  21. package/src/ui/client/hooks/useJobDetailWithUpdates.js +0 -1
  22. package/src/ui/client/index.css +6 -0
  23. package/src/ui/client/index.html +1 -1
  24. package/src/ui/client/main.jsx +2 -0
  25. package/src/ui/dist/assets/{index-CxcrauYR.js → index-WgJUlSmE.js} +716 -307
  26. package/src/ui/dist/assets/style-x0V-5m8e.css +62 -0
  27. package/src/ui/dist/index.html +3 -3
  28. package/src/ui/job-reader.js +0 -108
  29. package/src/ui/server.js +54 -0
  30. package/src/ui/sse-enhancer.js +0 -1
  31. package/src/ui/transformers/list-transformer.js +32 -12
  32. package/src/ui/transformers/status-transformer.js +11 -11
  33. package/src/utils/token-cost-calculator.js +297 -0
  34. package/src/utils/ui.jsx +4 -4
  35. package/src/ui/dist/assets/style-D6K_oQ12.css +0 -62
package/src/llm/index.js CHANGED
@@ -18,8 +18,12 @@ export function registerMockProvider(provider) {
18
18
 
19
19
  // Auto-register mock provider in test mode when default provider is "mock"
20
20
  function autoRegisterMockProvider() {
21
- const config = getConfig();
22
- if (config.llm.defaultProvider === "mock" && !mockProviderInstance) {
21
+ // Skip config check in tests to avoid PO_ROOT requirement
22
+ const isTest =
23
+ process.env.NODE_ENV === "test" || process.env.VITEST === "true";
24
+ const defaultProvider = isTest ? "mock" : getConfig().llm.defaultProvider;
25
+
26
+ if (defaultProvider === "mock" && !mockProviderInstance) {
23
27
  // Auto-register a basic mock provider for testing
24
28
  mockProviderInstance = {
25
29
  chat: async () => ({
@@ -92,6 +96,11 @@ export async function chat(options) {
92
96
  temperature,
93
97
  maxTokens,
94
98
  metadata = {},
99
+ topP,
100
+ frequencyPenalty,
101
+ presencePenalty,
102
+ stop,
103
+ responseFormat,
95
104
  ...rest
96
105
  } = options;
97
106
 
@@ -157,52 +166,90 @@ export async function chat(options) {
157
166
  totalTokens: result.usage.total_tokens,
158
167
  };
159
168
  } else if (provider === "openai") {
160
- const result = await openaiChat({
169
+ const openaiArgs = {
161
170
  messages,
162
171
  model: model || "gpt-5-chat-latest",
163
- maxTokens,
164
172
  temperature,
173
+ maxTokens,
165
174
  ...rest,
166
- });
175
+ };
176
+ if (responseFormat !== undefined)
177
+ openaiArgs.responseFormat = responseFormat;
178
+ if (topP !== undefined) openaiArgs.topP = topP;
179
+ if (frequencyPenalty !== undefined)
180
+ openaiArgs.frequencyPenalty = frequencyPenalty;
181
+ if (presencePenalty !== undefined)
182
+ openaiArgs.presencePenalty = presencePenalty;
183
+ if (stop !== undefined) openaiArgs.stop = stop;
184
+
185
+ const result = await openaiChat(openaiArgs);
167
186
 
168
187
  response = {
169
- content: typeof result === "string" ? result : JSON.stringify(result),
170
- raw: result,
188
+ content:
189
+ result?.content ??
190
+ (typeof result === "string" ? result : String(result)),
191
+ raw: result?.raw ?? result,
171
192
  };
172
193
 
173
- // Estimate tokens since GPT-5 responses API might not return usage
174
- const promptTokens = estimateTokens(systemMsg + userMsg);
175
- const completionTokens = estimateTokens(response.content);
176
- usage = {
177
- promptTokens,
178
- completionTokens,
179
- totalTokens: promptTokens + completionTokens,
180
- };
194
+ // Use provider usage if available; otherwise estimate tokens
195
+ if (result?.usage) {
196
+ const { prompt_tokens, completion_tokens, total_tokens } = result.usage;
197
+ usage = {
198
+ promptTokens: prompt_tokens,
199
+ completionTokens: completion_tokens,
200
+ totalTokens: total_tokens,
201
+ };
202
+ } else {
203
+ const promptTokens = estimateTokens(systemMsg + userMsg);
204
+ const completionTokens = estimateTokens(response.content);
205
+ usage = {
206
+ promptTokens,
207
+ completionTokens,
208
+ totalTokens: promptTokens + completionTokens,
209
+ };
210
+ }
181
211
  } else if (provider === "deepseek") {
182
- const result = await deepseekChat(
183
- {
184
- messages,
185
- model: "deepseek-chat",
186
- }
187
-
188
- // systemMsg,
189
- // userMsg,
190
- // model || "deepseek-reasoner"
191
- );
212
+ const deepseekArgs = {
213
+ messages,
214
+ model: model || "deepseek-reasoner",
215
+ temperature,
216
+ maxTokens,
217
+ ...rest,
218
+ };
219
+ if (topP !== undefined) deepseekArgs.topP = topP;
220
+ if (frequencyPenalty !== undefined)
221
+ deepseekArgs.frequencyPenalty = frequencyPenalty;
222
+ if (presencePenalty !== undefined)
223
+ deepseekArgs.presencePenalty = presencePenalty;
224
+ if (stop !== undefined) deepseekArgs.stop = stop;
225
+ if (responseFormat !== undefined)
226
+ deepseekArgs.responseFormat = responseFormat;
227
+
228
+ const result = await deepseekChat(deepseekArgs);
192
229
 
193
230
  response = {
194
231
  content: result.content,
195
232
  };
196
233
 
197
- const promptTokens = estimateTokens(systemMsg + userMsg);
198
- const completionTokens = estimateTokens(
199
- typeof result === "string" ? result : JSON.stringify(result)
200
- );
201
- usage = {
202
- promptTokens,
203
- completionTokens,
204
- totalTokens: promptTokens + completionTokens,
205
- };
234
+ // Use actual usage from deepseek API if available; otherwise estimate
235
+ if (result?.usage) {
236
+ const { prompt_tokens, completion_tokens, total_tokens } = result.usage;
237
+ usage = {
238
+ promptTokens: prompt_tokens,
239
+ completionTokens: completion_tokens,
240
+ totalTokens: total_tokens,
241
+ };
242
+ } else {
243
+ const promptTokens = estimateTokens(systemMsg + userMsg);
244
+ const completionTokens = estimateTokens(
245
+ typeof result === "string" ? result : JSON.stringify(result)
246
+ );
247
+ usage = {
248
+ promptTokens,
249
+ completionTokens,
250
+ totalTokens: promptTokens + completionTokens,
251
+ };
252
+ }
206
253
  } else {
207
254
  throw new Error(`Provider ${provider} not yet implemented`);
208
255
  }
@@ -222,8 +269,11 @@ export async function chat(options) {
222
269
  timestamp: new Date().toISOString(),
223
270
  });
224
271
 
225
- // Return clean response - no metrics attached!
226
- return response;
272
+ // Return clean response with usage - no metrics attached!
273
+ return {
274
+ ...response,
275
+ usage,
276
+ };
227
277
  } catch (error) {
228
278
  const duration = Date.now() - startTime;
229
279
 
@@ -299,8 +349,11 @@ function buildProviderFunctions(models) {
299
349
 
300
350
  // Helper function for single prompt completion
301
351
  export async function complete(prompt, options = {}) {
302
- const config = getConfig();
303
- const defaultProvider = options.provider || config.llm.defaultProvider;
352
+ // Skip config check in tests to avoid PO_ROOT requirement
353
+ const isTest =
354
+ process.env.NODE_ENV === "test" || process.env.VITEST === "true";
355
+ const defaultProvider =
356
+ options.provider || (isTest ? "openai" : getConfig().llm.defaultProvider);
304
357
 
305
358
  return chat({
306
359
  provider: defaultProvider,
@@ -417,8 +470,12 @@ export function createLLM() {
417
470
 
418
471
  // Separate function for high-level LLM interface (used by llm.test.js)
419
472
  export function createHighLevelLLM(options = {}) {
420
- const config = getConfig();
421
- const defaultProvider = options.defaultProvider || config.llm.defaultProvider;
473
+ // Skip config check in tests to avoid PO_ROOT requirement
474
+ const isTest =
475
+ process.env.NODE_ENV === "test" || process.env.VITEST === "true";
476
+ const config = isTest ? { llm: { models: {} } } : getConfig();
477
+ const defaultProvider =
478
+ options.defaultProvider || (isTest ? "openai" : config.llm.defaultProvider);
422
479
 
423
480
  // Build functions from registry
424
481
  const providerFunctions = buildProviderFunctions(config.llm.models);
@@ -0,0 +1,297 @@
1
+ import React, { useState, useEffect } from "react";
2
+ import { Box, Heading, Table, Code, Text } from "@radix-ui/themes";
3
+ import Layout from "../components/Layout.jsx";
4
+ import PageSubheader from "../components/PageSubheader.jsx";
5
+ import { Button } from "../components/ui/button.jsx";
6
+
7
+ const ioFunctions = [
8
+ {
9
+ name: "writeArtifact",
10
+ description: "Write an artifact file",
11
+ params:
12
+ 'name: string, content: string, options?: { mode?: "replace"|"append"=replace }',
13
+ returns: "Promise<string>",
14
+ notes: "Writes to {workDir}/files/artifacts; updates tasks-status.json",
15
+ },
16
+ {
17
+ name: "writeLog",
18
+ description: "Write a log file",
19
+ params:
20
+ 'name: string, content: string, options?: { mode?: "append"|"replace"=append }',
21
+ returns: "Promise<string>",
22
+ notes:
23
+ "Writes to {workDir}/files/logs; default append; updates tasks-status.json",
24
+ },
25
+ {
26
+ name: "writeTmp",
27
+ description: "Write a temporary file",
28
+ params:
29
+ 'name: string, content: string, options?: { mode?: "replace"|"append"=replace }',
30
+ returns: "Promise<string>",
31
+ notes: "Writes to {workDir}/files/tmp; updates tasks-status.json",
32
+ },
33
+ {
34
+ name: "readArtifact",
35
+ description: "Read an artifact file",
36
+ params: "name: string",
37
+ returns: "Promise<string>",
38
+ notes: "Reads from {workDir}/files/artifacts",
39
+ },
40
+ {
41
+ name: "readLog",
42
+ description: "Read a log file",
43
+ params: "name: string",
44
+ returns: "Promise<string>",
45
+ notes: "Reads from {workDir}/files/logs",
46
+ },
47
+ {
48
+ name: "readTmp",
49
+ description: "Read a temporary file",
50
+ params: "name: string",
51
+ returns: "Promise<string>",
52
+ notes: "Reads from {workDir}/files/tmp",
53
+ },
54
+ {
55
+ name: "getTaskDir",
56
+ description: "Get the task directory path",
57
+ params: "",
58
+ returns: "string",
59
+ notes: "Returns {workDir}/tasks/{taskName}",
60
+ },
61
+ {
62
+ name: "getCurrentStage",
63
+ description: "Get the current stage name",
64
+ params: "",
65
+ returns: "string",
66
+ notes: "Calls injected getStage()",
67
+ },
68
+ ];
69
+
70
+ const sampleSeed = {
71
+ name: "some-name",
72
+ pipeline: "content-generation",
73
+ data: {
74
+ type: "some-type",
75
+ contentType: "blog-post",
76
+ targetAudience: "software-developers",
77
+ tone: "professional-yet-accessible",
78
+ length: "1500-2000 words",
79
+ outputFormat: "blog-post",
80
+ },
81
+ };
82
+
83
+ export default function CodePage() {
84
+ const [llmFunctions, setLlmFunctions] = useState(null);
85
+
86
+ useEffect(() => {
87
+ fetch("/api/llm/functions")
88
+ .then((res) => res.json())
89
+ .then(setLlmFunctions)
90
+ .catch(console.error);
91
+ }, []);
92
+
93
+ const breadcrumbs = [{ label: "Home", href: "/" }, { label: "Code" }];
94
+
95
+ const handleCopySeed = () => {
96
+ navigator.clipboard.writeText(JSON.stringify(sampleSeed, null, 2));
97
+ };
98
+
99
+ return (
100
+ <Layout>
101
+ <PageSubheader breadcrumbs={breadcrumbs} />
102
+ <Box>
103
+ {/* Seed File Example Section */}
104
+ <Box mb="8">
105
+ <Heading size="6" mb="4">
106
+ Seed File Example
107
+ </Heading>
108
+ <Text as="p" mb="3" size="2">
109
+ A seed file is a JSON object used to start a new pipeline job. It
110
+ defines the job name, the pipeline to run, and any contextual data
111
+ the pipeline requires to begin.
112
+ </Text>
113
+ <Text as="p" mb="3" size="2" weight="bold">
114
+ Required fields:
115
+ </Text>
116
+ <ul className="list-disc list-inside mb-4 space-y-1">
117
+ <li className="text-sm text-gray-700">
118
+ <Text as="span" weight="bold">
119
+ name
120
+ </Text>{" "}
121
+ (string): Human-friendly title; non-empty, printable only, ≤120
122
+ chars; must be unique.
123
+ </li>
124
+ <li className="text-sm text-gray-700">
125
+ <Text as="span" weight="bold">
126
+ pipeline
127
+ </Text>{" "}
128
+ (string): Pipeline slug defined in your registry (e.g.,
129
+ content-generation).
130
+ </li>
131
+ <li className="text-sm text-gray-700">
132
+ <Text as="span" weight="bold">
133
+ data
134
+ </Text>{" "}
135
+ (object): Required but flexible; include any arbitrary keys your
136
+ pipeline tasks expect.
137
+ </li>
138
+ </ul>
139
+ <Box mb="3">
140
+ <Button
141
+ size="1"
142
+ onClick={handleCopySeed}
143
+ data-testid="copy-seed-example"
144
+ >
145
+ Copy
146
+ </Button>
147
+ </Box>
148
+ <pre className="text-xs bg-gray-50 p-3 rounded overflow-auto max-h-60 border border-gray-200">
149
+ {JSON.stringify(sampleSeed, null, 2)}
150
+ </pre>
151
+ </Box>
152
+
153
+ <Heading size="6" mb="4">
154
+ Pipeline Task IO API
155
+ </Heading>
156
+ <Box overflowX="auto">
157
+ <Table.Root>
158
+ <Table.Header>
159
+ <Table.Row>
160
+ <Table.ColumnHeaderCell>Function</Table.ColumnHeaderCell>
161
+ <Table.ColumnHeaderCell>Parameters</Table.ColumnHeaderCell>
162
+ <Table.ColumnHeaderCell>Returns</Table.ColumnHeaderCell>
163
+ <Table.ColumnHeaderCell>Notes</Table.ColumnHeaderCell>
164
+ </Table.Row>
165
+ </Table.Header>
166
+ <Table.Body>
167
+ {ioFunctions.map((fn) => (
168
+ <Table.Row key={fn.name}>
169
+ <Table.RowHeaderCell>
170
+ <Code size="3">io.{fn.name}</Code>
171
+ </Table.RowHeaderCell>
172
+ <Table.Cell>
173
+ <Code size="3">{fn.params || "—"}</Code>
174
+ </Table.Cell>
175
+ <Table.Cell>
176
+ <Code size="3">{fn.returns}</Code>
177
+ </Table.Cell>
178
+ <Table.Cell>
179
+ {fn.description}
180
+ <br />
181
+ {fn.notes}
182
+ </Table.Cell>
183
+ </Table.Row>
184
+ ))}
185
+ </Table.Body>
186
+ </Table.Root>
187
+ </Box>
188
+
189
+ <Heading size="6" mt="8" mb="4">
190
+ Pipeline Task LLM API
191
+ </Heading>
192
+ <Box mb="4">
193
+ <Heading size="4" mb="2">
194
+ Arguments
195
+ </Heading>
196
+ <Code size="3" mb="4">
197
+ {`{
198
+ messages: Array<{role: "system"|"user"|"assistant", content: string }>,
199
+ temperature?: number,
200
+ maxTokens?: number,
201
+ responseFormat?: "json" | { type: "json_object" | { type: "json_schema", name: string, json_schema: object } },
202
+ stop?: string | string[],
203
+ topP?: number,
204
+ frequencyPenalty?: number,
205
+ presencePenalty?: number,
206
+ tools?: Array<{type: "function", function: object}>,
207
+ toolChoice?: "auto" | "required" | { type: "function", function: { name: string } },
208
+ seed?: number,
209
+ provider?: string,
210
+ model?: string,
211
+ metadata?: object,
212
+ maxRetries?: number
213
+ }`}
214
+ </Code>
215
+ <Heading size="4" mb="2">
216
+ Returns
217
+ </Heading>
218
+ <Code size="3">{`Promise<{ content: any, usage?: object, raw?: any }>`}</Code>
219
+ </Box>
220
+
221
+ {llmFunctions && (
222
+ <Box overflowX="auto">
223
+ <Table.Root>
224
+ <Table.Header>
225
+ <Table.Row>
226
+ <Table.ColumnHeaderCell>Function</Table.ColumnHeaderCell>
227
+ <Table.ColumnHeaderCell>Model</Table.ColumnHeaderCell>
228
+ </Table.Row>
229
+ </Table.Header>
230
+ <Table.Body>
231
+ {Object.entries(llmFunctions).map(([provider, functions]) =>
232
+ functions.map((fn) => (
233
+ <Table.Row key={fn.fullPath}>
234
+ <Table.RowHeaderCell>
235
+ <Code size="3">{fn.fullPath}</Code>
236
+ </Table.RowHeaderCell>
237
+ <Table.Cell>
238
+ <Code size="3">{fn.model}</Code>
239
+ </Table.Cell>
240
+ </Table.Row>
241
+ ))
242
+ )}
243
+ </Table.Body>
244
+ </Table.Root>
245
+ </Box>
246
+ )}
247
+
248
+ <Heading size="6" mt="8" mb="4">
249
+ Environment Configuration
250
+ </Heading>
251
+ <Box mb="4">
252
+ <Heading size="4" mb="2">
253
+ Example .env Configuration
254
+ </Heading>
255
+ <Box overflowX="auto">
256
+ <Table.Root>
257
+ <Table.Header>
258
+ <Table.Row>
259
+ <Table.ColumnHeaderCell>
260
+ Environment Variable
261
+ </Table.ColumnHeaderCell>
262
+ </Table.Row>
263
+ </Table.Header>
264
+ <Table.Body>
265
+ <Table.Row>
266
+ <Table.RowHeaderCell>
267
+ <Code size="3">OPENAI_API_KEY=</Code>
268
+ </Table.RowHeaderCell>
269
+ </Table.Row>
270
+ <Table.Row>
271
+ <Table.RowHeaderCell>
272
+ <Code size="3">DEEPSEEK_API_KEY=</Code>
273
+ </Table.RowHeaderCell>
274
+ </Table.Row>
275
+ <Table.Row>
276
+ <Table.RowHeaderCell>
277
+ <Code size="3">GEMINI_API_KEY=</Code>
278
+ </Table.RowHeaderCell>
279
+ </Table.Row>
280
+ <Table.Row>
281
+ <Table.RowHeaderCell>
282
+ <Code size="3">ANTHROPIC_API_KEY=</Code>
283
+ </Table.RowHeaderCell>
284
+ </Table.Row>
285
+ <Table.Row>
286
+ <Table.RowHeaderCell>
287
+ <Code size="3">Z_API_KEY=</Code>
288
+ </Table.RowHeaderCell>
289
+ </Table.Row>
290
+ </Table.Body>
291
+ </Table.Root>
292
+ </Box>
293
+ </Box>
294
+ </Box>
295
+ </Layout>
296
+ );
297
+ }
@@ -4,6 +4,7 @@ import { Box, Flex, Text } from "@radix-ui/themes";
4
4
  import JobDetail from "../components/JobDetail.jsx";
5
5
  import { useJobDetailWithUpdates } from "../ui/client/hooks/useJobDetailWithUpdates.js";
6
6
  import Layout from "../components/Layout.jsx";
7
+ import PageSubheader from "../components/PageSubheader.jsx";
7
8
  import { statusBadge } from "../utils/ui.jsx";
8
9
 
9
10
  export default function PipelineDetail() {
@@ -12,7 +13,14 @@ export default function PipelineDetail() {
12
13
  // Handle missing job ID (undefined/null)
13
14
  if (jobId === undefined || jobId === null) {
14
15
  return (
15
- <Layout title="Pipeline Details" showBackButton={true}>
16
+ <Layout
17
+ pageTitle="Pipeline Details"
18
+ breadcrumbs={[
19
+ { label: "Home", href: "/" },
20
+ { label: "Pipeline Details" },
21
+ ]}
22
+ showBackButton={true}
23
+ >
16
24
  <Flex align="center" justify="center" className="min-h-64">
17
25
  <Box className="text-center">
18
26
  <Text size="5" weight="medium" color="red" className="mb-2">
@@ -28,7 +36,14 @@ export default function PipelineDetail() {
28
36
 
29
37
  if (loading) {
30
38
  return (
31
- <Layout title="Pipeline Details" showBackButton={true}>
39
+ <Layout
40
+ pageTitle="Pipeline Details"
41
+ breadcrumbs={[
42
+ { label: "Home", href: "/" },
43
+ { label: "Pipeline Details" },
44
+ ]}
45
+ showBackButton={true}
46
+ >
32
47
  <Flex align="center" justify="center" className="min-h-64">
33
48
  <Box className="text-center">
34
49
  <Text size="5" weight="medium" className="mb-2">
@@ -42,7 +57,14 @@ export default function PipelineDetail() {
42
57
 
43
58
  if (error) {
44
59
  return (
45
- <Layout title="Pipeline Details" showBackButton={true}>
60
+ <Layout
61
+ pageTitle="Pipeline Details"
62
+ breadcrumbs={[
63
+ { label: "Home", href: "/" },
64
+ { label: "Pipeline Details" },
65
+ ]}
66
+ showBackButton={true}
67
+ >
46
68
  <Flex align="center" justify="center" className="min-h-64">
47
69
  <Box className="text-center">
48
70
  <Text size="5" weight="medium" color="red" className="mb-2">
@@ -59,7 +81,14 @@ export default function PipelineDetail() {
59
81
 
60
82
  if (!job) {
61
83
  return (
62
- <Layout title="Pipeline Details" showBackButton={true}>
84
+ <Layout
85
+ pageTitle="Pipeline Details"
86
+ breadcrumbs={[
87
+ { label: "Home", href: "/" },
88
+ { label: "Pipeline Details" },
89
+ ]}
90
+ showBackButton={true}
91
+ >
63
92
  <Flex align="center" justify="center" className="min-h-64">
64
93
  <Box className="text-center">
65
94
  <Text size="5" weight="medium" className="mb-2">
@@ -89,8 +118,15 @@ export default function PipelineDetail() {
89
118
  return { tasks: pipelineTasks };
90
119
  })();
91
120
 
92
- // Header actions: job ID and status badge
93
- const headerActions = (
121
+ const pageTitle = job.name || "Pipeline Details";
122
+ const breadcrumbs = [
123
+ { label: "Home", href: "/" },
124
+ { label: "Pipeline Details" },
125
+ ...(job.name ? [{ label: job.name }] : []),
126
+ ];
127
+
128
+ // Right side content for PageSubheader: job ID and status badge
129
+ const subheaderRightContent = (
94
130
  <Flex align="center" gap="3" className="shrink-0">
95
131
  <Text size="2" color="gray">
96
132
  ID: {job.id || jobId}
@@ -101,10 +137,13 @@ export default function PipelineDetail() {
101
137
 
102
138
  return (
103
139
  <Layout
104
- title={job.name || "Pipeline Details"}
140
+ pageTitle={pageTitle}
141
+ breadcrumbs={breadcrumbs}
105
142
  showBackButton={true}
106
- actions={headerActions}
107
143
  >
144
+ <PageSubheader breadcrumbs={breadcrumbs} maxWidth="max-w-7xl">
145
+ {subheaderRightContent}
146
+ </PageSubheader>
108
147
  <JobDetail job={job} pipeline={pipeline} />
109
148
  </Layout>
110
149
  );